code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python2
# This file is part of the OpenMV project.
#
# Copyright (c) 2013-2021 <NAME> <<EMAIL>>
# Copyright (c) 2013-2021 <NAME> <<EMAIL>>
#
# This work is licensed under the MIT license, see the file LICENSE for details.
#
# This script creates smaller patches from images.
import os, sys
import argparse
import random
import numpy as np
from skimage import io
from skimage import exposure
from sklearn.feature_extraction import image
def main():
# CMD args parser
parser = argparse.ArgumentParser(description='Generate smaller patches from images')
parser.add_argument("--input", action = "store", help = "Input images dir")
parser.add_argument("--output", action = "store", help = "Output images dir")
parser.add_argument("--width", action = "store", help = "Patch width", type=int, default = 32)
parser.add_argument("--height", action = "store", help = "Patch height", type=int, default = 32)
parser.add_argument("--patches", action = "store", help = "Number of patches", type=int, default = 10)
# Parse CMD args
args = parser.parse_args()
if (args.input == None or args.output == None):
parser.print_help()
sys.exit(1)
count = 0
images = os.listdir(args.input)
while (count < args.patches):
random.shuffle(images)
for i in xrange(len(images)):
img = io.imread(args.input+'/'+images[i])
patches = image.extract_patches_2d(img,
patch_size=(args.width, args.height),
max_patches=100, random_state=np.random.RandomState(0))
random.shuffle(patches)
for p in patches:
# Save low contrast patches only
if (exposure.is_low_contrast(p) == False):
io.imsave(args.output+'/patch_%.4d.ppm'%(count), p)
count += 1
break
if (count == args.patches):
break
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"random.shuffle",
"numpy.random.RandomState",
"sys.exit",
"skimage.exposure.is_low_contrast",
"skimage.io.imsave",
"os.listdir",
"skimage.io.imread"
] |
[((500, 575), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate smaller patches from images"""'}), "(description='Generate smaller patches from images')\n", (523, 575), False, 'import argparse\n'), ((1243, 1265), 'os.listdir', 'os.listdir', (['args.input'], {}), '(args.input)\n', (1253, 1265), False, 'import os, sys\n'), ((1203, 1214), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1211, 1214), False, 'import os, sys\n'), ((1308, 1330), 'random.shuffle', 'random.shuffle', (['images'], {}), '(images)\n', (1322, 1330), False, 'import random\n'), ((1387, 1426), 'skimage.io.imread', 'io.imread', (["(args.input + '/' + images[i])"], {}), "(args.input + '/' + images[i])\n", (1396, 1426), False, 'from skimage import io\n'), ((1637, 1660), 'random.shuffle', 'random.shuffle', (['patches'], {}), '(patches)\n', (1651, 1660), False, 'import random\n'), ((1599, 1623), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (1620, 1623), True, 'import numpy as np\n'), ((1760, 1787), 'skimage.exposure.is_low_contrast', 'exposure.is_low_contrast', (['p'], {}), '(p)\n', (1784, 1787), False, 'from skimage import exposure\n'), ((1819, 1872), 'skimage.io.imsave', 'io.imsave', (["(args.output + '/patch_%.4d.ppm' % count)", 'p'], {}), "(args.output + '/patch_%.4d.ppm' % count, p)\n", (1828, 1872), False, 'from skimage import io\n')]
|
from urllib.parse import urlparse
def host(url):
result = urlparse(url)
if result.netloc:
return result.netloc
if not url:
return ""
res = result.path.split("/")[0]
if ":" not in res or "@" not in res:
return res
from_ = res.find("@") + 1
for_ = res.find(":")
return res[from_:for_]
|
[
"urllib.parse.urlparse"
] |
[((64, 77), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (72, 77), False, 'from urllib.parse import urlparse\n')]
|
""" Unit tests for pointing
"""
import logging
import unittest
import astropy.units as u
import numpy
from astropy.coordinates import SkyCoord
from rascil.data_models.memory_data_models import Skycomponent
from rascil.data_models.polarisation import PolarisationFrame
from rascil.processing_components.calibration.pointing import create_pointingtable_from_blockvisibility
from rascil.processing_components.imaging.primary_beams import create_vp
from rascil.processing_components.simulation import create_named_configuration
from rascil.processing_components.simulation.pointing import simulate_gaintable_from_pointingtable
from rascil.processing_components.simulation.pointing import simulate_pointingtable_from_timeseries
from rascil.processing_components.visibility.base import create_blockvisibility
from rascil.processing_components import create_image
log = logging.getLogger('logger')
log.setLevel(logging.WARNING)
class TestPointing(unittest.TestCase):
def setUp(self):
from rascil.data_models.parameters import rascil_path, rascil_data_path
self.doplot = False
self.midcore = create_named_configuration('MID', rmax=100.0)
self.nants = len(self.midcore.names)
self.dir = rascil_path('test_results')
self.ntimes = 100
interval = 10.0
self.times = numpy.arange(0.0, float(self.ntimes)) * interval
self.times *= numpy.pi / 43200.0
self.frequency = numpy.array([1.4e9])
self.channel_bandwidth = numpy.array([1e7])
self.phasecentre = SkyCoord(ra=+15.0 * u.deg, dec=-45.0 * u.deg, frame='icrs', equinox='J2000')
self.vis = create_blockvisibility(self.midcore, self.times, self.frequency,
channel_bandwidth=self.channel_bandwidth,
phasecentre=self.phasecentre, weight=1.0,
polarisation_frame=PolarisationFrame('stokesI'))
self.vis.data['vis'] *= 0.0
# Create model
self.model = create_image(npixel=512, cellsize=0.001, polarisation_frame=PolarisationFrame("stokesI"),
frequency=self.frequency, channel_bandwidth=self.channel_bandwidth,
phasecentre=self.phasecentre)
def test_simulate_gaintable_from_time_series(self):
numpy.random.seed(18051955)
offset_phasecentre = SkyCoord(ra=+15.0 * u.deg, dec=-44.58 * u.deg, frame='icrs', equinox='J2000')
component = [Skycomponent(frequency=self.frequency, direction=offset_phasecentre,
polarisation_frame=PolarisationFrame("stokesI"), flux=[[1.0]])]
for type in ['wind']:
pt = create_pointingtable_from_blockvisibility(self.vis)
import matplotlib.pyplot as plt
ant = 15
plt.clf()
plt.plot(pt.time, pt.nominal[:, ant, 0, 0, 0], '.')
plt.plot(pt.time, pt.nominal[:, ant, 0, 0, 1], '.')
plt.xlabel('Time (s)')
plt.ylabel('Nominal (rad)')
plt.title("Nominal pointing for %s" % (type))
plt.show(block=False)
for reference_pointing in [False, True]:
pt = simulate_pointingtable_from_timeseries(pt, type=type, reference_pointing=reference_pointing)
import matplotlib.pyplot as plt
ant = 15
plt.clf()
r2a = 180.0 * 3600.0 / numpy.pi
plt.plot(pt.time, r2a * pt.pointing[:, ant, 0, 0, 0], '.')
plt.plot(pt.time, r2a * pt.pointing[:, ant, 0, 0, 1], '.')
plt.xlabel('Time (s)')
plt.ylabel('Pointing (arcsec)')
plt.title("Pointing for %s, reference pointing %s" % (type, reference_pointing))
plt.show(block=False)
vp = create_vp(self.model, 'MID')
gt = simulate_gaintable_from_pointingtable(self.vis, component, pt, vp)
assert gt[0].gain.shape == (self.ntimes, self.nants, 1, 1, 1), gt[0].gain.shape
plt.clf()
plt.plot(gt[0].time, 1.0 / numpy.real(gt[0].gain[:, ant, 0, 0, 0]), '.')
plt.xlabel('Time (s)')
plt.ylabel('Gain')
plt.title("Gain for %s, reference pointing %s" % (type, reference_pointing))
plt.show(block=False)
if __name__ == '__main__':
unittest.main()
|
[
"matplotlib.pyplot.title",
"numpy.random.seed",
"matplotlib.pyplot.clf",
"rascil.data_models.parameters.rascil_path",
"unittest.main",
"rascil.processing_components.simulation.create_named_configuration",
"numpy.real",
"rascil.data_models.polarisation.PolarisationFrame",
"matplotlib.pyplot.show",
"rascil.processing_components.imaging.primary_beams.create_vp",
"rascil.processing_components.simulation.pointing.simulate_gaintable_from_pointingtable",
"rascil.processing_components.calibration.pointing.create_pointingtable_from_blockvisibility",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.plot",
"numpy.array",
"rascil.processing_components.simulation.pointing.simulate_pointingtable_from_timeseries",
"matplotlib.pyplot.xlabel",
"astropy.coordinates.SkyCoord",
"logging.getLogger"
] |
[((868, 895), 'logging.getLogger', 'logging.getLogger', (['"""logger"""'], {}), "('logger')\n", (885, 895), False, 'import logging\n'), ((4527, 4542), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4540, 4542), False, 'import unittest\n'), ((1128, 1173), 'rascil.processing_components.simulation.create_named_configuration', 'create_named_configuration', (['"""MID"""'], {'rmax': '(100.0)'}), "('MID', rmax=100.0)\n", (1154, 1173), False, 'from rascil.processing_components.simulation import create_named_configuration\n'), ((1238, 1265), 'rascil.data_models.parameters.rascil_path', 'rascil_path', (['"""test_results"""'], {}), "('test_results')\n", (1249, 1265), False, 'from rascil.data_models.parameters import rascil_path, rascil_data_path\n'), ((1461, 1488), 'numpy.array', 'numpy.array', (['[1400000000.0]'], {}), '([1400000000.0])\n', (1472, 1488), False, 'import numpy\n'), ((1515, 1540), 'numpy.array', 'numpy.array', (['[10000000.0]'], {}), '([10000000.0])\n', (1526, 1540), False, 'import numpy\n'), ((1561, 1637), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': '(+15.0 * u.deg)', 'dec': '(-45.0 * u.deg)', 'frame': '"""icrs"""', 'equinox': '"""J2000"""'}), "(ra=+15.0 * u.deg, dec=-45.0 * u.deg, frame='icrs', equinox='J2000')\n", (1569, 1637), False, 'from astropy.coordinates import SkyCoord\n'), ((2395, 2422), 'numpy.random.seed', 'numpy.random.seed', (['(18051955)'], {}), '(18051955)\n', (2412, 2422), False, 'import numpy\n'), ((2452, 2529), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': '(+15.0 * u.deg)', 'dec': '(-44.58 * u.deg)', 'frame': '"""icrs"""', 'equinox': '"""J2000"""'}), "(ra=+15.0 * u.deg, dec=-44.58 * u.deg, frame='icrs', equinox='J2000')\n", (2460, 2529), False, 'from astropy.coordinates import SkyCoord\n'), ((2779, 2830), 'rascil.processing_components.calibration.pointing.create_pointingtable_from_blockvisibility', 'create_pointingtable_from_blockvisibility', (['self.vis'], {}), '(self.vis)\n', (2820, 2830), False, 'from rascil.processing_components.calibration.pointing import create_pointingtable_from_blockvisibility\n'), ((2909, 2918), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2916, 2918), True, 'import matplotlib.pyplot as plt\n'), ((2931, 2982), 'matplotlib.pyplot.plot', 'plt.plot', (['pt.time', 'pt.nominal[:, ant, 0, 0, 0]', '"""."""'], {}), "(pt.time, pt.nominal[:, ant, 0, 0, 0], '.')\n", (2939, 2982), True, 'import matplotlib.pyplot as plt\n'), ((2995, 3046), 'matplotlib.pyplot.plot', 'plt.plot', (['pt.time', 'pt.nominal[:, ant, 0, 0, 1]', '"""."""'], {}), "(pt.time, pt.nominal[:, ant, 0, 0, 1], '.')\n", (3003, 3046), True, 'import matplotlib.pyplot as plt\n'), ((3059, 3081), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (3069, 3081), True, 'import matplotlib.pyplot as plt\n'), ((3094, 3121), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Nominal (rad)"""'], {}), "('Nominal (rad)')\n", (3104, 3121), True, 'import matplotlib.pyplot as plt\n'), ((3134, 3177), 'matplotlib.pyplot.title', 'plt.title', (["('Nominal pointing for %s' % type)"], {}), "('Nominal pointing for %s' % type)\n", (3143, 3177), True, 'import matplotlib.pyplot as plt\n'), ((3192, 3213), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (3200, 3213), True, 'import matplotlib.pyplot as plt\n'), ((1951, 1979), 'rascil.data_models.polarisation.PolarisationFrame', 'PolarisationFrame', (['"""stokesI"""'], {}), "('stokesI')\n", (1968, 1979), False, 'from rascil.data_models.polarisation import PolarisationFrame\n'), ((2130, 2158), 'rascil.data_models.polarisation.PolarisationFrame', 'PolarisationFrame', (['"""stokesI"""'], {}), "('stokesI')\n", (2147, 2158), False, 'from rascil.data_models.polarisation import PolarisationFrame\n'), ((3293, 3390), 'rascil.processing_components.simulation.pointing.simulate_pointingtable_from_timeseries', 'simulate_pointingtable_from_timeseries', (['pt'], {'type': 'type', 'reference_pointing': 'reference_pointing'}), '(pt, type=type, reference_pointing=\n reference_pointing)\n', (3331, 3390), False, 'from rascil.processing_components.simulation.pointing import simulate_pointingtable_from_timeseries\n'), ((3476, 3485), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3483, 3485), True, 'import matplotlib.pyplot as plt\n'), ((3550, 3608), 'matplotlib.pyplot.plot', 'plt.plot', (['pt.time', '(r2a * pt.pointing[:, ant, 0, 0, 0])', '"""."""'], {}), "(pt.time, r2a * pt.pointing[:, ant, 0, 0, 0], '.')\n", (3558, 3608), True, 'import matplotlib.pyplot as plt\n'), ((3625, 3683), 'matplotlib.pyplot.plot', 'plt.plot', (['pt.time', '(r2a * pt.pointing[:, ant, 0, 0, 1])', '"""."""'], {}), "(pt.time, r2a * pt.pointing[:, ant, 0, 0, 1], '.')\n", (3633, 3683), True, 'import matplotlib.pyplot as plt\n'), ((3700, 3722), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (3710, 3722), True, 'import matplotlib.pyplot as plt\n'), ((3739, 3770), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Pointing (arcsec)"""'], {}), "('Pointing (arcsec)')\n", (3749, 3770), True, 'import matplotlib.pyplot as plt\n'), ((3787, 3872), 'matplotlib.pyplot.title', 'plt.title', (["('Pointing for %s, reference pointing %s' % (type, reference_pointing))"], {}), "('Pointing for %s, reference pointing %s' % (type, reference_pointing)\n )\n", (3796, 3872), True, 'import matplotlib.pyplot as plt\n'), ((3884, 3905), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (3892, 3905), True, 'import matplotlib.pyplot as plt\n'), ((3944, 3972), 'rascil.processing_components.imaging.primary_beams.create_vp', 'create_vp', (['self.model', '"""MID"""'], {}), "(self.model, 'MID')\n", (3953, 3972), False, 'from rascil.processing_components.imaging.primary_beams import create_vp\n'), ((3994, 4060), 'rascil.processing_components.simulation.pointing.simulate_gaintable_from_pointingtable', 'simulate_gaintable_from_pointingtable', (['self.vis', 'component', 'pt', 'vp'], {}), '(self.vis, component, pt, vp)\n', (4031, 4060), False, 'from rascil.processing_components.simulation.pointing import simulate_gaintable_from_pointingtable\n'), ((4190, 4199), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4197, 4199), True, 'import matplotlib.pyplot as plt\n'), ((4305, 4327), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (4315, 4327), True, 'import matplotlib.pyplot as plt\n'), ((4344, 4362), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Gain"""'], {}), "('Gain')\n", (4354, 4362), True, 'import matplotlib.pyplot as plt\n'), ((4379, 4455), 'matplotlib.pyplot.title', 'plt.title', (["('Gain for %s, reference pointing %s' % (type, reference_pointing))"], {}), "('Gain for %s, reference pointing %s' % (type, reference_pointing))\n", (4388, 4455), True, 'import matplotlib.pyplot as plt\n'), ((4472, 4493), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (4480, 4493), True, 'import matplotlib.pyplot as plt\n'), ((2673, 2701), 'rascil.data_models.polarisation.PolarisationFrame', 'PolarisationFrame', (['"""stokesI"""'], {}), "('stokesI')\n", (2690, 2701), False, 'from rascil.data_models.polarisation import PolarisationFrame\n'), ((4243, 4282), 'numpy.real', 'numpy.real', (['gt[0].gain[:, ant, 0, 0, 0]'], {}), '(gt[0].gain[:, ant, 0, 0, 0])\n', (4253, 4282), False, 'import numpy\n')]
|
#Take input here
#we will take input using ast sys
import ast
input_str = input()
input_list = ast.literal_eval(input_str)
#Remember how we took input in the Alarm clock Question in previous Session?
#Lets see if you can finish taking input on your own
data=input_list[0]
check=input_list[1]
sum=0
#start writing your code to find if check is above average of data
for item in data:
sum=sum+item
#print(sum)
av=sum/len(data)
#print(av)
if av<check:
print("True")
else:
print("False")
|
[
"ast.literal_eval"
] |
[((95, 122), 'ast.literal_eval', 'ast.literal_eval', (['input_str'], {}), '(input_str)\n', (111, 122), False, 'import ast\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""Test lvmutil.funcfits.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
# The line above will help with 2to3 support.
import unittest
import numpy as np
from warnings import catch_warnings, simplefilter
from ..funcfits import func_fit, func_val, iter_fit, mk_fit_dict
class TestFuncFits(unittest.TestCase):
"""Test lvmutil.funcfits
"""
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_mk_fit_dict(self):
"""Test fit dict
"""
fdict = mk_fit_dict(np.arange(10), 5, 'legendre', xmin=0., xmax=5000.)
assert isinstance(fdict, dict)
def test_poly_fit(self):
"""Test polynomial fit.
"""
x = np.linspace(0, np.pi, 50)
y = np.sin(x)
# Fit
dfit = func_fit(x, y, 'polynomial', 3)
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.97854984428713754)
def test_legendre_fit(self):
"""Test Legendre fit.
"""
# Generate data
x = np.linspace(0, np.pi, 50)
y = np.sin(x)
# Fit
dfit = func_fit(x, y, 'legendre', 4)
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.99940823486206976)
def test_cheby_fit(self):
"""Test Chebyshev fit.
"""
# Generate data
x = np.linspace(0, np.pi, 50)
y = np.sin(x)
# Fit
dfit = func_fit(x, y, 'chebyshev', 4)
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.99940823486206942)
def test_fit_with_sigma(self):
"""Test fit with sigma.
"""
# Generate data
x = np.linspace(0, np.pi, 50)
y = np.sin(x)
sigy = np.ones_like(y)*0.1
sigy[::2] = 0.15
# Fit
dfit = func_fit(x, y, 'legendre', 4, w=1./sigy)
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.99941056289796115)
def test_func_fit_other(self):
"""Test corner cases in fitting.
"""
# Generate data
x = np.linspace(0, np.pi, 50)
y = np.sin(x)
# Fit
with self.assertRaises(ValueError):
dfit = func_fit(x, y, 'fourier', 4)
dfit = func_fit(x, y, 'polynomial', 3)
dfit['func'] = 'fourier'
x2 = np.linspace(0, np.pi, 100)
with self.assertRaises(ValueError):
y2 = func_val(x2, dfit)
x = np.array([1.0])
y = np.array([2.0])
with catch_warnings(record=True) as w:
# simplefilter("always")
dfit = func_fit(x, y, 'polynomial', 1)
self.assertEqual(len(w), 1)
self.assertIn('conditioned', str(w[-1].message))
self.assertEqual(dfit['xmin'], -1.0)
self.assertEqual(dfit['xmax'], 1.0)
def test_iterfit(self):
"""Test iter fit with Legendre.
"""
# Generate data
x = np.linspace(0, np.pi, 100)
y = np.sin(x)
#
y[50] = 3.
# Fit
dfit, mask = iter_fit(x, y, 'legendre', 4)
self.assertEqual(mask.sum(), 1)
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.99941444872371643)
def test_iterfit2(self):
"""Test iter fit with some special cases.
"""
# Generate data
x = np.linspace(0, np.pi, 100)
y = np.sin(x)
#
y[50] = 3.
# Fit
with catch_warnings(record=True) as w:
# simplefilter("always")
dfit, mask = iter_fit(x, y, 'legendre', 4, forceimask=True)
self.assertEqual(len(w), 1)
self.assertEqual(str(w[-1].message),
"Initial mask cannot be enforced -- " +
"no initital mask supplied")
x2 = np.linspace(0, np.pi, 100)
y2 = func_val(x2, dfit)
np.testing.assert_allclose(y2[50], 0.99941444872371643)
def test_suite():
"""Allows testing of only this module with the command::
python setup.py test -m <modulename>
"""
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
[
"numpy.ones_like",
"numpy.sin",
"numpy.array",
"numpy.arange",
"numpy.linspace",
"warnings.catch_warnings",
"numpy.testing.assert_allclose",
"unittest.defaultTestLoader.loadTestsFromName"
] |
[((4444, 4498), 'unittest.defaultTestLoader.loadTestsFromName', 'unittest.defaultTestLoader.loadTestsFromName', (['__name__'], {}), '(__name__)\n', (4488, 4498), False, 'import unittest\n'), ((890, 915), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(50)'], {}), '(0, np.pi, 50)\n', (901, 915), True, 'import numpy as np\n'), ((928, 937), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (934, 937), True, 'import numpy as np\n'), ((1012, 1038), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (1023, 1038), True, 'import numpy as np\n'), ((1079, 1133), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9785498442871375)'], {}), '(y2[50], 0.9785498442871375)\n', (1105, 1133), True, 'import numpy as np\n'), ((1247, 1272), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(50)'], {}), '(0, np.pi, 50)\n', (1258, 1272), True, 'import numpy as np\n'), ((1285, 1294), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (1291, 1294), True, 'import numpy as np\n'), ((1367, 1393), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (1378, 1393), True, 'import numpy as np\n'), ((1434, 1488), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9994082348620698)'], {}), '(y2[50], 0.9994082348620698)\n', (1460, 1488), True, 'import numpy as np\n'), ((1600, 1625), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(50)'], {}), '(0, np.pi, 50)\n', (1611, 1625), True, 'import numpy as np\n'), ((1638, 1647), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (1644, 1647), True, 'import numpy as np\n'), ((1721, 1747), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (1732, 1747), True, 'import numpy as np\n'), ((1788, 1842), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9994082348620694)'], {}), '(y2[50], 0.9994082348620694)\n', (1814, 1842), True, 'import numpy as np\n'), ((1960, 1985), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(50)'], {}), '(0, np.pi, 50)\n', (1971, 1985), True, 'import numpy as np\n'), ((1998, 2007), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (2004, 2007), True, 'import numpy as np\n'), ((2151, 2177), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (2162, 2177), True, 'import numpy as np\n'), ((2218, 2272), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9994105628979612)'], {}), '(y2[50], 0.9994105628979612)\n', (2244, 2272), True, 'import numpy as np\n'), ((2399, 2424), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(50)'], {}), '(0, np.pi, 50)\n', (2410, 2424), True, 'import numpy as np\n'), ((2437, 2446), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (2443, 2446), True, 'import numpy as np\n'), ((2646, 2672), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (2657, 2672), True, 'import numpy as np\n'), ((2765, 2780), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2773, 2780), True, 'import numpy as np\n'), ((2793, 2808), 'numpy.array', 'np.array', (['[2.0]'], {}), '([2.0])\n', (2801, 2808), True, 'import numpy as np\n'), ((3251, 3277), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (3262, 3277), True, 'import numpy as np\n'), ((3290, 3299), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (3296, 3299), True, 'import numpy as np\n'), ((3447, 3473), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (3458, 3473), True, 'import numpy as np\n'), ((3514, 3568), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9994144487237164)'], {}), '(y2[50], 0.9994144487237164)\n', (3540, 3568), True, 'import numpy as np\n'), ((3698, 3724), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (3709, 3724), True, 'import numpy as np\n'), ((3737, 3746), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (3743, 3746), True, 'import numpy as np\n'), ((4175, 4201), 'numpy.linspace', 'np.linspace', (['(0)', 'np.pi', '(100)'], {}), '(0, np.pi, 100)\n', (4186, 4201), True, 'import numpy as np\n'), ((4242, 4296), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['y2[50]', '(0.9994144487237164)'], {}), '(y2[50], 0.9994144487237164)\n', (4268, 4296), True, 'import numpy as np\n'), ((714, 727), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (723, 727), True, 'import numpy as np\n'), ((2023, 2038), 'numpy.ones_like', 'np.ones_like', (['y'], {}), '(y)\n', (2035, 2038), True, 'import numpy as np\n'), ((2822, 2849), 'warnings.catch_warnings', 'catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (2836, 2849), False, 'from warnings import catch_warnings, simplefilter\n'), ((3803, 3830), 'warnings.catch_warnings', 'catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3817, 3830), False, 'from warnings import catch_warnings, simplefilter\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-06 01:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('sponsorsModule', '0003_auto_20180605_2122'),
]
operations = [
migrations.AlterModelManagers(
name='categorytranslation',
managers=[
('objects', django.db.models.manager.Manager()),
('_plain_manager', django.db.models.manager.Manager()),
],
),
migrations.RemoveField(
model_name='category',
name='name',
),
migrations.AlterField(
model_name='category',
name='translation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
to='sponsorsModule.CategoryTranslation'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] |
[((578, 636), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""category"""', 'name': '"""name"""'}), "(model_name='category', name='name')\n", (600, 636), False, 'from django.db import migrations, models\n'), ((751, 869), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""sponsorsModule.CategoryTranslation"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='sponsorsModule.CategoryTranslation')\n", (768, 869), False, 'from django.db import migrations, models\n')]
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import os
import subprocess
from pex.interpreter import PythonInterpreter
from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest
from pants.util.collections import assert_single_element
from pants.util.contextutil import open_zip, temporary_dir
class PexBuildUtilIntegrationTest(PantsRunIntegrationTest):
binary_target_address = "testprojects/src/python/python_targets:test"
def test_ipex_gets_imprecise_constraint(self) -> None:
cur_interpreter_id = PythonInterpreter.get().identity
interpreter_name = cur_interpreter_id.requirement.name
major, minor, patch = cur_interpreter_id.version
# Pin the selected interpreter to the one used by pants to execute this test.
cur_interpreter_constraint = f"{interpreter_name}=={major}.{minor}.{patch}"
# Validate the the .ipex file specifically matches the major and minor versions, but allows
# any patch version.
imprecise_constraint = f"{interpreter_name}=={major}.{minor}.*"
with temporary_dir() as tmp_dir:
self.do_command(
"--binary-py-generate-ipex",
"binary",
self.binary_target_address,
config={
"GLOBAL": {"pants_distdir": tmp_dir},
"python-setup": {"interpreter_constraints": [cur_interpreter_constraint]},
},
)
pex_path = os.path.join(tmp_dir, "test.ipex")
assert os.path.isfile(pex_path)
pex_execution_result = subprocess.run([pex_path], stdout=subprocess.PIPE, check=True)
assert pex_execution_result.stdout.decode() == "test!\n"
with open_zip(pex_path) as zf:
info = json.loads(zf.read("PEX-INFO"))
constraint = assert_single_element(info["interpreter_constraints"])
assert constraint == imprecise_constraint
|
[
"subprocess.run",
"pants.util.contextutil.temporary_dir",
"pants.util.contextutil.open_zip",
"pants.util.collections.assert_single_element",
"os.path.isfile",
"pex.interpreter.PythonInterpreter.get",
"os.path.join"
] |
[((640, 663), 'pex.interpreter.PythonInterpreter.get', 'PythonInterpreter.get', ([], {}), '()\n', (661, 663), False, 'from pex.interpreter import PythonInterpreter\n'), ((1180, 1195), 'pants.util.contextutil.temporary_dir', 'temporary_dir', ([], {}), '()\n', (1193, 1195), False, 'from pants.util.contextutil import open_zip, temporary_dir\n'), ((1587, 1621), 'os.path.join', 'os.path.join', (['tmp_dir', '"""test.ipex"""'], {}), "(tmp_dir, 'test.ipex')\n", (1599, 1621), False, 'import os\n'), ((1641, 1665), 'os.path.isfile', 'os.path.isfile', (['pex_path'], {}), '(pex_path)\n', (1655, 1665), False, 'import os\n'), ((1701, 1763), 'subprocess.run', 'subprocess.run', (['[pex_path]'], {'stdout': 'subprocess.PIPE', 'check': '(True)'}), '([pex_path], stdout=subprocess.PIPE, check=True)\n', (1715, 1763), False, 'import subprocess\n'), ((1851, 1869), 'pants.util.contextutil.open_zip', 'open_zip', (['pex_path'], {}), '(pex_path)\n', (1859, 1869), False, 'from pants.util.contextutil import open_zip, temporary_dir\n'), ((1961, 2015), 'pants.util.collections.assert_single_element', 'assert_single_element', (["info['interpreter_constraints']"], {}), "(info['interpreter_constraints'])\n", (1982, 2015), False, 'from pants.util.collections import assert_single_element\n')]
|
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command line flags for parsing kubectl config files commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from googlecloudsdk.api_lib.container import kubeconfig as kconfig
from googlecloudsdk.calliope import base
from googlecloudsdk.core import exceptions as core_exceptions
class MissingEnvVarError(core_exceptions.Error):
"""An exception raised when required environment variables are missing."""
class ConfigParsingError(core_exceptions.Error):
"""An exception raised when parsing kubeconfig file."""
class MissingConfigError(core_exceptions.Error):
"""An exception raised when kubeconfig file is missing."""
def GetKubeConfigFlag():
return base.Argument(
'--kubeconfig',
required=False,
help='The path to the Kubeconfig file to use.')
def GetKubeContextFlag():
return base.Argument(
'--context', required=False, help='The Kubernetes context to use.')
def GetKubeconfigAndContext(kubeconfig=None, context=None):
"""Get the Kubeconfig path and context."""
config = kubeconfig or kconfig.Kubeconfig.DefaultPath()
if not config or not os.access(config, os.R_OK):
raise MissingConfigError(
'kubeconfig file not found or is not readable : [{}]'.format(config))
context_name = context or 'current-context'
kc = kconfig.Kubeconfig.LoadFromFile(config)
# Validate that passed context exists in specified kubeconfig
if context_name == 'current-context':
context_name = kc.current_context
elif context_name not in kc.contexts:
raise ConfigParsingError(
'context [{}] does not exist in kubeconfig [{}]'.format(
context_name, kubeconfig))
return config, context_name
|
[
"googlecloudsdk.api_lib.container.kubeconfig.Kubeconfig.DefaultPath",
"googlecloudsdk.api_lib.container.kubeconfig.Kubeconfig.LoadFromFile",
"googlecloudsdk.calliope.base.Argument",
"os.access"
] |
[((1368, 1466), 'googlecloudsdk.calliope.base.Argument', 'base.Argument', (['"""--kubeconfig"""'], {'required': '(False)', 'help': '"""The path to the Kubeconfig file to use."""'}), "('--kubeconfig', required=False, help=\n 'The path to the Kubeconfig file to use.')\n", (1381, 1466), False, 'from googlecloudsdk.calliope import base\n'), ((1518, 1604), 'googlecloudsdk.calliope.base.Argument', 'base.Argument', (['"""--context"""'], {'required': '(False)', 'help': '"""The Kubernetes context to use."""'}), "('--context', required=False, help=\n 'The Kubernetes context to use.')\n", (1531, 1604), False, 'from googlecloudsdk.calliope import base\n'), ((1985, 2024), 'googlecloudsdk.api_lib.container.kubeconfig.Kubeconfig.LoadFromFile', 'kconfig.Kubeconfig.LoadFromFile', (['config'], {}), '(config)\n', (2016, 2024), True, 'from googlecloudsdk.api_lib.container import kubeconfig as kconfig\n'), ((1739, 1771), 'googlecloudsdk.api_lib.container.kubeconfig.Kubeconfig.DefaultPath', 'kconfig.Kubeconfig.DefaultPath', ([], {}), '()\n', (1769, 1771), True, 'from googlecloudsdk.api_lib.container import kubeconfig as kconfig\n'), ((1795, 1821), 'os.access', 'os.access', (['config', 'os.R_OK'], {}), '(config, os.R_OK)\n', (1804, 1821), False, 'import os\n')]
|
from datetime import datetime, timedelta
from sqlalchemy import create_engine, Column, Integer, String, Date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
base = declarative_base()
class Table(base):
__tablename__ = "task"
id = Column(Integer, primary_key=True)
task = Column(String, default='default_value')
deadline = Column(Date, default=datetime.today())
def __repr__(self):
return self.task
class ToDo:
def __init__(self):
self.session: None = None
self.init_db()
self.today: datetime.date = datetime.today().date()
self.actions: dict = {'1': self.today_tasks,
'2': self.week_tasks,
'3': self.all_tasks,
'4': self.missed_tasks,
'5': self.create_task,
'6': self.delete_task,
'0': exit}
def init_db(self):
engine = create_engine('sqlite:///todo.db?check_same_thread=False')
base.metadata.create_all(engine)
self.session = sessionmaker(bind=engine)()
def create_task(self):
task, deadline = input('Enter task:\n'), input('Enter deadline:\n')
new_row = Table(task=task, deadline=datetime.strptime(deadline, '%Y-%m-%d').date())
self.session.add(new_row)
self.session.commit()
print('The task has been added!')
def today_tasks(self):
tasks = self.session.query(Table).filter(Table.deadline == self.today).all()
print(f'\nToday {datetime.today().day} {datetime.today().strftime("%b")}:')
if not tasks:
print('Nothing to do!')
else:
for i, todo in enumerate(tasks, 1):
print(f'{i}. {todo}')
def week_tasks(self):
for day in [self.today + timedelta(days=x) for x in range(7)]:
tasks = self.session.query(Table).filter(Table.deadline == day).all()
print(f"\n{day.strftime('%A %-d %b:')}")
if tasks:
for x, todo in enumerate(tasks, 1):
print(f'{x}. {todo}')
else:
print('Nothing to do!\n')
def all_tasks(self):
tasks = self.session.query(Table).filter(Table.deadline).order_by(Table.deadline).all()
print('\nAll tasks:')
for i, todo in enumerate(tasks, 1):
print(f'{i}. {todo}. {todo.deadline.strftime("%-d %b")}')
def missed_tasks(self):
tasks = self.session.query(Table).filter(Table.deadline < self.today).all()
print('Missed tasks:')
if tasks:
for x, todo in enumerate(tasks, 1):
print(f'{x}. {todo}. {todo.deadline.strftime("%-d %b")}')
else:
print('Nothing is missed!')
def delete_task(self):
rows = self.session.query(Table).filter(Table.deadline).all()
if rows:
for x, todelete in enumerate(rows, 1):
print(f'{x}. {todelete}. {todelete.deadline.strftime("%-d %b")}')
choice = input()
self.session.delete(rows[int(choice) - 1])
self.session.commit()
print('The task has been deleted!')
return
print('Nothing to delete')
return
def menu(self):
while True:
print()
choice: str = input('1) Today\'s tasks\n2) Week\'s tasks\n3) All tasks\n'
'4) Missed tasks\n5) Add task\n6) Delete task\n0) Exit\n')
if choice in self.actions:
self.actions[choice]()
else:
print('Unknown option.')
if __name__ == "__main__":
ToDo().menu()
|
[
"datetime.datetime.today",
"sqlalchemy.ext.declarative.declarative_base",
"datetime.datetime.strptime",
"datetime.timedelta",
"sqlalchemy.Column",
"sqlalchemy.create_engine",
"sqlalchemy.orm.sessionmaker"
] |
[((214, 232), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (230, 232), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((290, 323), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (296, 323), False, 'from sqlalchemy import create_engine, Column, Integer, String, Date\n'), ((335, 374), 'sqlalchemy.Column', 'Column', (['String'], {'default': '"""default_value"""'}), "(String, default='default_value')\n", (341, 374), False, 'from sqlalchemy import create_engine, Column, Integer, String, Date\n'), ((1033, 1091), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///todo.db?check_same_thread=False"""'], {}), "('sqlite:///todo.db?check_same_thread=False')\n", (1046, 1091), False, 'from sqlalchemy import create_engine, Column, Integer, String, Date\n'), ((411, 427), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (425, 427), False, 'from datetime import datetime, timedelta\n'), ((1156, 1181), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (1168, 1181), False, 'from sqlalchemy.orm import sessionmaker\n'), ((611, 627), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (625, 627), False, 'from datetime import datetime, timedelta\n'), ((1901, 1918), 'datetime.timedelta', 'timedelta', ([], {'days': 'x'}), '(days=x)\n', (1910, 1918), False, 'from datetime import datetime, timedelta\n'), ((1332, 1371), 'datetime.datetime.strptime', 'datetime.strptime', (['deadline', '"""%Y-%m-%d"""'], {}), "(deadline, '%Y-%m-%d')\n", (1349, 1371), False, 'from datetime import datetime, timedelta\n'), ((1624, 1640), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1638, 1640), False, 'from datetime import datetime, timedelta\n'), ((1647, 1663), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1661, 1663), False, 'from datetime import datetime, timedelta\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
from time import sleep
import RPi.GPIO as GPIO
class Servo:
def __init__(self, gpioPin, initDegrees):
# GPIO.setmode(GPIO.BCM) # pin numbering scheme that uses GPIO numbers
# GPIO.setwarnings(False)
GPIO.setup(gpioPin, GPIO.OUT) # set GPIO 25 as output
self.servo = GPIO.PWM(gpioPin, 50) # instantiate PWM output to GPIO `pin` @ 50Hz
self.degree_sign = u'\N{DEGREE SIGN}' # unicode for the degree symbol
self.degree_text = "deg" # unicode for the degree symbol
self.dc_min = 2.1 # the min duty cycle corresponding to 210deg rotation
self.dc_max = 12.3 # the max duty cycle corresponding to 0deg rotation
#self.deg_min = 0
#self.deg_mid = 105
#self.deg_max = 210
self.deg_min = 0
self.deg_mid = 105
self.deg_max = 360
# start at the provided rotation
self._initRotateTo(initDegrees)
def _degreesToDutyCycle(self, degrees):
# assert proper use
assert isinstance(degrees, int)
assert (self.deg_min <= degrees <= self.deg_max)
# convert degrees to fraction out of 210
rotation = degrees / 210.0
# convert fraction to the duty cycle from that corresponding to 0deg
# looks odd since max_dc corresponds to min degree position,
# and vice-versa
dc_from_max = ((self.dc_max - self.dc_min) * rotation)
# return the dc for the deg position
return self.dc_max - dc_from_max
def _initRotateTo(self, degrees):
duty_cycle = self._degreesToDutyCycle(degrees)
self.servo.start(duty_cycle) # start at init degree position
print("Servo: Rotated to " + str(degrees) + self.degree_text)
def rotateTo(self, degrees):
# convert to the duty cycle
duty_cycle = self._degreesToDutyCycle(degrees)
# adjust duty cycle
self.servo.ChangeDutyCycle(duty_cycle)
print("Servo: Rotated to " + str(degrees) + self.degree_text)
def test(self):
positions = [105, 210, 105, 0]
for position in positions:
self.rotateTo(position)
time.sleep(1) # wait until rotation is finished
def __exit__(self, exc_type, exc, traceback):
print("Servo: Exiting, cleaning up")
self.servo.stop()
GPIO.cleanup()
if __name__ == '__main__':
GPIO.setmode(GPIO.BOARD)
try:
s = Servo(7, 0)
while True:
degree = input("Rotate to degrees : ")
print("Trying to rotate to {}".format(degree))
s.rotateTo(int(degree))
sleep(5)
except KeyboardInterrupt:
print("Exit.")
#sleep(3)
#s.rotateTo(90)
#sleep(3)
#s.rotateTo(180)
#sleep(3)
#s.rotateTo(350)
#sleep(0)
#s.rotateTo(0)
|
[
"RPi.GPIO.setmode",
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"time.sleep",
"RPi.GPIO.PWM"
] |
[((2435, 2459), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BOARD'], {}), '(GPIO.BOARD)\n', (2447, 2459), True, 'import RPi.GPIO as GPIO\n'), ((289, 318), 'RPi.GPIO.setup', 'GPIO.setup', (['gpioPin', 'GPIO.OUT'], {}), '(gpioPin, GPIO.OUT)\n', (299, 318), True, 'import RPi.GPIO as GPIO\n'), ((365, 386), 'RPi.GPIO.PWM', 'GPIO.PWM', (['gpioPin', '(50)'], {}), '(gpioPin, 50)\n', (373, 386), True, 'import RPi.GPIO as GPIO\n'), ((2387, 2401), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (2399, 2401), True, 'import RPi.GPIO as GPIO\n'), ((2208, 2221), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2218, 2221), False, 'import time\n'), ((2673, 2681), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2678, 2681), False, 'from time import sleep\n')]
|
"""
백준 2644번 : 촌수계산
"""
from collections import deque
def bfs(first, end):
queue = deque()
queue.append([first, 0])
while queue:
person, chonsu = queue.popleft()
visited[person] = True
if person == end:
return chonsu
for i in home[person]:
if visited[i] is False:
queue.append([i, chonsu+1])
return -1
n = int(input())
a, b = map(int, input().split())
home = [[] for _ in range(n+1)]
visited = [False] * (n+1)
m = int(input())
for _ in range(m):
c, d = map(int, input().split())
home[c].append(d)
home[d].append(c)
print(bfs(a, b))
|
[
"collections.deque"
] |
[((88, 95), 'collections.deque', 'deque', ([], {}), '()\n', (93, 95), False, 'from collections import deque\n')]
|
from modeltranslation.translator import translator, TranslationOptions
from swingtime.models import Note, EventType, Event
from .models import AtriaEventProgram, AtriaEvent, AtriaOrganization
class NoteTranslationOptions(TranslationOptions):
fields = ('note',)
class EventTypeTranslationOptions(TranslationOptions):
fields = ('label',)
class EventTranslationOptions(TranslationOptions):
fields = ('title', 'description',)
class AtriaEventProgramTranslationOptions(TranslationOptions):
fields = ('label',)
class AtriaEventTranslationOptions(TranslationOptions):
fields = ('program',)
class AtriaOrganizationTranslationOptions(TranslationOptions):
fields = ('org_name',)
translator.register(Note, NoteTranslationOptions)
translator.register(EventType, EventTypeTranslationOptions)
translator.register(Event, EventTranslationOptions)
translator.register(AtriaEventProgram, AtriaEventProgramTranslationOptions)
translator.register(AtriaEvent, AtriaEventTranslationOptions)
translator.register(AtriaOrganization, AtriaOrganizationTranslationOptions)
|
[
"modeltranslation.translator.translator.register"
] |
[((701, 750), 'modeltranslation.translator.translator.register', 'translator.register', (['Note', 'NoteTranslationOptions'], {}), '(Note, NoteTranslationOptions)\n', (720, 750), False, 'from modeltranslation.translator import translator, TranslationOptions\n'), ((751, 810), 'modeltranslation.translator.translator.register', 'translator.register', (['EventType', 'EventTypeTranslationOptions'], {}), '(EventType, EventTypeTranslationOptions)\n', (770, 810), False, 'from modeltranslation.translator import translator, TranslationOptions\n'), ((811, 862), 'modeltranslation.translator.translator.register', 'translator.register', (['Event', 'EventTranslationOptions'], {}), '(Event, EventTranslationOptions)\n', (830, 862), False, 'from modeltranslation.translator import translator, TranslationOptions\n'), ((863, 938), 'modeltranslation.translator.translator.register', 'translator.register', (['AtriaEventProgram', 'AtriaEventProgramTranslationOptions'], {}), '(AtriaEventProgram, AtriaEventProgramTranslationOptions)\n', (882, 938), False, 'from modeltranslation.translator import translator, TranslationOptions\n'), ((939, 1000), 'modeltranslation.translator.translator.register', 'translator.register', (['AtriaEvent', 'AtriaEventTranslationOptions'], {}), '(AtriaEvent, AtriaEventTranslationOptions)\n', (958, 1000), False, 'from modeltranslation.translator import translator, TranslationOptions\n'), ((1001, 1076), 'modeltranslation.translator.translator.register', 'translator.register', (['AtriaOrganization', 'AtriaOrganizationTranslationOptions'], {}), '(AtriaOrganization, AtriaOrganizationTranslationOptions)\n', (1020, 1076), False, 'from modeltranslation.translator import translator, TranslationOptions\n')]
|
## в два потока
import time
from threading import Thread
def countup(N):
n = 0
while n < N:
n += 1
if __name__ == '__main__':
max_for_thread = 30000000//2
first_thread = Thread(target=countup, args=(max_for_thread,))
second_thread = Thread(target=countup, args=(max_for_thread,))
st_time = time.time()
first_thread.start()
second_thread.start()
first_thread.join()
second_thread.join()
end_time = time.time()
print(f'Время выполнения: {end_time-st_time}')
|
[
"threading.Thread",
"time.time"
] |
[((198, 244), 'threading.Thread', 'Thread', ([], {'target': 'countup', 'args': '(max_for_thread,)'}), '(target=countup, args=(max_for_thread,))\n', (204, 244), False, 'from threading import Thread\n'), ((265, 311), 'threading.Thread', 'Thread', ([], {'target': 'countup', 'args': '(max_for_thread,)'}), '(target=countup, args=(max_for_thread,))\n', (271, 311), False, 'from threading import Thread\n'), ((326, 337), 'time.time', 'time.time', ([], {}), '()\n', (335, 337), False, 'import time\n'), ((453, 464), 'time.time', 'time.time', ([], {}), '()\n', (462, 464), False, 'import time\n')]
|
from yoi.application import Application
from yoi.globals import g
from yoi.response import file_resp
import hashlib
app = Application()
@app.router(r"^/$", r"^/home/?$", methods=["GET"])
def index():
return file_resp('./file.html')
@app.router(r"^/upload/?$", methods=["POST"])
def upload():
form = g["request"].form
# print(request.file["name"])
file = form["file"]
data = file.file.read()
file_name = hashlib.md5(data).hexdigest()
with open("./files/" + file_name + "_" + file.filename, "wb") as f:
f.write(data)
return 'success'
# def show(filename):
# pass
def gallery(page_num):
pass
if __name__ == '__main__':
from wsgiref.simple_server import make_server
# httpd = make_server("127.0.0.1", 8000, app)
httpd = make_server("localhost", 8000, app)
try:
httpd.serve_forever()
except:
httpd.shutdown()
raise
|
[
"wsgiref.simple_server.make_server",
"hashlib.md5",
"yoi.response.file_resp",
"yoi.application.Application"
] |
[((123, 136), 'yoi.application.Application', 'Application', ([], {}), '()\n', (134, 136), False, 'from yoi.application import Application\n'), ((214, 238), 'yoi.response.file_resp', 'file_resp', (['"""./file.html"""'], {}), "('./file.html')\n", (223, 238), False, 'from yoi.response import file_resp\n'), ((786, 821), 'wsgiref.simple_server.make_server', 'make_server', (['"""localhost"""', '(8000)', 'app'], {}), "('localhost', 8000, app)\n", (797, 821), False, 'from wsgiref.simple_server import make_server\n'), ((432, 449), 'hashlib.md5', 'hashlib.md5', (['data'], {}), '(data)\n', (443, 449), False, 'import hashlib\n')]
|
from django.contrib import admin
from .models import Student, Schedule, MealStatus
# Register your models here.
admin.site.register(Student)
admin.site.register(Schedule)
admin.site.register(MealStatus)
from rest_framework.authtoken.admin import TokenAdmin
TokenAdmin.raw_id_fields = ['user']
|
[
"django.contrib.admin.site.register"
] |
[((114, 142), 'django.contrib.admin.site.register', 'admin.site.register', (['Student'], {}), '(Student)\n', (133, 142), False, 'from django.contrib import admin\n'), ((143, 172), 'django.contrib.admin.site.register', 'admin.site.register', (['Schedule'], {}), '(Schedule)\n', (162, 172), False, 'from django.contrib import admin\n'), ((173, 204), 'django.contrib.admin.site.register', 'admin.site.register', (['MealStatus'], {}), '(MealStatus)\n', (192, 204), False, 'from django.contrib import admin\n')]
|
from dictionaryutils import dictionary
def test_no_mixed_type_in_enum():
# An enum is said "mixed type" if the enum items don't all have the same type. The only
# exception to this is NoneType, which is allowed in enums regardless of the type of other
# items. This allows us to set the value to None when the property is not required
for schema in dictionary.schema.values():
for prop in schema["properties"].values():
try:
some_object_iterator = iter(prop)
except TypeError as te:
assert False, "{}: has non iterable property".format(schema["id"])
# print some_object, 'is not iterable'
if "enum" in prop:
assert all(
[type(i) == str or i == None for i in prop["enum"]]
), "{}: enum values should all be string".format(schema["id"])
def test_lowercase_ids():
for schema in dictionary.schema.values():
if "id" in schema:
assert (
schema["id"] == schema["id"].lower()
), "The id in {} should be lower case".format(schema["id"])
def test_nodeid_length():
# prepended to node id: (https://github.com/uc-cdis/psqlgraph/blob/3.0.0/psqlgraph/base.py#L14)
prefix_len = len("node_")
# postpended to node id: (https://github.com/uc-cdis/psqlgraph/blob/3.0.0/psqlgraph/node.py#L121)
postfix_len = len("_node_id_idx")
# maximum identifier allowed in postgres is 63 characters:
max_len = 63 - (prefix_len + postfix_len)
for schema in dictionary.schema.values():
if "id" in schema:
# (https://github.com/uc-cdis/gdcdatamodel/blob/daa709e1a71e0b8985f306c0a6bffe439ee18be7/gdcdatamodel/models/__init__.py#L161)
nodeid_len = len(schema["id"].replace("_", ""))
assert (
nodeid_len <= max_len
), "The id in {} should be at most {} characters (not counting underscores)".format(
schema["id"], max_len
)
|
[
"dictionaryutils.dictionary.schema.values"
] |
[((367, 393), 'dictionaryutils.dictionary.schema.values', 'dictionary.schema.values', ([], {}), '()\n', (391, 393), False, 'from dictionaryutils import dictionary\n'), ((945, 971), 'dictionaryutils.dictionary.schema.values', 'dictionary.schema.values', ([], {}), '()\n', (969, 971), False, 'from dictionaryutils import dictionary\n'), ((1571, 1597), 'dictionaryutils.dictionary.schema.values', 'dictionary.schema.values', ([], {}), '()\n', (1595, 1597), False, 'from dictionaryutils import dictionary\n')]
|
'''
Author: <NAME>
Affliction: Australia National University, DATA61 CSIRO
'''
import torch
import argparse
import datetime
from utils.loading import *
from utils.setup import *
from utils.loss import FusionLoss, NeuralFusionLoss
from torch.utils.data import DataLoader
from torch.optim import RMSprop, Adam
from torch.optim.lr_scheduler import StepLR, ExponentialLR
from modules.pipeline import Pipeline
from tqdm import tqdm
def arg_parse():
parser = argparse.ArgumentParser()
parser.add_argument('--config')
parser.add_argument('--experiment', type=str, default="experiment/")
args = parser.parse_args()
return vars(args)
def train_fusion(args):
config = load_config_from_yaml(args['config'])
config.TIMESTAMP = datetime.datetime.now().strftime('%y%m%d-%H%M%S')
# get workspace
workspace = get_workspace(config)
# save config before training
workspace.save_config(config)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
config.MODEL.device = device
# get datasets
# get train dataset
train_data_config = get_data_config(config, mode='train')
train_dataset = get_data(config.DATA.dataset, train_data_config)
train_loader = DataLoader(train_dataset, config.TRAINING.train_batch_size, num_workers=1)
# get val dataset
val_data_config = get_data_config(config, mode='val')
val_dataset = get_data(config.DATA.dataset, val_data_config)
val_loader = DataLoader(val_dataset, config.TRAINING.val_batch_size, num_workers=1)
# get database
# get train database
train_database = get_database(train_dataset, config, mode='train')
val_database = get_database(val_dataset, config, mode='val')
# setup pipeline
pipeline = Pipeline(config)
pipeline = pipeline.to(device)
# optimization
criterion = NeuralFusionLoss(config)
# optimizer
optimizer = Adam(
[
{'params': pipeline._fusion_network.parameters()},
{'params': pipeline._translator.parameters()}
],
config.OPTIMIZATION.lr
)
scheduler = ExponentialLR(optimizer=optimizer,
gamma=config.OPTIMIZATION.scheduler.gamma)
# optimizer = RMSprop(
# pipeline._fusion_network.parameters(),
# config.OPTIMIZATION.lr,
# config.OPTIMIZATION.rho,
# config.OPTIMIZATION.eps,
# momentum=config.OPTIMIZATION.momentum,
# weight_decay=config.OPTIMIZATION.weight_decay)
# scheduler = StepLR(optimizer=optimizer,
# step_size=config.OPTIMIZATION.scheduler.step_size,
# gamma=config.OPTIMIZATION.scheduler.gamma)
# define some parameters
n_batches = float(len(train_dataset) / config.TRAINING.train_batch_size)
# evaluation metrics
best_iou = 0.
for epoch in range(0, config.TRAINING.n_epochs):
print('Training on epoch {}/{}'.format(epoch, config.TRAINING.n_epochs))
pipeline.train()
# resetting databases before each epoch starts
train_database.reset()
val_database.reset()
for i, batch in tqdm(enumerate(train_loader), total=len(train_dataset)):
# put all data on GPU
batch = transform.to_device(batch, device)
# fusion pipline
output = pipeline.fuse_training(batch, train_database, device)
loss = criterion(output)
loss.backward()
if config.TRAINING.clipping:
torch.nn.utils.clip_grad_norm_(
pipeline._fusion_network.parameters(), max_norm=1., norm_type=2)
if (i + 1) % config.OPTIMIZATION.accumulation_steps == 0 or i == n_batches - 1:
optimizer.step()
optimizer.zero_grad()
scheduler.step()
# zero out all grads
optimizer.zero_grad()
# train_database.filter(value=3.)
pipeline.translate(train_database, device)
train_eval = train_database.evaluate(mode='train', workspace=workspace)
train_database.save_to_workspace(workspace)
print(train_eval)
pipeline.eval()
# validation step - fusion
for i, batch in tqdm(enumerate(val_loader), total=len(val_dataset)):
# put all data on GPU
batch = transform.to_device(batch, device)
# fusion pipeline
pipeline.fuse(batch, val_database, device)
# val_database.filter(value=3.)
pipeline.translate(val_database, device)
val_eval = val_database.evaluate(mode='val', workspace=workspace)
print(val_eval)
# check if current checkpoint is best
if val_eval['iou'] >= best_iou:
is_best = True
best_iou = val_eval['iou']
workspace.log('found new best model with iou {} at epoch {}'.format(
best_iou, epoch), mode='val')
else:
is_best = False
# save models
val_database.save_to_workspace(workspace)
# save checkpoint
workspace.save_model_state({
'pipeline_state_dict': pipeline.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'epoch': epoch},
is_best=is_best)
if __name__ == '__main__':
args = arg_parse()
print(args['config'])
train_fusion(args)
|
[
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"utils.loss.NeuralFusionLoss",
"torch.cuda.is_available",
"torch.optim.lr_scheduler.ExponentialLR",
"modules.pipeline.Pipeline",
"datetime.datetime.now"
] |
[((462, 487), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (485, 487), False, 'import argparse\n'), ((1236, 1310), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset', 'config.TRAINING.train_batch_size'], {'num_workers': '(1)'}), '(train_dataset, config.TRAINING.train_batch_size, num_workers=1)\n', (1246, 1310), False, 'from torch.utils.data import DataLoader\n'), ((1474, 1544), 'torch.utils.data.DataLoader', 'DataLoader', (['val_dataset', 'config.TRAINING.val_batch_size'], {'num_workers': '(1)'}), '(val_dataset, config.TRAINING.val_batch_size, num_workers=1)\n', (1484, 1544), False, 'from torch.utils.data import DataLoader\n'), ((1763, 1779), 'modules.pipeline.Pipeline', 'Pipeline', (['config'], {}), '(config)\n', (1771, 1779), False, 'from modules.pipeline import Pipeline\n'), ((1851, 1875), 'utils.loss.NeuralFusionLoss', 'NeuralFusionLoss', (['config'], {}), '(config)\n', (1867, 1875), False, 'from utils.loss import FusionLoss, NeuralFusionLoss\n'), ((2110, 2187), 'torch.optim.lr_scheduler.ExponentialLR', 'ExponentialLR', ([], {'optimizer': 'optimizer', 'gamma': 'config.OPTIMIZATION.scheduler.gamma'}), '(optimizer=optimizer, gamma=config.OPTIMIZATION.scheduler.gamma)\n', (2123, 2187), False, 'from torch.optim.lr_scheduler import StepLR, ExponentialLR\n'), ((754, 777), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (775, 777), False, 'import datetime\n'), ((971, 996), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (994, 996), False, 'import torch\n')]
|
"""
Contains all classes and functions pertaining to methods of genome mutation.
"""
from abc import abstractmethod
from functools import partial
from random import random
from evored.algorithm import EvolvingAlgorithm
class Mutator(EvolvingAlgorithm):
"""
Represents a mechanism for altering a genome in some way, with the
stipulation that such changes are made in an attempt to improve its
overall fitness.
"""
def evolve(self, genomes, pool, params):
binding = partial(self.mutate, params=params)
return pool.map(binding, genomes)
@abstractmethod
def mutate(self, genome, params):
"""
Mutates the specified genome in some way, influenced by the specified
user-selected parameters.
All implementations of this method must determine if the specified
genome is allowed to be mutated before any operations are performed.
:param genome: The object to mutate.
:param params: A dictionary of parameters.
:return: A mutated genome.
"""
pass
class HeapDownMutator(Mutator):
"""
Represents an implementation of Mutator that applies a single pass of a
heap-down operation to a genome, swapping the parent with the largest
child.
In terms of performance, this mutator is intended to strike a balance
between modifying the genomes in a meaningful way without becoming bogged
down by the potential pitfalls of an unknown tree structure. As such,
this mutator performs a single traversal from an arbitrary starting node
- usually the root - and applies a heap-down operation at each additional
node it encounters. This allows the overall fitness of a genome to
improve by moving better performing instructions further up the tree,
but this effect is mediated due to the lack of percolation.
As previously stated, this mutator may be configured to choose a random
starting node instead of solely the root. This has the effect of further
mitigating any fitness increase structure alteration may bring.
"""
def get_largest_child(self, node):
"""
Computes which child of the specified node is the largest and returns
it.
:param node: The node to obtain the largest child from.
:return: The largest child of a node.
"""
if node.has_left() and not node.has_right():
return node.left
elif not node.has_left() and node.has_right():
return node.right
else:
return node.left if node.left.item >= node.right.item else \
node.right
def heapify(self, node):
"""
Performs a single heap-down operation on the specified node and its
immediate children, selecting the largest child and replacing its
parent as necessary.
:param node: The node to apply a heap-down operation to.
"""
chosen_child = self.get_largest_child(node)
if node.item < chosen_child.item:
node.swap_items(chosen_child)
def mutate(self, genome, params):
if random() > params["mutator.rate"]:
return
node = genome.root if params.get("mutator.root_only", True) else \
genome.choose_node()
queue = [node]
while queue:
current = queue.pop(0)
self.heapify(current)
if current.has_left() and not current.left.is_leaf():
queue.append(current.left)
if current.has_right() and not current.right.is_leaf():
queue.append(current.right)
return genome
class NoMutator(Mutator):
"""
Represents an implementation of Mutator that does nothing.
"""
def mutate(self, genome, params):
return genome
|
[
"functools.partial",
"random.random"
] |
[((500, 535), 'functools.partial', 'partial', (['self.mutate'], {'params': 'params'}), '(self.mutate, params=params)\n', (507, 535), False, 'from functools import partial\n'), ((3129, 3137), 'random.random', 'random', ([], {}), '()\n', (3135, 3137), False, 'from random import random\n')]
|
import numpy as np
import pytest
from numpy.testing import assert_array_equal, assert_array_almost_equal
from ManipulatorCore import Joint, ManipulatorCore
def test_arm_1():
ph = np.pi / 2
bot = ManipulatorCore([
Joint('prismatic', -ph, 10, 0, ph),
Joint('prismatic', -ph, 20, 0, ph),
Joint('prismatic', np.pi, 30, 0, 0)
])
assert_array_almost_equal(bot.arm_matrix,
np.array([[0, 1, 0, -20],
[0, 0, 1, 30],
[1, 0, 0, 10],
[0, 0, 0, 1]]))
def test_arm_2():
ph = np.pi / 2
bot = ManipulatorCore([
Joint('revolute', ph, 450, 0, ph),
Joint('prismatic', 0, 20, 0, ph),
Joint('revolute', 0, 250, 0, 0)
])
assert_array_almost_equal(bot.arm_matrix,
np.array([[0, 1, 0, 20],
[1, 0, 0, 0],
[0, 0, -1, 200],
[0, 0, 0, 1]]))
def test_arm_3():
ph = np.pi / 2
bot = ManipulatorCore([
Joint('revolute', 0, 0, 0, 0),
Joint('prismatic', 0, 20, 0, ph),
Joint('prismatic', 0, 30, 0, ph),
Joint('revolute', ph, 40, 0, 0)
])
assert_array_almost_equal(bot.arm_matrix,
np.array([[0, -1, 0, 0],
[-1, 0, 0, -30],
[0, 0, -1, -20],
[0, 0, 0, 1]]))
|
[
"ManipulatorCore.Joint",
"numpy.array"
] |
[((441, 511), 'numpy.array', 'np.array', (['[[0, 1, 0, -20], [0, 0, 1, 30], [1, 0, 0, 10], [0, 0, 0, 1]]'], {}), '([[0, 1, 0, -20], [0, 0, 1, 30], [1, 0, 0, 10], [0, 0, 0, 1]])\n', (449, 511), True, 'import numpy as np\n'), ((911, 981), 'numpy.array', 'np.array', (['[[0, 1, 0, 20], [1, 0, 0, 0], [0, 0, -1, 200], [0, 0, 0, 1]]'], {}), '([[0, 1, 0, 20], [1, 0, 0, 0], [0, 0, -1, 200], [0, 0, 0, 1]])\n', (919, 981), True, 'import numpy as np\n'), ((1419, 1492), 'numpy.array', 'np.array', (['[[0, -1, 0, 0], [-1, 0, 0, -30], [0, 0, -1, -20], [0, 0, 0, 1]]'], {}), '([[0, -1, 0, 0], [-1, 0, 0, -30], [0, 0, -1, -20], [0, 0, 0, 1]])\n', (1427, 1492), True, 'import numpy as np\n'), ((234, 268), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', '(-ph)', '(10)', '(0)', 'ph'], {}), "('prismatic', -ph, 10, 0, ph)\n", (239, 268), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((278, 312), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', '(-ph)', '(20)', '(0)', 'ph'], {}), "('prismatic', -ph, 20, 0, ph)\n", (283, 312), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((322, 357), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', 'np.pi', '(30)', '(0)', '(0)'], {}), "('prismatic', np.pi, 30, 0, 0)\n", (327, 357), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((710, 743), 'ManipulatorCore.Joint', 'Joint', (['"""revolute"""', 'ph', '(450)', '(0)', 'ph'], {}), "('revolute', ph, 450, 0, ph)\n", (715, 743), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((753, 785), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', '(0)', '(20)', '(0)', 'ph'], {}), "('prismatic', 0, 20, 0, ph)\n", (758, 785), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((795, 826), 'ManipulatorCore.Joint', 'Joint', (['"""revolute"""', '(0)', '(250)', '(0)', '(0)'], {}), "('revolute', 0, 250, 0, 0)\n", (800, 826), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((1179, 1208), 'ManipulatorCore.Joint', 'Joint', (['"""revolute"""', '(0)', '(0)', '(0)', '(0)'], {}), "('revolute', 0, 0, 0, 0)\n", (1184, 1208), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((1218, 1250), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', '(0)', '(20)', '(0)', 'ph'], {}), "('prismatic', 0, 20, 0, ph)\n", (1223, 1250), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((1260, 1292), 'ManipulatorCore.Joint', 'Joint', (['"""prismatic"""', '(0)', '(30)', '(0)', 'ph'], {}), "('prismatic', 0, 30, 0, ph)\n", (1265, 1292), False, 'from ManipulatorCore import Joint, ManipulatorCore\n'), ((1302, 1333), 'ManipulatorCore.Joint', 'Joint', (['"""revolute"""', 'ph', '(40)', '(0)', '(0)'], {}), "('revolute', ph, 40, 0, 0)\n", (1307, 1333), False, 'from ManipulatorCore import Joint, ManipulatorCore\n')]
|
import re
import pandas as pd
import numpy as np
import argparse
def tlgHistoryRecall(table, pointID):
init = table[table.pointID == pointID]
track = ":"+str(init.frameNumber.values[0])+"-"+str(init.pointID.values[0])
parental = init.parentID.values[0]
while (parental>0) :
init = table[table.pointID == parental]
track = ","+str(init.frameNumber.values[0])+"-"+str(init.pointID.values[0]) + track
parental = init.parentID.values[0]
return(track)
def getAncestory(table,pointID):
init = table[table.pointID == pointID]
parental = init.parentID.values[0]
div=init.Div.values[0]
ttLast = 0
if (div==1):
div=0;
while (parental>0)&(div ==0) :
init = table[table.pointID == parental]
parental = init.parentID.values[0]
div=init.Div.values[0]
ttLast = ttLast+1
lcAncestor = init.pointID.values[0]
outFrame = pd.DataFrame({"pointID": [pointID], "timefromDiv": [ttLast], "lcAncest": [lcAncestor]})
return(outFrame)
def detailTLGoutput(table):
parents = table.parentID.unique()[table.parentID.unique()!=0]
addon1 = pd.DataFrame()
for i in parents:
checklen = (len(table[table.parentID == i]))
if(checklen==1):
div = 0
else:
div = 1
addon1 = addon1.append(pd.DataFrame({"pointID": [i],"Div": [div]}))
points = table.pointID.unique()
res = [i for i in points if i not in parents]
addon2 = pd.DataFrame({"pointID": res, "end": 1})
tst2 = pd.merge(pd.merge(table,addon1,how="outer"),addon2,how="outer")
tst2 = tst2.fillna(0)
tst2 = tst2.astype({'Div': 'int', 'end': 'int'})
nde = pd.DataFrame()
for j in tst2.pointID.values:
tmp_ancestor = getAncestory(tst2,j)
nde = nde.append(tmp_ancestor)
final = pd.merge(tst2,nde,how="outer")
return(final)
def ancestorStr(refTable,ancestor):
tmp=refTable[refTable.lcAncest==ancestor];
if len(tmp.pointID.values)>1:
tmp = tmp[tmp.lcAncest != tmp.pointID]
if len(tmp.pointID.values)>1:
str0 = str(tmp.pointID.values[0])+":"+str(tmp.timefromDiv.values[0])
str1 = str(tmp.pointID.values[1])+":"+str(tmp.timefromDiv.values[1])
return("("+str0+","+str1+")"+str(ancestor))
else:
str0 = str(tmp.pointID.values[0])+":"+str(tmp.timefromDiv.values[0])
return("("+str0+")"+str(ancestor))
else:
str0 = str(tmp.pointID.values[0])+":"+str(tmp.timefromDiv.values[0])
return("("+str0+")"+str(ancestor))
def generateNewick(TLGfile):
# Load output from TimelapseGUI
tstng = pd.read_csv(TLGfile) ;
# Use function to generate extra data
tlg = detailTLGoutput(tstng)
# Determine the ancestor and end nodes
ancestorNodes = tlg.lcAncest.unique();
endNodes = tlg[tlg.end==1].pointID.unique()
# Refine the search to only include ancestors and ends
refined = tlg[tlg['pointID'].isin(np.concatenate([ancestorNodes,endNodes]))]
refined = refined.fillna(0)
refined = refined.astype({'lcAncest': 'int', 'pointID': 'int','parentID': 'int','frameNumber': 'int'})
refined.frameNumber = refined.frameNumber+1
#refined.loc[refined.parentID == 0,'Div'] = 1
#refined.loc[refined.parentID == 0,'lcAncest'] = 0
#refined = refined[:-1]
refined['coupling'] = refined['lcAncest'].apply(lambda x: ancestorStr(refined,x))
toplvlIDs=refined[(refined.lcAncest == refined.pointID)&(refined.parentID == 0)][:-1].pointID.values
replaceForTop = "("+":1,".join(str(x) for x in toplvlIDs)+":1)0;"
refined.loc[refined.lcAncest==refined.pointID,'coupling'] = replaceForTop
refined = refined.loc[~refined['pointID'].isin(toplvlIDs)]
LCAdf = pd.DataFrame() ;
for i in refined.lcAncest.unique():
tmpLCA = refined[refined.lcAncest == i]
tmpLCA = tmpLCA.loc[:,['lcAncest','frameNumber','coupling']].drop_duplicates()
LCAdf = LCAdf.append(tmpLCA[tmpLCA.frameNumber == tmpLCA.frameNumber.min()])
LCAsort = LCAdf.sort_values(by=['frameNumber'])
initial_line = LCAsort.iloc[0].coupling
for i in range(1,len(LCAsort)):
pattern = "\("+str(LCAsort.iloc[i].lcAncest)+":"
replace = "("+LCAsort.iloc[i].coupling+":"
newline = re.sub(pattern, replace,initial_line)
if(newline == initial_line):
pattern = "\,"+str(LCAsort.iloc[i].lcAncest)+":"
replace = ","+LCAsort.iloc[i].coupling+":"
newline = re.sub(pattern, replace,initial_line)
initial_line = newline
return(initial_line)
def main():
parser = argparse.ArgumentParser('Generate Newick-formatter tree.')
parser.add_argument('--in', type = str, dest='file', help = 'Path and file in timelapse GUI output form (CSV).')
args = parser.parse_args()
newick = generateNewick(args.file)
print(newick)
return
if __name__ == '__main__':
main()
|
[
"pandas.DataFrame",
"argparse.ArgumentParser",
"pandas.read_csv",
"pandas.merge",
"re.sub",
"numpy.concatenate"
] |
[((922, 1014), 'pandas.DataFrame', 'pd.DataFrame', (["{'pointID': [pointID], 'timefromDiv': [ttLast], 'lcAncest': [lcAncestor]}"], {}), "({'pointID': [pointID], 'timefromDiv': [ttLast], 'lcAncest': [\n lcAncestor]})\n", (934, 1014), True, 'import pandas as pd\n'), ((1140, 1154), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1152, 1154), True, 'import pandas as pd\n'), ((1485, 1525), 'pandas.DataFrame', 'pd.DataFrame', (["{'pointID': res, 'end': 1}"], {}), "({'pointID': res, 'end': 1})\n", (1497, 1525), True, 'import pandas as pd\n'), ((1692, 1706), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1704, 1706), True, 'import pandas as pd\n'), ((1837, 1869), 'pandas.merge', 'pd.merge', (['tst2', 'nde'], {'how': '"""outer"""'}), "(tst2, nde, how='outer')\n", (1845, 1869), True, 'import pandas as pd\n'), ((2658, 2678), 'pandas.read_csv', 'pd.read_csv', (['TLGfile'], {}), '(TLGfile)\n', (2669, 2678), True, 'import pandas as pd\n'), ((3772, 3786), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3784, 3786), True, 'import pandas as pd\n'), ((4646, 4704), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Generate Newick-formatter tree."""'], {}), "('Generate Newick-formatter tree.')\n", (4669, 4704), False, 'import argparse\n'), ((1547, 1583), 'pandas.merge', 'pd.merge', (['table', 'addon1'], {'how': '"""outer"""'}), "(table, addon1, how='outer')\n", (1555, 1583), True, 'import pandas as pd\n'), ((4310, 4348), 're.sub', 're.sub', (['pattern', 'replace', 'initial_line'], {}), '(pattern, replace, initial_line)\n', (4316, 4348), False, 'import re\n'), ((1340, 1384), 'pandas.DataFrame', 'pd.DataFrame', (["{'pointID': [i], 'Div': [div]}"], {}), "({'pointID': [i], 'Div': [div]})\n", (1352, 1384), True, 'import pandas as pd\n'), ((2990, 3031), 'numpy.concatenate', 'np.concatenate', (['[ancestorNodes, endNodes]'], {}), '([ancestorNodes, endNodes])\n', (3004, 3031), True, 'import numpy as np\n'), ((4523, 4561), 're.sub', 're.sub', (['pattern', 'replace', 'initial_line'], {}), '(pattern, replace, initial_line)\n', (4529, 4561), False, 'import re\n')]
|
# -*- coding: utf-8 -*-
import os
__module__ = 'interact'
__docformat__ = 'restructuredtext'
__version__ = '{major:d}.{minor:d}'.format(major=0, minor=1)
__author__ = '<NAME>'
__status__ = 'pre-release beta1'
__date__ = '13 August 2018'
__licence__ = 'Apache Software License 2.0'
__url__ = 'https://github.com/MD-Studio/MDInteract'
__copyright__ = "Copyright (c) VU University, Amsterdam"
__rootpath__ = os.path.dirname(os.path.abspath(__file__))
__all__ = ['System', 'reference_data', 'constants', '__module__']
from glob import glob
from pandas import read_csv
# Import constants
from interact.constants import constants
# Import reference data sets
# The datasets are loaded once into Pandas DataFrame's and may be changed
# by the user.
reference_data = {}
for ref_data_file in glob(os.path.join(__rootpath__, 'data/*.csv')):
dataset_name = os.path.basename(ref_data_file).split('.')[0]
reference_data[dataset_name] = read_csv(ref_data_file, na_filter=False)
from interact.md_system import System
|
[
"pandas.read_csv",
"os.path.abspath",
"os.path.join",
"os.path.basename"
] |
[((423, 448), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (438, 448), False, 'import os\n'), ((793, 833), 'os.path.join', 'os.path.join', (['__rootpath__', '"""data/*.csv"""'], {}), "(__rootpath__, 'data/*.csv')\n", (805, 833), False, 'import os\n'), ((936, 976), 'pandas.read_csv', 'read_csv', (['ref_data_file'], {'na_filter': '(False)'}), '(ref_data_file, na_filter=False)\n', (944, 976), False, 'from pandas import read_csv\n'), ((855, 886), 'os.path.basename', 'os.path.basename', (['ref_data_file'], {}), '(ref_data_file)\n', (871, 886), False, 'import os\n')]
|
#!/usr/bin/env python
import argparse
import codecs
import os
import re
import shutil
import warnings
from collections import defaultdict
from pathlib import Path
from typing import Dict, Generator, List, Optional, Tuple
import keyboardlayout as kl
import keyboardlayout.pygame as klp
import librosa
import numpy
import pygame
import soundfile
ANCHOR_INDICATOR = " anchor"
ANCHOR_NOTE_REGEX = re.compile(r"\s[abcdefg]$")
DESCRIPTION = 'Use your computer keyboard as a "piano"'
DESCRIPTOR_32BIT = "FLOAT"
BITS_32BIT = 32
AUDIO_ALLOWED_CHANGES_HARDWARE_DETERMINED = 0
SOUND_FADE_MILLISECONDS = 50
CYAN = (0, 255, 255, 255)
BLACK = (0, 0, 0, 255)
WHITE = (255, 255, 255, 255)
AUDIO_ASSET_PREFIX = "audio_files/"
KEYBOARD_ASSET_PREFIX = "keyboards/"
CURRENT_WORKING_DIR = Path(__file__).parent.absolute()
ALLOWED_EVENTS = {pygame.KEYDOWN, pygame.KEYUP, pygame.QUIT}
def get_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description=DESCRIPTION)
default_wav_file = "audio_files/piano_c4.wav"
parser.add_argument(
"--wav",
"-w",
metavar="FILE",
type=str,
default=default_wav_file,
help="WAV file (default: {})".format(default_wav_file),
)
default_keyboard_file = "keyboards/qwerty_piano.txt"
parser.add_argument(
"--keyboard",
"-k",
metavar="FILE",
type=str,
default=default_keyboard_file,
help="keyboard file (default: {})".format(default_keyboard_file),
)
parser.add_argument(
"--clear-cache",
"-c",
default=False,
action="store_true",
help="deletes stored transposed audio files and recalculates them",
)
parser.add_argument("--verbose", "-v", action="store_true", help="verbose mode")
return parser
def get_or_create_key_sounds(
wav_path: str,
sample_rate_hz: int,
channels: int,
tones: List[int],
clear_cache: bool,
keys: List[str],
) -> Generator[pygame.mixer.Sound, None, None]:
sounds = []
y, sr = librosa.load(wav_path, sr=sample_rate_hz, mono=channels == 1)
file_name = os.path.splitext(os.path.basename(wav_path))[0]
folder_containing_wav = Path(wav_path).parent.absolute()
cache_folder_path = Path(folder_containing_wav, file_name)
if clear_cache and cache_folder_path.exists():
shutil.rmtree(cache_folder_path)
if not cache_folder_path.exists():
print("Generating samples for each key")
os.mkdir(cache_folder_path)
for i, tone in enumerate(tones):
cached_path = Path(cache_folder_path, "{}.wav".format(tone))
if Path(cached_path).exists():
print("Loading note {} out of {} for {}".format(i + 1, len(tones), keys[i]))
sound, sr = librosa.load(cached_path, sr=sample_rate_hz, mono=channels == 1)
if channels > 1:
# the shape must be [length, 2]
sound = numpy.transpose(sound)
else:
print(
"Transposing note {} out of {} for {}".format(
i + 1, len(tones), keys[i]
)
)
if channels == 1:
sound = librosa.effects.pitch_shift(y, sr, n_steps=tone)
else:
new_channels = [
librosa.effects.pitch_shift(y[i], sr, n_steps=tone)
for i in range(channels)
]
sound = numpy.ascontiguousarray(numpy.vstack(new_channels).T)
soundfile.write(cached_path, sound, sample_rate_hz, DESCRIPTOR_32BIT)
sounds.append(sound)
sounds = map(pygame.sndarray.make_sound, sounds)
return sounds
BLACK_INDICES_C_SCALE = [1, 3, 6, 8, 10]
LETTER_KEYS_TO_INDEX = {"c": 0, "d": 2, "e": 4, "f": 5, "g": 7, "a": 9, "b": 11}
def __get_black_key_indices(key_name: str) -> set:
letter_key_index = LETTER_KEYS_TO_INDEX[key_name]
black_key_indices = set()
for ind in BLACK_INDICES_C_SCALE:
new_index = ind - letter_key_index
if new_index < 0:
new_index += 12
black_key_indices.add(new_index)
return black_key_indices
def get_keyboard_info(keyboard_file: str):
with codecs.open(keyboard_file, encoding="utf-8") as k_file:
lines = k_file.readlines()
keys = []
anchor_index = -1
black_key_indices = set()
for i, line in enumerate(lines):
line = line.strip()
if not line:
continue
match = ANCHOR_NOTE_REGEX.search(line)
if match:
anchor_index = i
black_key_indices = __get_black_key_indices(line[-1])
key = kl.Key(line[: match.start(0)])
elif line.endswith(ANCHOR_INDICATOR):
anchor_index = i
key = kl.Key(line[: -len(ANCHOR_INDICATOR)])
else:
key = kl.Key(line)
keys.append(key)
if anchor_index == -1:
raise ValueError(
"Invalid keyboard file, one key must have an anchor note or the "
"word anchor written next to it.\n"
"For example 'm c OR m anchor'.\n"
"That tells the program that the wav file will be used for key m, "
"and all other keys will be pitch shifted higher or lower from "
"that anchor. If an anchor note is used then keys are colored black "
"and white like a piano. If the word anchor is used, then the "
"highest key is white, and keys get darker as they descend in pitch."
)
tones = [i - anchor_index for i in range(len(keys))]
color_to_key = defaultdict(list)
if black_key_indices:
key_color = (120, 120, 120, 255)
key_txt_color = (50, 50, 50, 255)
else:
key_color = (65, 65, 65, 255)
key_txt_color = (0, 0, 0, 255)
for index, key in enumerate(keys):
if index == anchor_index:
color_to_key[CYAN].append(key)
continue
if black_key_indices:
used_index = (index - anchor_index) % 12
if used_index in black_key_indices:
color_to_key[BLACK].append(key)
continue
color_to_key[WHITE].append(key)
continue
# anchor mode, keys go up in half steps and we do not color black keys
# instead we color from grey low to white high
rgb_val = 255 - (len(keys) - 1 - index) * 3
color = (rgb_val, rgb_val, rgb_val, 255)
color_to_key[color].append(key)
return keys, tones, color_to_key, key_color, key_txt_color
def configure_pygame_audio_and_set_ui(
framerate_hz: int,
channels: int,
keyboard_arg: str,
color_to_key: Dict[str, List[kl.Key]],
key_color: Tuple[int, int, int, int],
key_txt_color: Tuple[int, int, int, int],
) -> Tuple[pygame.Surface, klp.KeyboardLayout]:
# ui
pygame.display.init()
pygame.display.set_caption("pianoputer")
# block events that we don't want, this must be after display.init
pygame.event.set_blocked(None)
pygame.event.set_allowed(list(ALLOWED_EVENTS))
# fonts
pygame.font.init()
# audio
pygame.mixer.init(
framerate_hz,
BITS_32BIT,
channels,
allowedchanges=AUDIO_ALLOWED_CHANGES_HARDWARE_DETERMINED,
)
screen_width = 50
screen_height = 50
if "qwerty" in keyboard_arg:
layout_name = kl.LayoutName.QWERTY
elif "azerty" in keyboard_arg:
layout_name = kl.LayoutName.AZERTY_LAPTOP
else:
ValueError("keyboard must have qwerty or azerty in its name")
margin = 4
key_size = 60
overrides = {}
for color_value, keys in color_to_key.items():
override_color = color = pygame.Color(color_value)
inverted_color = list(~override_color)
other_val = 255
if (
abs(color_value[0] - inverted_color[0]) > abs(color_value[0] - other_val)
) or color_value == CYAN:
override_txt_color = pygame.Color(inverted_color)
else:
# biases grey override keys to use white as txt_color
override_txt_color = pygame.Color([other_val] * 3 + [255])
override_key_info = kl.KeyInfo(
margin=margin,
color=override_color,
txt_color=override_txt_color,
txt_font=pygame.font.SysFont("Arial", key_size // 4),
txt_padding=(key_size // 10, key_size // 10),
)
for key in keys:
overrides[key.value] = override_key_info
key_txt_color = pygame.Color(key_txt_color)
keyboard_info = kl.KeyboardInfo(position=(0, 0), padding=2, color=key_txt_color)
key_info = kl.KeyInfo(
margin=margin,
color=pygame.Color(key_color),
txt_color=pygame.Color(key_txt_color),
txt_font=pygame.font.SysFont("Arial", key_size // 4),
txt_padding=(key_size // 6, key_size // 10),
)
letter_key_size = (key_size, key_size) # width, height
keyboard = klp.KeyboardLayout(
layout_name, keyboard_info, letter_key_size, key_info, overrides
)
screen_width = keyboard.rect.width
screen_height = keyboard.rect.height
screen = pygame.display.set_mode((screen_width, screen_height))
screen.fill(pygame.Color("black"))
if keyboard:
keyboard.draw(screen)
pygame.display.update()
return screen, keyboard
def play_until_user_exits(
keys: List[kl.Key],
key_sounds: List[pygame.mixer.Sound],
keyboard: klp.KeyboardLayout,
):
sound_by_key = dict(zip(keys, key_sounds))
playing = True
while playing:
for event in pygame.event.get():
if event.type == pygame.QUIT:
playing = False
break
elif event.type == pygame.KEYDOWN:
if event.type == pygame.K_ESCAPE:
playing = False
break
key = keyboard.get_key(event)
if key is None:
continue
try:
sound = sound_by_key[key]
except KeyError:
continue
if event.type == pygame.KEYDOWN:
sound.stop()
sound.play(fade_ms=SOUND_FADE_MILLISECONDS)
elif event.type == pygame.KEYUP:
sound.fadeout(SOUND_FADE_MILLISECONDS)
pygame.quit()
print("Goodbye")
def get_audio_data(wav_path: str) -> Tuple:
audio_data, framerate_hz = soundfile.read(wav_path)
array_shape = audio_data.shape
if len(array_shape) == 1:
channels = 1
else:
channels = array_shape[1]
return audio_data, framerate_hz, channels
def process_args(parser: argparse.ArgumentParser, args: Optional[List]) -> Tuple:
if args:
args = parser.parse_args(args)
else:
args = parser.parse_args()
# Enable warnings from scipy if requested
if not args.verbose:
warnings.simplefilter("ignore")
wav_path = args.wav
if wav_path.startswith(AUDIO_ASSET_PREFIX):
wav_path = os.path.join(CURRENT_WORKING_DIR, wav_path)
keyboard_path = args.keyboard
if keyboard_path.startswith(KEYBOARD_ASSET_PREFIX):
keyboard_path = os.path.join(CURRENT_WORKING_DIR, keyboard_path)
return wav_path, keyboard_path, args.clear_cache
def play_pianoputer(args: Optional[List[str]] = None):
parser = get_parser()
wav_path, keyboard_path, clear_cache = process_args(parser, args)
audio_data, framerate_hz, channels = get_audio_data(wav_path)
results = get_keyboard_info(keyboard_path)
keys, tones, color_to_key, key_color, key_txt_color = results
key_sounds = get_or_create_key_sounds(
wav_path, framerate_hz, channels, tones, clear_cache, keys
)
_screen, keyboard = configure_pygame_audio_and_set_ui(
framerate_hz, channels, keyboard_path, color_to_key, key_color, key_txt_color
)
print(
"Ready for you to play!\n"
"Press the keys on your keyboard. "
"To exit presss ESC or close the pygame window"
)
play_until_user_exits(keys, key_sounds, keyboard)
if __name__ == "__main__":
play_pianoputer()
|
[
"os.mkdir",
"argparse.ArgumentParser",
"keyboardlayout.KeyboardInfo",
"keyboardlayout.pygame.KeyboardLayout",
"pygame.event.get",
"pygame.mixer.init",
"collections.defaultdict",
"pathlib.Path",
"pygame.font.init",
"pygame.display.update",
"shutil.rmtree",
"os.path.join",
"codecs.open",
"warnings.simplefilter",
"pygame.font.SysFont",
"pygame.display.set_mode",
"pygame.event.set_blocked",
"numpy.transpose",
"soundfile.write",
"pygame.display.set_caption",
"librosa.effects.pitch_shift",
"pygame.quit",
"soundfile.read",
"os.path.basename",
"librosa.load",
"numpy.vstack",
"pygame.display.init",
"re.compile",
"pygame.Color",
"keyboardlayout.Key"
] |
[((396, 423), 're.compile', 're.compile', (['"""\\\\s[abcdefg]$"""'], {}), "('\\\\s[abcdefg]$')\n", (406, 423), False, 'import re\n'), ((926, 974), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESCRIPTION'}), '(description=DESCRIPTION)\n', (949, 974), False, 'import argparse\n'), ((2045, 2106), 'librosa.load', 'librosa.load', (['wav_path'], {'sr': 'sample_rate_hz', 'mono': '(channels == 1)'}), '(wav_path, sr=sample_rate_hz, mono=channels == 1)\n', (2057, 2106), False, 'import librosa\n'), ((2256, 2294), 'pathlib.Path', 'Path', (['folder_containing_wav', 'file_name'], {}), '(folder_containing_wav, file_name)\n', (2260, 2294), False, 'from pathlib import Path\n'), ((5586, 5603), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5597, 5603), False, 'from collections import defaultdict\n'), ((6843, 6864), 'pygame.display.init', 'pygame.display.init', ([], {}), '()\n', (6862, 6864), False, 'import pygame\n'), ((6869, 6909), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""pianoputer"""'], {}), "('pianoputer')\n", (6895, 6909), False, 'import pygame\n'), ((6986, 7016), 'pygame.event.set_blocked', 'pygame.event.set_blocked', (['None'], {}), '(None)\n', (7010, 7016), False, 'import pygame\n'), ((7085, 7103), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (7101, 7103), False, 'import pygame\n'), ((7121, 7237), 'pygame.mixer.init', 'pygame.mixer.init', (['framerate_hz', 'BITS_32BIT', 'channels'], {'allowedchanges': 'AUDIO_ALLOWED_CHANGES_HARDWARE_DETERMINED'}), '(framerate_hz, BITS_32BIT, channels, allowedchanges=\n AUDIO_ALLOWED_CHANGES_HARDWARE_DETERMINED)\n', (7138, 7237), False, 'import pygame\n'), ((8514, 8541), 'pygame.Color', 'pygame.Color', (['key_txt_color'], {}), '(key_txt_color)\n', (8526, 8541), False, 'import pygame\n'), ((8562, 8626), 'keyboardlayout.KeyboardInfo', 'kl.KeyboardInfo', ([], {'position': '(0, 0)', 'padding': '(2)', 'color': 'key_txt_color'}), '(position=(0, 0), padding=2, color=key_txt_color)\n', (8577, 8626), True, 'import keyboardlayout as kl\n'), ((8959, 9047), 'keyboardlayout.pygame.KeyboardLayout', 'klp.KeyboardLayout', (['layout_name', 'keyboard_info', 'letter_key_size', 'key_info', 'overrides'], {}), '(layout_name, keyboard_info, letter_key_size, key_info,\n overrides)\n', (8977, 9047), True, 'import keyboardlayout.pygame as klp\n'), ((9152, 9206), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(screen_width, screen_height)'], {}), '((screen_width, screen_height))\n', (9175, 9206), False, 'import pygame\n'), ((9297, 9320), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (9318, 9320), False, 'import pygame\n'), ((10313, 10326), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (10324, 10326), False, 'import pygame\n'), ((10425, 10449), 'soundfile.read', 'soundfile.read', (['wav_path'], {}), '(wav_path)\n', (10439, 10449), False, 'import soundfile\n'), ((2354, 2386), 'shutil.rmtree', 'shutil.rmtree', (['cache_folder_path'], {}), '(cache_folder_path)\n', (2367, 2386), False, 'import shutil\n'), ((2483, 2510), 'os.mkdir', 'os.mkdir', (['cache_folder_path'], {}), '(cache_folder_path)\n', (2491, 2510), False, 'import os\n'), ((4202, 4246), 'codecs.open', 'codecs.open', (['keyboard_file'], {'encoding': '"""utf-8"""'}), "(keyboard_file, encoding='utf-8')\n", (4213, 4246), False, 'import codecs\n'), ((7695, 7720), 'pygame.Color', 'pygame.Color', (['color_value'], {}), '(color_value)\n', (7707, 7720), False, 'import pygame\n'), ((9223, 9244), 'pygame.Color', 'pygame.Color', (['"""black"""'], {}), "('black')\n", (9235, 9244), False, 'import pygame\n'), ((9588, 9606), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (9604, 9606), False, 'import pygame\n'), ((10887, 10918), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (10908, 10918), False, 'import warnings\n'), ((11011, 11054), 'os.path.join', 'os.path.join', (['CURRENT_WORKING_DIR', 'wav_path'], {}), '(CURRENT_WORKING_DIR, wav_path)\n', (11023, 11054), False, 'import os\n'), ((11170, 11218), 'os.path.join', 'os.path.join', (['CURRENT_WORKING_DIR', 'keyboard_path'], {}), '(CURRENT_WORKING_DIR, keyboard_path)\n', (11182, 11218), False, 'import os\n'), ((772, 786), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (776, 786), False, 'from pathlib import Path\n'), ((2140, 2166), 'os.path.basename', 'os.path.basename', (['wav_path'], {}), '(wav_path)\n', (2156, 2166), False, 'import os\n'), ((2769, 2833), 'librosa.load', 'librosa.load', (['cached_path'], {'sr': 'sample_rate_hz', 'mono': '(channels == 1)'}), '(cached_path, sr=sample_rate_hz, mono=channels == 1)\n', (2781, 2833), False, 'import librosa\n'), ((3512, 3581), 'soundfile.write', 'soundfile.write', (['cached_path', 'sound', 'sample_rate_hz', 'DESCRIPTOR_32BIT'], {}), '(cached_path, sound, sample_rate_hz, DESCRIPTOR_32BIT)\n', (3527, 3581), False, 'import soundfile\n'), ((7958, 7986), 'pygame.Color', 'pygame.Color', (['inverted_color'], {}), '(inverted_color)\n', (7970, 7986), False, 'import pygame\n'), ((8100, 8137), 'pygame.Color', 'pygame.Color', (['([other_val] * 3 + [255])'], {}), '([other_val] * 3 + [255])\n', (8112, 8137), False, 'import pygame\n'), ((8691, 8714), 'pygame.Color', 'pygame.Color', (['key_color'], {}), '(key_color)\n', (8703, 8714), False, 'import pygame\n'), ((8734, 8761), 'pygame.Color', 'pygame.Color', (['key_txt_color'], {}), '(key_txt_color)\n', (8746, 8761), False, 'import pygame\n'), ((8780, 8823), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Arial"""', '(key_size // 4)'], {}), "('Arial', key_size // 4)\n", (8799, 8823), False, 'import pygame\n'), ((2199, 2213), 'pathlib.Path', 'Path', (['wav_path'], {}), '(wav_path)\n', (2203, 2213), False, 'from pathlib import Path\n'), ((2628, 2645), 'pathlib.Path', 'Path', (['cached_path'], {}), '(cached_path)\n', (2632, 2645), False, 'from pathlib import Path\n'), ((2935, 2957), 'numpy.transpose', 'numpy.transpose', (['sound'], {}), '(sound)\n', (2950, 2957), False, 'import numpy\n'), ((3187, 3235), 'librosa.effects.pitch_shift', 'librosa.effects.pitch_shift', (['y', 'sr'], {'n_steps': 'tone'}), '(y, sr, n_steps=tone)\n', (3214, 3235), False, 'import librosa\n'), ((4839, 4851), 'keyboardlayout.Key', 'kl.Key', (['line'], {}), '(line)\n', (4845, 4851), True, 'import keyboardlayout as kl\n'), ((8302, 8345), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Arial"""', '(key_size // 4)'], {}), "('Arial', key_size // 4)\n", (8321, 8345), False, 'import pygame\n'), ((3307, 3358), 'librosa.effects.pitch_shift', 'librosa.effects.pitch_shift', (['y[i]', 'sr'], {'n_steps': 'tone'}), '(y[i], sr, n_steps=tone)\n', (3334, 3358), False, 'import librosa\n'), ((3470, 3496), 'numpy.vstack', 'numpy.vstack', (['new_channels'], {}), '(new_channels)\n', (3482, 3496), False, 'import numpy\n')]
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pokebattle',
version='0.0.1',
url='https://github.com/skooda/poke-battle',
packages=find_packages(exclude=['test', 'test.*']),
install_requires=[
"nameko>=2.1.2",
"nameko_sqlalchemy>=0.0.1",
],
)
|
[
"setuptools.find_packages"
] |
[((181, 222), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test', 'test.*']"}), "(exclude=['test', 'test.*'])\n", (194, 222), False, 'from setuptools import setup, find_packages\n')]
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2005-2006 TINY SPRL. (http://tiny.be) All Rights Reserved.
#
# $Id: product_expiry.py 4304 2006-10-25 09:54:51Z ged $
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
from openerp.osv import fields,osv
import pooler
import netsvc
import time
from xml import dom
CODE_EXEC_DEFAULT = '''\
res = []
cr.execute("select id, code from account_journal")
for record in cr.dictfetchall():
res.append(record['code'])
result = res
'''
class accounting_assert_test(osv.osv):
_name = "accounting.assert.test"
_order = "sequence"
_columns = {
'name': fields.char('Test Name', size=256, required=True, select=True, translate=True),
'desc': fields.text('Test Description', select=True, translate=True),
'code_exec': fields.text('Python code', required=True),
'active': fields.boolean('Active'),
'sequence': fields.integer('Sequence'),
}
_defaults = {
'code_exec': CODE_EXEC_DEFAULT,
'active': True,
'sequence': 10,
}
accounting_assert_test()
|
[
"openerp.osv.fields.text",
"openerp.osv.fields.boolean",
"openerp.osv.fields.char",
"openerp.osv.fields.integer"
] |
[((1804, 1882), 'openerp.osv.fields.char', 'fields.char', (['"""Test Name"""'], {'size': '(256)', 'required': '(True)', 'select': '(True)', 'translate': '(True)'}), "('Test Name', size=256, required=True, select=True, translate=True)\n", (1815, 1882), False, 'from openerp.osv import fields, osv\n'), ((1900, 1960), 'openerp.osv.fields.text', 'fields.text', (['"""Test Description"""'], {'select': '(True)', 'translate': '(True)'}), "('Test Description', select=True, translate=True)\n", (1911, 1960), False, 'from openerp.osv import fields, osv\n'), ((1983, 2024), 'openerp.osv.fields.text', 'fields.text', (['"""Python code"""'], {'required': '(True)'}), "('Python code', required=True)\n", (1994, 2024), False, 'from openerp.osv import fields, osv\n'), ((2044, 2068), 'openerp.osv.fields.boolean', 'fields.boolean', (['"""Active"""'], {}), "('Active')\n", (2058, 2068), False, 'from openerp.osv import fields, osv\n'), ((2090, 2116), 'openerp.osv.fields.integer', 'fields.integer', (['"""Sequence"""'], {}), "('Sequence')\n", (2104, 2116), False, 'from openerp.osv import fields, osv\n')]
|
import logging
from os import path
from scout.utils.md5 import generate_md5_key
from scout.utils.handle import get_file_handle
LOG = logging.getLogger(__name__)
def load_cytobands(resource, build, adapter):
"""Parse and load cytobands from file.
Args:
resource(str): path to cytobands file (either build 37 or 38)
build(str): "37" or "38"
adapter(MongoAdapter)
"""
cytobands = []
LOG.debug(f"Reading cytoband file for genome build {build}")
if path.exists(resource) is False:
LOG.error(f"Resource {resource} could not be found.")
return
lines = get_file_handle(resource)
for line in lines:
if line.startswith("#"):
continue
# Line will look like this:
# (chr)3 58600000 63800000 p14.2 gneg
fields = line.split("\t")
chrom = fields[0].lstrip("chr")
band = fields[3]
cytoband_obj = dict(
_id=generate_md5_key([build, chrom, band]),
band=band,
chrom=str(chrom), # 3
start=str(int(fields[1]) + 1), # 58600000
stop=str(int(fields[2]) + 1), # 63800000
build=str(build), # "37" or "38"
)
cytobands.append(cytoband_obj)
LOG.debug(f"Found {len(cytobands)} cytobands in the file.")
adapter.add_cytobands(cytobands)
|
[
"scout.utils.handle.get_file_handle",
"os.path.exists",
"scout.utils.md5.generate_md5_key",
"logging.getLogger"
] |
[((136, 163), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (153, 163), False, 'import logging\n'), ((622, 647), 'scout.utils.handle.get_file_handle', 'get_file_handle', (['resource'], {}), '(resource)\n', (637, 647), False, 'from scout.utils.handle import get_file_handle\n'), ((500, 521), 'os.path.exists', 'path.exists', (['resource'], {}), '(resource)\n', (511, 521), False, 'from os import path\n'), ((953, 991), 'scout.utils.md5.generate_md5_key', 'generate_md5_key', (['[build, chrom, band]'], {}), '([build, chrom, band])\n', (969, 991), False, 'from scout.utils.md5 import generate_md5_key\n')]
|
from flask import render_template, request, redirect, url_for
# from app import app
from . import main
from ..requests import get_source, get_article
# views
@main.route('/')
def index():
'''
function returns homepage and its data
'''
title = 'WithIt- Get current news.'
# Getting sources
news_sources = get_source('sources')
print(news_sources)
return render_template('source.html', title = title, sources = news_sources)
@main.route('/article')
def article():
'''
function returns news article and its data
'''
news_articles = get_article('articles')
print(news_articles)
return render_template('article.html', articles = news_articles)
@main.route('/search')
def search():
'''
function returns source search results
'''
return render_template('search.html')
@main.route('/favourite')
def favourite():
'''
function returns users favourite news sources
'''
return render_template('favourite.html')
@main.route('/contact')
def contact():
'''
function returns users favourite news sources
'''
return render_template('contact.html')
|
[
"flask.render_template"
] |
[((366, 431), 'flask.render_template', 'render_template', (['"""source.html"""'], {'title': 'title', 'sources': 'news_sources'}), "('source.html', title=title, sources=news_sources)\n", (381, 431), False, 'from flask import render_template, request, redirect, url_for\n'), ((608, 663), 'flask.render_template', 'render_template', (['"""article.html"""'], {'articles': 'news_articles'}), "('article.html', articles=news_articles)\n", (623, 663), False, 'from flask import render_template, request, redirect, url_for\n'), ((764, 794), 'flask.render_template', 'render_template', (['"""search.html"""'], {}), "('search.html')\n", (779, 794), False, 'from flask import render_template, request, redirect, url_for\n'), ((905, 938), 'flask.render_template', 'render_template', (['"""favourite.html"""'], {}), "('favourite.html')\n", (920, 938), False, 'from flask import render_template, request, redirect, url_for\n'), ((1045, 1076), 'flask.render_template', 'render_template', (['"""contact.html"""'], {}), "('contact.html')\n", (1060, 1076), False, 'from flask import render_template, request, redirect, url_for\n')]
|
import PySimpleGUI as Gui
# layout is type(class 'list') made up of lists
layout = [
[Gui.Text('Text typed below will be shown here.', key='out')],
[Gui.InputText('Type here. Clears first.', key='in', enable_events=True)],
[Gui.Button('Button')]
]
window = Gui.Window('title', layout, size=[500, 100],
return_keyboard_events=True)
is_input_clear = False
in_list = []
while True:
event, values = window.Read()
if event == 'Button':
# update the "'text' of 'key' 'out'"
# with window.read()s' returned 'values'
# "'key' 'in'.
window['out'].Update(values['in'])
in_list.append(values['in'])
elif event == 'in':
# do this only once, using counter, to clear input field.
if not is_input_clear:
window['in'].Update('')
is_input_clear = True
elif event is None:
print('window closed')
break
window.close()
print("\nItems entered are saved to:", str(type(in_list)),
"in_list[]" + ":\n", in_list, "\n")
exit(0)
|
[
"PySimpleGUI.InputText",
"PySimpleGUI.Text",
"PySimpleGUI.Window",
"PySimpleGUI.Button"
] |
[((271, 344), 'PySimpleGUI.Window', 'Gui.Window', (['"""title"""', 'layout'], {'size': '[500, 100]', 'return_keyboard_events': '(True)'}), "('title', layout, size=[500, 100], return_keyboard_events=True)\n", (281, 344), True, 'import PySimpleGUI as Gui\n'), ((91, 150), 'PySimpleGUI.Text', 'Gui.Text', (['"""Text typed below will be shown here."""'], {'key': '"""out"""'}), "('Text typed below will be shown here.', key='out')\n", (99, 150), True, 'import PySimpleGUI as Gui\n'), ((158, 229), 'PySimpleGUI.InputText', 'Gui.InputText', (['"""Type here. Clears first."""'], {'key': '"""in"""', 'enable_events': '(True)'}), "('Type here. Clears first.', key='in', enable_events=True)\n", (171, 229), True, 'import PySimpleGUI as Gui\n'), ((237, 257), 'PySimpleGUI.Button', 'Gui.Button', (['"""Button"""'], {}), "('Button')\n", (247, 257), True, 'import PySimpleGUI as Gui\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import pandas as pd
import random
import json
IMAGE_SIZE = 24
NUM_CLASSES = 5
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 2046
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 512
NUM_PREPROCESS_THREADS = 8
IMAGE_HEIGHT = 480
IMAGE_WIDTH = 640
DOWNSIZE_FACTOR = 1.0
random.seed(1337)
def readCSV(fn):
csv = pd.read_csv(fn, keep_default_na=False, na_values=['NaN'])
df_test = csv
imagePaths_test = list(df_test['image_dir'])
pathsNumpy_test = np.array(imagePaths_test)
labelFnsNumpy_test = pathsNumpy_test
return pathsNumpy_test, labelFnsNumpy_test
def segmentation_readImages(input_queue, mode):
file_contents = tf.read_file(input_queue[0])
label_file_contents = tf.read_file(input_queue[1])
example = tf.cast(tf.image.decode_image(file_contents, channels=3), tf.float32)
labelMask = tf.cast(tf.image.decode_image(label_file_contents, channels=3), tf.float32)
if mode == "train":
chance_lr = tf.random_normal([1])
chance_ud = tf.random_normal([1])
if tf.reduce_all(tf.greater(chance_lr, tf.Variable(1.0))) is True:
example = tf.image.flip_left_right(example)
labelMask = tf.image.flip_left_right(labelMask)
if tf.reduce_all(tf.greater(chance_ud, tf.Variable(1.0))) is True:
example = tf.image.flip_up_down(example)
labelMask = tf.image.flip_up_down(labelMask)
example = tf.image.random_brightness(example, 10.0)
example.set_shape([IMAGE_HEIGHT, IMAGE_WIDTH, 3])
labelMask.set_shape([IMAGE_HEIGHT, IMAGE_WIDTH, 3])
return input_queue[0], example, labelMask
def _segmentation_generate_image_and_label_batch(_name, image, label, min_queue_examples, batch_size, shuffle):
num_preprocess_threads = NUM_PREPROCESS_THREADS
if shuffle:
n, images, label_batch = tf.train.shuffle_batch([_name, image, label], batch_size=batch_size, num_threads=num_preprocess_threads, capacity=min_queue_examples + 3 * batch_size, min_after_dequeue=min_queue_examples)
return n, images, label_batch
else:
n, images, label_batch = tf.train.batch([_name, image, label], batch_size=batch_size, num_threads=num_preprocess_threads, capacity=min_queue_examples + 3 * batch_size)
return n, images, label_batch
def segmentation_distorted_inputs(imageFns, labelMasks, batch_size, num_examples_per_epoch, shuffle, _condition, mode):
imageFnsTensor = tf.convert_to_tensor(imageFns, dtype=tf.string)
labelMasksTensor = tf.convert_to_tensor(labelMasks, dtype=tf.string)
inputQueueTrain = tf.train.slice_input_producer([imageFnsTensor, labelMasksTensor], shuffle=False)
name, raw_image, labelMask = segmentation_readImages(inputQueueTrain, mode)
float_image = tf.image.per_image_standardization(raw_image)
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(num_examples_per_epoch * min_fraction_of_examples_in_queue)
print ('Filling queue with %d images before starting to train. This will take a few minutes.' % min_queue_examples)
_name, _image, _mask = _segmentation_generate_image_and_label_batch(name, float_image, labelMask, min_queue_examples, batch_size, shuffle=shuffle)
images = tf.image.resize_bicubic(_image, tf.convert_to_tensor([int(480 / DOWNSIZE_FACTOR), int(640 / DOWNSIZE_FACTOR)], dtype=tf.int32))
labelsRGB = tf.image.resize_bicubic(_mask, tf.convert_to_tensor([int(480 / DOWNSIZE_FACTOR), int(640 / DOWNSIZE_FACTOR)], dtype=tf.int32))
labelsFullRange = tf.image.rgb_to_grayscale(labelsRGB)
_max = tf.clip_by_value(tf.reduce_max(labelsFullRange), 1, 255)
labels = tf.divide(labelsFullRange, _max)
return _name, images, labels
|
[
"tensorflow.image.flip_left_right",
"tensorflow.image.rgb_to_grayscale",
"tensorflow.image.flip_up_down",
"tensorflow.train.shuffle_batch",
"pandas.read_csv",
"tensorflow.convert_to_tensor",
"tensorflow.reduce_max",
"tensorflow.divide",
"random.seed",
"numpy.array",
"tensorflow.image.per_image_standardization",
"tensorflow.image.decode_image",
"tensorflow.random_normal",
"tensorflow.Variable",
"tensorflow.read_file",
"tensorflow.train.batch",
"tensorflow.image.random_brightness",
"tensorflow.train.slice_input_producer"
] |
[((401, 418), 'random.seed', 'random.seed', (['(1337)'], {}), '(1337)\n', (412, 418), False, 'import random\n'), ((445, 502), 'pandas.read_csv', 'pd.read_csv', (['fn'], {'keep_default_na': '(False)', 'na_values': "['NaN']"}), "(fn, keep_default_na=False, na_values=['NaN'])\n", (456, 502), True, 'import pandas as pd\n'), ((586, 611), 'numpy.array', 'np.array', (['imagePaths_test'], {}), '(imagePaths_test)\n', (594, 611), True, 'import numpy as np\n'), ((762, 790), 'tensorflow.read_file', 'tf.read_file', (['input_queue[0]'], {}), '(input_queue[0])\n', (774, 790), True, 'import tensorflow as tf\n'), ((814, 842), 'tensorflow.read_file', 'tf.read_file', (['input_queue[1]'], {}), '(input_queue[1])\n', (826, 842), True, 'import tensorflow as tf\n'), ((2416, 2463), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['imageFns'], {'dtype': 'tf.string'}), '(imageFns, dtype=tf.string)\n', (2436, 2463), True, 'import tensorflow as tf\n'), ((2484, 2533), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['labelMasks'], {'dtype': 'tf.string'}), '(labelMasks, dtype=tf.string)\n', (2504, 2533), True, 'import tensorflow as tf\n'), ((2553, 2638), 'tensorflow.train.slice_input_producer', 'tf.train.slice_input_producer', (['[imageFnsTensor, labelMasksTensor]'], {'shuffle': '(False)'}), '([imageFnsTensor, labelMasksTensor], shuffle=False\n )\n', (2582, 2638), True, 'import tensorflow as tf\n'), ((2726, 2771), 'tensorflow.image.per_image_standardization', 'tf.image.per_image_standardization', (['raw_image'], {}), '(raw_image)\n', (2760, 2771), True, 'import tensorflow as tf\n'), ((3466, 3502), 'tensorflow.image.rgb_to_grayscale', 'tf.image.rgb_to_grayscale', (['labelsRGB'], {}), '(labelsRGB)\n', (3491, 3502), True, 'import tensorflow as tf\n'), ((3578, 3610), 'tensorflow.divide', 'tf.divide', (['labelsFullRange', '_max'], {}), '(labelsFullRange, _max)\n', (3587, 3610), True, 'import tensorflow as tf\n'), ((862, 910), 'tensorflow.image.decode_image', 'tf.image.decode_image', (['file_contents'], {'channels': '(3)'}), '(file_contents, channels=3)\n', (883, 910), True, 'import tensorflow as tf\n'), ((945, 999), 'tensorflow.image.decode_image', 'tf.image.decode_image', (['label_file_contents'], {'channels': '(3)'}), '(label_file_contents, channels=3)\n', (966, 999), True, 'import tensorflow as tf\n'), ((1049, 1070), 'tensorflow.random_normal', 'tf.random_normal', (['[1]'], {}), '([1])\n', (1065, 1070), True, 'import tensorflow as tf\n'), ((1087, 1108), 'tensorflow.random_normal', 'tf.random_normal', (['[1]'], {}), '([1])\n', (1103, 1108), True, 'import tensorflow as tf\n'), ((1453, 1494), 'tensorflow.image.random_brightness', 'tf.image.random_brightness', (['example', '(10.0)'], {}), '(example, 10.0)\n', (1479, 1494), True, 'import tensorflow as tf\n'), ((1846, 2042), 'tensorflow.train.shuffle_batch', 'tf.train.shuffle_batch', (['[_name, image, label]'], {'batch_size': 'batch_size', 'num_threads': 'num_preprocess_threads', 'capacity': '(min_queue_examples + 3 * batch_size)', 'min_after_dequeue': 'min_queue_examples'}), '([_name, image, label], batch_size=batch_size,\n num_threads=num_preprocess_threads, capacity=min_queue_examples + 3 *\n batch_size, min_after_dequeue=min_queue_examples)\n', (1868, 2042), True, 'import tensorflow as tf\n'), ((2101, 2248), 'tensorflow.train.batch', 'tf.train.batch', (['[_name, image, label]'], {'batch_size': 'batch_size', 'num_threads': 'num_preprocess_threads', 'capacity': '(min_queue_examples + 3 * batch_size)'}), '([_name, image, label], batch_size=batch_size, num_threads=\n num_preprocess_threads, capacity=min_queue_examples + 3 * batch_size)\n', (2115, 2248), True, 'import tensorflow as tf\n'), ((3528, 3558), 'tensorflow.reduce_max', 'tf.reduce_max', (['labelsFullRange'], {}), '(labelsFullRange)\n', (3541, 3558), True, 'import tensorflow as tf\n'), ((1193, 1226), 'tensorflow.image.flip_left_right', 'tf.image.flip_left_right', (['example'], {}), '(example)\n', (1217, 1226), True, 'import tensorflow as tf\n'), ((1242, 1277), 'tensorflow.image.flip_left_right', 'tf.image.flip_left_right', (['labelMask'], {}), '(labelMask)\n', (1266, 1277), True, 'import tensorflow as tf\n'), ((1361, 1391), 'tensorflow.image.flip_up_down', 'tf.image.flip_up_down', (['example'], {}), '(example)\n', (1382, 1391), True, 'import tensorflow as tf\n'), ((1407, 1439), 'tensorflow.image.flip_up_down', 'tf.image.flip_up_down', (['labelMask'], {}), '(labelMask)\n', (1428, 1439), True, 'import tensorflow as tf\n'), ((1152, 1168), 'tensorflow.Variable', 'tf.Variable', (['(1.0)'], {}), '(1.0)\n', (1163, 1168), True, 'import tensorflow as tf\n'), ((1320, 1336), 'tensorflow.Variable', 'tf.Variable', (['(1.0)'], {}), '(1.0)\n', (1331, 1336), True, 'import tensorflow as tf\n')]
|
"""
File Name: UnoPytorch/file_downloading.py
Author: <NAME> (xduan7)
Email: <EMAIL>
Date: 8/13/18
Python Version: 3.6.6
File Description:
"""
import errno
import os
import urllib
import logging
FTP_ROOT = 'http://ftp.mcs.anl.gov/pub/candle/public/' \
'benchmarks/Pilot1/combo/'
logger = logging.getLogger(__name__)
def download_files(filenames: str or iter,
target_folder: str,
ftp_root: str = FTP_ROOT, ):
"""download_files(['some', 'file', 'names'], './data/, 'ftp://some-server')
This function download one or more files from given FTP server to target
folder. Note that the filenames wil be the same with FTP server.
Args:
filenames (str or iter): a string of filename or an iterable structure
of multiple filenames for downloading.
target_folder (str): target folder for storing downloaded data.
ftp_root (str): address for FTP server.
Returns:
None
"""
if type(filenames) is str:
filenames = [filenames, ]
# Create target folder if not exist
try:
os.makedirs(target_folder)
except OSError as e:
if e.errno != errno.EEXIST:
logger.error('Failed to create data folders', exc_info=True)
raise
# Download each file in the list
for filename in filenames:
file_path = os.path.join(target_folder, filename)
if not os.path.exists(file_path):
logger.debug('File does not exit. Downloading %s ...' % filename)
url = ftp_root + filename
try:
url_data = urllib.request.urlopen(url)
with open(file_path, 'wb') as f:
f.write(url_data.read())
except IOError:
logger.error('Failed to open and download url %s.' % url,
exc_info=True)
raise
|
[
"os.makedirs",
"os.path.exists",
"urllib.request.urlopen",
"os.path.join",
"logging.getLogger"
] |
[((378, 405), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (395, 405), False, 'import logging\n'), ((1186, 1212), 'os.makedirs', 'os.makedirs', (['target_folder'], {}), '(target_folder)\n', (1197, 1212), False, 'import os\n'), ((1454, 1491), 'os.path.join', 'os.path.join', (['target_folder', 'filename'], {}), '(target_folder, filename)\n', (1466, 1491), False, 'import os\n'), ((1508, 1533), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (1522, 1533), False, 'import os\n'), ((1696, 1723), 'urllib.request.urlopen', 'urllib.request.urlopen', (['url'], {}), '(url)\n', (1718, 1723), False, 'import urllib\n')]
|
from __future__ import annotations
import dataclasses
from typing import Callable
# mypy hints
# e.g. "tests.FlatModel"
ModelName = str
# e.g. "django.db.models.fields.DateField"
ModelClassPath = str
AttributeName = str
AttributeValue = object
AttributeList = list[AttributeName]
IsoTimestamp = str
# {"date_registered": "django.db.models.fields.DateField"}
MetaFieldMap = dict[AttributeName, ModelClassPath]
FrozenModel = object
# see https://docs.python.org/3/library/pickle.html#object.__reduce__
PickleReducer = tuple[Callable, tuple[dict]]
# function call return value
DeconstructTuple = tuple[str, str, list, dict]
# used to define functions that overwrite default field to_python
FieldConverter = Callable[[AttributeName], AttributeValue]
FieldConverterMap = dict[AttributeName, FieldConverter]
def klass_str(klass: object) -> ModelClassPath:
"""Return fully-qualified (namespaced) name for a class."""
return f"{klass.__class__.__module__}.{klass.__class__.__qualname__}"
def is_dataclass_instance(obj: object, cls_name: str | None = None) -> bool:
"""
Return True if obj is a dataclass - taken from docs.
See https://docs.python.org/3/library/dataclasses.html#dataclasses.is_dataclass
"""
is_instance = dataclasses.is_dataclass(obj) and not isinstance(obj, type)
if cls_name:
return is_instance and obj.__class__.__name__ == cls_name
return is_instance
|
[
"dataclasses.is_dataclass"
] |
[((1249, 1278), 'dataclasses.is_dataclass', 'dataclasses.is_dataclass', (['obj'], {}), '(obj)\n', (1273, 1278), False, 'import dataclasses\n')]
|
from typing import Any, Callable, List, Optional, TypeVar, Union
from types import TracebackType
import os
import sys
import traceback
import requests
from requests.models import Response
from .ci_exception import CiException, CriticalCiException, SilentAbortException
from .gravity import Module
__all__ = [
"strip_path_start",
"parse_path",
"calculate_file_absolute_path",
"detect_environment",
"create_driver",
"format_traceback",
"catch_exception",
"trim_and_convert_to_unicode",
"convert_to_str",
"unify_argument_list",
"Uninterruptible",
"make_block",
"make_request"
]
ReturnT = TypeVar('ReturnT')
DecoratorT = Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]
def strip_path_start(line: str) -> str:
if line.startswith("./"):
return line[2:]
return line
def parse_path(path: str, starting_point: str) -> str:
if path.startswith('/'):
path = os.path.join(path)
else:
path = os.path.join(starting_point, path)
return os.path.abspath(path)
def calculate_file_absolute_path(target_directory: str, file_basename: str) -> str:
name = file_basename.replace(" ", "_")
name = name.replace("/", "\\")
if name.startswith('_'):
name = name[1:]
return os.path.join(target_directory, name)
def detect_environment() -> str:
"""
:return: "tc" if the script is launched on TeamCity agent,
"jenkins" is launched on Jenkins agent,
"github" is launched on Github Actions,
"terminal" otherwise
"""
teamcity = "TEAMCITY_VERSION" in os.environ
jenkins = "JENKINS_HOME" in os.environ
pycharm = "PYCHARM_HOSTED" in os.environ
github = "GITHUB_WORKFLOW" in os.environ
if pycharm:
return "terminal"
if teamcity and not jenkins:
return "tc"
if not teamcity and jenkins:
return "jenkins"
if github:
return "github"
return "terminal"
LocalFactoryT = TypeVar('LocalFactoryT', bound=Module)
TeamcityFactoryT = TypeVar('TeamcityFactoryT', bound=Module)
JenkinsFactoryT = TypeVar('JenkinsFactoryT', bound=Module)
GithubFactoryT = TypeVar('GithubFactoryT', bound=Module)
def create_driver(local_factory: Callable[[], LocalFactoryT],
teamcity_factory: Callable[[], TeamcityFactoryT],
jenkins_factory: Callable[[], JenkinsFactoryT],
github_factory: Callable[[], GithubFactoryT],
env_type: str = "") -> Union[LocalFactoryT, TeamcityFactoryT, JenkinsFactoryT, GithubFactoryT]:
if not env_type:
env_type = detect_environment()
if env_type == "tc":
return teamcity_factory()
if env_type == "jenkins":
return jenkins_factory()
if env_type == "github":
return github_factory()
return local_factory()
def format_traceback(exc: Exception, trace: Optional[TracebackType]) -> str:
tb_lines: List[str] = traceback.format_exception(exc.__class__, exc, trace)
tb_text: str = ''.join(tb_lines)
return tb_text
def catch_exception(exception_name: str, ignore_if: str = None) -> DecoratorT:
def decorated_function(function):
def function_to_run(*args, **kwargs):
result: ReturnT = None
try:
result = function(*args, **kwargs)
return result
except Exception as e:
if not type(e).__name__ == exception_name:
raise
if ignore_if is not None:
if ignore_if in str(e):
return result
raise CriticalCiException(str(e)) from e
return function_to_run
return decorated_function
def trim_and_convert_to_unicode(line: Union[bytes, str]) -> str:
if isinstance(line, bytes):
line = line.decode("utf-8", "replace")
elif not isinstance(line, str):
line = str(line)
if line.endswith("\n"):
line = line[:-1]
return line
def convert_to_str(line: Union[bytes, str]) -> str:
if isinstance(line, bytes):
return line.decode("utf8", "replace")
return str(line)
def unify_argument_list(source_list: Optional[List[str]], separator: str = ',',
additional_list: Optional[List[str]] = None) -> List[str]:
resulting_list: List[str] = additional_list if additional_list else []
# Add arguments parsed by ModuleArgumentParser, including list elements generated by nargs='+'
if source_list is not None:
for item in source_list:
if isinstance(item, list):
resulting_list.extend(item)
else:
resulting_list.append(item)
# Remove None and empty elements added by previous steps
resulting_list = [item for item in resulting_list if item]
# Split one-element arguments and merge to one list
resulting_list = [item.strip() for entry in resulting_list for item in entry.strip('"\'').split(separator)]
# Remove empty elements that might have been introduced by splitting
resulting_list = [item for item in resulting_list if item]
return resulting_list
class Uninterruptible:
def __init__(self, error_logger: Callable[[str], None]) -> None:
self.return_code: int = 0
self.error_logger: Callable[[str], None] = error_logger
self.exceptions: List[str] = []
def __enter__(self) -> Callable[..., None]:
def excepted_function(func: Callable[..., None], *args, **kwargs):
try:
func(*args, **kwargs)
except SilentAbortException as e:
self.return_code = max(self.return_code, e.application_exit_code)
except (KeyboardInterrupt, SystemExit):
self.error_logger("Interrupted from outer scope\n")
self.return_code = 3
except Exception as e:
ex_traceback = sys.exc_info()[2]
self.exceptions.append(format_traceback(e, ex_traceback))
self.return_code = max(self.return_code, 2)
return excepted_function
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
if self.return_code == 2:
for entry in self.exceptions:
sys.stderr.write(entry)
if self.return_code != 0:
raise SilentAbortException(application_exit_code=self.return_code)
def make_block(block_name: str, pass_errors: bool = True) -> DecoratorT:
def decorated_function(func):
def function_in_block(self, *args, **kwargs):
return self.structure.run_in_block(func, block_name, pass_errors, self, *args, **kwargs)
return function_in_block
return decorated_function
def make_request(url: str, request_method: str = "GET", critical: bool = True, **kwargs) -> Response:
try:
response: Response = requests.request(method=request_method, url=url, **kwargs)
response.raise_for_status()
return response
except requests.RequestException as error:
text = f"Error opening URL, got '{type(error).__name__}' with following message:\n{error}"
if critical:
raise CriticalCiException(text) from error
raise CiException(text) from error
|
[
"os.path.abspath",
"traceback.format_exception",
"requests.request",
"sys.exc_info",
"typing.TypeVar",
"sys.stderr.write",
"os.path.join"
] |
[((641, 659), 'typing.TypeVar', 'TypeVar', (['"""ReturnT"""'], {}), "('ReturnT')\n", (648, 659), False, 'from typing import Any, Callable, List, Optional, TypeVar, Union\n'), ((1990, 2028), 'typing.TypeVar', 'TypeVar', (['"""LocalFactoryT"""'], {'bound': 'Module'}), "('LocalFactoryT', bound=Module)\n", (1997, 2028), False, 'from typing import Any, Callable, List, Optional, TypeVar, Union\n'), ((2048, 2089), 'typing.TypeVar', 'TypeVar', (['"""TeamcityFactoryT"""'], {'bound': 'Module'}), "('TeamcityFactoryT', bound=Module)\n", (2055, 2089), False, 'from typing import Any, Callable, List, Optional, TypeVar, Union\n'), ((2108, 2148), 'typing.TypeVar', 'TypeVar', (['"""JenkinsFactoryT"""'], {'bound': 'Module'}), "('JenkinsFactoryT', bound=Module)\n", (2115, 2148), False, 'from typing import Any, Callable, List, Optional, TypeVar, Union\n'), ((2166, 2205), 'typing.TypeVar', 'TypeVar', (['"""GithubFactoryT"""'], {'bound': 'Module'}), "('GithubFactoryT', bound=Module)\n", (2173, 2205), False, 'from typing import Any, Callable, List, Optional, TypeVar, Union\n'), ((1036, 1057), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (1051, 1057), False, 'import os\n'), ((1286, 1322), 'os.path.join', 'os.path.join', (['target_directory', 'name'], {}), '(target_directory, name)\n', (1298, 1322), False, 'import os\n'), ((2960, 3013), 'traceback.format_exception', 'traceback.format_exception', (['exc.__class__', 'exc', 'trace'], {}), '(exc.__class__, exc, trace)\n', (2986, 3013), False, 'import traceback\n'), ((945, 963), 'os.path.join', 'os.path.join', (['path'], {}), '(path)\n', (957, 963), False, 'import os\n'), ((989, 1023), 'os.path.join', 'os.path.join', (['starting_point', 'path'], {}), '(starting_point, path)\n', (1001, 1023), False, 'import os\n'), ((6889, 6947), 'requests.request', 'requests.request', ([], {'method': 'request_method', 'url': 'url'}), '(method=request_method, url=url, **kwargs)\n', (6905, 6947), False, 'import requests\n'), ((6283, 6306), 'sys.stderr.write', 'sys.stderr.write', (['entry'], {}), '(entry)\n', (6299, 6306), False, 'import sys\n'), ((5931, 5945), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5943, 5945), False, 'import sys\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkeipanycast.endpoint import endpoint_data
class AllocateAnycastEipAddressRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Eipanycast', '2020-03-09', 'AllocateAnycastEipAddress','eipanycast')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Bandwidth(self):
return self.get_query_params().get('Bandwidth')
def set_Bandwidth(self,Bandwidth):
self.add_query_param('Bandwidth',Bandwidth)
def get_ServiceLocation(self):
return self.get_query_params().get('ServiceLocation')
def set_ServiceLocation(self,ServiceLocation):
self.add_query_param('ServiceLocation',ServiceLocation)
def get_ClientToken(self):
return self.get_query_params().get('ClientToken')
def set_ClientToken(self,ClientToken):
self.add_query_param('ClientToken',ClientToken)
def get_Description(self):
return self.get_query_params().get('Description')
def set_Description(self,Description):
self.add_query_param('Description',Description)
def get_InternetChargeType(self):
return self.get_query_params().get('InternetChargeType')
def set_InternetChargeType(self,InternetChargeType):
self.add_query_param('InternetChargeType',InternetChargeType)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_InstanceChargeType(self):
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self,InstanceChargeType):
self.add_query_param('InstanceChargeType',InstanceChargeType)
|
[
"aliyunsdkcore.request.RpcRequest.__init__",
"aliyunsdkeipanycast.endpoint.endpoint_data.getEndpointRegional",
"aliyunsdkeipanycast.endpoint.endpoint_data.getEndpointMap"
] |
[((969, 1069), 'aliyunsdkcore.request.RpcRequest.__init__', 'RpcRequest.__init__', (['self', '"""Eipanycast"""', '"""2020-03-09"""', '"""AllocateAnycastEipAddress"""', '"""eipanycast"""'], {}), "(self, 'Eipanycast', '2020-03-09',\n 'AllocateAnycastEipAddress', 'eipanycast')\n", (988, 1069), False, 'from aliyunsdkcore.request import RpcRequest\n'), ((1161, 1191), 'aliyunsdkeipanycast.endpoint.endpoint_data.getEndpointMap', 'endpoint_data.getEndpointMap', ([], {}), '()\n', (1189, 1191), False, 'from aliyunsdkeipanycast.endpoint import endpoint_data\n'), ((1272, 1307), 'aliyunsdkeipanycast.endpoint.endpoint_data.getEndpointRegional', 'endpoint_data.getEndpointRegional', ([], {}), '()\n', (1305, 1307), False, 'from aliyunsdkeipanycast.endpoint import endpoint_data\n')]
|
# Generated by Django 3.1.6 on 2021-04-10 10:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0027_auto_20210409_1947'),
]
operations = [
migrations.AddField(
model_name='gallery',
name='show',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='partner',
name='class_id',
field=models.CharField(default='BCvZL', max_length=150),
),
migrations.AlterField(
model_name='sponsor',
name='class_id',
field=models.CharField(default='HMlGm', max_length=150),
),
]
|
[
"django.db.models.CharField",
"django.db.models.BooleanField"
] |
[((330, 364), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (349, 364), False, 'from django.db import migrations, models\n'), ((489, 538), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""BCvZL"""', 'max_length': '(150)'}), "(default='BCvZL', max_length=150)\n", (505, 538), False, 'from django.db import migrations, models\n'), ((663, 712), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""HMlGm"""', 'max_length': '(150)'}), "(default='HMlGm', max_length=150)\n", (679, 712), False, 'from django.db import migrations, models\n')]
|
"""
Allocate apertures to targets.
Contains code originally written by 2021 Akamai intern, <NAME>.
.. include:: ../include/links.rst
"""
from pathlib import Path
from IPython import embed
import numpy
def random_targets(r, n=None, density=5., rng=None):
r"""
Draw a set of random x and y coordinates within a circle.
Args:
r (:obj:`float`):
Radius of the circle.
n (:obj:`int`, optional):
The total number of points to draw. If None, number drawn
based on the density requested.
density (:obj:`float`, optional):
The average density of targets within the circle. This is
used to calculate the number of points to generate within
the circle: ``n = int(numpy.ceil(density*numpy.pi*r**2))``. Units
must be appropriate match radius units.
rng (`numpy.random.Generator`_, optional):
Random number generator to use. If None, a new one is
instantiated using `numpy.random.default_rng`_.
Returns:
:obj:`tuple`: Two vectors of length :math:`N_{\rm targ}` (the number of
targets). Cartesian x coordinates are in the first vector, y
coordinates in the second.
"""
# Calculate the number of points to match an expected density
if n is None:
n = int(numpy.ceil(density*numpy.pi*r**2))
if rng is None:
rng = numpy.random.default_rng()
c = numpy.empty((0,2), dtype=float)
overdraw = 1.5
while c.shape[0] != n:
# Draw more points than needed within the unit square until the correct
# number is reached.
# TODO: Probably a less brute-force way of doing this...
c = rng.uniform(low=-1, high=1, size=(int(n*overdraw),2))
# Find those within the r = 1
indx = c[:,0]**2 + c[:,1]**2 < 1
c = c[indx][:n]
# Increase overdraw for next iteration
overdraw *= 1.1
return r*c[:,0], r*c[:,1]
def parse_targets(ifile, ra_c=1, dec_c=2, ap_c=None, default_ap=0):
"""
Parse target coordinates and aperture types from an input file.
Args:
ifile (:obj:`str`):
Columnated ascii file with the target coordinates.
ra_c (:obj:`int`, optional):
1-indexed column with the RA coordinates. Assumed to be in decimal
degrees.
dec_c (:obj:`int`, optional):
1-indexed column with the declination coordinates. Assumed to be in
decimal degrees.
ap_c (:obj:`int`, optional):
1-indexed column with the aperture type to assign to each target.
If None, the type is not available in the input file and the
``default_types`` is used for all targets. Apertures must be 0 for
a single fiber or 1 for an IFU.
default_ap (:obj:`int`, optional):
If the aperture types are not provided in the file, this sets the
type to assign to *all* apertures. Apertures must be 0 for a single
fiber or 1 for an IFU.
Returns:
:obj:`tuple`: Three numpy vectors with the coordinates and aperture type
for each target.
"""
# Check the input
if default_ap not in [0, 1]:
raise ValueError('Default aperture type must be 0 (single-fiber) or 1 (IFU).')
# Instantiate the file path
p = Path(ifile).resolve()
# Check it exists
if not p.exists():
raise FileNotFoundError(f'{str(p)}')
# Read the file
db = numpy.genfromtxt(str(p), dtype=str).T
# Check the requested columns exist
if numpy.any(numpy.array([ra_c, dec_c, 1 if ap_c is None else ap_c]) > db.shape[0]):
raise ValueError(f'{p.name} only contains {db.shape[0]} columns. Check column requests.')
# Collect the data and convert to the correct type
ra = db[ra_c-1].astype(float)
dec = db[dec_c-1].astype(float)
ap = numpy.full(ra.size, default_ap, dtype=int) if ap_c is None else db[ap_c-1].astype(int)
return ra, dec, ap
|
[
"numpy.full",
"numpy.ceil",
"numpy.empty",
"numpy.random.default_rng",
"pathlib.Path",
"numpy.array"
] |
[((1457, 1489), 'numpy.empty', 'numpy.empty', (['(0, 2)'], {'dtype': 'float'}), '((0, 2), dtype=float)\n', (1468, 1489), False, 'import numpy\n'), ((1421, 1447), 'numpy.random.default_rng', 'numpy.random.default_rng', ([], {}), '()\n', (1445, 1447), False, 'import numpy\n'), ((3919, 3961), 'numpy.full', 'numpy.full', (['ra.size', 'default_ap'], {'dtype': 'int'}), '(ra.size, default_ap, dtype=int)\n', (3929, 3961), False, 'import numpy\n'), ((1352, 1391), 'numpy.ceil', 'numpy.ceil', (['(density * numpy.pi * r ** 2)'], {}), '(density * numpy.pi * r ** 2)\n', (1362, 1391), False, 'import numpy\n'), ((3377, 3388), 'pathlib.Path', 'Path', (['ifile'], {}), '(ifile)\n', (3381, 3388), False, 'from pathlib import Path\n'), ((3614, 3669), 'numpy.array', 'numpy.array', (['[ra_c, dec_c, 1 if ap_c is None else ap_c]'], {}), '([ra_c, dec_c, 1 if ap_c is None else ap_c])\n', (3625, 3669), False, 'import numpy\n')]
|
import json
import os
from configurations import CONFIG
class GameAssets:
@staticmethod
def load(filename):
path = os.path.join(CONFIG.get('game_assets_folder'), filename)
with open(path, encoding='utf8') as f:
return json.load(f)
@staticmethod
def path(filename):
return os.path.join(CONFIG.get('game_assets_folder'), filename)
@staticmethod
def exists(filename):
path = os.path.join(CONFIG.get('game_assets_folder'), filename)
return os.path.exists(path)
|
[
"configurations.CONFIG.get",
"json.load",
"os.path.exists"
] |
[((517, 537), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (531, 537), False, 'import os\n'), ((147, 179), 'configurations.CONFIG.get', 'CONFIG.get', (['"""game_assets_folder"""'], {}), "('game_assets_folder')\n", (157, 179), False, 'from configurations import CONFIG\n'), ((257, 269), 'json.load', 'json.load', (['f'], {}), '(f)\n', (266, 269), False, 'import json\n'), ((341, 373), 'configurations.CONFIG.get', 'CONFIG.get', (['"""game_assets_folder"""'], {}), "('game_assets_folder')\n", (351, 373), False, 'from configurations import CONFIG\n'), ((458, 490), 'configurations.CONFIG.get', 'CONFIG.get', (['"""game_assets_folder"""'], {}), "('game_assets_folder')\n", (468, 490), False, 'from configurations import CONFIG\n')]
|
import time, random, re, sys
from lxml import etree
class Chatbot:
def __init__(self, initialStateID=None, botPrompt="", userPrompt="", context={}):
self.initialStateID=initialStateID
self.currentState=None
self.botPrompt=botPrompt
self.userPrompt=userPrompt
self.finished = False
self.waitingForInput = False
self.states = {}
self.messageQueue = []
self.functions = []
self.isValid = None
self.externChatbots = []
self.context = context
globals()['context'] = self.context
def getBotPrompt(self):
return self.botPrompt
def getUserPrompt(self):
return self.userPrompt
def isWaitingForInput(self):
return self.waitingForInput
def addFunction(self, function):
self.functions.append(function)
def addFunctionsFromCode(self, code):
ind1 = code.find("def")
while ind1 != -1:
indn = code.find("(",ind1+3)
name = code[ind1+3:indn].strip()
ind2 = code.find("def", ind1+3)
if ind2 == -1:
self.addFunction(Function(code,name=name))
else:
self.addFunction(Function(code,name=name))
ind1=ind2
def addExternChatbot(self, externChatbot):
self.externChatbots.append(externChatbot)
if externChatbot.getName() not in self.states:
self.states[externChatbot.getName()] = externChatbot
def isFinished(self):
return self.finished
def getStateFromId(self, stateId):
return self.states[stateId]
def setReady(self):
self.currentState = self.getStateFromId(self.initialStateID)
self.processState(self.currentState)
def processState(self, state):
startTP = state.getStart()
messages = startTP.processAsList()
self.messageQueue.extend(messages)
self.waitingForInput = False
def addState(self, state):
self.states[state.getId()] = state
def getNextMessage(self):
if len(self.messageQueue) > 0:
message = self.messageQueue.pop(0)
delay = message.delay
if delay > 0: time.sleep(delay)
if len(self.messageQueue) == 0: self.waitingForInput = True
return self.processMessage(message.text)
self.waitingForInput = True
return None
def processMessage(self, text):
if "${" in text:
indx1 = text.find("${")
resp = text[:indx1]
while indx1 != -1:
indx2 = text.find("}", indx1+2)
varname = text[indx1+2:indx2].strip()
resp += context[varname]
indx1 = text.find("${", indx2)
if indx1!=-1: resp+=text[indx2+1:indx1]
else: resp+=text[indx2+1:]
return resp
else:
return text
def processInput(self, string):
state = self.currentState
for case in state.getCases():
nextStateId = case.match(string, self)
if nextStateId != None:
messages = []
case.getOutput().processAsList(messages)
self.messageQueue.extend(messages)
if nextStateId == None:
self.finished = True
self.waitingForInput = False
else:
nextState = self.getStateFromId(nextStateId)
if isinstance(nextState, State):
self.currentState = nextState
messages = []
self.currentState.getStart().processAsList(messages)
self.messageQueue.extend(messages)
if self.currentState.isTerminalState(): self.finished = True
else: self.finished = False
self.waitingForInput = False
elif isinstance(nextState, ExternChatbot):
self.load(nextState.getSrc())
return
if state.hasLoopback():
messages = []
state.getLoopback().getOutput().processAsList(messages)
self.messageQueue.extend(messages)
self.finished = False
self.waitingForInput = False
def clear(self):
pass
############################################################################################################
#
# XML loading methods
############################################################################################################
def load(self, filename):
try:
parser = etree.XMLParser()
tree = etree.parse(filename, parser)
root = tree.getroot()
# TODO: validate with an Schema or a DTD
start=root.attrib["start"]
if "bot-prompt" in root.attrib:
botPrompt=root.attrib["bot-prompt"]
else:
botPrompt="chatbot> "
if "user-prompt" in root.attrib:
userPrompt=root.attrib["user-prompt"]
else:
userPrompt="you> "
self.clear()
self.__init__(start, botPrompt, userPrompt, context=self.context)
for el in root.getchildren():
if el.tag == "state":
self.readState(el)
elif el.tag == "function":
self.readFunction(el)
elif el.tag == "code":
self.readCode(el)
elif el.tag == "extern-chatbot":
self.readExternalChatbot(el)
self.executeOnStart()
self.setReady()
return self
except FileNotFoundError as err:
print("File not found: '"+filename+"'")
sys.exit(2)
def executeOnStart(self):
for funct in self.functions:
if funct.getName() == "onStart":
exec(funct.getSource())
exec("onStart()")
return
def readExternalChatbot(self, root):
self.addExternChatbot(ExternChatbot(root.attrib['name'], root.attrib['src']))
def readCode(self, root):
try:
exec(root.text)
except SyntaxError as e:
print("Error loading the following code:\n\n"+root.text)
print(e)
sys.exit(2)
self.addFunctionsFromCode(root.text)
def readFunction(self, root):
try:
exec(root.text)
except SyntaxError as e:
print("Error loading the following function:\n\n"+root.text)
print(e)
sys.exit(2)
self.addFunction(Function(root.text))
def readState(self, root):
state = State(root.attrib["id"])
for el in root.getchildren():
if el.tag == "start":
state.setStart( self.readStart(el) )
elif el.tag == "input":
state.setInputProcessor( self.readInput(el) )
self.addState(state)
def readStart(self, root):
resp = TextContainer()
for el in root.getchildren():
if el.tag == "sentence":
resp.addElement( self.readSentence(el) )
elif el.tag in ["random","sequence"]:
self.readContainer(el, resp)
return resp
def readInput(self,root):
resp = InputProcessor()
for el in root.getchildren():
if el.tag == "case":
resp.addCase( self.readCase(el) )
elif el.tag == "loopback":
resp.setLoopback( self.readLoopback(el))
return resp
def readLoopback(self, root):
resp = Case()
outp = TextContainer()
self.readContainer(root, outp)
resp.setOutput(outp)
return resp
def readCase(self, root):
resp = Case()
outp = TextContainer()
self.readContainer(root, outp)
resp.setOutput(outp)
try:
ctype = root.attrib["type"]
except KeyError:
print("Type attribute is mandatory for case elements, line "+str(root.sourceline))
sys.exit(2)
resp.setType(ctype)
if ctype == "pattern":
try:
resp.setPattern(root.attrib["pattern"])
except KeyError:
print("A pattern should be specified for a case of type pattern, line "+str(root.sourceline))
sys.exit(2)
elif ctype == "function":
try:
resp.setFunction(root.attrib["name"])
except KeyError:
print("The name of a function should be specified for a pattern of type function, line "+str(root.sourceline))
sys.exit(2)
try:
nextState = root.attrib["next"]
resp.setNextStateId(nextState)
except:
pass
return resp
def readSentence(self,root):
delay = 0
try:
delay = float(root.attrib["delay"])
except KeyError: pass
sent = Sentence(root.text, delay)
return sent
def readContainer(self, root, container=None):
if root.tag == "random":
cont = TextContainer("random")
else:
cont = TextContainer("sequence")
if container!=None:
container.addElement(cont)
for el in root.getchildren():
if el.tag == "sentence":
cont.addElement( self.readSentence(el) )
elif el.tag in ["random","sequence"]:
self.readContainer(el, cont)
return cont
#################################################################
# Class Message
#################################################################
class Message:
def __init__(self, text, delay=0):
self.text=text
self.delay=delay
#################################################################
# Class ExternChatbot
#################################################################
class ExternChatbot:
def __init__(self, name, src):
self.name=name
self.src=src
def getName(self):
return self.name
def getSrc(self):
return self.src
def setName(self, name):
self.name=name
def setSrc(self, src):
self.src=src
#################################################################
# Class State
#################################################################
class Function:
def __init__(self, src, name=None):
if name==None:
self.name = self.getFunctionNameFromSource(src)
else:
self.name = name
self.src = src
def getFunctionNameFromSource(self, src):
resp = ""
ind1 = src.find("def")
ind2 = src.find("(", ind1+3)
return src[ind1+3:ind2].strip()
def getName(self):
return self.name
def getSource(self):
return self.src
#################################################################
# Class State
#################################################################
class State:
def __init__(self, id):
self.id=id
self.inputProcessor=None
def setInputProcessor(self, inputElement):
self.inputProcessor=inputElement
def setStart(self, start):
self.start = start
def getStart(self):
return self.start
def getId(self):
return self.id
def getCases(self):
if self.inputProcessor == None: return []
return self.inputProcessor.cases
def hasLoopback(self):
return self.getLoopback() != None
def getLoopback(self):
if self.inputProcessor == None: return None
return self.inputProcessor.getLoopback()
def isTerminalState(self):
return len(self.getCases())==0 and not self.hasLoopback()
#################################################################
# Class Sentence
#################################################################
class Sentence:
def __init__(self, text, delay=0):
self.text=text
self.delay=delay
#################################################################
# Class Code
#################################################################
class Code:
def __init__(self, code):
self.code=code
#################################################################
# Class InputProcessor
#################################################################
class InputProcessor:
def __init__(self):
self.cases = []
self.loopback = None
def addCase(self, case):
self.cases.append(case)
def setLoopback(self, case):
self.loopback = case
def getLoopback(self):
return self.loopback
#################################################################
# Class Case
#################################################################
class Case:
def __init__(self):
self.pattern = None
self.type = None
self.function = None
self.output = None
self.nextStateId = None
def setOutput(self, output):
self.output = output
def getOutput(self):
return self.output
def setType(self, typeStr):
self.type = typeStr
def setPattern(self, pattern):
self.pattern = pattern
self.compiled = re.compile(pattern)
def setFunction(self, function):
self.function = function
def match(self, string, bot):
if self.type == "pattern":
if self.compiled.match(string) != None:
return self.nextStateId
else: return None
elif self.type == "function":
for funct in bot.functions:
if funct.getName() == self.function:
exec(funct.getSource())
exec("global resp\nresp = "+self.function+"(string)")
return resp
def setNextStateId(self, string):
self.nextStateId = string
def getNextStateId(self):
return self.nextStateId
#################################################################
# Class TextContainer
#################################################################
class TextContainer:
def __init__(self, type=None):
self.container = []
if type == "random":
self.type="random"
else:
self.type="sequence"
def addElement(self,element):
self.container.append(element)
def processAsList(self, resp=[]):
if self.type == "random":
sel = random.randint(0, len(self.container)) - 1
self.processElement(self.container[sel], resp)
else:
for elem in self.container:
self.processElement(elem, resp)
return resp
def processElement(self, elem, resp):
if isinstance(elem, TextContainer):
elem.processAsList(resp)
elif isinstance(elem, Sentence):
resp.append( Message(elem.text, elem.delay) )
elif isinstance(elem, Code):
pass
#################################################################
# Class Output
#################################################################
class Output:
def __init__(self, type):
pass
def addElement(self, element):
pass
|
[
"time.sleep",
"lxml.etree.XMLParser",
"lxml.etree.parse",
"sys.exit",
"re.compile"
] |
[((12765, 12784), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (12775, 12784), False, 'import time, random, re, sys\n'), ((4507, 4524), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {}), '()\n', (4522, 4524), False, 'from lxml import etree\n'), ((4539, 4568), 'lxml.etree.parse', 'etree.parse', (['filename', 'parser'], {}), '(filename, parser)\n', (4550, 4568), False, 'from lxml import etree\n'), ((2110, 2127), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (2120, 2127), False, 'import time, random, re, sys\n'), ((5516, 5527), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (5524, 5527), False, 'import time, random, re, sys\n'), ((6026, 6037), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (6034, 6037), False, 'import time, random, re, sys\n'), ((6269, 6280), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (6277, 6280), False, 'import time, random, re, sys\n'), ((7736, 7747), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (7744, 7747), False, 'import time, random, re, sys\n'), ((8019, 8030), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (8027, 8030), False, 'import time, random, re, sys\n'), ((8292, 8303), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (8300, 8303), False, 'import time, random, re, sys\n')]
|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) <NAME> <<EMAIL>>
# Copyright (C) <NAME> <<EMAIL>>
# This program is published under a GPLv2 license
"""
Native Microsoft Windows sockets (L3 only)
## Notice: ICMP packets
DISCLAIMER: Please use Npcap/Winpcap to send/receive ICMP. It is going to work.
Below is some additional information, mainly implemented in a testing purpose.
When in native mode, everything goes through the Windows kernel.
This firstly requires that the Firewall is open. Be sure it allows ICMPv4/6
packets in and out.
Windows may drop packets that it finds wrong. for instance, answers to
ICMP packets with id=0 or seq=0 may be dropped. It means that sent packets
should (most of the time) be perfectly built.
A perfectly built ICMP req packet on Windows means that its id is 1, its
checksum (IP and ICMP) are correctly built, but also that its seq number is
in the "allowed range".
In fact, every time an ICMP packet is sent on Windows, a global sequence
number is increased, which is only reset at boot time. The seq number of the
received ICMP packet must be in the range [current, current + 3] to be valid,
and received by the socket. The current number is quite hard to get, thus we
provide in this module the get_actual_icmp_seq() function.
Example:
>>> conf.use_pcap = False
>>> a = conf.L3socket()
# This will (most likely) work:
>>> current = get_current_icmp_seq()
>>> a.sr(IP(dst="www.google.com", ttl=128)/ICMP(id=1, seq=current))
# This won't:
>>> a.sr(IP(dst="www.google.com", ttl=128)/ICMP())
PS: on computers where the firewall isn't open, Windows temporarily opens it
when using the `ping` util from cmd.exe. One can first call a ping on cmd,
then do custom calls through the socket using get_current_icmp_seq(). See
the tests (windows.uts) for an example.
"""
import io
import os
import socket
import subprocess
import time
from scapy.automaton import SelectableObject
from scapy.arch.common import _select_nonblock
from scapy.arch.windows.structures import GetIcmpStatistics
from scapy.compat import raw
from scapy.config import conf
from scapy.data import MTU
from scapy.error import Scapy_Exception, warning
from scapy.supersocket import SuperSocket
# Watch out for import loops (inet...)
class L3WinSocket(SuperSocket, SelectableObject):
desc = "a native Layer 3 (IPv4) raw socket under Windows"
nonblocking_socket = True
__slots__ = ["promisc", "cls", "ipv6", "proto"]
def __init__(self, iface=None, proto=socket.IPPROTO_IP,
ttl=128, ipv6=False, promisc=True, **kwargs):
from scapy.layers.inet import IP
from scapy.layers.inet6 import IPv6
for kwarg in kwargs:
warning("Dropping unsupported option: %s" % kwarg)
af = socket.AF_INET6 if ipv6 else socket.AF_INET
self.proto = proto
if ipv6:
from scapy.arch import get_if_addr6
self.host_ip6 = get_if_addr6(conf.iface) or "::1"
if proto == socket.IPPROTO_IP:
# We'll restrict ourselves to UDP, as TCP isn't bindable
# on AF_INET6
self.proto = socket.IPPROTO_UDP
# On Windows, with promisc=False, you won't get much
self.ipv6 = ipv6
self.cls = IPv6 if ipv6 else IP
self.promisc = promisc
# Notes:
# - IPPROTO_RAW only works to send packets.
# - IPPROTO_IPV6 exists in MSDN docs, but using it will result in
# no packets being received. Same for its options (IPV6_HDRINCL...)
# However, using IPPROTO_IP with AF_INET6 will still receive
# the IPv6 packets
try:
self.ins = socket.socket(af,
socket.SOCK_RAW,
self.proto)
self.outs = socket.socket(af,
socket.SOCK_RAW,
socket.IPPROTO_RAW)
except OSError as e:
if e.errno == 10013:
raise OSError("Windows native L3 Raw sockets are only "
"usable as administrator ! "
"Install Winpcap/Npcap to workaround !")
raise
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.outs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.ins.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 2**30)
self.outs.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2**30)
# IOCTL Include IP headers
self.ins.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
self.outs.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
# set TTL
self.ins.setsockopt(socket.IPPROTO_IP, socket.IP_TTL, ttl)
self.outs.setsockopt(socket.IPPROTO_IP, socket.IP_TTL, ttl)
# Bind on all ports
iface = iface or conf.iface
host = iface.ip if iface.ip else socket.gethostname()
self.ins.bind((host, 0))
self.ins.setblocking(False)
# Get as much data as possible: reduce what is cropped
if ipv6:
try: # Not all Windows versions
self.ins.setsockopt(socket.IPPROTO_IPV6,
socket.IPV6_RECVTCLASS, 1)
self.ins.setsockopt(socket.IPPROTO_IPV6,
socket.IPV6_HOPLIMIT, 1)
except (OSError, socket.error):
pass
else:
try: # Not Windows XP
self.ins.setsockopt(socket.IPPROTO_IP,
socket.IP_RECVDSTADDR, 1)
except (OSError, socket.error):
pass
try: # Windows 10+ recent builds only
self.ins.setsockopt(socket.IPPROTO_IP, socket.IP_RECVTTL, 1)
except (OSError, socket.error):
pass
if promisc:
# IOCTL Receive all packets
self.ins.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
def send(self, x):
data = raw(x)
if self.cls not in x:
raise Scapy_Exception("L3WinSocket can only send IP/IPv6 packets !"
" Install Npcap/Winpcap to send more")
dst_ip = str(x[self.cls].dst)
self.outs.sendto(data, (dst_ip, 0))
def nonblock_recv(self, x=MTU):
return self.recv()
# https://docs.microsoft.com/en-us/windows/desktop/winsock/tcp-ip-raw-sockets-2 # noqa: E501
# - For IPv4 (address family of AF_INET), an application receives the IP
# header at the front of each received datagram regardless of the
# IP_HDRINCL socket option.
# - For IPv6 (address family of AF_INET6), an application receives
# everything after the last IPv6 header in each received datagram
# regardless of the IPV6_HDRINCL socket option. The application does
# not receive any IPv6 headers using a raw socket.
def recv_raw(self, x=MTU):
try:
data, address = self.ins.recvfrom(x)
except io.BlockingIOError:
return None, None, None
from scapy.layers.inet import IP
from scapy.layers.inet6 import IPv6
if self.ipv6:
# AF_INET6 does not return the IPv6 header. Let's build it
# (host, port, flowinfo, scopeid)
host, _, flowinfo, _ = address
header = raw(IPv6(src=host,
dst=self.host_ip6,
fl=flowinfo,
nh=self.proto, # fixed for AF_INET6
plen=len(data)))
return IPv6, header + data, time.time()
else:
return IP, data, time.time()
def check_recv(self):
return True
def close(self):
if not self.closed and self.promisc:
self.ins.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
super(L3WinSocket, self).close()
@staticmethod
def select(sockets, remain=None):
return _select_nonblock(sockets, remain=remain)
class L3WinSocket6(L3WinSocket):
desc = "a native Layer 3 (IPv6) raw socket under Windows"
def __init__(self, **kwargs):
super(L3WinSocket6, self).__init__(ipv6=True, **kwargs)
def open_icmp_firewall(host):
"""Temporarily open the ICMP firewall. Tricks Windows into allowing
ICMP packets for a short period of time (~ 1 minute)"""
# We call ping with a timeout of 1ms: will return instantly
with open(os.devnull, 'wb') as DEVNULL:
return subprocess.Popen("ping -4 -w 1 -n 1 %s" % host,
shell=True,
stdout=DEVNULL,
stderr=DEVNULL).wait()
def get_current_icmp_seq():
"""See help(scapy.arch.windows.native) for more information.
Returns the current ICMP seq number."""
return GetIcmpStatistics()['stats']['icmpOutStats']['dwEchos']
|
[
"scapy.arch.get_if_addr6",
"subprocess.Popen",
"scapy.error.warning",
"socket.socket",
"time.time",
"scapy.arch.common._select_nonblock",
"socket.gethostname",
"scapy.error.Scapy_Exception",
"scapy.arch.windows.structures.GetIcmpStatistics",
"scapy.compat.raw"
] |
[((6127, 6133), 'scapy.compat.raw', 'raw', (['x'], {}), '(x)\n', (6130, 6133), False, 'from scapy.compat import raw\n'), ((8087, 8127), 'scapy.arch.common._select_nonblock', '_select_nonblock', (['sockets'], {'remain': 'remain'}), '(sockets, remain=remain)\n', (8103, 8127), False, 'from scapy.arch.common import _select_nonblock\n'), ((2780, 2830), 'scapy.error.warning', 'warning', (["('Dropping unsupported option: %s' % kwarg)"], {}), "('Dropping unsupported option: %s' % kwarg)\n", (2787, 2830), False, 'from scapy.error import Scapy_Exception, warning\n'), ((3744, 3790), 'socket.socket', 'socket.socket', (['af', 'socket.SOCK_RAW', 'self.proto'], {}), '(af, socket.SOCK_RAW, self.proto)\n', (3757, 3790), False, 'import socket\n'), ((3889, 3943), 'socket.socket', 'socket.socket', (['af', 'socket.SOCK_RAW', 'socket.IPPROTO_RAW'], {}), '(af, socket.SOCK_RAW, socket.IPPROTO_RAW)\n', (3902, 3943), False, 'import socket\n'), ((5022, 5042), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (5040, 5042), False, 'import socket\n'), ((6182, 6289), 'scapy.error.Scapy_Exception', 'Scapy_Exception', (['"""L3WinSocket can only send IP/IPv6 packets ! Install Npcap/Winpcap to send more"""'], {}), "(\n 'L3WinSocket can only send IP/IPv6 packets ! Install Npcap/Winpcap to send more'\n )\n", (6197, 6289), False, 'from scapy.error import Scapy_Exception, warning\n'), ((3008, 3032), 'scapy.arch.get_if_addr6', 'get_if_addr6', (['conf.iface'], {}), '(conf.iface)\n', (3020, 3032), False, 'from scapy.arch import get_if_addr6\n'), ((7728, 7739), 'time.time', 'time.time', ([], {}), '()\n', (7737, 7739), False, 'import time\n'), ((7783, 7794), 'time.time', 'time.time', ([], {}), '()\n', (7792, 7794), False, 'import time\n'), ((8611, 8706), 'subprocess.Popen', 'subprocess.Popen', (["('ping -4 -w 1 -n 1 %s' % host)"], {'shell': '(True)', 'stdout': 'DEVNULL', 'stderr': 'DEVNULL'}), "('ping -4 -w 1 -n 1 %s' % host, shell=True, stdout=DEVNULL,\n stderr=DEVNULL)\n", (8627, 8706), False, 'import subprocess\n'), ((8956, 8975), 'scapy.arch.windows.structures.GetIcmpStatistics', 'GetIcmpStatistics', ([], {}), '()\n', (8973, 8975), False, 'from scapy.arch.windows.structures import GetIcmpStatistics\n')]
|
"""Text Processing Helper"""
import re
class TextProcessingHelper():
"""Text Processing Helper"""
@staticmethod
def cleanhtml(raw_html):
"""Clean HTML"""
cleanr = re.compile('</.*?>')
cleantext = re.sub(cleanr, '>', raw_html)
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '<', cleantext)
return cleantext
|
[
"re.sub",
"re.compile"
] |
[((195, 215), 're.compile', 're.compile', (['"""</.*?>"""'], {}), "('</.*?>')\n", (205, 215), False, 'import re\n'), ((236, 265), 're.sub', 're.sub', (['cleanr', '""">"""', 'raw_html'], {}), "(cleanr, '>', raw_html)\n", (242, 265), False, 'import re\n'), ((283, 302), 're.compile', 're.compile', (['"""<.*?>"""'], {}), "('<.*?>')\n", (293, 302), False, 'import re\n'), ((323, 353), 're.sub', 're.sub', (['cleanr', '"""<"""', 'cleantext'], {}), "(cleanr, '<', cleantext)\n", (329, 353), False, 'import re\n')]
|
from flask import Flask
import os
from flask import request
from flask import render_template
import requests
from collections import OrderedDict
from flask_wtf import Form
from wtforms import StringField
import numpy as np
from manage_db import Status, Stage, Subject, Committee, Deputy, Period
token = "<KEY>"
app_token = "appc3d09adec00286a901b919e73842807f3c7e81c4"
app = Flask(__name__)
path = os.path.dirname(os.path.abspath(__file__))
db_path = os.path.join(path, 'duma.db')
app.config['SECRET_KEY'] = '123457'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + db_path
class SearchForm(Form):
name = StringField(u"Название законопроекта")
@app.route('/periods')
def periods():
all_period = Period.query.all()
all_stage = Stage.query.all()
all_subject = Subject.query.all()
path = os.path.dirname(os.path.abspath(__file__))
info_subj = np.load(os.path.join(path, 'info_period_subject.npy'))
info_stage = np.load(os.path.join(path, 'info_period_stage.npy'))
stage_dict = {period: {stage: 0 for stage in all_stage} for period in all_period}
subj_dict = {period: {subj: 0 for subj in all_subject} for period in all_period}
for i, period in enumerate(all_period):
for j, subj in enumerate(all_subject):
subj_dict[period][subj] = info_subj[i][j]
for k, stage in enumerate(all_stage):
stage_dict[period][stage] = info_stage[i][k]
return render_template("convocation.html", subj_dict=subj_dict, dict=stage_dict, all_periods=all_period,
all_stage=all_stage, all_subject=all_subject)
@app.route('/committees', methods=['GET', 'POST'])
def committees():
all_com = Committee.query.all()
all_dep = Deputy.query.all()
names = set()
for dep in all_dep:
names.add(dep.faction_name)
names = sorted(names)
dict = {com: {name: [] for name in names} for com in all_com}
for dep in all_dep:
name = dep.faction_name
all_dep_com = dep.committees
for com in all_dep_com:
dict[com][name].append(dep.name + ' ' + dep.family)
for com in all_com:
count = 0
for name in names:
count += len(dict[com][name])
if count == 0:
dict.pop(com)
for com in all_com:
try:
dict[com] = OrderedDict(sorted(dict[com].items(), key=lambda t: t[0]))
except:
continue
dict = OrderedDict(sorted(dict.items(), key=lambda t: t[0].name))
return render_template("komitet.html", all_com=all_com, names=names, dict=dict)
def get_subject_names_lists(subject):
gd = []
sf = []
subject_names = {'Депутаты ГД': [], 'Члены СФ': [], 'Органы': []}
for dep in subject['deputies']:
if dep['position'] == 'Депутат ГД':
if len(subject_names['Депутаты ГД']) != 0:
subject_names['Депутаты ГД'].append(', ')
subject_names['Депутаты ГД'].append(dep['name'])
elif dep['position'] == 'Член СФ':
if len(subject_names['Члены СФ']) != 0:
subject_names['Члены СФ'].append(', ')
subject_names['Члены СФ'].append(dep['name'])
for department in subject['departments']:
name = department['name']
subject = Subject.query.filter(Subject.name == name).first()
if len(subject_names['Органы']) != 0:
subject_names['Органы'].append(', ')
if subject is None:
subject_names['Органы'].append('Региональные органы')
else:
subject_names['Органы'].append(subject.name)
return subject_names
@app.route('/', methods=['GET', 'POST'])
def bills():
form = SearchForm()
url = "http://api.duma.gov.ru/api/" + token + "/search.json"
r = requests.get(url, {'app_token': app_token})
data = r.json()
bills_count = data['count']
bills = []
all_status = Status.query.all()
status_dict = {}
for st in all_status:
status_dict[st.id] = st.name
all_stage = Stage.query.all()
stage_dict = {}
for st in all_stage:
stage_dict[st.id] = st.name
special_subjects = {'6231101': 'Региональные органы'}
all_subject = Subject.query.all()
subject_dict = {}
for s in all_subject:
if str(s.id) not in special_subjects.keys():
subject_dict[s.id] = s.name
return render_template("laws.html", bills=bills, bills_count=bills_count,
status_dict=status_dict, subject_dict=subject_dict,
stage_dict=stage_dict, form=form)
def get_law_param_dict(law):
if law['lastEvent']['document'] is None or law['lastEvent']['document'] == 'None':
document_name = ''
document_type = ''
else:
document_type = law['lastEvent']['document']['type']
document_name = law['lastEvent']['document']['name']
law_dict = {'name': law['name'], 'stage': law['lastEvent']['stage'],
'number': law['number'], 'url': law['url'],
'date': law['lastEvent']['date'],
'indate': law['introductionDate'],
'subject': get_subject_names_lists(law['subject']),
'solution': law['lastEvent']['solution'],
'document_name': document_name,
'document_type': document_type}
for key in law_dict.keys():
if law_dict[key] == 'None' or law_dict[key] is None:
law_dict[key] = ''
if law_dict['date'] != '':
tmp = law_dict['date'].split('-')
law_dict['date'] = tmp[2] + '-' + tmp[1] + '-' + tmp[0]
if law_dict['indate'] != '':
tmp = law_dict['indate'].split('-')
law_dict['indate'] = tmp[2] + '-' + tmp[1] + '-' + tmp[0]
return law_dict
@app.route('/search', methods=['GET', 'POST'])
def search():
url = "http://api.duma.gov.ru/api/" + token + "/search.json"
name = request.args.get('name')
stage = request.args.get('stage')
subject = request.args.get('subject')
special_subjects = {'6231101': 'Региональные органы', '6231102': 'Депутат ГД', '6231103': 'Член СФ'}
page = 1
laws = []
if subject == '6231102' or subject == '6231103':
if stage == '':
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
# print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
flag = False
for dep in law['subject']['deputies']:
if dep['position'] == special_subjects[subject]:
flag = True
break
if flag:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
else:
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name,
'stage': stage, 'search_mode': '2'}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
flag = False
for dep in law['subject']['deputies']:
if dep['position'] == special_subjects[subject]:
flag = True
break
if flag:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
return render_template("section_for_load.html", laws=laws)
page = 1
laws = []
if subject != '' and stage != '':
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name,
'stage': stage, 'federal_subject': subject, 'search_mode': '2'}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
if subject != '' and stage == '':
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name,
'federal_subject': subject}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
if subject == '' and stage != '':
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name,
'stage': stage, 'search_mode': '2'}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
if subject == '' and stage == '':
while True:
query_dict = {'app_token': app_token, 'page': page, 'sort': 'date', 'name': name}
r = requests.get(url, query_dict)
data = r.json()
if 'laws' in data:
print('page=', page, 'len=', len(data['laws']))
page += 1
for law in data['laws']:
law_dict = get_law_param_dict(law)
laws.append(law_dict)
else:
break
return render_template("section_for_load.html", laws=laws)
@app.route('/diagram', methods=['GET'])
def diagram():
id = int(request.args.get('komitet_id').split('-')[1])
all_dep = Deputy.query.all()
faction_names = set()
for dep in all_dep:
faction_names.add(dep.faction_name)
com = Committee.query.filter(Committee.id == id).first()
deps = com.deputies
dict = {name: 0 for name in faction_names}
deps_dict = {name: [] for name in faction_names}
for dep in deps:
dict[dep.faction_name] += 1
deps_dict[dep.faction_name].append(dep)
dict = OrderedDict(sorted(dict.items(), key=lambda t: t[0]))
rows_with_data = []
flag = True
i = 0
while flag:
flag = False
cur_list = []
for faction_name in dict.keys():
if dict[faction_name] > i:
cur_list.append(deps_dict[faction_name][i].name + ' ' + deps_dict[faction_name][i].family)
flag = True
else:
cur_list.append(' ')
rows_with_data.append(cur_list)
i += 1
return render_template("komitet_diagramms.html", rows_with_data=rows_with_data[:-1],
dict=dict, com=com)
def get_subject_index(subject):
all_subject = Subject.query.all()
for i, cur_subj in enumerate(all_subject):
if cur_subj == subject:
return i
@app.route('/diagram_conv', methods=['GET'])
def diagram_conv():
period_id = request.args.get('period_id')
subject_name = request.args.get('name')
subject = Subject.query.filter(Subject.name==subject_name).first()
subject_ind = get_subject_index(subject)
path = os.path.dirname(os.path.abspath(__file__))
info = np.load(os.path.join(path, 'info.npy'))
all_stage = Stage.query.all()
stage_prev_periods = ['Законопроекты, внесенные в Государственную Думу, работа над которыми не завершена',
'Законопроекты, отклоненные Государственной Думой на стадии предварительного рассмотрения',
'Законопроекты, прошедшие первое чтение и отклоненные Государственной Думой',
'Законопроекты, прошедшие второе чтение и отклоненные Государственной Думой',
'Законопроекты, прошедшие третье чтение и отклоненные Государственной Думой',
'Законопроекты, отклоненные Советом Федерации',
'Законы, снятые с рассмотрения Государственной Думы, после того, как они были отклонены Советом Федерации',
'Окончательное принятие закона',
'Законы, снятые с рассмотрения Государственной Думы, после того, как они были отклонены Президентом Российской Федерации']
stage_cur_period = ['Законопроекты, внесенные в Государственную Думу, работа над которыми не завершена',
'Предварительное рассмотрение законопроекта, внесенного в Государственную Думу',
'Рассмотрение законопроекта в первом чтении',
'Рассмотрение законопроекта во втором чтении',
'Рассмотрение законопроекта в третьем чтении',
'Рассмотрение закона в Совете Федерации',
'Законы, снятые с рассмотрения Государственной Думы, после того, как они были отклонены Советом Федерации',
'Окончательное принятие закона',
'Законы, снятые с рассмотрения Государственной Думы, после того, как они были отклонены Президентом Российской Федерации']
dict = {}
if 1 <= int(period_id) <= 5:
for i, name in enumerate(stage_prev_periods):
dict[name] = info[int(period_id) - 1][subject_ind][i]
else:
for i, name in enumerate(stage_cur_period):
dict[name] = info[int(period_id) - 1][subject_ind][i]
return render_template('conv_diagramms.html', dict=dict, period_id=period_id, subject_name=subject_name)
|
[
"os.path.abspath",
"manage_db.Committee.query.all",
"flask.request.args.get",
"flask.Flask",
"manage_db.Subject.query.all",
"manage_db.Subject.query.filter",
"manage_db.Stage.query.all",
"flask.render_template",
"manage_db.Deputy.query.all",
"requests.get",
"manage_db.Status.query.all",
"wtforms.StringField",
"manage_db.Committee.query.filter",
"os.path.join",
"manage_db.Period.query.all"
] |
[((378, 393), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (383, 393), False, 'from flask import Flask\n'), ((455, 484), 'os.path.join', 'os.path.join', (['path', '"""duma.db"""'], {}), "(path, 'duma.db')\n", (467, 484), False, 'import os\n'), ((418, 443), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (433, 443), False, 'import os\n'), ((622, 660), 'wtforms.StringField', 'StringField', (['u"""Название законопроекта"""'], {}), "(u'Название законопроекта')\n", (633, 660), False, 'from wtforms import StringField\n'), ((718, 736), 'manage_db.Period.query.all', 'Period.query.all', ([], {}), '()\n', (734, 736), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((753, 770), 'manage_db.Stage.query.all', 'Stage.query.all', ([], {}), '()\n', (768, 770), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((789, 808), 'manage_db.Subject.query.all', 'Subject.query.all', ([], {}), '()\n', (806, 808), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((1439, 1586), 'flask.render_template', 'render_template', (['"""convocation.html"""'], {'subj_dict': 'subj_dict', 'dict': 'stage_dict', 'all_periods': 'all_period', 'all_stage': 'all_stage', 'all_subject': 'all_subject'}), "('convocation.html', subj_dict=subj_dict, dict=stage_dict,\n all_periods=all_period, all_stage=all_stage, all_subject=all_subject)\n", (1454, 1586), False, 'from flask import render_template\n'), ((1695, 1716), 'manage_db.Committee.query.all', 'Committee.query.all', ([], {}), '()\n', (1714, 1716), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((1731, 1749), 'manage_db.Deputy.query.all', 'Deputy.query.all', ([], {}), '()\n', (1747, 1749), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((2511, 2583), 'flask.render_template', 'render_template', (['"""komitet.html"""'], {'all_com': 'all_com', 'names': 'names', 'dict': 'dict'}), "('komitet.html', all_com=all_com, names=names, dict=dict)\n", (2526, 2583), False, 'from flask import render_template\n'), ((3773, 3816), 'requests.get', 'requests.get', (['url', "{'app_token': app_token}"], {}), "(url, {'app_token': app_token})\n", (3785, 3816), False, 'import requests\n'), ((3904, 3922), 'manage_db.Status.query.all', 'Status.query.all', ([], {}), '()\n', (3920, 3922), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((4024, 4041), 'manage_db.Stage.query.all', 'Stage.query.all', ([], {}), '()\n', (4039, 4041), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((4200, 4219), 'manage_db.Subject.query.all', 'Subject.query.all', ([], {}), '()\n', (4217, 4219), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((4374, 4535), 'flask.render_template', 'render_template', (['"""laws.html"""'], {'bills': 'bills', 'bills_count': 'bills_count', 'status_dict': 'status_dict', 'subject_dict': 'subject_dict', 'stage_dict': 'stage_dict', 'form': 'form'}), "('laws.html', bills=bills, bills_count=bills_count,\n status_dict=status_dict, subject_dict=subject_dict, stage_dict=\n stage_dict, form=form)\n", (4389, 4535), False, 'from flask import render_template\n'), ((5917, 5941), 'flask.request.args.get', 'request.args.get', (['"""name"""'], {}), "('name')\n", (5933, 5941), False, 'from flask import request\n'), ((5954, 5979), 'flask.request.args.get', 'request.args.get', (['"""stage"""'], {}), "('stage')\n", (5970, 5979), False, 'from flask import request\n'), ((5994, 6021), 'flask.request.args.get', 'request.args.get', (['"""subject"""'], {}), "('subject')\n", (6010, 6021), False, 'from flask import request\n'), ((10403, 10454), 'flask.render_template', 'render_template', (['"""section_for_load.html"""'], {'laws': 'laws'}), "('section_for_load.html', laws=laws)\n", (10418, 10454), False, 'from flask import render_template\n'), ((10586, 10604), 'manage_db.Deputy.query.all', 'Deputy.query.all', ([], {}), '()\n', (10602, 10604), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((11504, 11606), 'flask.render_template', 'render_template', (['"""komitet_diagramms.html"""'], {'rows_with_data': 'rows_with_data[:-1]', 'dict': 'dict', 'com': 'com'}), "('komitet_diagramms.html', rows_with_data=rows_with_data[:-1\n ], dict=dict, com=com)\n", (11519, 11606), False, 'from flask import render_template\n'), ((11681, 11700), 'manage_db.Subject.query.all', 'Subject.query.all', ([], {}), '()\n', (11698, 11700), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((11885, 11914), 'flask.request.args.get', 'request.args.get', (['"""period_id"""'], {}), "('period_id')\n", (11901, 11914), False, 'from flask import request\n'), ((11935, 11959), 'flask.request.args.get', 'request.args.get', (['"""name"""'], {}), "('name')\n", (11951, 11959), False, 'from flask import request\n'), ((12200, 12217), 'manage_db.Stage.query.all', 'Stage.query.all', ([], {}), '()\n', (12215, 12217), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((14312, 14413), 'flask.render_template', 'render_template', (['"""conv_diagramms.html"""'], {'dict': 'dict', 'period_id': 'period_id', 'subject_name': 'subject_name'}), "('conv_diagramms.html', dict=dict, period_id=period_id,\n subject_name=subject_name)\n", (14327, 14413), False, 'from flask import render_template\n'), ((837, 862), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (852, 862), False, 'import os\n'), ((889, 934), 'os.path.join', 'os.path.join', (['path', '"""info_period_subject.npy"""'], {}), "(path, 'info_period_subject.npy')\n", (901, 934), False, 'import os\n'), ((961, 1004), 'os.path.join', 'os.path.join', (['path', '"""info_period_stage.npy"""'], {}), "(path, 'info_period_stage.npy')\n", (973, 1004), False, 'import os\n'), ((8001, 8052), 'flask.render_template', 'render_template', (['"""section_for_load.html"""'], {'laws': 'laws'}), "('section_for_load.html', laws=laws)\n", (8016, 8052), False, 'from flask import render_template\n'), ((12104, 12129), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (12119, 12129), False, 'import os\n'), ((12151, 12181), 'os.path.join', 'os.path.join', (['path', '"""info.npy"""'], {}), "(path, 'info.npy')\n", (12163, 12181), False, 'import os\n'), ((8340, 8369), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (8352, 8369), False, 'import requests\n'), ((8920, 8949), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (8932, 8949), False, 'import requests\n'), ((9508, 9537), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (9520, 9537), False, 'import requests\n'), ((10034, 10063), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (10046, 10063), False, 'import requests\n'), ((10710, 10752), 'manage_db.Committee.query.filter', 'Committee.query.filter', (['(Committee.id == id)'], {}), '(Committee.id == id)\n', (10732, 10752), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((11974, 12024), 'manage_db.Subject.query.filter', 'Subject.query.filter', (['(Subject.name == subject_name)'], {}), '(Subject.name == subject_name)\n', (11994, 12024), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((3282, 3324), 'manage_db.Subject.query.filter', 'Subject.query.filter', (['(Subject.name == name)'], {}), '(Subject.name == name)\n', (3302, 3324), False, 'from manage_db import Status, Stage, Subject, Committee, Deputy, Period\n'), ((6376, 6405), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (6388, 6405), False, 'import requests\n'), ((7293, 7322), 'requests.get', 'requests.get', (['url', 'query_dict'], {}), '(url, query_dict)\n', (7305, 7322), False, 'import requests\n'), ((10525, 10555), 'flask.request.args.get', 'request.args.get', (['"""komitet_id"""'], {}), "('komitet_id')\n", (10541, 10555), False, 'from flask import request\n')]
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, SelectField, TextAreaField
from flask.ext.wtf.file import FileField, FileAllowed
from wtforms.validators import DataRequired, Length, Required
from wtforms_html5 import DateField
from wtforms_html5 import DateRange
from datetime import date
class RecordForm(Form):
"""
A Form for uploading a Record
"""
patient_id = SelectField('Patient ID', validators=[DataRequired()], coerce=int)
doctor_id = SelectField('Doctor ID', validators=[DataRequired()], coerce=int)
radiologist_id = SelectField('Radiologist ID', validators=[DataRequired()], coerce=int)
test_type = StringField('Test Type', validators=[DataRequired(), Length(max=24)])
prescribing_date = DateField('Prescribing Date', default=date.today(),
validators=[DateRange(date(1900, 1, 1), date(2016, 1, 1))])
test_date = DateField('Test Date', default=date.today(),
validators=[DateRange(date(1900, 1, 1), date(2016, 1, 1))])
diagnosis = StringField('Diagnosis', validators=[DataRequired(), Length(max=128)])
description = TextAreaField('Description', validators=[DataRequired(), Length(max=1024)])
images = FileField(u'Image Files', validators=[FileAllowed(['jpg'], 'We only support jpg!')])
|
[
"wtforms.validators.Length",
"datetime.date",
"datetime.date.today",
"flask.ext.wtf.file.FileAllowed",
"wtforms.validators.DataRequired"
] |
[((796, 808), 'datetime.date.today', 'date.today', ([], {}), '()\n', (806, 808), False, 'from datetime import date\n'), ((950, 962), 'datetime.date.today', 'date.today', ([], {}), '()\n', (960, 962), False, 'from datetime import date\n'), ((446, 460), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (458, 460), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((528, 542), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (540, 542), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((620, 634), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (632, 634), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((702, 716), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (714, 716), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((718, 732), 'wtforms.validators.Length', 'Length', ([], {'max': '(24)'}), '(max=24)\n', (724, 732), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((1103, 1117), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1115, 1117), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((1119, 1134), 'wtforms.validators.Length', 'Length', ([], {'max': '(128)'}), '(max=128)\n', (1125, 1134), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((1196, 1210), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1208, 1210), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((1212, 1228), 'wtforms.validators.Length', 'Length', ([], {'max': '(1024)'}), '(max=1024)\n', (1218, 1228), False, 'from wtforms.validators import DataRequired, Length, Required\n'), ((1282, 1326), 'flask.ext.wtf.file.FileAllowed', 'FileAllowed', (["['jpg']", '"""We only support jpg!"""'], {}), "(['jpg'], 'We only support jpg!')\n", (1293, 1326), False, 'from flask.ext.wtf.file import FileField, FileAllowed\n'), ((865, 881), 'datetime.date', 'date', (['(1900)', '(1)', '(1)'], {}), '(1900, 1, 1)\n', (869, 881), False, 'from datetime import date\n'), ((883, 899), 'datetime.date', 'date', (['(2016)', '(1)', '(1)'], {}), '(2016, 1, 1)\n', (887, 899), False, 'from datetime import date\n'), ((1012, 1028), 'datetime.date', 'date', (['(1900)', '(1)', '(1)'], {}), '(1900, 1, 1)\n', (1016, 1028), False, 'from datetime import date\n'), ((1030, 1046), 'datetime.date', 'date', (['(2016)', '(1)', '(1)'], {}), '(2016, 1, 1)\n', (1034, 1046), False, 'from datetime import date\n')]
|
# python3
import JobsMapResultsFilesToContainerObjs as JRS
from nltk.metrics.agreement import AnnotationTask
from collections import OrderedDict
import re
import importlib
importlib.reload(JRS)
import pandas as pd
from math import ceil
import sys
## This function calculates the agreeability of share patterns across different albums
## i.e. to what degree do albums agree on the same images.
def getReliabilityMatImg(gidAlbumMapFl):
results = JRS.createResultDict(1,100)
imgAlbumDict = JRS.genImgAlbumDictFromMap(gidAlbumMapFl)
imgShareCounts,_ = JRS.imgShareCountsPerAlbum(imgAlbumDict,results)
imgShareCounts = list(filter(lambda row: row[4]>=80.0 or row[4] <= 20.0,imgShareCounts))
reliability_matrix = [(row[0],row[1],1 if row[4]>=80 else 0) for row in imgShareCounts]
return reliability_matrix
## This function returns reliability matrix for calculating if turkers agree on the way the same image is shared.
## i.e. to what degree do turkers agree on the same images
def getReliabilityMatTurker():
print("Constructing reliability matrix")
resultDict = JRS.createResultDict(1,100,workerData=True)
reliabilityMatrx = []
for album in resultDict.keys(): # loop 1
responses = resultDict[album]
workers = responses['workerid']
for response in responses.keys(): # loop 2
if 'Answer' in response and response.split(".")[1].isdigit():
shrNoShr = []
gid = response.split(".")[1]
for shrNShr in responses[response]: # loop 3.1
if len(shrNShr.split("|")) != 2: # no response captured
shrNoShr.append("*")
elif shrNShr.split("|")[1] == 'share':
shrNoShr.append(1)
else:
shrNoShr.append(0)
for i in range(len(workers)): # loop 3.2
reliabilityMatrx.append((workers[i],gid,shrNoShr[i]))
print("Constructing reliability matrix complete")
return list(filter(lambda x : x[2] != '*',reliabilityMatrx))
def __main__(argv):
if len(argv) != 2:
print("Specify cmd arg")
sys.exit(2)
else:
arg = argv[1]
if arg == 'img':
reliability_mat = getReliabilityMatImg("../data/imageGID_job_map_expt2_corrected.csv")
else:
reliability_mat = getReliabilityMatTurker()
t = AnnotationTask(data=reliability_mat)
print("Calculating the agreement scores")
alpha = t.alpha()
print("Alpha = %f" %alpha)
s = t.S()
print("S = %f" %s)
pi = t.pi()
print("Pi = %f" %pi)
kappa = t.kappa()
print("kappa = %f" %kappa)
if __name__ == "__main__":
__main__(sys.argv)
|
[
"JobsMapResultsFilesToContainerObjs.imgShareCountsPerAlbum",
"nltk.metrics.agreement.AnnotationTask",
"importlib.reload",
"JobsMapResultsFilesToContainerObjs.genImgAlbumDictFromMap",
"JobsMapResultsFilesToContainerObjs.createResultDict",
"sys.exit"
] |
[((173, 194), 'importlib.reload', 'importlib.reload', (['JRS'], {}), '(JRS)\n', (189, 194), False, 'import importlib\n'), ((447, 475), 'JobsMapResultsFilesToContainerObjs.createResultDict', 'JRS.createResultDict', (['(1)', '(100)'], {}), '(1, 100)\n', (467, 475), True, 'import JobsMapResultsFilesToContainerObjs as JRS\n'), ((491, 532), 'JobsMapResultsFilesToContainerObjs.genImgAlbumDictFromMap', 'JRS.genImgAlbumDictFromMap', (['gidAlbumMapFl'], {}), '(gidAlbumMapFl)\n', (517, 532), True, 'import JobsMapResultsFilesToContainerObjs as JRS\n'), ((555, 604), 'JobsMapResultsFilesToContainerObjs.imgShareCountsPerAlbum', 'JRS.imgShareCountsPerAlbum', (['imgAlbumDict', 'results'], {}), '(imgAlbumDict, results)\n', (581, 604), True, 'import JobsMapResultsFilesToContainerObjs as JRS\n'), ((1075, 1120), 'JobsMapResultsFilesToContainerObjs.createResultDict', 'JRS.createResultDict', (['(1)', '(100)'], {'workerData': '(True)'}), '(1, 100, workerData=True)\n', (1095, 1120), True, 'import JobsMapResultsFilesToContainerObjs as JRS\n'), ((2108, 2119), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (2116, 2119), False, 'import sys\n'), ((2317, 2353), 'nltk.metrics.agreement.AnnotationTask', 'AnnotationTask', ([], {'data': 'reliability_mat'}), '(data=reliability_mat)\n', (2331, 2353), False, 'from nltk.metrics.agreement import AnnotationTask\n')]
|
from PIL import Image
def to16(r, g, b):
# get decimal and return first 4 bits in hex
outC = '\n' # output string
for num in [r, g, b]:
num = '{:08d}'.format(int(bin(num)[2:]))[:4] # firstly turn into bin, then get rid of 0b, put into 8 digits, get first 4
num = hex(int(num, 2))[-1] # turn into hex, get rid of 0x
outC += str(num)
outC += ','
return outC
def img2coe(name):
img = Image.open(name)
img = img.convert("RGB") # convert to RGB
width, height = img.size
output_file = name.split('.')[0] + ".coe"
f = open(output_file, "w")
# using the 16, so change radix to 16, can be 2(binary), 10(decimal), 16(hex)
f.write("memory_initialization_radix=16;\nmemory_initialization_vector=")
for x in range(0, height):
for y in range(0, width):
r, g, b = img.getpixel((y, x))
line = to16(r, g, b)
f.write(line)
f.seek(f.tell() - 1, 0) # put pointer to last position - 1, relative to head
f.truncate() # del final ,
f.write(";") # add ;
# for line in arr:
# for r, g, b in line:
# r = int((r * 16) / 256)
# g = int((g * 16) / 256)
# b = int((b * 16) / 256)
# f.write(str('\n{:04b}'.format(r)) + str('{:04b}'.format(g)) + str('{:04b}'.format(b)) + ",")
# # 宽度是 4,用 0 填充
# f.seek(f.tell() - 1, os.SEEK_SET)
# f.truncate()
# f.write(";")
if __name__ == "__main__":
name = input("Input image name:")
img2coe(name)
# else:
# print("Insert at least one image path\nFormat: python img2coe.py <path>")
|
[
"PIL.Image.open"
] |
[((435, 451), 'PIL.Image.open', 'Image.open', (['name'], {}), '(name)\n', (445, 451), False, 'from PIL import Image\n')]
|
from apps.processing.ala.models import SamplingFeature, Observation
from django.contrib.gis.geos import GEOSGeometry
from apps.common.models import Process
from psycopg2.extras import DateTimeTZRange
from datetime import timedelta, datetime
from django.conf import settings
from apps.common.models import Property, Topic, TimeSlots
from apps.ad.anomaly_detection import get_timeseries
from rest_framework import status
from rest_framework.test import APITestCase
import dateutil.parser
import pytz
from apps.mc.tasks import import_time_slots_from_config
utc = pytz.UTC
NAME_ID = 'name_id=air_temperature'
DATE_FROM = '&phenomenon_date_from=2018-06-15'
DATE_TO = '&phenomenon_date_to=2018-06-15'
TIME_SLOT_10H = '&time_slots=10_hour_slot'
TIME_SLOT_24H = '&time_slots=24_hour_slot'
TOPIC_NAME = 'drought'
TOPIC_NAME_NOT_EXISTS = 'xxxx'
URL_TIMESERIES = '/api/v2/timeseries/?topic=' + TOPIC_NAME + DATE_FROM + DATE_TO
URL_TIMESERIES_TOPIC_NOT_EXISTS = '/api/v2/timeseries/?topic=' + TOPIC_NAME_NOT_EXISTS + DATE_FROM + DATE_TO
URL_TIMESERIES_TIME_SLOTS_NOT_EXISTS = '/api/v2/timeseries/?topic=' + TOPIC_NAME + DATE_FROM + DATE_TO + TIME_SLOT_10H
URL_TIMESERIES_TIME_SLOTS_24H = '/api/v2/timeseries/?topic=' + TOPIC_NAME + DATE_FROM + '&phenomenon_date_to=2018-06-17' + TIME_SLOT_24H
URL_TIMESERIES_30 = '/api/v2/timeseries/?topic=' + TOPIC_NAME + '&phenomenon_date_from=2018-06-16' + '&phenomenon_date_to=2018-06-16' + '&time_slots=30_days_daily'
DATE_FROM_ERROR = '&phenomenon_date_from=00000-06-15'
DATE_TO_ERROR = '&phenomenon_date_to=XXX'
URL_TIMESERIES_WRONG_DATE_FROM = URL_TIMESERIES + DATE_FROM_ERROR + DATE_TO
URL_TIMESERIES_WRONG_DATE_TO = URL_TIMESERIES + DATE_FROM + DATE_TO_ERROR
URL_TIMESERIES_INTERVAL_NO_VALUES = '/api/v2/timeseries/?topic=' + TOPIC_NAME + '&phenomenon_date_from=2000-06-15' + '&phenomenon_date_to=2000-06-15'
URL_TIMESERIES_BBOX = URL_TIMESERIES + '&bbox=1826997.8501,6306589.8927,1856565.7293,6521189.3651'
URL_TIMESERIES_BBOX_NO_DATA = URL_TIMESERIES + '&bbox=1826997.8501,6306589.8927,1836565.7293,6521189.3651'
URL_TIMESERIES_BBOX_WRONG_VALUES = URL_TIMESERIES + '&bbox=1856997.8501,6306589.8927,1836565.7293,6521189.3651'
URL_TIMESERIES_BBOX_MISSING_VALUES = URL_TIMESERIES + '&bbox=1856997.8501,6306589.8927,1836565.7293'
URL_PROPERTIES = '/api/v2/properties/?topic=' + TOPIC_NAME
URL_TOPICS = '/api/v2/topics/'
STATION_PROPS = {
'11359201': {
'id_by_provider': '11359201',
'geom_type': 'Point',
'name': 'Brno',
'coordinates': [1847520.94, 6309563.27]
},
'brno2_id_by_provider': {
'id_by_provider': 'brno2_id_by_provider',
'geom_type': 'Point',
'name': 'Brno2',
'coordinates': [1847520.94, 6309563.27]
}
}
time_range_boundary = '[)'
time_from = datetime(2018, 6, 15, 00, 00, 00)
date_time_range = DateTimeTZRange(
time_from,
time_from + timedelta(hours=24),
time_range_boundary
)
def create_station(key):
station_key = key
props = STATION_PROPS[station_key]
coordinates = props['coordinates']
return SamplingFeature.objects.create(
id_by_provider=props['id_by_provider'],
name=props['name'],
geometry=GEOSGeometry(
props['geom_type'] + ' (' + str(coordinates[0]) + ' ' + str(coordinates[1]) + ')',
srid=3857
)
)
def get_time_series_test():
station = SamplingFeature.objects.get(name="Brno")
prop = Property.objects.get(name_id='air_temperature')
time_range = date_time_range
topic_config = settings.APPLICATION_MC.TOPICS.get(TOPIC_NAME)
process = Process.objects.get(name_id="apps.common.aggregate.arithmetic_mean")
value_frequency = topic_config['value_frequency']
return get_timeseries(
observed_property=prop,
observation_provider_model=Observation,
feature_of_interest=station,
phenomenon_time_range=time_range,
process = process,
frequency = value_frequency
)
class RestApiTestCase(APITestCase):
def setUp(self):
Topic.objects.create(
name_id='drought',
name='drought'
)
am_process = Process.objects.create(
name_id='apps.common.aggregate.arithmetic_mean',
name='arithmetic mean'
)
station_key = '11359201'
station = create_station(station_key)
station_key = 'brno2_id_by_provider'
station_2 = create_station(station_key)
at_prop = Property.objects.create(
name_id='air_temperature',
name='air temperature',
unit='°C',
default_mean=am_process
)
Property.objects.create(
name_id='ground_air_temperature',
name='ground_air_temperature',
unit='°C',
default_mean=am_process
)
import_time_slots_from_config()
t = TimeSlots.objects.get(name_id='1_hour_slot')
t30 = TimeSlots.objects.get(name_id='30_days_daily')
#"["2019-05-01 23:00:00+00","2019-05-31 23:00:00+00")"
'''
"["2019-04-30 23:00:00+00","2019-05-30 23:00:00+00")"
"["2019-04-29 23:00:00+00","2019-05-29 23:00:00+00")"
"["2019-04-28 23:00:00+00","2019-05-28 23:00:00+00")"
"["2019-04-27 23:00:00+00","2019-05-27 23:00:00+00")"
"["2019-04-26 23:00:00+00","2019-05-26 23:00:00+00")"
'''
time_from = datetime(2018, 6, 10, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=2,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 11, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 12, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=3.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 13, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 14, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 15, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 16, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 17, 23, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(days=30),
time_range_boundary
),
time_slots=t30
)
time_from = datetime(2018, 6, 15, 11, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 15, 12, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station_2,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 14, 13, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 15, 10, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station,
procedure=am_process,
result=1,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 15, 11, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station,
procedure=am_process,
result=1000,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 15, 12, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
time_from = datetime(2018, 6, 16, 13, 00, 00)
Observation.objects.create(
observed_property=at_prop,
feature_of_interest=station,
procedure=am_process,
result=1.5,
phenomenon_time_range=DateTimeTZRange(
time_from,
time_from + timedelta(hours=1),
time_range_boundary
),
time_slots=t
)
def test_properties_response_status(self):
response = self.client.get(URL_PROPERTIES)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_properties_response_content(self):
response = self.client.get(URL_PROPERTIES)
expected_response = [
{"name_id": "air_temperature", "name": "air temperature", "unit": "°C" },
{"name_id": "ground_air_temperature", "name": "ground_air_temperature", "unit": "°C"}
]
self.assertEquals(response.data, expected_response)
def test_topics_response_status(self):
response = self.client.get(URL_TOPICS)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_topics_response_content(self):
response = self.client.get(URL_TOPICS)
expected_response = [
{"name_id": "drought", "name": "drought"}
]
self.assertEquals(response.data, expected_response)
def test_timeseries_response_status(self):
response = self.client.get(URL_TIMESERIES)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_timeseries_property_values_length_equals_anomaly_rates(self):
response = self.client.get(URL_TIMESERIES)
data = response.data
fc = data['feature_collection']
features = fc['features']
props = data['properties']
for f in features:
properties = f.get('properties', None)
for p in props:
property = properties.get(p, None)
if property:
property_values = property.get('values', None)
property_anomaly_rates = property.get('anomaly_rates', None)
self.assertEquals(len(property_values), len(property_anomaly_rates))
def test_timeseries_feature_output(self):
response = self.client.get(URL_TIMESERIES, format='json')
data = response.data
fc = data['feature_collection']
features = fc['features']
for f in features:
props = f.get('properties', None)
for p in props:
self.assertIsNotNone(props.get(p, None))
id = f.get('id', None)
id_by_provider = id.split(':')[-1]
geom = f.get('geometry', None)
coordinates = geom.get('coordinates', None)
geom_type = geom.get('type', None)
props = STATION_PROPS[id_by_provider]
self.assertEquals(id_by_provider, props['id_by_provider'])
self.assertEquals(geom_type, 'Point')
self.assertEquals(len(coordinates), len(props['coordinates']))
self.assertEquals(coordinates[0], props['coordinates'][0])
self.assertEquals(coordinates[1], props['coordinates'][1])
def test_timeseries_time_range_output(self):
response = self.client.get(URL_TIMESERIES, format='json')
data = response.data
phenomenon_time_from = dateutil.parser.parse(data['phenomenon_time_from'])
phenomenon_time_to = dateutil.parser.parse(data['phenomenon_time_to'])
date_time_range_from_utc = date_time_range.lower.replace(tzinfo=utc)
date_time_range_to_utc = date_time_range.upper.replace(tzinfo=utc)
self.assertGreaterEqual(phenomenon_time_from, date_time_range_from_utc)
self.assertLessEqual(phenomenon_time_to, date_time_range_to_utc)
def test_timeseries_bbox_param_data(self):
response = self.client.get(URL_TIMESERIES_BBOX, format='json')
data = response.data
fc = data['feature_collection']
features = fc['features']
self.assertNotEquals(len(features), 0)
def test_timeseries_bbox_param_no_data_in_area(self):
response = self.client.get(URL_TIMESERIES_BBOX_NO_DATA, format='json')
data = response.data
phenomenon_time_from = data['phenomenon_time_from']
phenomenon_time_to = data['phenomenon_time_to']
fc = data['feature_collection']
features = fc['features']
self.assertEquals(len(features), 0)
self.assertEquals(phenomenon_time_from, None)
self.assertEquals(phenomenon_time_to, None)
def test_timeseries_interval_no_data(self):
response = self.client.get(URL_TIMESERIES_INTERVAL_NO_VALUES, format='json')
data = response.data
fc = data['feature_collection']
features = fc['features']
self.assertEquals(len(features), 0)
def test_timeseries_bbox_wrong_params(self):
response = self.client.get(URL_TIMESERIES_BBOX_WRONG_VALUES, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_timeseries_bbox_missing_param(self):
response = self.client.get(URL_TIMESERIES_BBOX_MISSING_VALUES, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_timeseries_phenomenon_date_from_wrong_param(self):
response = self.client.get(URL_TIMESERIES_WRONG_DATE_FROM, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_timeseries_phenomenon_date_to_wrong_param(self):
response = self.client.get(URL_TIMESERIES_WRONG_DATE_TO, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_timeseries_topic_not_exists(self):
response = self.client.get(URL_TIMESERIES_TOPIC_NOT_EXISTS, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_timeseries_time_slots_24h(self):
response = self.client.get(URL_TIMESERIES_TIME_SLOTS_24H, format='json')
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_timeseries_time_slots_not_exists(self):
response = self.client.get(URL_TIMESERIES_TIME_SLOTS_NOT_EXISTS, format='json')
self.assertEquals(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
def test_30_days_timeslots(self):
response = self.client.get(URL_TIMESERIES_30)
data = response.data
fc = data['feature_collection']
features = fc['features']
props = data['properties']
for f in features:
properties = f.get('properties', None)
for p in props:
property = properties.get(p, None)
if property:
self.assertEquals(property.get('values', None), [2.000, 1.500, 3.500, 1.500, 1.500, 1.500])
break
break
|
[
"apps.common.models.Process.objects.create",
"apps.common.models.TimeSlots.objects.get",
"django.conf.settings.APPLICATION_MC.TOPICS.get",
"apps.common.models.Property.objects.create",
"apps.processing.ala.models.SamplingFeature.objects.get",
"datetime.datetime",
"apps.common.models.Process.objects.get",
"apps.common.models.Property.objects.get",
"datetime.timedelta",
"apps.mc.tasks.import_time_slots_from_config",
"apps.ad.anomaly_detection.get_timeseries",
"apps.common.models.Topic.objects.create"
] |
[((2778, 2808), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(0)', '(0)', '(0)'], {}), '(2018, 6, 15, 0, 0, 0)\n', (2786, 2808), False, 'from datetime import timedelta, datetime\n'), ((3379, 3419), 'apps.processing.ala.models.SamplingFeature.objects.get', 'SamplingFeature.objects.get', ([], {'name': '"""Brno"""'}), "(name='Brno')\n", (3406, 3419), False, 'from apps.processing.ala.models import SamplingFeature, Observation\n'), ((3431, 3478), 'apps.common.models.Property.objects.get', 'Property.objects.get', ([], {'name_id': '"""air_temperature"""'}), "(name_id='air_temperature')\n", (3451, 3478), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((3531, 3577), 'django.conf.settings.APPLICATION_MC.TOPICS.get', 'settings.APPLICATION_MC.TOPICS.get', (['TOPIC_NAME'], {}), '(TOPIC_NAME)\n', (3565, 3577), False, 'from django.conf import settings\n'), ((3592, 3660), 'apps.common.models.Process.objects.get', 'Process.objects.get', ([], {'name_id': '"""apps.common.aggregate.arithmetic_mean"""'}), "(name_id='apps.common.aggregate.arithmetic_mean')\n", (3611, 3660), False, 'from apps.common.models import Process\n'), ((3727, 3922), 'apps.ad.anomaly_detection.get_timeseries', 'get_timeseries', ([], {'observed_property': 'prop', 'observation_provider_model': 'Observation', 'feature_of_interest': 'station', 'phenomenon_time_range': 'time_range', 'process': 'process', 'frequency': 'value_frequency'}), '(observed_property=prop, observation_provider_model=\n Observation, feature_of_interest=station, phenomenon_time_range=\n time_range, process=process, frequency=value_frequency)\n', (3741, 3922), False, 'from apps.ad.anomaly_detection import get_timeseries\n'), ((2878, 2897), 'datetime.timedelta', 'timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (2887, 2897), False, 'from datetime import timedelta, datetime\n'), ((4039, 4094), 'apps.common.models.Topic.objects.create', 'Topic.objects.create', ([], {'name_id': '"""drought"""', 'name': '"""drought"""'}), "(name_id='drought', name='drought')\n", (4059, 4094), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((4151, 4250), 'apps.common.models.Process.objects.create', 'Process.objects.create', ([], {'name_id': '"""apps.common.aggregate.arithmetic_mean"""', 'name': '"""arithmetic mean"""'}), "(name_id='apps.common.aggregate.arithmetic_mean',\n name='arithmetic mean')\n", (4173, 4250), False, 'from apps.common.models import Process\n'), ((4474, 4588), 'apps.common.models.Property.objects.create', 'Property.objects.create', ([], {'name_id': '"""air_temperature"""', 'name': '"""air temperature"""', 'unit': '"""°C"""', 'default_mean': 'am_process'}), "(name_id='air_temperature', name='air temperature',\n unit='°C', default_mean=am_process)\n", (4497, 4588), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((4652, 4781), 'apps.common.models.Property.objects.create', 'Property.objects.create', ([], {'name_id': '"""ground_air_temperature"""', 'name': '"""ground_air_temperature"""', 'unit': '"""°C"""', 'default_mean': 'am_process'}), "(name_id='ground_air_temperature', name=\n 'ground_air_temperature', unit='°C', default_mean=am_process)\n", (4675, 4781), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((4846, 4877), 'apps.mc.tasks.import_time_slots_from_config', 'import_time_slots_from_config', ([], {}), '()\n', (4875, 4877), False, 'from apps.mc.tasks import import_time_slots_from_config\n'), ((4890, 4934), 'apps.common.models.TimeSlots.objects.get', 'TimeSlots.objects.get', ([], {'name_id': '"""1_hour_slot"""'}), "(name_id='1_hour_slot')\n", (4911, 4934), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((4949, 4995), 'apps.common.models.TimeSlots.objects.get', 'TimeSlots.objects.get', ([], {'name_id': '"""30_days_daily"""'}), "(name_id='30_days_daily')\n", (4970, 4995), False, 'from apps.common.models import Property, Topic, TimeSlots\n'), ((5415, 5446), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(10)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 10, 23, 0, 0)\n', (5423, 5446), False, 'from datetime import timedelta, datetime\n'), ((5858, 5889), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(11)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 11, 23, 0, 0)\n', (5866, 5889), False, 'from datetime import timedelta, datetime\n'), ((6303, 6334), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(12)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 12, 23, 0, 0)\n', (6311, 6334), False, 'from datetime import timedelta, datetime\n'), ((6748, 6779), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(13)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 13, 23, 0, 0)\n', (6756, 6779), False, 'from datetime import timedelta, datetime\n'), ((7193, 7224), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(14)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 14, 23, 0, 0)\n', (7201, 7224), False, 'from datetime import timedelta, datetime\n'), ((7638, 7669), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 15, 23, 0, 0)\n', (7646, 7669), False, 'from datetime import timedelta, datetime\n'), ((8084, 8115), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(16)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 16, 23, 0, 0)\n', (8092, 8115), False, 'from datetime import timedelta, datetime\n'), ((8529, 8560), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(17)', '(23)', '(0)', '(0)'], {}), '(2018, 6, 17, 23, 0, 0)\n', (8537, 8560), False, 'from datetime import timedelta, datetime\n'), ((8974, 9005), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(11)', '(0)', '(0)'], {}), '(2018, 6, 15, 11, 0, 0)\n', (8982, 9005), False, 'from datetime import timedelta, datetime\n'), ((9417, 9448), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(12)', '(0)', '(0)'], {}), '(2018, 6, 15, 12, 0, 0)\n', (9425, 9448), False, 'from datetime import timedelta, datetime\n'), ((9860, 9891), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(14)', '(13)', '(0)', '(0)'], {}), '(2018, 6, 14, 13, 0, 0)\n', (9868, 9891), False, 'from datetime import timedelta, datetime\n'), ((10301, 10332), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(10)', '(0)', '(0)'], {}), '(2018, 6, 15, 10, 0, 0)\n', (10309, 10332), False, 'from datetime import timedelta, datetime\n'), ((10740, 10771), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(11)', '(0)', '(0)'], {}), '(2018, 6, 15, 11, 0, 0)\n', (10748, 10771), False, 'from datetime import timedelta, datetime\n'), ((11182, 11213), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(15)', '(12)', '(0)', '(0)'], {}), '(2018, 6, 15, 12, 0, 0)\n', (11190, 11213), False, 'from datetime import timedelta, datetime\n'), ((11623, 11654), 'datetime.datetime', 'datetime', (['(2018)', '(6)', '(16)', '(13)', '(0)', '(0)'], {}), '(2018, 6, 16, 13, 0, 0)\n', (11631, 11654), False, 'from datetime import timedelta, datetime\n'), ((5729, 5747), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (5738, 5747), False, 'from datetime import timedelta, datetime\n'), ((6174, 6192), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (6183, 6192), False, 'from datetime import timedelta, datetime\n'), ((6619, 6637), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (6628, 6637), False, 'from datetime import timedelta, datetime\n'), ((7064, 7082), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (7073, 7082), False, 'from datetime import timedelta, datetime\n'), ((7509, 7527), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (7518, 7527), False, 'from datetime import timedelta, datetime\n'), ((7954, 7972), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (7963, 7972), False, 'from datetime import timedelta, datetime\n'), ((8400, 8418), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (8409, 8418), False, 'from datetime import timedelta, datetime\n'), ((8845, 8863), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (8854, 8863), False, 'from datetime import timedelta, datetime\n'), ((9290, 9308), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (9299, 9308), False, 'from datetime import timedelta, datetime\n'), ((9733, 9751), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (9742, 9751), False, 'from datetime import timedelta, datetime\n'), ((10174, 10192), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (10183, 10192), False, 'from datetime import timedelta, datetime\n'), ((10613, 10631), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (10622, 10631), False, 'from datetime import timedelta, datetime\n'), ((11055, 11073), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (11064, 11073), False, 'from datetime import timedelta, datetime\n'), ((11496, 11514), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (11505, 11514), False, 'from datetime import timedelta, datetime\n'), ((11937, 11955), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (11946, 11955), False, 'from datetime import timedelta, datetime\n')]
|
from abc import abstractmethod, ABCMeta
from skeema.intermediate import ClassContext, Representation
class Compiler(metaclass=ABCMeta):
@abstractmethod
def get_compilation_keys(self):
pass
def compile(self, schema, compilation_context):
class_name = schema.class_name
class_context = ClassContext(class_name)
compilation_keys = self.get_compilation_keys()
compiled_keys = dict()
for compilation_key in compilation_keys:
compiled = compilation_key.compile(schema, class_context, compilation_context)
if compiled:
compiled_keys[compilation_key.key] = compilation_key
else:
if compilation_key.error_message is not None:
print(
f"Warning during schema compilation for {schema.class_name} - "
f"compilation key \"{compilation_key.key}\" failed to compile:\n"
f"{compilation_key.error_message}")
constructor_parameters = class_context.constructor_parameters
data_members = class_context.data_members
representation = Representation(class_name, class_context.base_classes, constructor_parameters, data_members)
compilation_context.register_representation(class_name, representation)
|
[
"skeema.intermediate.Representation",
"skeema.intermediate.ClassContext"
] |
[((324, 348), 'skeema.intermediate.ClassContext', 'ClassContext', (['class_name'], {}), '(class_name)\n', (336, 348), False, 'from skeema.intermediate import ClassContext, Representation\n'), ((1160, 1256), 'skeema.intermediate.Representation', 'Representation', (['class_name', 'class_context.base_classes', 'constructor_parameters', 'data_members'], {}), '(class_name, class_context.base_classes,\n constructor_parameters, data_members)\n', (1174, 1256), False, 'from skeema.intermediate import ClassContext, Representation\n')]
|
import tensorflow as tf
import numpy as np
from tensorflow.keras import backend as K
from sklearn.linear_model import LassoLars
from timeit import default_timer as timer
from sklearn.linear_model import LinearRegression
from compression_tools.pruning.helper_functions import rel_error, get_layer_index, load_model_param
from compression_tools.pruning.delete_filters import delete_filter_before
from tools.progress.bar import Bar
def extract_inputs_and_outputs(
model,
layer,
layer_index_dic,
get_dataset="food20",
batches_n=80,
activation=False):
index = get_layer_index(layer_index_dic, layer)
if layer.use_bias:
bias = layer.get_weights()[1]
[_, H, W, C] = layer.output_shape
[h, w] = layer.kernel_size
fore_layer = layer.inbound_nodes[0].inbound_layers
try:
fore_layer_index = get_layer_index(layer_index_dic, fore_layer)
except Exception:
fore_layer_index = get_layer_index(layer_index_dic, fore_layer[0])
train_data, _ = get_dataset()
inputs = []
outputs = []
if activation:
get_layer_input = K.function([model.layers[0].input],
[model.layers[fore_layer_index].output])
get_layer_output = K.function([model.layers[0].input],
[model.layers[index+1].output])
else:
get_layer_input = K.function([model.layers[0].input],
[model.layers[fore_layer_index].output])
get_layer_output = K.function([model.layers[0].input],
[model.layers[index].output])
for batch in range(batches_n):
it = iter(train_data)
batch = next(train_data)
layer_input = get_layer_input([batch[0]])[0]
layer_output = get_layer_output([batch[0]])[0]
if activation:
X = []
Y = layer_output.reshape((-1, layer_output.shape[3]))
outputs.append(np.vstack(Y))
inputs = outputs
else:
hh = (h-1)/2
hw = (w-1)/2
x_samples = np.random.randint(0, H - h, 10)
y_samples = np.random.randint(0, W - w, 10)
if layer.use_bias:
for b in layer_output:
for l1 in range(b.shape[2]):
b[:, :, l1] = b[:, :, l1]-bias[l1]
Xs = []
Ys = []
for n, x in enumerate(x_samples):
Y = layer_output[:, x, y_samples[n], :]
x = x*layer.strides[0]
y_samples[n] = y_samples[n]*layer.strides[1]
X = layer_input[
:, int(x-hh):int(x+hh+1), int(y_samples[n]-hw):int(y_samples[n]+hw+1), :]
Xs.append(X)
Ys.append(Y)
inputs.append(np.stack(Xs))
outputs.append(np.vstack(Ys))
return [np.vstack(np.vstack(inputs)), np.vstack(outputs)]
def featuremap_reconstruction(x, y, copy_x=True, fit_intercept=False):
"""Given changed input X, used linear regression to reconstruct original Y
Args:
x: The pruned input
y: The original feature map of the convolution layer
Return:
new weights and bias which can reconstruct the feature map with small loss given X
"""
_reg = LinearRegression(n_jobs=-1, copy_X=copy_x, fit_intercept=fit_intercept)
_reg.fit(x, y)
return _reg.coef_, _reg.intercept_
def compute_pruned_kernel(
X,
W2,
Y,
alpha=1e-4,
c_new=None,
idx=None,
tolerance=0.02):
"""compute which channels to be pruned by lasso"""
nb_samples = X.shape[0]
c_in = X.shape[-1]
c_out = W2.shape[-1]
samples = np.random.randint(0, nb_samples, min(400, nb_samples // 20))
reshape_X = np.rollaxis(
np.transpose(X, (0, 3, 1, 2)).reshape((nb_samples, c_in, -1))[samples], 1, 0)
reshape_W2 = np.transpose(np.transpose(W2, (3, 2, 0, 1)).reshape((c_out, c_in, -1)), [1, 2, 0])
product = np.matmul(reshape_X, reshape_W2).reshape((c_in, -1)).T
reshape_Y = Y[samples].reshape(-1)
solver = LassoLars(alpha=alpha, fit_intercept=False, max_iter=3000)
def solve(alpha):
""" Solve the Lasso"""
solver.alpha = alpha
solver.fit(product, reshape_Y)
idxs = solver.coef_ != 0.
tmp = sum(idxs)
return idxs, tmp, solver.coef_
# print("pruned channel selecting")
start = timer()
if c_new == c_in:
idxs = np.array([True] * c_new)
# newW2 = W2.reshape(W2.shape[-1],)
else:
left = 0
right = alpha
lbound = c_new - tolerance * c_in / 2
rbound = c_new + tolerance * c_in / 2
while True:
_, tmp, coef = solve(right)
if tmp < c_new:
break
else:
right *= 2
# print("relax right to {}".format(right))
while True:
if lbound < 0:
lbound = 1
idxs, tmp, coef = solve(alpha)
# print loss
loss = 1 / (2 * float(product.shape[0])) * np.sqrt(
np.sum((reshape_Y - np.matmul(
product, coef)) ** 2, axis=0)) + alpha * np.sum(np.fabs(coef))
if lbound <= tmp and tmp <= rbound:
if False:
if tmp % 4 == 0:
break
elif tmp % 4 <= 2:
rbound = tmp - 1
lbound = lbound - 2
else:
lbound = tmp + 1
rbound = rbound + 2
else:
break
elif abs(left - right) <= right * 0.1:
if lbound > 1:
lbound = lbound - 1
if rbound < c_in:
rbound = rbound + 1
left = left / 1.2
right = right * 1.2
elif tmp > rbound:
left = left + (alpha - left) / 2
else:
right = right - (right - alpha) / 2
if alpha < 1e-10:
break
alpha = (left + right) / 2
c_new = tmp
newW2, _ = featuremap_reconstruction(
X[:, :, :, idxs].reshape((nb_samples, -1)), Y, fit_intercept=False)
return idxs, newW2
def prune_kernel_lasso(
model,
index,
layer_params,
prune_ratio,
layer_types,
layer_bias,
layer_output_shape,
filters,
layer_index_dic,
cp_lasso=True,
dataset="food20"):
if prune_ratio < 1:
left_edge_flag = False
after_add = False
layer_index = index
current_layer = layer_index_dic[layer_index]
fore_layer = current_layer.inbound_nodes[0].inbound_layers
while((not fore_layer == []
and not isinstance(fore_layer, tf.keras.layers.Conv2D)
and not isinstance(fore_layer, tf.keras.layers.Add)
or isinstance(fore_layer, tf.keras.layers.DepthwiseConv2D))
and not len(fore_layer.outbound_nodes) == 2):
# TODO:: Batch normalization
fore_layer = fore_layer.inbound_nodes[0].inbound_layers
if fore_layer == []:
new_model_param = layer_params
num_new_filter = layer_params[index][0].shape[-1]
# print("No pruning implemented for start conv layers")
return new_model_param, num_new_filter, layer_output_shape, filters
if isinstance(fore_layer, tf.keras.layers.Add):
after_add = True
if len(fore_layer.outbound_nodes) == 2:
# print("This conv2D is at the beginning edge")
next_layer = current_layer.outbound_nodes[0].layer
while(not isinstance(next_layer, tf.compat.v1.keras.layers.Conv2D)
and not isinstance(next_layer, tf.keras.layers.Add)):
next_layer = next_layer.outbound_nodes[0].layer
if isinstance(next_layer, tf.compat.v1.keras.layers.Conv2D):
# print("left edge")
left_edge_flag = True
############################################
if not left_edge_flag and not after_add:
layer = model.layers[index]
W = layer_params[index][0]
[inputs, outputs] = extract_inputs_and_outputs(
model, layer, layer_index_dic, dataset=dataset)
error1 = rel_error(
inputs.reshape(inputs.shape[0], -1).dot(W.reshape(-1, W.shape[-1])), outputs)
# print('feature map rmse: {}'.format(error1))
error2 = 1
# while(error2 > 0.05 and prune_ratio < 1):
nb_channel_new = int((1-prune_ratio)*(layer.input_shape[3]))
if cp_lasso is True:
idxs, newW2 = compute_pruned_kernel(inputs, W, outputs, c_new=nb_channel_new)
else:
idxs = np.argsort(-np.abs(W).sum((0, 1, 3)))
mask = np.zeros(len(idxs), bool)
idxs = idxs[:nb_channel_new]
mask[idxs] = True
idxsz = mask
reg = LinearRegression(fit_intercept=False)
reg.fit(inputs[:, :, :, idxs].reshape(inputs.shape[0], -1), outputs)
newW2 = reg.coef_
error2 = rel_error(
inputs[:, :, :, idxs].reshape(inputs.shape[0], -1).dot(newW2.T), outputs)
# print('feature map rmse: {}'.format(error2))
# print('prune_ratio is: {}'.format(prune_ratio))
# prune_ratio += 0.1
'''
if error2 > 0.1 or prune_ratio > 0.9:
print("BIG ERROR")
print('Prune {} c_in from {} to {}'.format(layer.name, inputs.shape[-1], sum(idxs)))
new_model_param = layer_params
num_new_filter = layer_params[index][0].shape[-1]
print("No pruning implemented for left edge conv layers")
else:
'''
# print("PRUN IT")
# print('Prune {} c_in from {} to {}'.format(layer.name, inputs.shape[-1], sum(idxs)))
prun_filter = []
for i, idx in enumerate(idxs):
if not idx:
prun_filter.append(i)
filters[index] = prun_filter
num_new_filter = W.shape[-1]-len(prun_filter)
h, w = layer.kernel_size
newW2 = newW2.reshape(-1, h, w, np.sum(idxs))
newW2 = np.transpose(newW2, [1, 2, 3, 0])
layer_params[index][0] = newW2
prun_filter = [prun_filter]
for i in range(len(prun_filter)-1, -1, -1):
new_model_param, layer_output_shape = delete_filter_before(
layer_params, layer_types, layer_output_shape, layer_bias,
index, prun_filter[i], layer_index_dic)
else:
new_model_param = layer_params
num_new_filter = layer_params[index][0].shape[-1]
# print("No pruning implemented for left edge conv layers")
else:
new_model_param = layer_params
num_new_filter = layer_params[index][0].shape[-1]
# print("No pruning implemented for conv layers")
return new_model_param, num_new_filter, layer_output_shape, filters
def channel_prune_model_lasso(
my_model,
prune_ratio,
min_index=3,
max_index=None,
dataset="food20"):
layer_types, layer_params, layer_output_shape, layer_bias, layer_index_dic = load_model_param(
my_model)
max_index = len(my_model.layers) if max_index is None else max_index
counter = 0
filters = {}
with Bar(f'Lasso channel pruning...') as bar:
for index, layer in enumerate(my_model.layers):
if isinstance(layer, tf.keras.layers.Conv2D) and\
not isinstance(layer, tf.keras.layers.DepthwiseConv2D) and\
layer.kernel_size[0] >= 1 and\
index >= min_index and index <= max_index:
if index >= min_index:
layer_params, _, layer_output_shape, filters = prune_kernel_lasso(
my_model,
index,
layer_params,
prune_ratio[index],
layer_types,
layer_bias,
layer_output_shape,
filters=filters,
layer_index_dic=layer_index_dic,
dataset=dataset)
counter += 1
else:
layer_params, _, layer_output_shape, filters = prune_kernel_lasso(
my_model,
index,
layer_params,
1.0,
layer_types,
layer_bias,
layer_output_shape,
filters=filters,
layer_index_dic=layer_index_dic,
dataset=dataset)
bar.next((100/len(my_model.layers)))
return layer_params, layer_types
|
[
"numpy.stack",
"numpy.sum",
"numpy.abs",
"timeit.default_timer",
"sklearn.linear_model.LassoLars",
"compression_tools.pruning.helper_functions.get_layer_index",
"numpy.transpose",
"compression_tools.pruning.delete_filters.delete_filter_before",
"sklearn.linear_model.LinearRegression",
"tensorflow.keras.backend.function",
"numpy.random.randint",
"numpy.array",
"numpy.fabs",
"numpy.matmul",
"tools.progress.bar.Bar",
"numpy.vstack",
"compression_tools.pruning.helper_functions.load_model_param"
] |
[((657, 696), 'compression_tools.pruning.helper_functions.get_layer_index', 'get_layer_index', (['layer_index_dic', 'layer'], {}), '(layer_index_dic, layer)\n', (672, 696), False, 'from compression_tools.pruning.helper_functions import rel_error, get_layer_index, load_model_param\n'), ((3399, 3470), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {'n_jobs': '(-1)', 'copy_X': 'copy_x', 'fit_intercept': 'fit_intercept'}), '(n_jobs=-1, copy_X=copy_x, fit_intercept=fit_intercept)\n', (3415, 3470), False, 'from sklearn.linear_model import LinearRegression\n'), ((4216, 4274), 'sklearn.linear_model.LassoLars', 'LassoLars', ([], {'alpha': 'alpha', 'fit_intercept': '(False)', 'max_iter': '(3000)'}), '(alpha=alpha, fit_intercept=False, max_iter=3000)\n', (4225, 4274), False, 'from sklearn.linear_model import LassoLars\n'), ((4547, 4554), 'timeit.default_timer', 'timer', ([], {}), '()\n', (4552, 4554), True, 'from timeit import default_timer as timer\n'), ((11784, 11810), 'compression_tools.pruning.helper_functions.load_model_param', 'load_model_param', (['my_model'], {}), '(my_model)\n', (11800, 11810), False, 'from compression_tools.pruning.helper_functions import rel_error, get_layer_index, load_model_param\n'), ((919, 963), 'compression_tools.pruning.helper_functions.get_layer_index', 'get_layer_index', (['layer_index_dic', 'fore_layer'], {}), '(layer_index_dic, fore_layer)\n', (934, 963), False, 'from compression_tools.pruning.helper_functions import rel_error, get_layer_index, load_model_param\n'), ((1174, 1250), 'tensorflow.keras.backend.function', 'K.function', (['[model.layers[0].input]', '[model.layers[fore_layer_index].output]'], {}), '([model.layers[0].input], [model.layers[fore_layer_index].output])\n', (1184, 1250), True, 'from tensorflow.keras import backend as K\n'), ((1315, 1384), 'tensorflow.keras.backend.function', 'K.function', (['[model.layers[0].input]', '[model.layers[index + 1].output]'], {}), '([model.layers[0].input], [model.layers[index + 1].output])\n', (1325, 1384), True, 'from tensorflow.keras import backend as K\n'), ((1458, 1534), 'tensorflow.keras.backend.function', 'K.function', (['[model.layers[0].input]', '[model.layers[fore_layer_index].output]'], {}), '([model.layers[0].input], [model.layers[fore_layer_index].output])\n', (1468, 1534), True, 'from tensorflow.keras import backend as K\n'), ((1599, 1664), 'tensorflow.keras.backend.function', 'K.function', (['[model.layers[0].input]', '[model.layers[index].output]'], {}), '([model.layers[0].input], [model.layers[index].output])\n', (1609, 1664), True, 'from tensorflow.keras import backend as K\n'), ((2997, 3015), 'numpy.vstack', 'np.vstack', (['outputs'], {}), '(outputs)\n', (3006, 3015), True, 'import numpy as np\n'), ((4593, 4617), 'numpy.array', 'np.array', (['([True] * c_new)'], {}), '([True] * c_new)\n', (4601, 4617), True, 'import numpy as np\n'), ((11935, 11967), 'tools.progress.bar.Bar', 'Bar', (['f"""Lasso channel pruning..."""'], {}), "(f'Lasso channel pruning...')\n", (11938, 11967), False, 'from tools.progress.bar import Bar\n'), ((1013, 1060), 'compression_tools.pruning.helper_functions.get_layer_index', 'get_layer_index', (['layer_index_dic', 'fore_layer[0]'], {}), '(layer_index_dic, fore_layer[0])\n', (1028, 1060), False, 'from compression_tools.pruning.helper_functions import rel_error, get_layer_index, load_model_param\n'), ((2178, 2209), 'numpy.random.randint', 'np.random.randint', (['(0)', '(H - h)', '(10)'], {}), '(0, H - h, 10)\n', (2195, 2209), True, 'import numpy as np\n'), ((2234, 2265), 'numpy.random.randint', 'np.random.randint', (['(0)', '(W - w)', '(10)'], {}), '(0, W - w, 10)\n', (2251, 2265), True, 'import numpy as np\n'), ((2977, 2994), 'numpy.vstack', 'np.vstack', (['inputs'], {}), '(inputs)\n', (2986, 2994), True, 'import numpy as np\n'), ((10720, 10753), 'numpy.transpose', 'np.transpose', (['newW2', '[1, 2, 3, 0]'], {}), '(newW2, [1, 2, 3, 0])\n', (10732, 10753), True, 'import numpy as np\n'), ((2046, 2058), 'numpy.vstack', 'np.vstack', (['Y'], {}), '(Y)\n', (2055, 2058), True, 'import numpy as np\n'), ((2898, 2910), 'numpy.stack', 'np.stack', (['Xs'], {}), '(Xs)\n', (2906, 2910), True, 'import numpy as np\n'), ((2939, 2952), 'numpy.vstack', 'np.vstack', (['Ys'], {}), '(Ys)\n', (2948, 2952), True, 'import numpy as np\n'), ((4024, 4054), 'numpy.transpose', 'np.transpose', (['W2', '(3, 2, 0, 1)'], {}), '(W2, (3, 2, 0, 1))\n', (4036, 4054), True, 'import numpy as np\n'), ((4108, 4140), 'numpy.matmul', 'np.matmul', (['reshape_X', 'reshape_W2'], {}), '(reshape_X, reshape_W2)\n', (4117, 4140), True, 'import numpy as np\n'), ((9371, 9408), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {'fit_intercept': '(False)'}), '(fit_intercept=False)\n', (9387, 9408), False, 'from sklearn.linear_model import LinearRegression\n'), ((10686, 10698), 'numpy.sum', 'np.sum', (['idxs'], {}), '(idxs)\n', (10692, 10698), True, 'import numpy as np\n'), ((10947, 11070), 'compression_tools.pruning.delete_filters.delete_filter_before', 'delete_filter_before', (['layer_params', 'layer_types', 'layer_output_shape', 'layer_bias', 'index', 'prun_filter[i]', 'layer_index_dic'], {}), '(layer_params, layer_types, layer_output_shape,\n layer_bias, index, prun_filter[i], layer_index_dic)\n', (10967, 11070), False, 'from compression_tools.pruning.delete_filters import delete_filter_before\n'), ((3916, 3945), 'numpy.transpose', 'np.transpose', (['X', '(0, 3, 1, 2)'], {}), '(X, (0, 3, 1, 2))\n', (3928, 3945), True, 'import numpy as np\n'), ((5335, 5348), 'numpy.fabs', 'np.fabs', (['coef'], {}), '(coef)\n', (5342, 5348), True, 'import numpy as np\n'), ((9166, 9175), 'numpy.abs', 'np.abs', (['W'], {}), '(W)\n', (9172, 9175), True, 'import numpy as np\n'), ((5256, 5280), 'numpy.matmul', 'np.matmul', (['product', 'coef'], {}), '(product, coef)\n', (5265, 5280), True, 'import numpy as np\n')]
|
"""
read_excel_book.py
:copyright: (c) 2014-2017 by Onni Software Ltd.
:license: New BSD License, see LICENSE for more details
This shows how to use get_book to go through a multiple
sheet spreadsheet.
Please install pyexcel-ods3
"""
import os
import pyexcel as pe
def main(base_dir):
# Simply give a name to the Book class
book = pe.get_book(file_name=os.path.join(base_dir,
"multiple-sheets-example.xls"))
# the default iterator for a **Book* instance is a SheetIterator
for sheet in book:
# Each sheet has name
print("sheet: %s" % sheet.name)
# Once you have a sheet instance, you can regard it as
# a Reader instance. You can iterate its member in the way
# you wanted it
for row in sheet:
print(row)
if __name__ == '__main__':
main(os.getcwd())
# Here's the output
# sheet: Sheet 2
# [u'X', u'Y', u'Z']
# [1.0, 2.0, 3.0]
# [4.0, 5.0, 6.0]
# sheet: Sheet 3
# [u'O', u'P', u'Q']
# [3.0, 2.0, 1.0]
# [4.0, 3.0, 2.0]
# sheet: Sheet 1
# [1.0, 2.0, 3.0]
# [4.0, 5.0, 6.0]
# [7.0, 8.0, 9.0]
|
[
"os.getcwd",
"os.path.join"
] |
[((870, 881), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (879, 881), False, 'import os\n'), ((365, 418), 'os.path.join', 'os.path.join', (['base_dir', '"""multiple-sheets-example.xls"""'], {}), "(base_dir, 'multiple-sheets-example.xls')\n", (377, 418), False, 'import os\n')]
|
# -*- coding: utf-8 -*- #
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list all folder IDs associated with the active user."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.resource_manager import folders
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.resource_manager import flags
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class List(base.ListCommand):
"""List folders accessible by the active account.
List all folders to which the user has access under the specified
parent (either an Organization or a Folder). Exactly one of --folder
or --organization must be provided.
## EXAMPLES
The following command lists folders under org with ID `123456789`:
$ {command} --organization=123456789
The following command lists folders under folder with ID `123456789`:
$ {command} --folder=123456789
"""
@staticmethod
def Args(parser):
flags.FolderIdFlag('to list folders under').AddToParser(parser)
flags.OrganizationIdFlag('to list folders under').AddToParser(parser)
parser.display_info.AddFormat("""
table(
displayName:label=DISPLAY_NAME,
parent:label=PARENT_NAME,
name.segment():label=ID:align=right:sort=1
)
""")
def Run(self, args):
"""Run the list command."""
flags.CheckParentFlags(args)
return list_pager.YieldFromList(
folders.FoldersService(),
folders.FoldersMessages().CloudresourcemanagerFoldersListRequest(
parent=flags.GetParentFromFlags(args)),
limit=args.limit,
batch_size_attribute='pageSize',
batch_size=args.page_size,
field='folders')
|
[
"googlecloudsdk.command_lib.resource_manager.flags.FolderIdFlag",
"googlecloudsdk.command_lib.resource_manager.flags.GetParentFromFlags",
"googlecloudsdk.api_lib.resource_manager.folders.FoldersMessages",
"googlecloudsdk.calliope.base.ReleaseTracks",
"googlecloudsdk.command_lib.resource_manager.flags.OrganizationIdFlag",
"googlecloudsdk.api_lib.resource_manager.folders.FoldersService",
"googlecloudsdk.command_lib.resource_manager.flags.CheckParentFlags"
] |
[((1010, 1053), 'googlecloudsdk.calliope.base.ReleaseTracks', 'base.ReleaseTracks', (['base.ReleaseTrack.ALPHA'], {}), '(base.ReleaseTrack.ALPHA)\n', (1028, 1053), False, 'from googlecloudsdk.calliope import base\n'), ((1998, 2026), 'googlecloudsdk.command_lib.resource_manager.flags.CheckParentFlags', 'flags.CheckParentFlags', (['args'], {}), '(args)\n', (2020, 2026), False, 'from googlecloudsdk.command_lib.resource_manager import flags\n'), ((2072, 2096), 'googlecloudsdk.api_lib.resource_manager.folders.FoldersService', 'folders.FoldersService', ([], {}), '()\n', (2094, 2096), False, 'from googlecloudsdk.api_lib.resource_manager import folders\n'), ((1597, 1640), 'googlecloudsdk.command_lib.resource_manager.flags.FolderIdFlag', 'flags.FolderIdFlag', (['"""to list folders under"""'], {}), "('to list folders under')\n", (1615, 1640), False, 'from googlecloudsdk.command_lib.resource_manager import flags\n'), ((1665, 1714), 'googlecloudsdk.command_lib.resource_manager.flags.OrganizationIdFlag', 'flags.OrganizationIdFlag', (['"""to list folders under"""'], {}), "('to list folders under')\n", (1689, 1714), False, 'from googlecloudsdk.command_lib.resource_manager import flags\n'), ((2106, 2131), 'googlecloudsdk.api_lib.resource_manager.folders.FoldersMessages', 'folders.FoldersMessages', ([], {}), '()\n', (2129, 2131), False, 'from googlecloudsdk.api_lib.resource_manager import folders\n'), ((2191, 2221), 'googlecloudsdk.command_lib.resource_manager.flags.GetParentFromFlags', 'flags.GetParentFromFlags', (['args'], {}), '(args)\n', (2215, 2221), False, 'from googlecloudsdk.command_lib.resource_manager import flags\n')]
|
"""Tests for migration_utils."""
from keras.initializers import GlorotUniform as V2GlorotUniform
from keras.legacy_tf_layers import migration_utils
import tensorflow as tf
class DeterministicRandomTestToolTest(tf.test.TestCase):
def test_constant_mode_no_seed(self):
"""Test random tensor generation consistancy in constant mode.
Verify that the random tensor generated without using the seed is
consistant between graph and eager mode
"""
# Generate three random tensors to show how the stateful random number
# generation and glorot_uniform_initializer match between sessions and
# eager execution.
random_tool = migration_utils.DeterministicRandomTestTool()
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
a = tf.compat.v1.random.uniform(shape=(3, 1))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v1.random.uniform(shape=(3, 3))
c = c * 3
d = tf.compat.v1.glorot_uniform_initializer()(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c, graph_d = sess.run([a, b, c, d])
a = tf.compat.v2.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v2.random.uniform(shape=(3, 3))
c = c * 3
d = V2GlorotUniform()(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
self.assertAllClose(graph_d, d)
# In constant mode, because b and c were generated with the same seed within
# the same scope and have the same shape, they will have exactly the same
# values.
# validate that b and c are the same, also graph_b and graph_c
self.assertAllClose(b, c)
self.assertAllClose(graph_b, graph_c)
def test_constant_mode_seed_argument(self):
"""Test random tensor generation consistancy in constant mode.
Verify that the random tensor generated by setting the global seeed
in the args is consistant between graph and eager mode.
"""
random_tool = migration_utils.DeterministicRandomTestTool()
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = tf.compat.v1.random.uniform(shape=(3, 1), seed=1234)
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3), seed=1234)
b = b * 3
c = tf.compat.v1.glorot_uniform_initializer(seed=1234)(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c = sess.run([a, b, c])
a = tf.compat.v2.random.uniform(shape=(3, 1), seed=1234)
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3), seed=1234)
b = b * 3
c = V2GlorotUniform(seed=1234)(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
def test_num_rand_ops(self):
"""Test random tensor generation consistancy in num_random_ops mode.
Verify that the random tensor generated without using the seed is
consistant between graph and eager mode.
Random tensor generated should be different based on random ops ordering
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
graph = tf.Graph()
with graph.as_default(), tf.compat.v1.Session(graph=graph) as sess:
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = tf.compat.v1.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v1.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v1.random.uniform(shape=(3, 3))
c = c * 3
d = tf.compat.v1.glorot_uniform_initializer()(
shape=(6, 6), dtype=tf.float32)
graph_a, graph_b, graph_c, graph_d = sess.run([a, b, c, d])
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
a = tf.compat.v2.random.uniform(shape=(3, 1))
a = a * 3
b = tf.compat.v2.random.uniform(shape=(3, 3))
b = b * 3
c = tf.compat.v2.random.uniform(shape=(3, 3))
c = c * 3
d = V2GlorotUniform()(shape=(6, 6), dtype=tf.float32)
# validate that the generated random tensors match
self.assertAllClose(graph_a, a)
self.assertAllClose(graph_b, b)
self.assertAllClose(graph_c, c)
self.assertAllClose(graph_d, d)
# validate that the tensors differ based on ops ordering
self.assertNotAllClose(b, c)
self.assertNotAllClose(graph_b, graph_c)
def test_num_rand_ops_program_order(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate that in this mode random number generation is sensitive to program
order, so the generated random tesnors should not match.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
a = tf.random.uniform(shape=(3, 1))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
a = a * 3
b = tf.random.uniform(shape=(3, 3))
b = b * 3
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
b_prime = tf.random.uniform(shape=(3, 3))
# adding additional computation/ops to the graph and ensuring consistant
# random number generation
b_prime = b_prime * 3
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
# validate that the tensors are different
self.assertNotAllClose(a, a_prime)
self.assertNotAllClose(b, b_prime)
def test_num_rand_ops_operation_seed(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate if random number generation match across two different program
orders.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
# operation seed = 0
a = tf.random.uniform(shape=(3, 1))
a = a * 3
# operation seed = 1
b = tf.random.uniform(shape=(3, 3))
b = b * 3
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
random_tool.operation_seed = 1
b_prime = tf.random.uniform(shape=(3, 3))
b_prime = b_prime * 3
random_tool.operation_seed = 0
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
self.assertAllClose(a, a_prime)
self.assertAllClose(b, b_prime)
def test_num_rand_ops_disallow_repeated_ops_seed(self):
"""Test random tensor generation consistancy in num_random_ops mode.
validate if DeterministicRandomTestTool disallows reusing already-used
operation seeds.
"""
random_tool = migration_utils.DeterministicRandomTestTool(
mode="num_random_ops")
with random_tool.scope():
random_tool.operation_seed = 1
b_prime = tf.random.uniform(shape=(3, 3))
b_prime = b_prime * 3
random_tool.operation_seed = 0
a_prime = tf.random.uniform(shape=(3, 1))
a_prime = a_prime * 3
error_string = "An exception should have been raised before this"
error_raised = "An exception should have been raised before this"
try:
c = tf.random.uniform(shape=(3, 1))
raise RuntimeError(error_string)
except ValueError as err:
err_raised = err
self.assertNotEqual(err_raised, error_string)
if __name__ == "__main__":
tf.test.main()
|
[
"tensorflow.test.main",
"keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool",
"tensorflow.random.uniform",
"tensorflow.compat.v1.glorot_uniform_initializer",
"tensorflow.compat.v1.random.uniform",
"tensorflow.compat.v2.random.uniform",
"tensorflow.compat.v1.Session",
"tensorflow.Graph",
"keras.initializers.GlorotUniform"
] |
[((8185, 8199), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (8197, 8199), True, 'import tensorflow as tf\n'), ((655, 700), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {}), '()\n', (698, 700), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((2378, 2423), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {}), '()\n', (2421, 2423), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((3714, 3780), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (3757, 3780), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((4437, 4503), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (4480, 4503), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((5432, 5498), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (5475, 5498), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((5785, 5851), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (5828, 5851), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((6515, 6581), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (6558, 6581), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((6810, 6876), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (6853, 6876), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((7471, 7537), 'keras.legacy_tf_layers.migration_utils.DeterministicRandomTestTool', 'migration_utils.DeterministicRandomTestTool', ([], {'mode': '"""num_random_ops"""'}), "(mode='num_random_ops')\n", (7514, 7537), False, 'from keras.legacy_tf_layers import migration_utils\n'), ((745, 755), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (753, 755), True, 'import tensorflow as tf\n'), ((1340, 1381), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (1367, 1381), True, 'import tensorflow as tf\n'), ((1408, 1449), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (1435, 1449), True, 'import tensorflow as tf\n'), ((1476, 1517), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (1503, 1517), True, 'import tensorflow as tf\n'), ((2468, 2478), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2476, 2478), True, 'import tensorflow as tf\n'), ((3009, 3061), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 1)', 'seed': '(1234)'}), '(shape=(3, 1), seed=1234)\n', (3036, 3061), True, 'import tensorflow as tf\n'), ((3088, 3140), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 3)', 'seed': '(1234)'}), '(shape=(3, 3), seed=1234)\n', (3115, 3140), True, 'import tensorflow as tf\n'), ((3834, 3844), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (3842, 3844), True, 'import tensorflow as tf\n'), ((4553, 4594), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (4580, 4594), True, 'import tensorflow as tf\n'), ((4621, 4662), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (4648, 4662), True, 'import tensorflow as tf\n'), ((4689, 4730), 'tensorflow.compat.v2.random.uniform', 'tf.compat.v2.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (4716, 4730), True, 'import tensorflow as tf\n'), ((5548, 5579), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (5565, 5579), True, 'import tensorflow as tf\n'), ((5718, 5749), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (5735, 5749), True, 'import tensorflow as tf\n'), ((5907, 5938), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (5924, 5938), True, 'import tensorflow as tf\n'), ((6095, 6126), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (6112, 6126), True, 'import tensorflow as tf\n'), ((6658, 6689), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (6675, 6689), True, 'import tensorflow as tf\n'), ((6743, 6774), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (6760, 6774), True, 'import tensorflow as tf\n'), ((6969, 7000), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (6986, 7000), True, 'import tensorflow as tf\n'), ((7082, 7113), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (7099, 7113), True, 'import tensorflow as tf\n'), ((7630, 7661), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (7647, 7661), True, 'import tensorflow as tf\n'), ((7743, 7774), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (7760, 7774), True, 'import tensorflow as tf\n'), ((787, 820), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (807, 820), True, 'import tensorflow as tf\n'), ((842, 883), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (869, 883), True, 'import tensorflow as tf\n'), ((1030, 1071), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (1057, 1071), True, 'import tensorflow as tf\n'), ((1102, 1143), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (1129, 1143), True, 'import tensorflow as tf\n'), ((1544, 1561), 'keras.initializers.GlorotUniform', 'V2GlorotUniform', ([], {}), '()\n', (1559, 1561), True, 'from keras.initializers import GlorotUniform as V2GlorotUniform\n'), ((2510, 2543), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (2530, 2543), True, 'import tensorflow as tf\n'), ((2681, 2733), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 1)', 'seed': '(1234)'}), '(shape=(3, 1), seed=1234)\n', (2708, 2733), True, 'import tensorflow as tf\n'), ((2764, 2816), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 3)', 'seed': '(1234)'}), '(shape=(3, 3), seed=1234)\n', (2791, 2816), True, 'import tensorflow as tf\n'), ((3167, 3193), 'keras.initializers.GlorotUniform', 'V2GlorotUniform', ([], {'seed': '(1234)'}), '(seed=1234)\n', (3182, 3193), True, 'from keras.initializers import GlorotUniform as V2GlorotUniform\n'), ((3876, 3909), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (3896, 3909), True, 'import tensorflow as tf\n'), ((4047, 4088), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (4074, 4088), True, 'import tensorflow as tf\n'), ((4119, 4160), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (4146, 4160), True, 'import tensorflow as tf\n'), ((4191, 4232), 'tensorflow.compat.v1.random.uniform', 'tf.compat.v1.random.uniform', ([], {'shape': '(3, 3)'}), '(shape=(3, 3))\n', (4218, 4232), True, 'import tensorflow as tf\n'), ((4757, 4774), 'keras.initializers.GlorotUniform', 'V2GlorotUniform', ([], {}), '()\n', (4772, 4774), True, 'from keras.initializers import GlorotUniform as V2GlorotUniform\n'), ((7970, 8001), 'tensorflow.random.uniform', 'tf.random.uniform', ([], {'shape': '(3, 1)'}), '(shape=(3, 1))\n', (7987, 8001), True, 'import tensorflow as tf\n'), ((1174, 1215), 'tensorflow.compat.v1.glorot_uniform_initializer', 'tf.compat.v1.glorot_uniform_initializer', ([], {}), '()\n', (1213, 1215), True, 'import tensorflow as tf\n'), ((2847, 2897), 'tensorflow.compat.v1.glorot_uniform_initializer', 'tf.compat.v1.glorot_uniform_initializer', ([], {'seed': '(1234)'}), '(seed=1234)\n', (2886, 2897), True, 'import tensorflow as tf\n'), ((4263, 4304), 'tensorflow.compat.v1.glorot_uniform_initializer', 'tf.compat.v1.glorot_uniform_initializer', ([], {}), '()\n', (4302, 4304), True, 'import tensorflow as tf\n')]
|
# ***************************************************
# LED test for color mixing
#
# ProtoStax Air Quality Monitor.
# using Raspberry Pi A+, Micro Servo SG92R, RGB LED and ProtoStax Enclosure for Raspberry Pi
# --> https://www.protostax.com/products/protostax-for-raspberry-pi-a
# You can also use
# --> https://www.protostax.com/products/protostax-for-raspberry-pi-b
# --> https://www.protostax.com/products/protostax-for-raspberry-pi-zero
#
# Use this program to test the color mixing for the RGB LED
#
# In the AQI monitor, we use the following colors:
# green, yellow, orange, red, purple and maroon
#
# Using this program, you can figure out the R,G,B values for the perfect
# color of your choice, and plug those values in aqi_monitor.py
# should you desire to change the default values there
#
# Written by <NAME> for ProtoStax.
#
#
# BSD license. All text above must be included in any redistribution
import RPi.GPIO as GPIO
import time
# Configure the Pi to use pin names (i.e. BOARD) and allocate I/O
# We are utilizing the BOARD pin numbering, which means
# connect the RGB LED pins to physical pin numbers 11, 13 and 15 on your Raspberry Pi
# (Or change the RED_PIN, GREEN_PIN and BLUE_PIN values below to correspond to the physical pin number you
# are using)
GPIO.setmode(GPIO.BOARD)
#closing the warnings when you are compiling the code
GPIO.setwarnings(False)
#defining the pins
RED_PIN = 11
GREEN_PIN = 13
BLUE_PIN = 15
#defining the pins as output
GPIO.setup(RED_PIN, GPIO.OUT)
GPIO.setup(GREEN_PIN, GPIO.OUT)
GPIO.setup(BLUE_PIN, GPIO.OUT)
#choosing a frequency for pwm
Freq = 2000
#defining the pins that are going to be used with PWM
pwm_red = GPIO.PWM(RED_PIN, Freq)
pwm_green = GPIO.PWM(GREEN_PIN, Freq)
pwm_blue = GPIO.PWM(BLUE_PIN, Freq)
def setRGBled(red, green, blue):
pwm_red.ChangeDutyCycle(100.0 - (100.0*red)/255.0)
pwm_green.ChangeDutyCycle(100.0 - (100.0*green)/255.0)
pwm_blue.ChangeDutyCycle(100.0 - (100.0*blue)/255.0)
# We are using a Common Anode (CA) RGB LED, so the logic is reversed
# setting a pin HIGH turns OFF the pin, and seting it LOW turns it ON
# Starting with a duty cycle of 100 therefore turns OFF the corresponding
# color LED
pwm_red.start(100)
pwm_green.start(100)
pwm_blue.start(100)
try:
#we are starting with the loop
while True:
values = input("Input comma seprated RGB numbers : ")
setRGBled(values[0], values[1], values[2])
time.sleep(0.5)
except KeyboardInterrupt:
print("CTRL-C: Terminating program.")
finally:
print("Cleaning up GPIO...")
pwm_red.stop()
pwm_green.stop()
pwm_blue.stop()
GPIO.cleanup()
|
[
"RPi.GPIO.setmode",
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"time.sleep",
"RPi.GPIO.PWM",
"RPi.GPIO.setwarnings"
] |
[((1308, 1332), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BOARD'], {}), '(GPIO.BOARD)\n', (1320, 1332), True, 'import RPi.GPIO as GPIO\n'), ((1389, 1412), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (1405, 1412), True, 'import RPi.GPIO as GPIO\n'), ((1505, 1534), 'RPi.GPIO.setup', 'GPIO.setup', (['RED_PIN', 'GPIO.OUT'], {}), '(RED_PIN, GPIO.OUT)\n', (1515, 1534), True, 'import RPi.GPIO as GPIO\n'), ((1536, 1567), 'RPi.GPIO.setup', 'GPIO.setup', (['GREEN_PIN', 'GPIO.OUT'], {}), '(GREEN_PIN, GPIO.OUT)\n', (1546, 1567), True, 'import RPi.GPIO as GPIO\n'), ((1568, 1598), 'RPi.GPIO.setup', 'GPIO.setup', (['BLUE_PIN', 'GPIO.OUT'], {}), '(BLUE_PIN, GPIO.OUT)\n', (1578, 1598), True, 'import RPi.GPIO as GPIO\n'), ((1707, 1730), 'RPi.GPIO.PWM', 'GPIO.PWM', (['RED_PIN', 'Freq'], {}), '(RED_PIN, Freq)\n', (1715, 1730), True, 'import RPi.GPIO as GPIO\n'), ((1745, 1770), 'RPi.GPIO.PWM', 'GPIO.PWM', (['GREEN_PIN', 'Freq'], {}), '(GREEN_PIN, Freq)\n', (1753, 1770), True, 'import RPi.GPIO as GPIO\n'), ((1782, 1806), 'RPi.GPIO.PWM', 'GPIO.PWM', (['BLUE_PIN', 'Freq'], {}), '(BLUE_PIN, Freq)\n', (1790, 1806), True, 'import RPi.GPIO as GPIO\n'), ((2671, 2685), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (2683, 2685), True, 'import RPi.GPIO as GPIO\n'), ((2476, 2491), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2486, 2491), False, 'import time\n')]
|
from model.user import User
def test_modify_user_fname(app):
if app.user.count() == 0:
app.user.create(User(lname="Testman"))
app.user.modify_first_user(User(fname="New firstname"))
def test_modify_user_lastname(app):
if app.user.count() == 0:
app.user.create(User(lname="Testman"))
app.user.modify_first_user(User (lname ="New lastname"))
def test_modify_user_address(app):
if app.user.count() == 0:
app.user.create(User(lname="Testman"))
app.user.modify_first_user(User (address="New address"))
|
[
"model.user.User"
] |
[((170, 197), 'model.user.User', 'User', ([], {'fname': '"""New firstname"""'}), "(fname='New firstname')\n", (174, 197), False, 'from model.user import User\n'), ((345, 371), 'model.user.User', 'User', ([], {'lname': '"""New lastname"""'}), "(lname='New lastname')\n", (349, 371), False, 'from model.user import User\n'), ((520, 547), 'model.user.User', 'User', ([], {'address': '"""New address"""'}), "(address='New address')\n", (524, 547), False, 'from model.user import User\n'), ((116, 137), 'model.user.User', 'User', ([], {'lname': '"""Testman"""'}), "(lname='Testman')\n", (120, 137), False, 'from model.user import User\n'), ((291, 312), 'model.user.User', 'User', ([], {'lname': '"""Testman"""'}), "(lname='Testman')\n", (295, 312), False, 'from model.user import User\n'), ((466, 487), 'model.user.User', 'User', ([], {'lname': '"""Testman"""'}), "(lname='Testman')\n", (470, 487), False, 'from model.user import User\n')]
|
import urllib.parse
from typing import TYPE_CHECKING
import tsutils
if TYPE_CHECKING:
from dbcog.models.monster_model import MonsterModel
INFO_PDX_TEMPLATE = 'http://www.puzzledragonx.com/en/monster.asp?n={}'
YT_SEARCH_TEMPLATE = 'https://www.youtube.com/results?search_query={}'
SKYOZORA_TEMPLATE = 'http://pad.skyozora.com/pets/{}'
ILMINA_TEMPLATE = 'https://ilmina.com/#/CARD/{}'
def puzzledragonx(m: "MonsterModel"):
return INFO_PDX_TEMPLATE.format(tsutils.get_pdx_id(m))
def youtube_search(m: "MonsterModel"):
return YT_SEARCH_TEMPLATE.format(urllib.parse.quote(m.name_ja))
def skyozora(m: "MonsterModel"):
return SKYOZORA_TEMPLATE.format(m.monster_no_jp)
def ilmina(m: "MonsterModel"):
return ILMINA_TEMPLATE.format(m.monster_no_jp)
def ilmina_skill(m: "MonsterModel"):
return "https://ilmina.com/#/SKILL/{}".format(m.active_skill.active_skill_id) if m.active_skill else None
|
[
"tsutils.get_pdx_id"
] |
[((466, 487), 'tsutils.get_pdx_id', 'tsutils.get_pdx_id', (['m'], {}), '(m)\n', (484, 487), False, 'import tsutils\n')]
|
from comet_ml import Experiment
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.optim.lr_scheduler import ExponentialLR, CosineAnnealingLR
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from enchanter import addons
from enchanter import tasks
from enchanter.addons import layers
from enchanter.engine.modules import fix_seed, get_dataset
fix_seed(0)
experiment = Experiment()
model = layers.MLP([4, 512, 128, 3], addons.mish)
optimizer = optim.Adam(model.parameters())
runner = tasks.ClassificationRunner(
model,
optimizer=optimizer,
criterion=nn.CrossEntropyLoss(),
experiment=experiment,
scheduler=[
CosineAnnealingLR(optimizer, T_max=10, eta_min=1e-10),
ExponentialLR(optimizer, gamma=0.9),
]
)
x, y = load_iris(return_X_y=True)
x = x.astype("float32")
y = y.astype("int64")
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=0)
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, random_state=0)
train_ds = get_dataset(x_train, y_train)
val_ds = get_dataset(x_val, y_val)
test_ds = get_dataset(x_test, y_test)
train_loader = DataLoader(train_ds, batch_size=8)
val_loader = DataLoader(val_ds, batch_size=8)
test_loader = DataLoader(test_ds, batch_size=8)
runner.add_loader("train", train_loader)
runner.add_loader("val", val_loader)
runner.add_loader("test", test_loader)
runner.train_config(
epochs=50
)
runner.run()
|
[
"sklearn.datasets.load_iris",
"enchanter.addons.layers.MLP",
"torch.utils.data.DataLoader",
"sklearn.model_selection.train_test_split",
"torch.nn.CrossEntropyLoss",
"enchanter.engine.modules.get_dataset",
"torch.optim.lr_scheduler.CosineAnnealingLR",
"comet_ml.Experiment",
"torch.optim.lr_scheduler.ExponentialLR",
"enchanter.engine.modules.fix_seed"
] |
[((443, 454), 'enchanter.engine.modules.fix_seed', 'fix_seed', (['(0)'], {}), '(0)\n', (451, 454), False, 'from enchanter.engine.modules import fix_seed, get_dataset\n'), ((470, 482), 'comet_ml.Experiment', 'Experiment', ([], {}), '()\n', (480, 482), False, 'from comet_ml import Experiment\n'), ((491, 532), 'enchanter.addons.layers.MLP', 'layers.MLP', (['[4, 512, 128, 3]', 'addons.mish'], {}), '([4, 512, 128, 3], addons.mish)\n', (501, 532), False, 'from enchanter.addons import layers\n'), ((854, 880), 'sklearn.datasets.load_iris', 'load_iris', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (863, 880), False, 'from sklearn.datasets import load_iris\n'), ((964, 1002), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'random_state': '(0)'}), '(x, y, random_state=0)\n', (980, 1002), False, 'from sklearn.model_selection import train_test_split\n'), ((1036, 1086), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_train', 'y_train'], {'random_state': '(0)'}), '(x_train, y_train, random_state=0)\n', (1052, 1086), False, 'from sklearn.model_selection import train_test_split\n'), ((1099, 1128), 'enchanter.engine.modules.get_dataset', 'get_dataset', (['x_train', 'y_train'], {}), '(x_train, y_train)\n', (1110, 1128), False, 'from enchanter.engine.modules import fix_seed, get_dataset\n'), ((1138, 1163), 'enchanter.engine.modules.get_dataset', 'get_dataset', (['x_val', 'y_val'], {}), '(x_val, y_val)\n', (1149, 1163), False, 'from enchanter.engine.modules import fix_seed, get_dataset\n'), ((1174, 1201), 'enchanter.engine.modules.get_dataset', 'get_dataset', (['x_test', 'y_test'], {}), '(x_test, y_test)\n', (1185, 1201), False, 'from enchanter.engine.modules import fix_seed, get_dataset\n'), ((1219, 1253), 'torch.utils.data.DataLoader', 'DataLoader', (['train_ds'], {'batch_size': '(8)'}), '(train_ds, batch_size=8)\n', (1229, 1253), False, 'from torch.utils.data import DataLoader\n'), ((1267, 1299), 'torch.utils.data.DataLoader', 'DataLoader', (['val_ds'], {'batch_size': '(8)'}), '(val_ds, batch_size=8)\n', (1277, 1299), False, 'from torch.utils.data import DataLoader\n'), ((1314, 1347), 'torch.utils.data.DataLoader', 'DataLoader', (['test_ds'], {'batch_size': '(8)'}), '(test_ds, batch_size=8)\n', (1324, 1347), False, 'from torch.utils.data import DataLoader\n'), ((663, 684), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (682, 684), True, 'import torch.nn as nn\n'), ((737, 790), 'torch.optim.lr_scheduler.CosineAnnealingLR', 'CosineAnnealingLR', (['optimizer'], {'T_max': '(10)', 'eta_min': '(1e-10)'}), '(optimizer, T_max=10, eta_min=1e-10)\n', (754, 790), False, 'from torch.optim.lr_scheduler import ExponentialLR, CosineAnnealingLR\n'), ((800, 835), 'torch.optim.lr_scheduler.ExponentialLR', 'ExponentialLR', (['optimizer'], {'gamma': '(0.9)'}), '(optimizer, gamma=0.9)\n', (813, 835), False, 'from torch.optim.lr_scheduler import ExponentialLR, CosineAnnealingLR\n')]
|
from django.urls import path
from vawc import views
app_name = 'vawc'
urlpatterns = [
# path('', katarungan_views.katarunganHome, name='katarunganHome'),
# path('dashboard/', katarungan_views.katarunganDashboard, name='katarunganDashboard'),
# path('add/', katarungan_views.katarunganAddCase, name='katarunganAddCase'),
path('',views.VawcListView.as_view(), name='list'),
path('<int:pk>/', views.VawcDetailView.as_view(), name='detail'),
path('create/', views.VawcCreateView.as_view(), name='create'),
path('update/<int:pk>/', views.VawcUpdateView.as_view(), name='update'),
path('delete/<int:pk>/', views.VawcDeleteView.as_view(), name='delete'),
]
|
[
"vawc.views.VawcDeleteView.as_view",
"vawc.views.VawcDetailView.as_view",
"vawc.views.VawcCreateView.as_view",
"vawc.views.VawcUpdateView.as_view",
"vawc.views.VawcListView.as_view"
] |
[((338, 366), 'vawc.views.VawcListView.as_view', 'views.VawcListView.as_view', ([], {}), '()\n', (364, 366), False, 'from vawc import views\n'), ((401, 431), 'vawc.views.VawcDetailView.as_view', 'views.VawcDetailView.as_view', ([], {}), '()\n', (429, 431), False, 'from vawc import views\n'), ((466, 496), 'vawc.views.VawcCreateView.as_view', 'views.VawcCreateView.as_view', ([], {}), '()\n', (494, 496), False, 'from vawc import views\n'), ((540, 570), 'vawc.views.VawcUpdateView.as_view', 'views.VawcUpdateView.as_view', ([], {}), '()\n', (568, 570), False, 'from vawc import views\n'), ((614, 644), 'vawc.views.VawcDeleteView.as_view', 'views.VawcDeleteView.as_view', ([], {}), '()\n', (642, 644), False, 'from vawc import views\n')]
|
#!/usr/bin/env python3
import argparse
parser = argparse.ArgumentParser(description='RDFize RefEx tissue specificity data')
parser.add_argument('input_file', help='RefEx tissue specificity data file')
args = parser.parse_args()
fp = open(args.input_file, 'r')
checked_header = False
prefix = ''
for line in fp:
fields = line.strip().split('\t')
if not checked_header:
checked_header = True
print('@prefix refexo: <http://purl.jp/bio/01/refexo#>')
if fields[0] == 'Affymetrix_probesetID':
prefix = 'affy'
print('@prefix affy: <http://identifiers.org/affy.probeset/>')
elif fields[0] == 'NCBI_RefSeqID':
prefix = 'refseq'
print('@prefix refseq: <http://identifiers.org/refseq/>')
print()
continue
for i in range(2, len(fields)):
name = fields[0]
val = fields[i]
uri = f'{prefix}:{name}'
if prefix == 'affy' and '/' in name:
uri = f'<http://identifiers.org/affy.probeset/{name}>';
if val == '1':
print(f'{uri} refexo:isPositivelySpecificTo refexo:v{i-1:02}_40 .')
elif val == '-1':
print(f'{uri} refexo:isNegativelySpecificTo refexo:v{i-1:02}_40 .')
|
[
"argparse.ArgumentParser"
] |
[((49, 124), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""RDFize RefEx tissue specificity data"""'}), "(description='RDFize RefEx tissue specificity data')\n", (72, 124), False, 'import argparse\n')]
|
from typing import Optional, Tuple
import numpy as np
from scipy import integrate
from scipy.optimize import minimize
from paramak import ExtrudeMixedShape
from paramak.utils import add_thickness
class ToroidalFieldCoilPrincetonD(ExtrudeMixedShape):
"""Toroidal field coil based on Princeton-D curve
Args:
R1: smallest radius (cm)
R2: largest radius (cm)
thickness: magnet thickness (cm)
distance: extrusion distance (cm)
number_of_coils: the number of tf coils. This changes by the
azimuth_placement_angle dividing up 360 degrees by the number of
coils.
vertical_displacement: vertical displacement (cm). Defaults to 0.0.
with_inner_leg: Include the inner tf leg. Defaults to True.
"""
def __init__(
self,
R1: float,
R2: float,
thickness: float,
distance: float,
number_of_coils: int,
vertical_displacement: float = 0.0,
with_inner_leg: bool = True,
color: Tuple[float, float, float, Optional[float]] = (0.0, 0.0, 1.0),
**kwargs
) -> None:
super().__init__(distance=distance, color=color, **kwargs)
self.R1 = R1
self.R2 = R2
self.thickness = thickness
self.distance = distance
self.number_of_coils = number_of_coils
self.vertical_displacement = vertical_displacement
self.with_inner_leg = with_inner_leg
@property
def inner_points(self):
self.points
return self._inner_points
@inner_points.setter
def inner_points(self, value):
self._inner_points = value
@property
def outer_points(self):
self.points
return self._outer_points
@outer_points.setter
def outer_points(self, value):
self._outer_points = value
@property
def azimuth_placement_angle(self):
self.find_azimuth_placement_angle()
return self._azimuth_placement_angle
@azimuth_placement_angle.setter
def azimuth_placement_angle(self, value):
self._azimuth_placement_angle = value
def _compute_inner_points(self, R1, R2):
"""Computes the inner curve points
Args:
R1 (float): smallest radius (cm)
R2 (float): largest radius (cm)
Returns:
(list, list, list): R, Z and derivative lists for outer curve
points
"""
def error(z_0, R0, R2):
segment = get_segment(R0, R2, z_0)
return abs(segment[1][-1])
def get_segment(a, b, z_0):
a_R = np.linspace(a, b, num=70, endpoint=True)
asol = integrate.odeint(solvr, [z_0, 0], a_R)
return a_R, asol[:, 0], asol[:, 1]
def solvr(Y, R):
return [Y[1], -1 / (k * R) * (1 + Y[1] ** 2) ** (3 / 2)]
R0 = (R1 * R2) ** 0.5
k = 0.5 * np.log(R2 / R1)
# computing of z_0
# z_0 is computed by ensuring outer segment end is zero
z_0 = 10 # initial guess for z_0
res = minimize(error, z_0, args=(R0, R2))
z_0 = res.x
# compute inner and outer segments
segment1 = get_segment(R0, R1, z_0)
segment2 = get_segment(R0, R2, z_0)
r_values = np.concatenate(
[
np.flip(segment1[0]),
segment2[0][1:],
np.flip(segment2[0])[1:],
segment1[0][1:],
]
)
z_values = np.concatenate(
[
np.flip(segment1[1]),
segment2[1][1:],
-np.flip(segment2[1])[1:],
-segment1[1][1:],
]
)
return r_values, z_values
def find_points(self):
"""Finds the XZ points joined by connections that describe the 2D
profile of the toroidal field coil shape."""
# compute inner points
r_inner, z_inner = self._compute_inner_points(self.R1 + self.thickness, self.R2)
# compute outer points
dz_dr = np.diff(z_inner) / np.diff(r_inner)
dz_dr[0] = float("-inf")
dz_dr = np.append(dz_dr, float("inf"))
r_outer, z_outer = add_thickness(r_inner, z_inner, self.thickness, dy_dx=dz_dr)
r_outer, z_outer = np.flip(r_outer), np.flip(z_outer)
# add vertical displacement
z_outer += self.vertical_displacement
z_inner += self.vertical_displacement
# extract helping points for inner leg
inner_leg_connection_points = [
(r_inner[0], z_inner[0]),
(r_inner[-1], z_inner[-1]),
(r_outer[0], z_outer[0]),
(r_outer[-1], z_outer[-1]),
]
self.inner_leg_connection_points = inner_leg_connection_points
# add the leg to the points
if self.with_inner_leg:
r_inner = np.append(r_inner, r_inner[0])
z_inner = np.append(z_inner, z_inner[0])
r_outer = np.append(r_outer, r_outer[0])
z_outer = np.append(z_outer, z_outer[0])
# add connections
inner_points = [[r, z, "spline"] for r, z in zip(r_inner, z_inner)]
outer_points = [[r, z, "spline"] for r, z in zip(r_outer, z_outer)]
if self.with_inner_leg:
outer_points[-2][2] = "straight"
inner_points[-2][2] = "straight"
inner_points[-1][2] = "straight"
outer_points[-1][2] = "straight"
points = inner_points + outer_points
self.outer_points = np.vstack((r_outer, z_outer)).T
self.inner_points = np.vstack((r_inner, z_inner)).T
self.points = points
def find_azimuth_placement_angle(self):
"""Calculates the azimuth placement angles based on the number of tf
coils"""
angles = list(np.linspace(0, 360, self.number_of_coils, endpoint=False))
self.azimuth_placement_angle = angles
|
[
"scipy.optimize.minimize",
"numpy.flip",
"numpy.log",
"scipy.integrate.odeint",
"numpy.append",
"paramak.utils.add_thickness",
"numpy.diff",
"numpy.linspace",
"numpy.vstack"
] |
[((3059, 3094), 'scipy.optimize.minimize', 'minimize', (['error', 'z_0'], {'args': '(R0, R2)'}), '(error, z_0, args=(R0, R2))\n', (3067, 3094), False, 'from scipy.optimize import minimize\n'), ((4188, 4248), 'paramak.utils.add_thickness', 'add_thickness', (['r_inner', 'z_inner', 'self.thickness'], {'dy_dx': 'dz_dr'}), '(r_inner, z_inner, self.thickness, dy_dx=dz_dr)\n', (4201, 4248), False, 'from paramak.utils import add_thickness\n'), ((2605, 2645), 'numpy.linspace', 'np.linspace', (['a', 'b'], {'num': '(70)', 'endpoint': '(True)'}), '(a, b, num=70, endpoint=True)\n', (2616, 2645), True, 'import numpy as np\n'), ((2665, 2703), 'scipy.integrate.odeint', 'integrate.odeint', (['solvr', '[z_0, 0]', 'a_R'], {}), '(solvr, [z_0, 0], a_R)\n', (2681, 2703), False, 'from scipy import integrate\n'), ((2895, 2910), 'numpy.log', 'np.log', (['(R2 / R1)'], {}), '(R2 / R1)\n', (2901, 2910), True, 'import numpy as np\n'), ((4045, 4061), 'numpy.diff', 'np.diff', (['z_inner'], {}), '(z_inner)\n', (4052, 4061), True, 'import numpy as np\n'), ((4064, 4080), 'numpy.diff', 'np.diff', (['r_inner'], {}), '(r_inner)\n', (4071, 4080), True, 'import numpy as np\n'), ((4276, 4292), 'numpy.flip', 'np.flip', (['r_outer'], {}), '(r_outer)\n', (4283, 4292), True, 'import numpy as np\n'), ((4294, 4310), 'numpy.flip', 'np.flip', (['z_outer'], {}), '(z_outer)\n', (4301, 4310), True, 'import numpy as np\n'), ((4856, 4886), 'numpy.append', 'np.append', (['r_inner', 'r_inner[0]'], {}), '(r_inner, r_inner[0])\n', (4865, 4886), True, 'import numpy as np\n'), ((4909, 4939), 'numpy.append', 'np.append', (['z_inner', 'z_inner[0]'], {}), '(z_inner, z_inner[0])\n', (4918, 4939), True, 'import numpy as np\n'), ((4963, 4993), 'numpy.append', 'np.append', (['r_outer', 'r_outer[0]'], {}), '(r_outer, r_outer[0])\n', (4972, 4993), True, 'import numpy as np\n'), ((5016, 5046), 'numpy.append', 'np.append', (['z_outer', 'z_outer[0]'], {}), '(z_outer, z_outer[0])\n', (5025, 5046), True, 'import numpy as np\n'), ((5504, 5533), 'numpy.vstack', 'np.vstack', (['(r_outer, z_outer)'], {}), '((r_outer, z_outer))\n', (5513, 5533), True, 'import numpy as np\n'), ((5564, 5593), 'numpy.vstack', 'np.vstack', (['(r_inner, z_inner)'], {}), '((r_inner, z_inner))\n', (5573, 5593), True, 'import numpy as np\n'), ((5787, 5844), 'numpy.linspace', 'np.linspace', (['(0)', '(360)', 'self.number_of_coils'], {'endpoint': '(False)'}), '(0, 360, self.number_of_coils, endpoint=False)\n', (5798, 5844), True, 'import numpy as np\n'), ((3313, 3333), 'numpy.flip', 'np.flip', (['segment1[0]'], {}), '(segment1[0])\n', (3320, 3333), True, 'import numpy as np\n'), ((3532, 3552), 'numpy.flip', 'np.flip', (['segment1[1]'], {}), '(segment1[1])\n', (3539, 3552), True, 'import numpy as np\n'), ((3384, 3404), 'numpy.flip', 'np.flip', (['segment2[0]'], {}), '(segment2[0])\n', (3391, 3404), True, 'import numpy as np\n'), ((3604, 3624), 'numpy.flip', 'np.flip', (['segment2[1]'], {}), '(segment2[1])\n', (3611, 3624), True, 'import numpy as np\n')]
|
#------------------------------------------------------------------------------
# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# Query.py
#
# Demonstrate how to perform a query in different ways.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import SampleEnv
connection = cx_Oracle.connect(SampleEnv.GetMainConnectString())
sql = """
select * from SampleQueryTab
where id < 6
order by id"""
print("Get all rows via iterator")
cursor = connection.cursor()
for result in cursor.execute(sql):
print(result)
print()
print("Query one row at a time")
cursor.execute(sql)
row = cursor.fetchone()
print(row)
row = cursor.fetchone()
print(row)
print()
print("Fetch many rows")
cursor.execute(sql)
res = cursor.fetchmany(numRows=3)
print(res)
|
[
"SampleEnv.GetMainConnectString"
] |
[((575, 607), 'SampleEnv.GetMainConnectString', 'SampleEnv.GetMainConnectString', ([], {}), '()\n', (605, 607), False, 'import SampleEnv\n')]
|
import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
#from app import create_app, engine
#from app.core.api import initialize
#from app.core.user import auth, user_connector
import unittest
import tempfile
import app
import base64
from app.core.api import initialize
from flask import jsonify, json
from app.core.device import device_connector
from werkzeug.datastructures import Headers
failAuth = "Could not authenticate."
from requests.auth import HTTPBasicAuth
class FlaskrTestCase(unittest.TestCase):
def create_app(self):
# pass in test configuration
return app.create_app(self)
setup_done = False
def setUp(self):
self.db_fd, app.app.config['DATABASE'] = tempfile.mkstemp()
app.app.testing = True
self.app = app.app.test_client()
with app.app.app_context():
app.init_db()
if not FlaskrTestCase.setup_done:
initialize.initialize_APIs()
FlaskrTestCase.setup_done = True
## adding users
app.core.user.auth.add_user("fawaz", "password","<EMAIL>", "ADMIN", "test", "ADMIN", "192.168.1.1")
def tearDown(self):
os.close(self.db_fd)
os.unlink(app.app.config['DATABASE'])
def test_Dalive(self):
rv = self.app.get('/users')
data = json.loads(rv.data)
print(data)
assert(data['status'] == 401)
assert(str(data['message']) == str("Unauthorized"))
##### Devices
def test_Devices_no_auth_get(self):
response = self.app.get('/devices')
data = json.loads(response.data)
print(data)
assert(data['status'] == 401)
assert(data['message'] == "Unauthorized")
def test_Devices_no_auth_put(self):
response = self.app.put('/devices')
data = json.loads(response.data)
print(data)
assert(data['status'] == 401)
assert(data['message'] == "Unauthorized")
"""
# Device Groups
def test_get_Device_Groups_no_auth_post(self):
response = self.app.post('/device_groups')
assert response == jsonify(
status=400,
message=failAuth
)
def test_get_Device_Groups_no_auth_put(self):
response = self.app.put('/devices_groups')
assert response == jsonify(
status=400,
message=failAuth
)
def test_get_Device_Groups_no_auth_post(self):
response = self.app.put('/devices')
assert response == jsonify(
status=400,
message=failAuth
)
def test_login_auth_post(self):
username = "test"
password = "password"
response = self.app.post('/login', data=dict(
username="fawaz",
password="password"
), follow_redirects=True)
data = json.loads(response.data)
assert(data['status'] == 200)
assert(data['message'] == 'User logged in')
def test_users_auth_put(self):
response = self.app.put('/users', data=dict(
username="guy",
password="<PASSWORD>",
retyppassword = "<PASSWORD>",
email = "<EMAIL>",
role = "ADMIN"
))
assert response == jsonify(
status=200,
message="User added"
)
assert db_connector.get_user("guy") != None
def test_users_auth_delete(self):
db_connector.add_user("man", "<PASSWORD>", "<EMAIL>", "OPERATOR")
response = self.app.delete('/users', data=dict(
rmusr="guy",
))
assert response == jsonify(
status=200,
message="User Deleted"
)
assert db_connector.get_user("guy") == None
#DEVICE AUTHED
def test_get_Device_Groups_auth_get(self):
response = self.app.get('/device_groups')
assert response == jsonify(
status=200,
message="Sent Devices"
)
data = json.loads(response.data)
assert(data['status'] == 400)
assert(data['message'] == 'Sent Devices')
assert(data['data'] != None)
def test_get_Device_auth_put(self):
response = self.app.put('/devices', data=dict(
vendor_id="1",
serial_num = "2",
model_num = "3"
))
assert response == jsonify(
status=200,
message="Device Added"
)
data = json.loads(response.data)
assert(data['status'] == 200)
assert(data['message'] == 'Sent Devices')
assert(data['data'] != None)
assert db_connector.get_all_devices() != None
def test_get_Device_auth_get(self):
db_connector.add_device("111", "2222", "3333")
response = self.app.get('/devices')
assert response == jsonify(
status=200,
message="Device Added"
)
data = json.loads(response.data)
assert(data['status'] == 200)
assert(data['message'] == 'Sent Devices')
assert(data['data'] != None)
assert db_connector.get_all_devices() != None
def test_get_Device_auth_put(self):
response = self.app.put('/devices', data=dict(
vendor_id="1",
serial_num = "2",
model_num = "3"
))
assert response == jsonify(
status=200,
message="Device Added"
)
data = json.loads(response.data)
assert(data['status'] == 200)
assert(data['message'] == 'Sent Devices')
assert(data['data'] != None)
assert db_connector.get_all_devices() != None
"""
#### Parameters
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"app.core.user.auth.add_user",
"os.path.abspath",
"app.core.api.initialize.initialize_APIs",
"app.app.test_client",
"tempfile.mkstemp",
"os.unlink",
"app.create_app",
"os.close",
"flask.json.loads",
"app.app.app_context",
"app.init_db"
] |
[((5825, 5840), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5838, 5840), False, 'import unittest\n'), ((626, 646), 'app.create_app', 'app.create_app', (['self'], {}), '(self)\n', (640, 646), False, 'import app\n'), ((741, 759), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (757, 759), False, 'import tempfile\n'), ((810, 831), 'app.app.test_client', 'app.app.test_client', ([], {}), '()\n', (829, 831), False, 'import app\n'), ((1056, 1160), 'app.core.user.auth.add_user', 'app.core.user.auth.add_user', (['"""fawaz"""', '"""password"""', '"""<EMAIL>"""', '"""ADMIN"""', '"""test"""', '"""ADMIN"""', '"""192.168.1.1"""'], {}), "('fawaz', 'password', '<EMAIL>', 'ADMIN', 'test',\n 'ADMIN', '192.168.1.1')\n", (1083, 1160), False, 'import app\n'), ((1189, 1209), 'os.close', 'os.close', (['self.db_fd'], {}), '(self.db_fd)\n', (1197, 1209), False, 'import os, sys\n'), ((1218, 1255), 'os.unlink', 'os.unlink', (["app.app.config['DATABASE']"], {}), "(app.app.config['DATABASE'])\n", (1227, 1255), False, 'import os, sys\n'), ((1336, 1355), 'flask.json.loads', 'json.loads', (['rv.data'], {}), '(rv.data)\n', (1346, 1355), False, 'from flask import jsonify, json\n'), ((1588, 1613), 'flask.json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1598, 1613), False, 'from flask import jsonify, json\n'), ((1822, 1847), 'flask.json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1832, 1847), False, 'from flask import jsonify, json\n'), ((62, 87), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (77, 87), False, 'import os, sys\n'), ((845, 866), 'app.app.app_context', 'app.app.app_context', ([], {}), '()\n', (864, 866), False, 'import app\n'), ((880, 893), 'app.init_db', 'app.init_db', ([], {}), '()\n', (891, 893), False, 'import app\n'), ((948, 976), 'app.core.api.initialize.initialize_APIs', 'initialize.initialize_APIs', ([], {}), '()\n', (974, 976), False, 'from app.core.api import initialize\n')]
|
# -*- coding: utf-8 -*-
from kivy.lang import Builder
from kivy.uix.textinput import TextInput
from kivy.properties import ObjectProperty, NumericProperty, StringProperty, \
ListProperty, BooleanProperty
from kivy.metrics import sp, dp
from kivy.animation import Animation
from kivymd.label import MDLabel
from kivymd.theming import ThemableBehavior
from kivy.clock import Clock
Builder.load_string('''
<SingleLineTextField>:
canvas.before:
Clear
Color:
rgba: self.line_color_normal
Line:
id: "the_line"
points: self.x, self.y + dp(8), self.x + self.width, self.y + dp(8)
width: 1
dash_length: dp(3)
dash_offset: 2 if self.disabled else 0
Color:
rgba: self._current_line_color
Rectangle:
size: self._line_width, dp(2)
pos: self.center_x - (self._line_width / 2), self.y + dp(8)
Color:
rgba: self._current_error_color
Rectangle:
texture: self._msg_lbl.texture
size: self._msg_lbl.texture_size
pos: self.x, self.y - dp(8)
Color:
rgba: (self._current_line_color if self.focus and not self.cursor_blink \
else (0, 0, 0, 0))
Rectangle:
pos: [int(x) for x in self.cursor_pos]
size: 1, -self.line_height
Color:
#rgba: self._hint_txt_color if not self.text and not self.focus\
#else (self.line_color_focus if not self.text or self.focus\
#else self.line_color_normal)
rgba: self._current_hint_text_color
Rectangle:
texture: self._hint_lbl.texture
size: self._hint_lbl.texture_size
pos: self.x, self.y + self._hint_y
Color:
rgba: self.disabled_foreground_color if self.disabled else \
(self.hint_text_color if not self.text and not self.focus else \
self.foreground_color)
font_name: 'Roboto'
foreground_color: app.theme_cls.text_color
font_size: sp(16)
bold: False
padding: 0, dp(16), 0, dp(10)
multiline: False
size_hint_y: None
height: dp(48)
''')
class SingleLineTextField(ThemableBehavior, TextInput):
line_color_normal = ListProperty()
line_color_focus = ListProperty()
error_color = ListProperty()
error = BooleanProperty(False)
message = StringProperty("")
message_mode = StringProperty("none")
mode = message_mode
_hint_txt_color = ListProperty()
_hint_lbl = ObjectProperty()
_hint_lbl_font_size = NumericProperty(sp(16))
_hint_y = NumericProperty(dp(10))
_error_label = ObjectProperty()
_line_width = NumericProperty(0)
_hint_txt = StringProperty('')
_current_line_color = line_color_focus
_current_error_color = ListProperty([0.0, 0.0, 0.0, 0.0])
_current_hint_text_color = _hint_txt_color
def __init__(self, **kwargs):
Clock.schedule_interval(self._update_color, 5)
self._msg_lbl = MDLabel(font_style='Caption',
theme_text_color='Error',
halign='left',
valign='middle',
text=self.message)
self._hint_lbl = MDLabel(font_style='Subhead',
halign='left',
valign='middle')
super(SingleLineTextField, self).__init__(**kwargs)
self.line_color_normal = self.theme_cls.divider_color
self.line_color_focus = list(self.theme_cls.primary_color)
self.base_line_color_focus = list(self.theme_cls.primary_color)
self.error_color = self.theme_cls.error_color
self._hint_txt_color = self.theme_cls.disabled_hint_text_color
self.hint_text_color = (1, 1, 1, 0)
self.cursor_color = self.theme_cls.primary_color
self.bind(message=self._set_msg,
hint_text=self._set_hint,
_hint_lbl_font_size=self._hint_lbl.setter('font_size'),
message_mode=self._set_mode)
self.hint_anim_in = Animation(_hint_y=dp(34),
_hint_lbl_font_size=sp(12), duration=.2,
t='out_quad')
self.hint_anim_out = Animation(_hint_y=dp(10),
_hint_lbl_font_size=sp(16), duration=.2,
t='out_quad')
def _update_color(self, *args):
self.line_color_normal = self.theme_cls.divider_color
self.base_line_color_focus = list(self.theme_cls.primary_color)
if not self.focus and not self.error:
self.line_color_focus = self.theme_cls.primary_color
Animation(duration=.2, _current_hint_text_color=self.theme_cls.disabled_hint_text_color).start(self)
if self.mode == "persistent":
Animation(duration=.1, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
if self.focus and not self.error:
self.cursor_color = self.theme_cls.primary_color
def on_hint_text_color(self, instance, color):
self._hint_txt_color = self.theme_cls.disabled_hint_text_color
self.hint_text_color = (1, 1, 1, 0)
def on_width(self, instance, width):
if self.focus and instance is not None or self.error and instance is not None:
self._line_width = width
self.anim = Animation(_line_width=width, duration=.2, t='out_quad')
self._msg_lbl.width = self.width
self._hint_lbl.width = self.width
def on_pos(self, *args):
self.hint_anim_in = Animation(_hint_y=dp(34),
_hint_lbl_font_size=sp(12), duration=.2,
t='out_quad')
self.hint_anim_out = Animation(_hint_y=dp(10),
_hint_lbl_font_size=sp(16), duration=.2,
t='out_quad')
def on_focus(self, *args):
if self.focus:
Animation.cancel_all(self, '_line_width', '_hint_y',
'_hint_lbl_font_size')
if len(self.text) == 0:
self.hint_anim_in.start(self)
if self.error:
Animation(duration=.2, _current_hint_text_color=self.error_color).start(self)
if self.mode == "on_error":
Animation(duration=.2, _current_error_color=self.error_color).start(self)
elif self.mode == "persistent":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
elif self.mode == "on_focus":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
else:
pass
elif not self.error:
self.on_width(None, self.width)
self.anim.start(self)
Animation(duration=.2, _current_hint_text_color=self.line_color_focus).start(self)
if self.mode == "on_error":
Animation(duration=.2, _current_error_color=(0, 0, 0, 0)).start(self)
if self.mode == "persistent":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
elif self.mode == "on_focus":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
else:
pass
else:
Animation.cancel_all(self, '_line_width', '_hint_y',
'_hint_lbl_font_size')
if len(self.text) == 0:
self.hint_anim_out.start(self)
if not self.error:
self.line_color_focus = self.base_line_color_focus
Animation(duration=.2, _current_line_color=self.line_color_focus,
_current_hint_text_color=self.theme_cls.disabled_hint_text_color).start(self)
if self.mode == "on_error":
Animation(duration=.2, _current_error_color=(0, 0, 0, 0)).start(self)
elif self.mode == "persistent":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
elif self.mode == "on_focus":
Animation(duration=.2, _current_error_color=(0, 0, 0, 0)).start(self)
self.on_width(None, 0)
self.anim.start(self)
elif self.error:
Animation(duration=.2, _current_line_color=self.error_color,
_current_hint_text_color=self.error_color).start(self)
if self.mode == "on_error":
Animation(duration=.2, _current_error_color=self.error_color).start(self)
elif self.mode == "persistent":
Animation(duration=.2, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
elif self.mode == "on_focus":
Animation(duration=.2, _current_error_color=(0, 0, 0, 0)).start(self)
def _set_hint(self, instance, text):
self._hint_lbl.text = text
def _set_msg(self, instance, text):
self._msg_lbl.text = text
self.message = text
def _set_mode(self, instance, text):
self.mode = text
if self.mode == "persistent":
Animation(duration=.1, _current_error_color=self.theme_cls.disabled_hint_text_color).start(self)
|
[
"kivy.properties.ListProperty",
"kivy.animation.Animation.cancel_all",
"kivy.lang.Builder.load_string",
"kivy.metrics.sp",
"kivy.clock.Clock.schedule_interval",
"kivy.properties.BooleanProperty",
"kivy.properties.StringProperty",
"kivymd.label.MDLabel",
"kivy.animation.Animation",
"kivy.metrics.dp",
"kivy.properties.ObjectProperty",
"kivy.properties.NumericProperty"
] |
[((385, 2223), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['"""\n<SingleLineTextField>:\n canvas.before:\n Clear\n Color:\n rgba: self.line_color_normal\n Line:\n id: "the_line"\n points: self.x, self.y + dp(8), self.x + self.width, self.y + dp(8)\n width: 1\n dash_length: dp(3)\n dash_offset: 2 if self.disabled else 0\n Color:\n rgba: self._current_line_color\n Rectangle:\n size: self._line_width, dp(2)\n pos: self.center_x - (self._line_width / 2), self.y + dp(8)\n Color:\n rgba: self._current_error_color\n Rectangle:\n texture: self._msg_lbl.texture\n size: self._msg_lbl.texture_size\n pos: self.x, self.y - dp(8)\n Color:\n rgba: (self._current_line_color if self.focus and not self.cursor_blink else (0, 0, 0, 0))\n Rectangle:\n pos: [int(x) for x in self.cursor_pos]\n size: 1, -self.line_height\n Color:\n #rgba: self._hint_txt_color if not self.text and not self.focus #else (self.line_color_focus if not self.text or self.focus #else self.line_color_normal)\n rgba: self._current_hint_text_color\n Rectangle:\n texture: self._hint_lbl.texture\n size: self._hint_lbl.texture_size\n pos: self.x, self.y + self._hint_y\n Color:\n rgba: self.disabled_foreground_color if self.disabled else (self.hint_text_color if not self.text and not self.focus else self.foreground_color)\n\n font_name: \'Roboto\'\n foreground_color: app.theme_cls.text_color\n font_size: sp(16)\n bold: False\n padding: 0, dp(16), 0, dp(10)\n multiline: False\n size_hint_y: None\n height: dp(48)\n"""'], {}), '(\n """\n<SingleLineTextField>:\n canvas.before:\n Clear\n Color:\n rgba: self.line_color_normal\n Line:\n id: "the_line"\n points: self.x, self.y + dp(8), self.x + self.width, self.y + dp(8)\n width: 1\n dash_length: dp(3)\n dash_offset: 2 if self.disabled else 0\n Color:\n rgba: self._current_line_color\n Rectangle:\n size: self._line_width, dp(2)\n pos: self.center_x - (self._line_width / 2), self.y + dp(8)\n Color:\n rgba: self._current_error_color\n Rectangle:\n texture: self._msg_lbl.texture\n size: self._msg_lbl.texture_size\n pos: self.x, self.y - dp(8)\n Color:\n rgba: (self._current_line_color if self.focus and not self.cursor_blink else (0, 0, 0, 0))\n Rectangle:\n pos: [int(x) for x in self.cursor_pos]\n size: 1, -self.line_height\n Color:\n #rgba: self._hint_txt_color if not self.text and not self.focus #else (self.line_color_focus if not self.text or self.focus #else self.line_color_normal)\n rgba: self._current_hint_text_color\n Rectangle:\n texture: self._hint_lbl.texture\n size: self._hint_lbl.texture_size\n pos: self.x, self.y + self._hint_y\n Color:\n rgba: self.disabled_foreground_color if self.disabled else (self.hint_text_color if not self.text and not self.focus else self.foreground_color)\n\n font_name: \'Roboto\'\n foreground_color: app.theme_cls.text_color\n font_size: sp(16)\n bold: False\n padding: 0, dp(16), 0, dp(10)\n multiline: False\n size_hint_y: None\n height: dp(48)\n"""\n )\n', (404, 2223), False, 'from kivy.lang import Builder\n'), ((2306, 2320), 'kivy.properties.ListProperty', 'ListProperty', ([], {}), '()\n', (2318, 2320), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2344, 2358), 'kivy.properties.ListProperty', 'ListProperty', ([], {}), '()\n', (2356, 2358), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2377, 2391), 'kivy.properties.ListProperty', 'ListProperty', ([], {}), '()\n', (2389, 2391), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2404, 2426), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (2419, 2426), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2441, 2459), 'kivy.properties.StringProperty', 'StringProperty', (['""""""'], {}), "('')\n", (2455, 2459), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2479, 2501), 'kivy.properties.StringProperty', 'StringProperty', (['"""none"""'], {}), "('none')\n", (2493, 2501), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2549, 2563), 'kivy.properties.ListProperty', 'ListProperty', ([], {}), '()\n', (2561, 2563), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2580, 2596), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (2594, 2596), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2704, 2720), 'kivy.properties.ObjectProperty', 'ObjectProperty', ([], {}), '()\n', (2718, 2720), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2739, 2757), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (2754, 2757), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2774, 2792), 'kivy.properties.StringProperty', 'StringProperty', (['""""""'], {}), "('')\n", (2788, 2792), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2863, 2897), 'kivy.properties.ListProperty', 'ListProperty', (['[0.0, 0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0, 0.0])\n', (2875, 2897), False, 'from kivy.properties import ObjectProperty, NumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2639, 2645), 'kivy.metrics.sp', 'sp', (['(16)'], {}), '(16)\n', (2641, 2645), False, 'from kivy.metrics import sp, dp\n'), ((2677, 2683), 'kivy.metrics.dp', 'dp', (['(10)'], {}), '(10)\n', (2679, 2683), False, 'from kivy.metrics import sp, dp\n'), ((2988, 3034), 'kivy.clock.Clock.schedule_interval', 'Clock.schedule_interval', (['self._update_color', '(5)'], {}), '(self._update_color, 5)\n', (3011, 3034), False, 'from kivy.clock import Clock\n'), ((3059, 3169), 'kivymd.label.MDLabel', 'MDLabel', ([], {'font_style': '"""Caption"""', 'theme_text_color': '"""Error"""', 'halign': '"""left"""', 'valign': '"""middle"""', 'text': 'self.message'}), "(font_style='Caption', theme_text_color='Error', halign='left',\n valign='middle', text=self.message)\n", (3066, 3169), False, 'from kivymd.label import MDLabel\n'), ((3320, 3381), 'kivymd.label.MDLabel', 'MDLabel', ([], {'font_style': '"""Subhead"""', 'halign': '"""left"""', 'valign': '"""middle"""'}), "(font_style='Subhead', halign='left', valign='middle')\n", (3327, 3381), False, 'from kivymd.label import MDLabel\n'), ((5522, 5578), 'kivy.animation.Animation', 'Animation', ([], {'_line_width': 'width', 'duration': '(0.2)', 't': '"""out_quad"""'}), "(_line_width=width, duration=0.2, t='out_quad')\n", (5531, 5578), False, 'from kivy.animation import Animation\n'), ((6131, 6206), 'kivy.animation.Animation.cancel_all', 'Animation.cancel_all', (['self', '"""_line_width"""', '"""_hint_y"""', '"""_hint_lbl_font_size"""'], {}), "(self, '_line_width', '_hint_y', '_hint_lbl_font_size')\n", (6151, 6206), False, 'from kivy.animation import Animation\n'), ((7707, 7782), 'kivy.animation.Animation.cancel_all', 'Animation.cancel_all', (['self', '"""_line_width"""', '"""_hint_y"""', '"""_hint_lbl_font_size"""'], {}), "(self, '_line_width', '_hint_y', '_hint_lbl_font_size')\n", (7727, 7782), False, 'from kivy.animation import Animation\n'), ((4188, 4194), 'kivy.metrics.dp', 'dp', (['(34)'], {}), '(34)\n', (4190, 4194), False, 'from kivy.metrics import sp, dp\n'), ((4254, 4260), 'kivy.metrics.sp', 'sp', (['(12)'], {}), '(12)\n', (4256, 4260), False, 'from kivy.metrics import sp, dp\n'), ((4375, 4381), 'kivy.metrics.dp', 'dp', (['(10)'], {}), '(10)\n', (4377, 4381), False, 'from kivy.metrics import sp, dp\n'), ((4442, 4448), 'kivy.metrics.sp', 'sp', (['(16)'], {}), '(16)\n', (4444, 4448), False, 'from kivy.metrics import sp, dp\n'), ((5737, 5743), 'kivy.metrics.dp', 'dp', (['(34)'], {}), '(34)\n', (5739, 5743), False, 'from kivy.metrics import sp, dp\n'), ((5803, 5809), 'kivy.metrics.sp', 'sp', (['(12)'], {}), '(12)\n', (5805, 5809), False, 'from kivy.metrics import sp, dp\n'), ((5923, 5929), 'kivy.metrics.dp', 'dp', (['(10)'], {}), '(10)\n', (5925, 5929), False, 'from kivy.metrics import sp, dp\n'), ((5990, 5996), 'kivy.metrics.sp', 'sp', (['(16)'], {}), '(16)\n', (5992, 5996), False, 'from kivy.metrics import sp, dp\n'), ((4810, 4904), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_hint_text_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_hint_text_color=self.theme_cls.\n disabled_hint_text_color)\n', (4819, 4904), False, 'from kivy.animation import Animation\n'), ((9619, 9709), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.1)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.1, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (9628, 9709), False, 'from kivy.animation import Animation\n'), ((4969, 5059), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.1)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.1, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (4978, 5059), False, 'from kivy.animation import Animation\n'), ((6365, 6431), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_hint_text_color': 'self.error_color'}), '(duration=0.2, _current_hint_text_color=self.error_color)\n', (6374, 6431), False, 'from kivy.animation import Animation\n'), ((8013, 8149), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_line_color': 'self.line_color_focus', '_current_hint_text_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_line_color=self.line_color_focus,\n _current_hint_text_color=self.theme_cls.disabled_hint_text_color)\n', (8022, 8149), False, 'from kivy.animation import Animation\n'), ((6507, 6569), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.error_color'}), '(duration=0.2, _current_error_color=self.error_color)\n', (6516, 6569), False, 'from kivy.animation import Animation\n'), ((7091, 7162), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_hint_text_color': 'self.line_color_focus'}), '(duration=0.2, _current_hint_text_color=self.line_color_focus)\n', (7100, 7162), False, 'from kivy.animation import Animation\n'), ((8247, 8305), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': '(0, 0, 0, 0)'}), '(duration=0.2, _current_error_color=(0, 0, 0, 0))\n', (8256, 8305), False, 'from kivy.animation import Animation\n'), ((8741, 8849), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_line_color': 'self.error_color', '_current_hint_text_color': 'self.error_color'}), '(duration=0.2, _current_line_color=self.error_color,\n _current_hint_text_color=self.error_color)\n', (8750, 8849), False, 'from kivy.animation import Animation\n'), ((6649, 6739), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (6658, 6739), False, 'from kivy.animation import Animation\n'), ((7238, 7296), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': '(0, 0, 0, 0)'}), '(duration=0.2, _current_error_color=(0, 0, 0, 0))\n', (7247, 7296), False, 'from kivy.animation import Animation\n'), ((7374, 7464), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (7383, 7464), False, 'from kivy.animation import Animation\n'), ((8385, 8475), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (8394, 8475), False, 'from kivy.animation import Animation\n'), ((8947, 9009), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.error_color'}), '(duration=0.2, _current_error_color=self.error_color)\n', (8956, 9009), False, 'from kivy.animation import Animation\n'), ((6812, 6902), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (6821, 6902), False, 'from kivy.animation import Animation\n'), ((7537, 7627), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (7546, 7627), False, 'from kivy.animation import Animation\n'), ((8548, 8606), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': '(0, 0, 0, 0)'}), '(duration=0.2, _current_error_color=(0, 0, 0, 0))\n', (8557, 8606), False, 'from kivy.animation import Animation\n'), ((9089, 9179), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': 'self.theme_cls.disabled_hint_text_color'}), '(duration=0.2, _current_error_color=self.theme_cls.\n disabled_hint_text_color)\n', (9098, 9179), False, 'from kivy.animation import Animation\n'), ((9252, 9310), 'kivy.animation.Animation', 'Animation', ([], {'duration': '(0.2)', '_current_error_color': '(0, 0, 0, 0)'}), '(duration=0.2, _current_error_color=(0, 0, 0, 0))\n', (9261, 9310), False, 'from kivy.animation import Animation\n')]
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
from codecs import open
pkg_name = 'zwdb'
here = os.path.abspath(os.path.dirname(__file__))
packages = find_packages()
requires = [s.strip() for s in open('requirements.txt').readlines()]
test_requirements = [s.strip() for s in open('requirements_dev.txt').readlines()][4:]
about = {}
lines = []
with open(os.path.join(here, pkg_name, '__version__.py'), 'r', 'utf-8') as f:
exec(f.read(), about)
# auto update min version number for every dist upload
verarr = about['__version__'].split('.')
verarr[2] = str(int(verarr[2])+1)
about['__version__'] = '.'.join(verarr)
f.seek(0)
lines = f.readlines()
lines[0] = "__version__ = '%s'\n"%about['__version__']
with open(os.path.join(here, pkg_name, '__version__.py'), 'w', 'utf-8') as f:
f.writelines(lines)
with open('README.md', 'r') as f:
readme = f.read()
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=readme,
long_description_content_type='text/markdown',
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
license=about['__license__'],
packages=packages,
package_data={'': ['LICENSE', 'NOTICE']},
package_dir={pkg_name:pkg_name},
include_package_data=True,
install_requires=requires,
tests_require=test_requirements,
python_requires='>=3.6',
platforms=["all"],
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
)
|
[
"codecs.open",
"setuptools.setup",
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((181, 196), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (194, 196), False, 'from setuptools import setup, find_packages\n'), ((925, 1630), 'setuptools.setup', 'setup', ([], {'name': "about['__title__']", 'version': "about['__version__']", 'description': "about['__description__']", 'long_description': 'readme', 'long_description_content_type': '"""text/markdown"""', 'author': "about['__author__']", 'author_email': "about['__author_email__']", 'url': "about['__url__']", 'license': "about['__license__']", 'packages': 'packages', 'package_data': "{'': ['LICENSE', 'NOTICE']}", 'package_dir': '{pkg_name: pkg_name}', 'include_package_data': '(True)', 'install_requires': 'requires', 'tests_require': 'test_requirements', 'python_requires': '""">=3.6"""', 'platforms': "['all']", 'classifiers': "['Programming Language :: Python :: 3',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent']"}), "(name=about['__title__'], version=about['__version__'], description=\n about['__description__'], long_description=readme,\n long_description_content_type='text/markdown', author=about[\n '__author__'], author_email=about['__author_email__'], url=about[\n '__url__'], license=about['__license__'], packages=packages,\n package_data={'': ['LICENSE', 'NOTICE']}, package_dir={pkg_name:\n pkg_name}, include_package_data=True, install_requires=requires,\n tests_require=test_requirements, python_requires='>=3.6', platforms=[\n 'all'], classifiers=['Programming Language :: Python :: 3',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent'])\n", (930, 1630), False, 'from setuptools import setup, find_packages\n'), ((143, 168), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (158, 168), False, 'import os\n'), ((873, 895), 'codecs.open', 'open', (['"""README.md"""', '"""r"""'], {}), "('README.md', 'r')\n", (877, 895), False, 'from codecs import open\n'), ((385, 431), 'os.path.join', 'os.path.join', (['here', 'pkg_name', '"""__version__.py"""'], {}), "(here, pkg_name, '__version__.py')\n", (397, 431), False, 'import os\n'), ((775, 821), 'os.path.join', 'os.path.join', (['here', 'pkg_name', '"""__version__.py"""'], {}), "(here, pkg_name, '__version__.py')\n", (787, 821), False, 'import os\n'), ((228, 252), 'codecs.open', 'open', (['"""requirements.txt"""'], {}), "('requirements.txt')\n", (232, 252), False, 'from codecs import open\n'), ((306, 334), 'codecs.open', 'open', (['"""requirements_dev.txt"""'], {}), "('requirements_dev.txt')\n", (310, 334), False, 'from codecs import open\n')]
|
from couchdbkit import MultipleResultsFound
from dimagi.ext.couchdbkit import *
from django.conf import settings
from django.template.loader import render_to_string
from corehq.apps.users.models import WebUser, MultiMembershipMixin, Invitation
from corehq.util.view_utils import absolute_reverse
from dimagi.utils.couch.cache import cache_core
from dimagi.utils.couch.undo import UndoableDocument, DeleteDocRecord
from corehq.apps.hqwebapp.tasks import send_html_email_async
class Organization(Document):
name = StringProperty() # for example "worldvision"
title = StringProperty() # for example "World Vision"
#metadata
email = StringProperty()
url = StringProperty()
location = StringProperty()
logo_filename = StringProperty()
verified = BooleanProperty(default=False)
@classmethod
def get_by_name(cls, name, strict=False):
extra_args = {'stale': settings.COUCH_STALE_QUERY} if not strict else {}
results = cache_core.cached_view(cls.get_db(), "orgs/by_name", key=name, reduce=False,
include_docs=True, wrapper=cls.wrap, **extra_args)
length = len(results)
if length > 1:
raise MultipleResultsFound("Error, Organization.get_by_name returned more than 1 result for %s" % name)
elif length == 1:
return list(results)[0]
else:
return None
@classmethod
def get_all(cls):
"""This will eventually be a big operation"""
result = cls.view("orgs/by_name",
reduce=False,
include_docs=True,
#stale=settings.COUCH_STALE_QUERY,
).all()
return result
def get_logo(self):
if self.logo_filename:
return self.fetch_attachment(self.logo_filename), self._attachments[self.logo_filename]['content_type']
else:
return None, None
def __str__(self):
return self.title
def get_members(self):
return WebUser.by_organization(self.name)
class Team(UndoableDocument, MultiMembershipMixin):
name = StringProperty()
organization = StringProperty()
def get_members(self):
return WebUser.by_organization(self.organization, team_id=self.get_id)
@classmethod
def get_by_org_and_name(cls, org_name, name):
return cls.view("orgs/team_by_org_and_name",
key=[org_name,name],
reduce=False,
include_docs=True).one()
@classmethod
def get_by_org(cls, org_name):
return cache_core.cached_view(
cls.get_db(), "orgs/team_by_org_and_name",
startkey=[org_name],
endkey=[org_name, {}],
reduce=False,
include_docs=True,
wrapper=cls.wrap,
)
@classmethod
def get_by_domain(cls, domain):
return cache_core.cached_view(cls.get_db(), "orgs/team_by_domain", key=domain, reduce=False,
include_docs=True, wrapper=cls.wrap)
def save(self, *args, **kwargs):
# forcibly replace empty name with '-'
self.name = self.name or '-'
super(Team, self).save()
def create_delete_record(self, *args, **kwargs):
return DeleteTeamRecord(*args, **kwargs)
def soft_delete(self):
return super(Team, self).soft_delete(domain_included=False)
class DeleteTeamRecord(DeleteDocRecord):
def get_doc(self):
return Team.get(self.doc_id)
class OrgInvitation(Invitation):
doc_type = "OrgInvitation"
organization = StringProperty()
def send_activation_email(self):
url = absolute_reverse("orgs_accept_invitation",
args=[self.organization, self.get_id])
params = {"organization": self.organization, "url": url,
"inviter": self.get_inviter().formatted_name}
text_content = render_to_string("orgs/email/org_invite.txt", params)
html_content = render_to_string("orgs/email/org_invite.html", params)
subject = 'Invitation from %s to join CommCareHQ' % self.get_inviter().formatted_name
send_html_email_async.delay(subject, self.email, html_content,
text_content=text_content,
email_from=settings.DEFAULT_FROM_EMAIL)
class OrgRequest(Document):
doc_type = "OrgRequest"
organization = StringProperty()
domain = StringProperty()
requested_by = StringProperty()
requested_on = DateTimeProperty()
seen = BooleanProperty(default=False)
@classmethod
def get_requests(cls, organization, domain=None, user_id=None):
key = [organization]
if domain:
key.append(domain)
if user_id:
key.append(user_id)
# todo - forcing invalidating on all requests while we turn these features on slowly
results = cache_core.cached_view(
cls.get_db(), "orgs/org_requests",
startkey=key,
endkey=key + [{}],
reduce=False,
include_docs=True,
wrapper=cls.wrap,
)
#return results.all() if not user_id else results.one()
if not user_id:
return results
else:
try:
length = len(results)
if length == 1:
return results[0]
elif length > 0:
raise MultipleResultsFound("OrgRequests found multiple results for %s" % key)
except IndexError:
return None
|
[
"corehq.util.view_utils.absolute_reverse",
"django.template.loader.render_to_string",
"corehq.apps.hqwebapp.tasks.send_html_email_async.delay",
"couchdbkit.MultipleResultsFound",
"corehq.apps.users.models.WebUser.by_organization"
] |
[((1996, 2030), 'corehq.apps.users.models.WebUser.by_organization', 'WebUser.by_organization', (['self.name'], {}), '(self.name)\n', (2019, 2030), False, 'from corehq.apps.users.models import WebUser, MultiMembershipMixin, Invitation\n'), ((2192, 2255), 'corehq.apps.users.models.WebUser.by_organization', 'WebUser.by_organization', (['self.organization'], {'team_id': 'self.get_id'}), '(self.organization, team_id=self.get_id)\n', (2215, 2255), False, 'from corehq.apps.users.models import WebUser, MultiMembershipMixin, Invitation\n'), ((3625, 3711), 'corehq.util.view_utils.absolute_reverse', 'absolute_reverse', (['"""orgs_accept_invitation"""'], {'args': '[self.organization, self.get_id]'}), "('orgs_accept_invitation', args=[self.organization, self.\n get_id])\n", (3641, 3711), False, 'from corehq.util.view_utils import absolute_reverse\n'), ((3890, 3943), 'django.template.loader.render_to_string', 'render_to_string', (['"""orgs/email/org_invite.txt"""', 'params'], {}), "('orgs/email/org_invite.txt', params)\n", (3906, 3943), False, 'from django.template.loader import render_to_string\n'), ((3967, 4021), 'django.template.loader.render_to_string', 'render_to_string', (['"""orgs/email/org_invite.html"""', 'params'], {}), "('orgs/email/org_invite.html', params)\n", (3983, 4021), False, 'from django.template.loader import render_to_string\n'), ((4124, 4258), 'corehq.apps.hqwebapp.tasks.send_html_email_async.delay', 'send_html_email_async.delay', (['subject', 'self.email', 'html_content'], {'text_content': 'text_content', 'email_from': 'settings.DEFAULT_FROM_EMAIL'}), '(subject, self.email, html_content, text_content\n =text_content, email_from=settings.DEFAULT_FROM_EMAIL)\n', (4151, 4258), False, 'from corehq.apps.hqwebapp.tasks import send_html_email_async\n'), ((1211, 1318), 'couchdbkit.MultipleResultsFound', 'MultipleResultsFound', (["('Error, Organization.get_by_name returned more than 1 result for %s' % name)"], {}), "(\n 'Error, Organization.get_by_name returned more than 1 result for %s' % name\n )\n", (1231, 1318), False, 'from couchdbkit import MultipleResultsFound\n'), ((5435, 5506), 'couchdbkit.MultipleResultsFound', 'MultipleResultsFound', (["('OrgRequests found multiple results for %s' % key)"], {}), "('OrgRequests found multiple results for %s' % key)\n", (5455, 5506), False, 'from couchdbkit import MultipleResultsFound\n')]
|
''' Some matrix utilities.
'''
import autograd.numpy as np
def is_pos_def(mat):
''' Checks if a matrix is symmetric positive definite.
'''
if np.allclose(mat, mat.T):
try:
np.linalg.cholesky(mat)
return True
except np.linalg.LinAlgError:
return False
else:
return is_pos_def(mat + mat.T)
|
[
"autograd.numpy.allclose",
"autograd.numpy.linalg.cholesky"
] |
[((157, 180), 'autograd.numpy.allclose', 'np.allclose', (['mat', 'mat.T'], {}), '(mat, mat.T)\n', (168, 180), True, 'import autograd.numpy as np\n'), ((207, 230), 'autograd.numpy.linalg.cholesky', 'np.linalg.cholesky', (['mat'], {}), '(mat)\n', (225, 230), True, 'import autograd.numpy as np\n')]
|
import fileinput
import os
from datetime import date as d
from shutil import copyfile
import sass
import helpers
from article import Article
class Builder:
def __init__(self, **kwargs):
self.selected_theme = kwargs.get('theme')
self.blog_name = kwargs.get('name')
self.description = kwargs.get('description')
self.language = kwargs.get('lang')
self.blog_entries = []
os.makedirs('build/', exist_ok=True)
theme_file = 'themes/{}.scss'.format(self.selected_theme)
helpers.check_file(theme_file)
self._compile_theme(theme_file)
def _compile_theme(self, theme_file):
scss = open(theme_file, 'r')
css = open('build/{}.css'.format(str.lower(self.selected_theme)), 'w')
css.write(sass.compile(string=scss.read()))
scss.close()
css.close()
def build_article(self, article_file):
article = Article(article_file)
if not article.is_publish:
return
subfolder = '../build/{}'.format(article.get_subfolder())
os.makedirs(subfolder, exist_ok=True)
file = '{}/{}.html'.format(subfolder, article.name)
template = '../templates/article.html'
helpers.check_file(template)
copyfile(template, file)
with fileinput.FileInput(file, inplace=1) as file_input:
for line in file_input:
print(line.replace(
'{{ theme }}',
'../../../{}.css'.format(str.lower(self.selected_theme))
).replace('{{ title }}', article.title).replace(
'{{ date }}', article.get_formatted_date()).replace(
'{{ text }}', article.text).replace(
'{{ time_to_read }}',
article.calculate_time_to_read()).replace(
'{{ blog_name }}', self.blog_name).replace(
'{{ year }}', str(d.today().year)).replace(
'{{ language }}',
str.lower(self.language)),
end='')
helpers.minify_html(file)
self.blog_entries.append(article)
def build_overview(self):
file = '../build/index.html'
template = '../templates/overview.html'
helpers.check_file(template)
copyfile(template, file)
entries_html = ''
self.blog_entries.sort(key=lambda x: x.date, reverse=True)
current_year = self.blog_entries[0].date.year
entries_html += '<h2>{}</h2><ul>'.format(current_year)
for blog_entry in self.blog_entries:
if blog_entry.date.year != current_year:
entries_html += '</ul><h2>{}</h2><ul>'.format(
blog_entry.date.year)
current_year = blog_entry.date.year
entries_html += '<li><a href="{}/{}.html"> \
<span class="date">{}</span>{} \
</a></li>\n'.format(blog_entry.get_subfolder(),
blog_entry.name,
blog_entry.get_formatted_date(),
blog_entry.title)
with fileinput.FileInput(file, inplace=1) as file_input:
for line in file_input:
print(line.replace(
'{{ theme }}',
'{}.css'.format(str.lower(self.selected_theme))).replace(
'{{ blog_entries }}', entries_html).replace(
'{{ blog_name }}', self.blog_name).replace(
'{{ description }}', self.description).replace(
'{{ year }}', str(d.today().year)).replace(
'{{ language }}',
str.lower(self.language)),
end='')
helpers.minify_html(file)
|
[
"os.makedirs",
"datetime.date.today",
"helpers.minify_html",
"fileinput.FileInput",
"article.Article",
"shutil.copyfile",
"helpers.check_file"
] |
[((424, 460), 'os.makedirs', 'os.makedirs', (['"""build/"""'], {'exist_ok': '(True)'}), "('build/', exist_ok=True)\n", (435, 460), False, 'import os\n'), ((535, 565), 'helpers.check_file', 'helpers.check_file', (['theme_file'], {}), '(theme_file)\n', (553, 565), False, 'import helpers\n'), ((921, 942), 'article.Article', 'Article', (['article_file'], {}), '(article_file)\n', (928, 942), False, 'from article import Article\n'), ((1073, 1110), 'os.makedirs', 'os.makedirs', (['subfolder'], {'exist_ok': '(True)'}), '(subfolder, exist_ok=True)\n', (1084, 1110), False, 'import os\n'), ((1228, 1256), 'helpers.check_file', 'helpers.check_file', (['template'], {}), '(template)\n', (1246, 1256), False, 'import helpers\n'), ((1265, 1289), 'shutil.copyfile', 'copyfile', (['template', 'file'], {}), '(template, file)\n', (1273, 1289), False, 'from shutil import copyfile\n'), ((2180, 2205), 'helpers.minify_html', 'helpers.minify_html', (['file'], {}), '(file)\n', (2199, 2205), False, 'import helpers\n'), ((2373, 2401), 'helpers.check_file', 'helpers.check_file', (['template'], {}), '(template)\n', (2391, 2401), False, 'import helpers\n'), ((2410, 2434), 'shutil.copyfile', 'copyfile', (['template', 'file'], {}), '(template, file)\n', (2418, 2434), False, 'from shutil import copyfile\n'), ((4002, 4027), 'helpers.minify_html', 'helpers.minify_html', (['file'], {}), '(file)\n', (4021, 4027), False, 'import helpers\n'), ((1304, 1340), 'fileinput.FileInput', 'fileinput.FileInput', (['file'], {'inplace': '(1)'}), '(file, inplace=1)\n', (1323, 1340), False, 'import fileinput\n'), ((3300, 3336), 'fileinput.FileInput', 'fileinput.FileInput', (['file'], {'inplace': '(1)'}), '(file, inplace=1)\n', (3319, 3336), False, 'import fileinput\n'), ((1990, 1999), 'datetime.date.today', 'd.today', ([], {}), '()\n', (1997, 1999), True, 'from datetime import date as d\n'), ((3812, 3821), 'datetime.date.today', 'd.today', ([], {}), '()\n', (3819, 3821), True, 'from datetime import date as d\n')]
|
""" Docstring for the open_weather_class_tests.py module.
"""
import unittest
from .open_weather_class import OpenWeather
class TestOpenWeatherClass(unittest.TestCase):
"""
Class to test if OpenWeather class is working as expected.
Attributes
----------
open_weather : OpenWeather
An open weather object.
location : str
A city to perform tests.
wrong_location : str
A city wrote in wrong way to check how it performs.
Methods
-------
test_fetch(self) -> None
Method to check if fetch method is working.
test_response_format(self) -> None
Method to check if all data is being returned.
test_wrong_city(self) -> None
Method to test Open Weather for a wrong given location.
"""
def setUp(self) -> None:
"""
Setup method for tests.
"""
self.open_weather = OpenWeather()
self.location = 'London,uk'
self.wrong_location = 'SaoPaulo,br'
def test_fetch(self) -> None:
"""
Method to check if fetch method is working.
"""
response, _ = self.open_weather.fetch_weather(location=self.location)
self.assertIsNotNone(response)
def test_response_format(self) -> None:
"""
Method to check if all data is being returned.
"""
response, _ = self.open_weather.fetch_weather(location=self.location)
self.assertIsNotNone(response['min'])
self.assertIsNotNone(response['max'])
self.assertIsNotNone(response['avg'])
self.assertIsNotNone(response['feels_like'])
self.assertIsNotNone(response['city']['name'])
self.assertIsNotNone(response['city']['country'])
def test_wrong_city(self) -> None:
"""
Method to test Open Weather for a wrong given location.
"""
response, _ = self.open_weather.fetch_weather(location=self.wrong_location)
self.assertEqual(response, {})
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((2007, 2022), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2020, 2022), False, 'import unittest\n')]
|
import numpy as np
import pytest
from packaging import version
import qcodes as qc
from qcodes import load_or_create_experiment, initialise_or_create_database_at
from plottr.data.datadict import DataDict
from plottr.utils import testdata
from plottr.node.tools import linearFlowchart
from plottr.data.qcodes_dataset import (
QCodesDSLoader,
get_ds_structure,
get_ds_info,
get_runs_from_db,
ds_to_datadict)
@pytest.fixture(scope='function')
def empty_db_path(tmp_path):
db_path = str(tmp_path / 'some.db')
initialise_or_create_database_at(db_path)
yield db_path
@pytest.fixture
def experiment(empty_db_path):
exp = load_or_create_experiment('2d_softsweep', sample_name='no sample')
yield exp
exp.conn.close()
@pytest.fixture
def database_with_three_datasets(empty_db_path):
"""Fixture of a database file with 3 DataSets"""
exp1 = load_or_create_experiment('get_runs_from_db', sample_name='qubit')
m1 = qc.Measurement(exp=exp1)
m1.register_custom_parameter('x', unit='cm')
m1.register_custom_parameter('y')
m1.register_custom_parameter('foo')
for n in range(2):
m1.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
with m1.run() as datasaver:
dataset11 = datasaver.dataset
with m1.run() as datasaver:
datasaver.add_result(('x', 1.), ('y', 2.), ('z_0', 42.), ('z_1', 0.2))
dataset12 = datasaver.dataset
exp2 = load_or_create_experiment('give_em', sample_name='now')
m2 = qc.Measurement(exp=exp2)
m2.register_custom_parameter('a')
m2.register_custom_parameter('b', unit='mm')
m2.register_custom_parameter('c', setpoints=['a', 'b'])
with m2.run() as datasaver:
datasaver.add_result(('a', 1.), ('b', 2.), ('c', 42.))
datasaver.add_result(('a', 4.), ('b', 5.), ('c', 77.))
dataset2 = datasaver.dataset
datasets = (dataset11, dataset12, dataset2)
yield empty_db_path, datasets
for ds in datasets:
ds.conn.close()
exp1.conn.close()
exp2.conn.close()
def test_load_2dsoftsweep(experiment):
N = 5
m = qc.Measurement(exp=experiment)
m.register_custom_parameter('x', unit='cm')
m.register_custom_parameter('y')
# check that unused parameters don't mess with
m.register_custom_parameter('foo')
dd_expected = DataDict(x=dict(values=np.array([]), unit='cm'),
y=dict(values=np.array([])))
for n in range(N):
m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
dd_expected.validate()
with m.run() as datasaver:
for result in testdata.generate_2d_scalar_simple(3, 3, N):
row = [(k, v) for k, v in result.items()] + [('foo', 1)]
datasaver.add_result(*row)
dd_expected.add_data(**result)
# retrieve data as data dict
ddict = ds_to_datadict(datasaver.dataset)
assert ddict == dd_expected
@pytest.mark.skipif(version.parse(qc.__version__)
< version.parse("0.20.0"),
reason="Requires QCoDes 0.20.0 or later")
def test_load_2dsoftsweep_known_shape(experiment):
N = 1
m = qc.Measurement(exp=experiment)
m.register_custom_parameter('x', unit='cm')
m.register_custom_parameter('y')
# check that unused parameters don't mess with
m.register_custom_parameter('foo')
dd_expected = DataDict(x=dict(values=np.array([]), unit='cm'),
y=dict(values=np.array([])))
for n in range(N):
m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
dd_expected.validate()
shape = (3, 3)
m.set_shapes({'z_0': shape})
with m.run() as datasaver:
for result in testdata.generate_2d_scalar_simple(*shape, N):
row = [(k, v) for k, v in result.items()] + [('foo', 1)]
datasaver.add_result(*row)
dd_expected.add_data(**result)
dd_expected['x']['values'] = dd_expected['x']['values'].reshape(*shape)
dd_expected['y']['values'] = dd_expected['y']['values'].reshape(*shape)
dd_expected['z_0']['values'] = dd_expected['z_0']['values'].reshape(*shape)
# retrieve data as data dict
ddict = ds_to_datadict(datasaver.dataset)
assert ddict == dd_expected
def test_get_ds_structure(experiment):
N = 5
m = qc.Measurement(exp=experiment)
m.register_custom_parameter('x', unit='cm',label='my_x_param')
m.register_custom_parameter('y')
# check that unused parameters don't mess with
m.register_custom_parameter('foo')
for n in range(N):
m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
with m.run() as datasaver:
dataset = datasaver.dataset
# test dataset structure function
expected_structure = {
'x': {
'unit': 'cm',
'label': 'my_x_param',
'values': []
},
'y': {
'unit': '',
'label': '',
'values': []
}
# note that parameter 'foo' is not expected to be included
# because it's a "standalone" parameter
}
for n in range(N):
expected_structure.update(
{f'z_{n}': {
'unit': '',
'label': '',
'axes': ['x', 'y'],
'values': []
}
}
)
structure = get_ds_structure(dataset)
assert structure == expected_structure
def test_get_ds_info(experiment):
N = 5
m = qc.Measurement(exp=experiment)
m.register_custom_parameter('x', unit='cm')
m.register_custom_parameter('y')
m.register_custom_parameter('foo')
for n in range(N):
m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
with m.run() as datasaver:
dataset = datasaver.dataset
ds_info_with_empty_timestamps = get_ds_info(dataset,
get_structure=False)
assert ds_info_with_empty_timestamps['completed_date'] == ''
assert ds_info_with_empty_timestamps['completed_time'] == ''
# timestamps are difficult to test for, so we will cheat here and
# instead of hard-coding timestamps we will just get them from the dataset
# The same applies to the guid as it contains the timestamp
started_ts = dataset.run_timestamp()
completed_ts = dataset.completed_timestamp()
expected_ds_info = {
'experiment': '2d_softsweep',
'sample': 'no sample',
'completed_date': completed_ts[:10],
'completed_time': completed_ts[11:],
'started_date': started_ts[:10],
'started_time': started_ts[11:],
'name': 'results',
'structure': None,
'records': 0,
'guid': dataset.guid
}
ds_info = get_ds_info(dataset, get_structure=False)
assert ds_info == expected_ds_info
expected_ds_info_with_structure = expected_ds_info.copy()
expected_ds_info_with_structure['structure'] = get_ds_structure(dataset)
ds_info_with_structure = get_ds_info(dataset)
assert ds_info_with_structure == expected_ds_info_with_structure
def test_get_runs_from_db(database_with_three_datasets):
db_path, datasets = database_with_three_datasets
# Prepare an expected overview of the created database
expected_overview = {ds.run_id: get_ds_info(ds, get_structure=False)
for ds in datasets}
# Get the actual overview of the created database
overview = get_runs_from_db(db_path) # get_structure=False is the default
# Finally, assert
assert overview == expected_overview
# Prepare an expected overview of the created database WITH STRUCTURE
expected_overview_with_structure = {
ds.run_id: get_ds_info(ds, get_structure=True)
for ds in datasets
}
# Get the actual overview of the created database WITH STRUCTURE
overview_with_structure = get_runs_from_db(db_path, get_structure=True)
# Finally, assert WITH STRUCTURE
assert overview_with_structure == expected_overview_with_structure
def test_update_qcloader(qtbot, empty_db_path):
db_path = empty_db_path
exp = load_or_create_experiment('2d_softsweep', sample_name='no sample')
N = 2
m = qc.Measurement(exp=exp)
m.register_custom_parameter('x')
m.register_custom_parameter('y')
dd_expected = DataDict(x=dict(values=np.array([])),
y=dict(values=np.array([])))
for n in range(N):
m.register_custom_parameter(f'z_{n}', setpoints=['x', 'y'])
dd_expected[f'z_{n}'] = dict(values=np.array([]), axes=['x', 'y'])
dd_expected.validate()
# setting up the flowchart
fc = linearFlowchart(('loader', QCodesDSLoader))
loader = fc.nodes()['loader']
def check():
nresults = ds.number_of_results
loader.update()
ddict = fc.output()['dataOut']
if ddict is not None and nresults > 0:
z_in = dd_expected.data_vals('z_1')
z_out = ddict.data_vals('z_1')
if z_out is not None:
assert z_in.size == z_out.size
assert np.allclose(z_in, z_out, atol=1e-15)
with m.run() as datasaver:
ds = datasaver.dataset
run_id = datasaver.dataset.captured_run_id
loader.pathAndId = db_path, run_id
for result in testdata.generate_2d_scalar_simple(3, 3, N):
row = [(k, v) for k, v in result.items()]
datasaver.add_result(*row)
dd_expected.add_data(**result)
check()
check()
# insert data in small chunks, and check
# while True:
# try:
# ninsertions = np.random.randint(0, 5)
# for n in range(ninsertions):
# _ds.add_result(next(results))
# except StopIteration:
# _ds.mark_complete()
# break
# check()
# check()
|
[
"qcodes.Measurement",
"plottr.data.qcodes_dataset.get_ds_info",
"qcodes.load_or_create_experiment",
"packaging.version.parse",
"numpy.allclose",
"pytest.fixture",
"plottr.data.qcodes_dataset.get_ds_structure",
"plottr.data.qcodes_dataset.ds_to_datadict",
"plottr.data.qcodes_dataset.get_runs_from_db",
"numpy.array",
"plottr.utils.testdata.generate_2d_scalar_simple",
"plottr.node.tools.linearFlowchart",
"qcodes.initialise_or_create_database_at"
] |
[((431, 463), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (445, 463), False, 'import pytest\n'), ((537, 578), 'qcodes.initialise_or_create_database_at', 'initialise_or_create_database_at', (['db_path'], {}), '(db_path)\n', (569, 578), False, 'from qcodes import load_or_create_experiment, initialise_or_create_database_at\n'), ((656, 722), 'qcodes.load_or_create_experiment', 'load_or_create_experiment', (['"""2d_softsweep"""'], {'sample_name': '"""no sample"""'}), "('2d_softsweep', sample_name='no sample')\n", (681, 722), False, 'from qcodes import load_or_create_experiment, initialise_or_create_database_at\n'), ((889, 955), 'qcodes.load_or_create_experiment', 'load_or_create_experiment', (['"""get_runs_from_db"""'], {'sample_name': '"""qubit"""'}), "('get_runs_from_db', sample_name='qubit')\n", (914, 955), False, 'from qcodes import load_or_create_experiment, initialise_or_create_database_at\n'), ((965, 989), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'exp1'}), '(exp=exp1)\n', (979, 989), True, 'import qcodes as qc\n'), ((1444, 1499), 'qcodes.load_or_create_experiment', 'load_or_create_experiment', (['"""give_em"""'], {'sample_name': '"""now"""'}), "('give_em', sample_name='now')\n", (1469, 1499), False, 'from qcodes import load_or_create_experiment, initialise_or_create_database_at\n'), ((1509, 1533), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'exp2'}), '(exp=exp2)\n', (1523, 1533), True, 'import qcodes as qc\n'), ((2114, 2144), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'experiment'}), '(exp=experiment)\n', (2128, 2144), True, 'import qcodes as qc\n'), ((2933, 2966), 'plottr.data.qcodes_dataset.ds_to_datadict', 'ds_to_datadict', (['datasaver.dataset'], {}), '(datasaver.dataset)\n', (2947, 2966), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((3229, 3259), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'experiment'}), '(exp=experiment)\n', (3243, 3259), True, 'import qcodes as qc\n'), ((4337, 4370), 'plottr.data.qcodes_dataset.ds_to_datadict', 'ds_to_datadict', (['datasaver.dataset'], {}), '(datasaver.dataset)\n', (4351, 4370), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((4463, 4493), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'experiment'}), '(exp=experiment)\n', (4477, 4493), True, 'import qcodes as qc\n'), ((5510, 5535), 'plottr.data.qcodes_dataset.get_ds_structure', 'get_ds_structure', (['dataset'], {}), '(dataset)\n', (5526, 5535), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((5634, 5664), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'experiment'}), '(exp=experiment)\n', (5648, 5664), True, 'import qcodes as qc\n'), ((6919, 6960), 'plottr.data.qcodes_dataset.get_ds_info', 'get_ds_info', (['dataset'], {'get_structure': '(False)'}), '(dataset, get_structure=False)\n', (6930, 6960), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((7115, 7140), 'plottr.data.qcodes_dataset.get_ds_structure', 'get_ds_structure', (['dataset'], {}), '(dataset)\n', (7131, 7140), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((7171, 7191), 'plottr.data.qcodes_dataset.get_ds_info', 'get_ds_info', (['dataset'], {}), '(dataset)\n', (7182, 7191), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((7622, 7647), 'plottr.data.qcodes_dataset.get_runs_from_db', 'get_runs_from_db', (['db_path'], {}), '(db_path)\n', (7638, 7647), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((8054, 8099), 'plottr.data.qcodes_dataset.get_runs_from_db', 'get_runs_from_db', (['db_path'], {'get_structure': '(True)'}), '(db_path, get_structure=True)\n', (8070, 8099), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((8298, 8364), 'qcodes.load_or_create_experiment', 'load_or_create_experiment', (['"""2d_softsweep"""'], {'sample_name': '"""no sample"""'}), "('2d_softsweep', sample_name='no sample')\n", (8323, 8364), False, 'from qcodes import load_or_create_experiment, initialise_or_create_database_at\n'), ((8384, 8407), 'qcodes.Measurement', 'qc.Measurement', ([], {'exp': 'exp'}), '(exp=exp)\n', (8398, 8407), True, 'import qcodes as qc\n'), ((8828, 8871), 'plottr.node.tools.linearFlowchart', 'linearFlowchart', (["('loader', QCodesDSLoader)"], {}), "(('loader', QCodesDSLoader))\n", (8843, 8871), False, 'from plottr.node.tools import linearFlowchart\n'), ((2691, 2734), 'plottr.utils.testdata.generate_2d_scalar_simple', 'testdata.generate_2d_scalar_simple', (['(3)', '(3)', 'N'], {}), '(3, 3, N)\n', (2725, 2734), False, 'from plottr.utils import testdata\n'), ((3860, 3905), 'plottr.utils.testdata.generate_2d_scalar_simple', 'testdata.generate_2d_scalar_simple', (['*shape', 'N'], {}), '(*shape, N)\n', (3894, 3905), False, 'from plottr.utils import testdata\n'), ((3021, 3050), 'packaging.version.parse', 'version.parse', (['qc.__version__'], {}), '(qc.__version__)\n', (3034, 3050), False, 'from packaging import version\n'), ((3073, 3096), 'packaging.version.parse', 'version.parse', (['"""0.20.0"""'], {}), "('0.20.0')\n", (3086, 3096), False, 'from packaging import version\n'), ((5990, 6031), 'plottr.data.qcodes_dataset.get_ds_info', 'get_ds_info', (['dataset'], {'get_structure': '(False)'}), '(dataset, get_structure=False)\n', (6001, 6031), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((7470, 7506), 'plottr.data.qcodes_dataset.get_ds_info', 'get_ds_info', (['ds'], {'get_structure': '(False)'}), '(ds, get_structure=False)\n', (7481, 7506), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((7885, 7920), 'plottr.data.qcodes_dataset.get_ds_info', 'get_ds_info', (['ds'], {'get_structure': '(True)'}), '(ds, get_structure=True)\n', (7896, 7920), False, 'from plottr.data.qcodes_dataset import QCodesDSLoader, get_ds_structure, get_ds_info, get_runs_from_db, ds_to_datadict\n'), ((9487, 9530), 'plottr.utils.testdata.generate_2d_scalar_simple', 'testdata.generate_2d_scalar_simple', (['(3)', '(3)', 'N'], {}), '(3, 3, N)\n', (9521, 9530), False, 'from plottr.utils import testdata\n'), ((2579, 2591), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2587, 2591), True, 'import numpy as np\n'), ((3694, 3706), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3702, 3706), True, 'import numpy as np\n'), ((8729, 8741), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (8737, 8741), True, 'import numpy as np\n'), ((9270, 9306), 'numpy.allclose', 'np.allclose', (['z_in', 'z_out'], {'atol': '(1e-15)'}), '(z_in, z_out, atol=1e-15)\n', (9281, 9306), True, 'import numpy as np\n'), ((2362, 2374), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2370, 2374), True, 'import numpy as np\n'), ((2429, 2441), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2437, 2441), True, 'import numpy as np\n'), ((3477, 3489), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3485, 3489), True, 'import numpy as np\n'), ((3544, 3556), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3552, 3556), True, 'import numpy as np\n'), ((8523, 8535), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (8531, 8535), True, 'import numpy as np\n'), ((8579, 8591), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (8587, 8591), True, 'import numpy as np\n')]
|
"""
FactSet Procure to Pay API
Allows for Provisioning and Entitlement of FactSet accounts. Authentication is provided via FactSet's [API Key System](https://developer.factset.com/authentication) Please note that the on-page \"Try it out\" features do not function. You must authorize against our API and make requests directly againt the endpoints. # noqa: E501
The version of the OpenAPI document: 1S
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.ProcuretoPayProvisioning.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.ProcuretoPayProvisioning.model.cancel_individual import CancelIndividual
from fds.sdk.ProcuretoPayProvisioning.model.create_individual import CreateIndividual
from fds.sdk.ProcuretoPayProvisioning.model.get_individual import GetIndividual
from fds.sdk.ProcuretoPayProvisioning.model.inline_response202 import InlineResponse202
from fds.sdk.ProcuretoPayProvisioning.model.list_individuals import ListIndividuals
from fds.sdk.ProcuretoPayProvisioning.model.modify_individual import ModifyIndividual
class UserManagementApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.cancel_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/cancelIndividual',
'operation_id': 'cancel_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'cancel_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'cancel_individual':
(CancelIndividual,),
},
'attribute_map': {
},
'location_map': {
'cancel_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.create_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/createIndividual',
'operation_id': 'create_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'create_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'create_individual':
(CreateIndividual,),
},
'attribute_map': {
},
'location_map': {
'create_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_individual_get_endpoint = _Endpoint(
settings={
'response_type': (GetIndividual,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/getIndividual',
'operation_id': 'get_individual_get',
'http_method': 'GET',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'uniqueid',
],
'required': [
'uniqueid',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'uniqueid':
(str,),
},
'attribute_map': {
'uniqueid': 'uniqueid',
},
'location_map': {
'uniqueid': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [],
},
api_client=api_client
)
self.list_individuals_get_endpoint = _Endpoint(
settings={
'response_type': (ListIndividuals,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/listIndividuals',
'operation_id': 'list_individuals_get',
'http_method': 'GET',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'include_product_ids',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'include_product_ids':
(bool,),
},
'attribute_map': {
'include_product_ids': 'includeProductIds',
},
'location_map': {
'include_product_ids': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [],
},
api_client=api_client
)
self.modify_individual_post_endpoint = _Endpoint(
settings={
'response_type': (InlineResponse202,),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/modifyIndividual',
'operation_id': 'modify_individual_post',
'http_method': 'POST',
'servers': [
{
'url': "https://api.factset.com/procuretopay/provisioning/",
'description': "No description provided",
},
]
},
params_map={
'all': [
'modify_individual',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'modify_individual':
(ModifyIndividual,),
},
'attribute_map': {
},
'location_map': {
'modify_individual': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json; charset=utf-8',
'text/plain'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def cancel_individual_post(
self,
**kwargs
):
"""Cancels an individual's serial and all productIds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
cancel_individual (CancelIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.cancel_individual_post_endpoint.call_with_http_info(**kwargs)
def create_individual_post(
self,
**kwargs
):
"""Provisions an individual for FactSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
create_individual (CreateIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.create_individual_post_endpoint.call_with_http_info(**kwargs)
def get_individual_get(
self,
uniqueid,
**kwargs
):
"""Returns an individual's details by uniqueId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_individual_get(uniqueid, async_req=True)
>>> result = thread.get()
Args:
uniqueid (str): uniqueId to query
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GetIndividual
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['uniqueid'] = \
uniqueid
return self.get_individual_get_endpoint.call_with_http_info(**kwargs)
def list_individuals_get(
self,
**kwargs
):
"""Lists all individuals with details at all locations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_individuals_get(async_req=True)
>>> result = thread.get()
Keyword Args:
include_product_ids (bool): <br>Optional, if =TRUE will return additional product array per object with all productIds for all returned individuals.</br>. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListIndividuals
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.list_individuals_get_endpoint.call_with_http_info(**kwargs)
def modify_individual_post(
self,
**kwargs
):
"""Modifies an individual's attributes as determined by the uniqueId in the body of the request. Please note that the uniqueId may not be changed. Fields not changing may be passed as NULL but never empty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_individual_post(async_req=True)
>>> result = thread.get()
Keyword Args:
modify_individual (ModifyIndividual): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
InlineResponse202
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.modify_individual_post_endpoint.call_with_http_info(**kwargs)
|
[
"fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint",
"fds.sdk.ProcuretoPayProvisioning.api_client.ApiClient"
] |
[((1719, 2546), 'fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (InlineResponse202,), 'auth': ['FactSetApiKey',\n 'FactSetOAuth2'], 'endpoint_path': '/cancelIndividual', 'operation_id':\n 'cancel_individual_post', 'http_method': 'POST', 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}", 'params_map': "{'all': ['cancel_individual'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'cancel_individual': (CancelIndividual,)}, 'attribute_map': {},\n 'location_map': {'cancel_individual': 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json; charset=utf-8', 'text/plain'],\n 'content_type': ['application/json']}", 'api_client': 'api_client'}), "(settings={'response_type': (InlineResponse202,), 'auth': [\n 'FactSetApiKey', 'FactSetOAuth2'], 'endpoint_path': '/cancelIndividual',\n 'operation_id': 'cancel_individual_post', 'http_method': 'POST',\n 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}, params_map={'all': ['cancel_individual'],\n 'required': [], 'nullable': [], 'enum': [], 'validation': []}, root_map\n ={'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'cancel_individual': (CancelIndividual,)}, 'attribute_map': {},\n 'location_map': {'cancel_individual': 'body'}, 'collection_format_map':\n {}}, headers_map={'accept': ['application/json; charset=utf-8',\n 'text/plain'], 'content_type': ['application/json']}, api_client=api_client\n )\n", (1728, 2546), True, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n'), ((3496, 4323), 'fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (InlineResponse202,), 'auth': ['FactSetApiKey',\n 'FactSetOAuth2'], 'endpoint_path': '/createIndividual', 'operation_id':\n 'create_individual_post', 'http_method': 'POST', 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}", 'params_map': "{'all': ['create_individual'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'create_individual': (CreateIndividual,)}, 'attribute_map': {},\n 'location_map': {'create_individual': 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json; charset=utf-8', 'text/plain'],\n 'content_type': ['application/json']}", 'api_client': 'api_client'}), "(settings={'response_type': (InlineResponse202,), 'auth': [\n 'FactSetApiKey', 'FactSetOAuth2'], 'endpoint_path': '/createIndividual',\n 'operation_id': 'create_individual_post', 'http_method': 'POST',\n 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}, params_map={'all': ['create_individual'],\n 'required': [], 'nullable': [], 'enum': [], 'validation': []}, root_map\n ={'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'create_individual': (CreateIndividual,)}, 'attribute_map': {},\n 'location_map': {'create_individual': 'body'}, 'collection_format_map':\n {}}, headers_map={'accept': ['application/json; charset=utf-8',\n 'text/plain'], 'content_type': ['application/json']}, api_client=api_client\n )\n", (3505, 4323), True, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n'), ((5269, 6054), 'fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (GetIndividual,), 'auth': ['FactSetApiKey',\n 'FactSetOAuth2'], 'endpoint_path': '/getIndividual', 'operation_id':\n 'get_individual_get', 'http_method': 'GET', 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}", 'params_map': "{'all': ['uniqueid'], 'required': ['uniqueid'], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'uniqueid': (\n str,)}, 'attribute_map': {'uniqueid': 'uniqueid'}, 'location_map': {\n 'uniqueid': 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json; charset=utf-8', 'text/plain'],\n 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (GetIndividual,), 'auth': [\n 'FactSetApiKey', 'FactSetOAuth2'], 'endpoint_path': '/getIndividual',\n 'operation_id': 'get_individual_get', 'http_method': 'GET', 'servers':\n [{'url': 'https://api.factset.com/procuretopay/provisioning/',\n 'description': 'No description provided'}]}, params_map={'all': [\n 'uniqueid'], 'required': ['uniqueid'], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'uniqueid': (str,)}, 'attribute_map': {'uniqueid':\n 'uniqueid'}, 'location_map': {'uniqueid': 'query'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json; charset=utf-8', 'text/plain'], 'content_type': []},\n api_client=api_client)\n", (5278, 6054), True, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n'), ((7031, 7868), 'fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (ListIndividuals,), 'auth': ['FactSetApiKey',\n 'FactSetOAuth2'], 'endpoint_path': '/listIndividuals', 'operation_id':\n 'list_individuals_get', 'http_method': 'GET', 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}", 'params_map': "{'all': ['include_product_ids'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'include_product_ids': (bool,)}, 'attribute_map': {\n 'include_product_ids': 'includeProductIds'}, 'location_map': {\n 'include_product_ids': 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json; charset=utf-8', 'text/plain'],\n 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (ListIndividuals,), 'auth': [\n 'FactSetApiKey', 'FactSetOAuth2'], 'endpoint_path': '/listIndividuals',\n 'operation_id': 'list_individuals_get', 'http_method': 'GET', 'servers':\n [{'url': 'https://api.factset.com/procuretopay/provisioning/',\n 'description': 'No description provided'}]}, params_map={'all': [\n 'include_product_ids'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}, root_map={'validations': {}, 'allowed_values': {},\n 'openapi_types': {'include_product_ids': (bool,)}, 'attribute_map': {\n 'include_product_ids': 'includeProductIds'}, 'location_map': {\n 'include_product_ids': 'query'}, 'collection_format_map': {}},\n headers_map={'accept': ['application/json; charset=utf-8', 'text/plain'\n ], 'content_type': []}, api_client=api_client)\n", (7040, 7868), True, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n'), ((8806, 9633), 'fds.sdk.ProcuretoPayProvisioning.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (InlineResponse202,), 'auth': ['FactSetApiKey',\n 'FactSetOAuth2'], 'endpoint_path': '/modifyIndividual', 'operation_id':\n 'modify_individual_post', 'http_method': 'POST', 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}", 'params_map': "{'all': ['modify_individual'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'modify_individual': (ModifyIndividual,)}, 'attribute_map': {},\n 'location_map': {'modify_individual': 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json; charset=utf-8', 'text/plain'],\n 'content_type': ['application/json']}", 'api_client': 'api_client'}), "(settings={'response_type': (InlineResponse202,), 'auth': [\n 'FactSetApiKey', 'FactSetOAuth2'], 'endpoint_path': '/modifyIndividual',\n 'operation_id': 'modify_individual_post', 'http_method': 'POST',\n 'servers': [{'url':\n 'https://api.factset.com/procuretopay/provisioning/', 'description':\n 'No description provided'}]}, params_map={'all': ['modify_individual'],\n 'required': [], 'nullable': [], 'enum': [], 'validation': []}, root_map\n ={'validations': {}, 'allowed_values': {}, 'openapi_types': {\n 'modify_individual': (ModifyIndividual,)}, 'attribute_map': {},\n 'location_map': {'modify_individual': 'body'}, 'collection_format_map':\n {}}, headers_map={'accept': ['application/json; charset=utf-8',\n 'text/plain'], 'content_type': ['application/json']}, api_client=api_client\n )\n", (8815, 9633), True, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n'), ((1623, 1634), 'fds.sdk.ProcuretoPayProvisioning.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (1632, 1634), False, 'from fds.sdk.ProcuretoPayProvisioning.api_client import ApiClient, Endpoint as _Endpoint\n')]
|
"""Project Static Views.
This view host static file generate from WebModule/ui
"""
import logging
import os
from django.conf import settings
from django.http import HttpResponse
from django.views.generic import View
class UIAppView(View):
"""Project Static UI View"""
def get(self, request):
"""get"""
try:
with open(os.path.join(settings.UI_DIR, 'index.html')) as ui_file:
return HttpResponse(ui_file.read())
except FileNotFoundError:
logging.exception('Production build of app not found')
return HttpResponse(
'''
This URL is only used when you have built the production
version of the app. Visit http://localhost:3000/ instead, or
run `yarn run build` to test the production version.
''',
status=501,
)
|
[
"logging.exception",
"os.path.join",
"django.http.HttpResponse"
] |
[((513, 567), 'logging.exception', 'logging.exception', (['"""Production build of app not found"""'], {}), "('Production build of app not found')\n", (530, 567), False, 'import logging\n'), ((587, 865), 'django.http.HttpResponse', 'HttpResponse', (['"""\n This URL is only used when you have built the production\n version of the app. Visit http://localhost:3000/ instead, or\n run `yarn run build` to test the production version.\n """'], {'status': '(501)'}), '(\n """\n This URL is only used when you have built the production\n version of the app. Visit http://localhost:3000/ instead, or\n run `yarn run build` to test the production version.\n """\n , status=501)\n', (599, 865), False, 'from django.http import HttpResponse\n'), ((358, 401), 'os.path.join', 'os.path.join', (['settings.UI_DIR', '"""index.html"""'], {}), "(settings.UI_DIR, 'index.html')\n", (370, 401), False, 'import os\n')]
|
from pygame._sdl2 import get_num_audio_devices, get_audio_device_name
from pygame import mixer
from gtts import gTTS
import time
import os
class TTS:
def __init__(self) -> None:
mixer.init()
mixer.quit()
try:
mixer.init(devicename="CABLE Input (VB-Audio Virtual Cable)")
except:
raise Exception("Please make sure VB-CABLE is installed, you can download it at: https://vb-audio.com/Cable/")
def speak(self, text: str) -> None:
gTTS(text=text, lang='en-uk').save("text.mp3")
mixer.music.load("text.mp3")
mixer.music.play()
while mixer.music.get_busy():
time.sleep(0.3)
mixer.music.unload()
def removeAudioFile(self) -> None:
if os.path.exists("text.mp3"):
os.remove("text.mp3")
|
[
"os.remove",
"gtts.gTTS",
"pygame.mixer.init",
"os.path.exists",
"pygame.mixer.music.play",
"time.sleep",
"pygame.mixer.music.unload",
"pygame.mixer.music.get_busy",
"pygame.mixer.music.load",
"pygame.mixer.quit"
] |
[((191, 203), 'pygame.mixer.init', 'mixer.init', ([], {}), '()\n', (201, 203), False, 'from pygame import mixer\n'), ((212, 224), 'pygame.mixer.quit', 'mixer.quit', ([], {}), '()\n', (222, 224), False, 'from pygame import mixer\n'), ((577, 605), 'pygame.mixer.music.load', 'mixer.music.load', (['"""text.mp3"""'], {}), "('text.mp3')\n", (593, 605), False, 'from pygame import mixer\n'), ((614, 632), 'pygame.mixer.music.play', 'mixer.music.play', ([], {}), '()\n', (630, 632), False, 'from pygame import mixer\n'), ((656, 678), 'pygame.mixer.music.get_busy', 'mixer.music.get_busy', ([], {}), '()\n', (676, 678), False, 'from pygame import mixer\n'), ((729, 749), 'pygame.mixer.music.unload', 'mixer.music.unload', ([], {}), '()\n', (747, 749), False, 'from pygame import mixer\n'), ((801, 827), 'os.path.exists', 'os.path.exists', (['"""text.mp3"""'], {}), "('text.mp3')\n", (815, 827), False, 'import os\n'), ((250, 311), 'pygame.mixer.init', 'mixer.init', ([], {'devicename': '"""CABLE Input (VB-Audio Virtual Cable)"""'}), "(devicename='CABLE Input (VB-Audio Virtual Cable)')\n", (260, 311), False, 'from pygame import mixer\n'), ((692, 707), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (702, 707), False, 'import time\n'), ((841, 862), 'os.remove', 'os.remove', (['"""text.mp3"""'], {}), "('text.mp3')\n", (850, 862), False, 'import os\n'), ((513, 542), 'gtts.gTTS', 'gTTS', ([], {'text': 'text', 'lang': '"""en-uk"""'}), "(text=text, lang='en-uk')\n", (517, 542), False, 'from gtts import gTTS\n')]
|
'''
TauREx v2 - Development version - DO NOT DISTRIBUTE
TauREx create spectrum
Developers: <NAME>, <NAME> (University College London)
'''
# loading libraries
import sys
import os
import argparse
import logging
# loading classes
sys.path.append('./classes')
sys.path.append('./library')
from parameters import *
from transmission import *
from emission import *
from atmosphere import *
from data import *
class create_spectrum(object):
def __init__(self, params=None, param_filename=None, nthreads=0,full_init=True):
logging.info('Initialise object create_spectrum')
if params:
self.params = params
elif hasattr(options, 'param_filename'):
self.params = parameters(options.param_filename,mpi=False)
elif param_filename:
self.params = parameters(param_filename, mpi=False)
if full_init:
self.init_data()
self.init_atmosphere(nthreads)
self.init_fmob()
# self.dataob = data(self.params)
# self.atmosphereob = atmosphere(self.dataob, nthreads=nthreads)
# if self.params.gen_type == 'transmission':
# self.fmob = transmission(self.atmosphereob)
# elif self.params.gen_type == 'emission':
# self.fmob = emission(self.atmosphereob)
def init_data(self):
self.dataob = data(self.params)
def init_atmosphere(self,nthreads):
self.atmosphereob = atmosphere(self.dataob,nthreads=nthreads)
def init_fmob(self):
if self.params.gen_type == 'transmission':
self.fmob = transmission(self.atmosphereob)
elif self.params.gen_type == 'emission':
self.fmob = emission(self.atmosphereob)
def generate_spectrum(self, save_instance=False, instance_filename=None, contrib_func=False, transmittance=False,
opacity_contrib=False):
# this function returns the SPECTRUM_INSTANCE_out dictionary
# if filename is not specified, store in out_path/SPECTRUM_INSTANCE_out.pickle
# also stored in self.SPECTRUM_INSTANCE_out
# create SPECTRUM_out
instance = {'type': 'create_spectrum',
'params': self.params.params_to_dict()}
instance_data = {}
# compute spectrum
instance_data['spectrum'] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 3))
instance_data['spectrum'][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['spectrum'][:,1] = self.fmob.model()
# freeze the mixing ratio profiles, disable gen_ace
if self.fmob.params.gen_ace:
self.fmob.params.gen_ace = False
# compute contribution function
if contrib_func:
instance_data['contrib_func'] = self.fmob.model(return_tau=True)
if transmittance:
instance_data['transmittance'] = self.fmob.model(return_tau=True)
# calculate opacity contributions
if opacity_contrib:
instance_data['opacity_contrib'] = {}
active_mixratio_profile = self.fmob.atmosphere.active_mixratio_profile
atm_rayleigh = self.fmob.params.atm_rayleigh
atm_cia = self.fmob.params.atm_cia
atm_clouds = self.fmob.params.atm_clouds
atm_mie = self.fmob.params.atm_mie
# opacity from molecules
for idx, val in enumerate(self.atmosphereob.active_gases):
mask = np.ones(len(self.atmosphereob.active_gases), dtype=bool)
mask[idx] = 0
active_mixratio_profile_mask = np.copy(active_mixratio_profile)
active_mixratio_profile_mask[mask, :] = 0
self.fmob.atmosphere.active_mixratio_profile = active_mixratio_profile_mask
self.fmob.params.atm_rayleigh = False
self.fmob.params.atm_cia = False
#self.fmob.params.atm_clouds = False
instance_data['opacity_contrib'][val] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 2))
instance_data['opacity_contrib'][val][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['opacity_contrib'][val][:,1] = self.fmob.model()
self.fmob.atmosphere.active_mixratio_profile = np.copy(active_mixratio_profile)
if opacity_contrib and self.params.gen_type == 'transmission':
self.fmob.atmosphere.active_mixratio_profile[:, :] = 0
# opacity from rayleigh
if atm_rayleigh:
self.fmob.params.atm_rayleigh = True
self.fmob.params.atm_cia = False
self.fmob.params.atm_mie = False
self.fmob.params.atm_clouds = False
instance_data['opacity_contrib']['rayleigh'] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 2))
instance_data['opacity_contrib']['rayleigh'][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['opacity_contrib']['rayleigh'][:,1] = self.fmob.model()
# opacity from cia
if atm_cia:
self.fmob.params.atm_rayleigh = False
self.fmob.params.atm_cia = True
self.fmob.params.atm_mie = False
self.fmob.params.atm_clouds = False
instance_data['opacity_contrib']['cia'] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 2))
instance_data['opacity_contrib']['cia'][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['opacity_contrib']['cia'][:,1] = self.fmob.model()
# opacity from clouds
if atm_clouds:
self.fmob.params.atm_rayleigh = False
self.fmob.params.atm_cia = False
self.fmob.params.atm_clouds = True
self.fmob.params.atm_mie = False
instance_data['opacity_contrib']['clouds'] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 2))
instance_data['opacity_contrib']['clouds'][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['opacity_contrib']['clouds'][:,1] = self.fmob.model()
#opacity from Mie scattering
if atm_mie:
self.fmob.params.atm_rayleigh = False
self.fmob.params.atm_cia = False
self.fmob.params.atm_clouds = False
self.fmob.params.atm_mie = True
instance_data['opacity_contrib']['mie'] = np.zeros((self.fmob.atmosphere.int_nwlgrid, 2))
instance_data['opacity_contrib']['mie'][:,0] = self.fmob.atmosphere.int_wlgrid
instance_data['opacity_contrib']['mie'][:,1] = self.fmob.model()
self.fmob.atmosphere.active_mixratio_profile = np.copy(active_mixratio_profile)
self.fmob.params.atm_rayleigh = atm_rayleigh
self.fmob.params.atm_cia = atm_cia
self.fmob.params.atm_clouds = atm_clouds
self.fmob.params.atm_mie = atm_mie
# tp profile
instance_data['temperature_profile'] = np.zeros((self.atmosphereob.nlayers, 2))
instance_data['temperature_profile'][:,0] = self.fmob.atmosphere.pressure_profile
instance_data['temperature_profile'][:,1] = self.fmob.atmosphere.temperature_profile
# altitude
instance_data['altitude_profile'] = np.zeros((self.atmosphereob.nlayers, 2))
instance_data['altitude_profile'][:,0] = self.fmob.atmosphere.pressure_profile
instance_data['altitude_profile'][:,1] = self.fmob.atmosphere.altitude_profile
# planet_grav
instance_data['gravity_profile'] = np.zeros((self.atmosphereob.nlayers, 2))
instance_data['gravity_profile'][:,0] = self.fmob.atmosphere.pressure_profile
instance_data['gravity_profile'][:,1] = self.fmob.atmosphere.gravity_profile
# scale_height
instance_data['scaleheight_profile'] = np.zeros((self.atmosphereob.nlayers, 2))
instance_data['scaleheight_profile'][:,0] = self.fmob.atmosphere.pressure_profile
instance_data['scaleheight_profile'][:,1] = self.fmob.atmosphere.scaleheight_profile
# mu profile
instance_data['mu_profile'] = np.zeros((self.atmosphereob.nlayers, 2))
instance_data['mu_profile'][:,0] = self.fmob.atmosphere.pressure_profile
instance_data['mu_profile'][:,1] = self.fmob.atmosphere.mu_profile
# mixing ratios
instance_data['active_mixratio_profile'] = np.zeros((len(self.atmosphereob.active_gases), self.atmosphereob.nlayers, 2))
instance_data['inactive_mixratio_profile'] = np.zeros((len(self.atmosphereob.inactive_gases), self.atmosphereob.nlayers, 2))
for i in range(len(self.atmosphereob.active_gases)):
instance_data['active_mixratio_profile'][i,:,0] = self.fmob.atmosphere.pressure_profile
instance_data['active_mixratio_profile'][i,:,1] = self.fmob.atmosphere.active_mixratio_profile[i,:]
for i in range(len(self.atmosphereob.inactive_gases)):
instance_data['inactive_mixratio_profile'][i,:,0] = self.fmob.atmosphere.pressure_profile
instance_data['inactive_mixratio_profile'][i,:,1] = self.fmob.atmosphere.inactive_mixratio_profile[i,:]
# store data
instance['data'] = instance_data
self.SPECTRUM_INSTANCE_out = instance
if save_instance:
if not instance_filename:
instance_filename = os.path.join(self.params.out_path, 'SPECTRUM_INSTANCE_out.pickle')
logging.info('Store spectrum instance in %s ' % instance_filename)
pickle.dump(instance, open(instance_filename, 'wb'), protocol=2)
return instance
def save_spectrum(self, sp_filename=None, pickled=False):
if not hasattr(self, 'SPECTRUM_INSTANCE_out'):
self.generate_spectrum()
if not sp_filename:
if pickled:
sp_filename = os.path.join(self.params.out_path , 'SPECTRUM_out.pickle')
else:
sp_filename = os.path.join(self.params.out_path , 'SPECTRUM_out.dat')
logging.info('Store spectrum in %s ' % sp_filename)
if pickled:
pickle.dump(self.SPECTRUM_INSTANCE_out['data']['spectrum'], open(sp_filename, 'wb'), protocol=2)
else:
np.savetxt(sp_filename, self.SPECTRUM_INSTANCE_out['data']['spectrum'])
# gets called when running from command line
if __name__ == '__main__':
#loading parameter file parser
parser = argparse.ArgumentParser()
parser.add_argument('-p',
dest='param_filename',
default='Parfiles/default.par',
help='Input parameter file'
)
# parser.add_argument('--save_sp', # spectrum is always saved!
# action='store_true',
# dest='save_sp',
# default=True)
parser.add_argument('--pickle', # store spectrum in its pickled version (faster for highres spectra)
action='store_true',
dest='pickle_save_sp',
default=False,
help='Store the final output spectrum in Python pickle format. This is much faster for high resolution '
'spectra than using ascii files. See also --sp_filename.')
parser.add_argument('--sp_filename',
dest='sp_filename',
default=False,
help='Specify a custom file path and filename for the output spectrum (note that the path is relative'
'to the current working folder). Remember that if using --pickle_save_sp the ouput spectrum '
'is stored in Python pickle format.')
parser.add_argument('--save_instance',
action='store_true',
dest='save_instance',
default=False,
help = 'Save a dictionary in .pickle format containing the full spectrum instance, including mixing ratio and ' \
'temperature profiles used, and all the paramaters used to generate the spectrum.'
'See also the options --opacity__contriv, --contrib_func. Default file path is in the'
'Output folder specified in parameter file, and the default filename is '
'SPECTRUM_INSTANCE_out.pickle. Otherwise see --instance_filename')
parser.add_argument('--instance_filename',
dest='instance_filename',
default=False,
help = 'Specify a custom file path and filename for the output spectrum instance dictionary (stored'
'in Python pickle format.')
# default to true
# parser.add_argument('--opacity_contrib', # calculates the opacity contribution for each opacity source.
# dest='opacity_contrib', # stored in SPECTRUM_INSTANCE_out.pickle, so use '--save_instance' as well
# action='store_true',
# default=False,
# help = 'Calculates the opacity contribution for each opacity source. It computes one spectrum for '
# 'each opacity source, suppressing the contribution from all the other sources. Stored in the instance'
# 'dictionary (see option --save_instance) under ["data"]["opacity_contrib"][<opacity_source>]')
parser.add_argument('--contrib_func',
dest='contrib_func',
action='store_true',
default=False,
help = 'Only valid for emission. Store the contribution function as a funciton of pressure and wavelength.'
'The 2d array is stored in the instance dictionary (see option --save_instance), '
'under ["data"]["contrib_func"].',)
parser.add_argument('--transmittance',
dest='transmittance',
action='store_true',
default=False,
help = 'Only valid for transmission. Store the spectral transmittance as a function of pressure'
' as a funciton of pressure and wavelength. The transmittance is integrated over the path '
'parallel to the line of sight. The 2d array is stored in the instance dictionary (see option --save_instance), '
'under ["data"]["transmittance"].',)
parser.add_argument('--nthreads', # run forward model in multithreaded mode (use Python multiprocesing for sigma array interpolation
dest='nthreads', # and openmp parallel version of cpp code). You need to spcify the number of cores to use,
default=0,
type=int,
help = 'Run forward model in multithreaded mode (using NTHREADS cores). NTHREADS should not '
'be larger than the number of cores available.')
# plotting parameters
parser.add_argument('--plot',
dest='plot_spectrum',
action='store_true',
default=False,
help='Display an instantanous plot after the spectrum is computed.')
parser.add_argument('--save_plots',
dest='save_plots',
action='store_true',
default=False,
help='Save a range of plots in the Output folder specified.')
parser.add_argument('--plot_profiles',
action='store_true',
dest='plot_profiles',
default=False)
parser.add_argument('--plot_resolution',
dest='plot_resolution',
default=100,
help='Output plot spectrum resolution. Set to 0 for native resolution.')
parser.add_argument('--plot_contrib',
action='store_true',
dest='plot_contrib',
default=False,
help='Plot the contribution of each molecule to the spectrum')
parser.add_argument('--plot_title',
dest='plot_title',
default=False)
parser.add_argument('--plot_prefix',
dest='plot_prefix',
default=False)
parser.add_argument('--plot_out_folder',
dest='plot_out_folder',
default=False)
# add command line interface to parameter file
params_tmp = parameters(mpi=False, log=False)
params_dict = params_tmp.params_to_dict() # get all param names
for param in params_dict:
if type(params_dict[param]) == list:
# manage lists
parser.add_argument('--%s' % param,
action='append',
dest=param,
default=None,
type = type(params_dict[param][0])
)
else:
parser.add_argument('--%s' % param,
dest=param,
default=None,
type = type(params_dict[param])
)
options = parser.parse_args()
# Initialise parameters instance
params = parameters(options.param_filename, mode='forward_model', mpi=False)
# Override params object from command line input
for param in params_dict:
if getattr(options, param) != None:
value = getattr(options, param)
if param == 'planet_mass':
value *= MJUP
if param == 'planet_radius':
value *= RJUP
if param == 'star_radius':
value *= RSOL
if param == 'atm_mu':
value *= AMU
setattr(params, param, value)
# checks
if params.gen_type == 'transmission' and options.contrib_func:
logging.error('Options --contrib_func is only valid in emission. Maybe you wanted to use --transmittance? ')
if params.gen_type == 'emission' and options.transmittance:
logging.error('Options --transmittance is only valid in transmission. Maybe you wanted to use --contrib_func ?')
if (options.transmittance or options.contrib_func) and not options.save_instance:
logging.warning('Options --transmittance and --contrib_func require --save_instance. This options is '
'switched on automatically. The instance of the spectrum will be stored in a .pickle file'
'in the Output folder.')
options.save_instance = True
spectrumob = create_spectrum(params=params, nthreads=options.nthreads)
sp_instance = spectrumob.generate_spectrum(save_instance=options.save_instance,
instance_filename=options.instance_filename,
opacity_contrib=True,
contrib_func=options.contrib_func,
transmittance=options.transmittance)
spectrumob.save_spectrum(sp_filename=options.sp_filename, pickled=options.pickle_save_sp)
# plotting
if options.save_plots:
sys.path.append('./tools')
from taurex_plots import taurex_plots
logging.info('Initialising plotting')
plot = taurex_plots(plot_type='create_spectrum',pickle_file=sp_instance, title=options.plot_title,
prefix=options.plot_prefix, out_folder=options.plot_out_folder,
plot_resolution=options.plot_resolution)
# plot spectrum
plot.plot_forward_spectrum(plot_contrib=options.plot_contrib)
# plot mixing ratio profiles
if options.plot_profiles:
plot.plot_forward_xp()
# do plot transmissivity contrib funct
if options.plot_spectrum:
logging.info('Plot spectrum... Close the Plot window to terminate the script.')
sp = spectrumob.SPECTRUM_INSTANCE_out['data']['spectrum']
plt.plot(sp[:,0], sp[:,1])
plt.xscale('log')
plt.show()
|
[
"sys.path.append",
"logging.error",
"argparse.ArgumentParser",
"logging.warning",
"taurex_plots.taurex_plots",
"logging.info",
"os.path.join"
] |
[((246, 274), 'sys.path.append', 'sys.path.append', (['"""./classes"""'], {}), "('./classes')\n", (261, 274), False, 'import sys\n'), ((275, 303), 'sys.path.append', 'sys.path.append', (['"""./library"""'], {}), "('./library')\n", (290, 303), False, 'import sys\n'), ((10510, 10535), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10533, 10535), False, 'import argparse\n'), ((551, 600), 'logging.info', 'logging.info', (['"""Initialise object create_spectrum"""'], {}), "('Initialise object create_spectrum')\n", (563, 600), False, 'import logging\n'), ((10107, 10158), 'logging.info', 'logging.info', (["('Store spectrum in %s ' % sp_filename)"], {}), "('Store spectrum in %s ' % sp_filename)\n", (10119, 10158), False, 'import logging\n'), ((18311, 18429), 'logging.error', 'logging.error', (['"""Options --contrib_func is only valid in emission. Maybe you wanted to use --transmittance? """'], {}), "(\n 'Options --contrib_func is only valid in emission. Maybe you wanted to use --transmittance? '\n )\n", (18324, 18429), False, 'import logging\n'), ((18493, 18615), 'logging.error', 'logging.error', (['"""Options --transmittance is only valid in transmission. Maybe you wanted to use --contrib_func ?"""'], {}), "(\n 'Options --transmittance is only valid in transmission. Maybe you wanted to use --contrib_func ?'\n )\n", (18506, 18615), False, 'import logging\n'), ((18701, 18923), 'logging.warning', 'logging.warning', (['"""Options --transmittance and --contrib_func require --save_instance. This options is switched on automatically. The instance of the spectrum will be stored in a .pickle filein the Output folder."""'], {}), "(\n 'Options --transmittance and --contrib_func require --save_instance. This options is switched on automatically. The instance of the spectrum will be stored in a .pickle filein the Output folder.'\n )\n", (18716, 18923), False, 'import logging\n'), ((19647, 19673), 'sys.path.append', 'sys.path.append', (['"""./tools"""'], {}), "('./tools')\n", (19662, 19673), False, 'import sys\n'), ((19729, 19766), 'logging.info', 'logging.info', (['"""Initialising plotting"""'], {}), "('Initialising plotting')\n", (19741, 19766), False, 'import logging\n'), ((19782, 19989), 'taurex_plots.taurex_plots', 'taurex_plots', ([], {'plot_type': '"""create_spectrum"""', 'pickle_file': 'sp_instance', 'title': 'options.plot_title', 'prefix': 'options.plot_prefix', 'out_folder': 'options.plot_out_folder', 'plot_resolution': 'options.plot_resolution'}), "(plot_type='create_spectrum', pickle_file=sp_instance, title=\n options.plot_title, prefix=options.plot_prefix, out_folder=options.\n plot_out_folder, plot_resolution=options.plot_resolution)\n", (19794, 19989), False, 'from taurex_plots import taurex_plots\n'), ((20329, 20408), 'logging.info', 'logging.info', (['"""Plot spectrum... Close the Plot window to terminate the script."""'], {}), "('Plot spectrum... Close the Plot window to terminate the script.')\n", (20341, 20408), False, 'import logging\n'), ((9528, 9594), 'logging.info', 'logging.info', (["('Store spectrum instance in %s ' % instance_filename)"], {}), "('Store spectrum instance in %s ' % instance_filename)\n", (9540, 9594), False, 'import logging\n'), ((9449, 9515), 'os.path.join', 'os.path.join', (['self.params.out_path', '"""SPECTRUM_INSTANCE_out.pickle"""'], {}), "(self.params.out_path, 'SPECTRUM_INSTANCE_out.pickle')\n", (9461, 9515), False, 'import os\n'), ((9936, 9993), 'os.path.join', 'os.path.join', (['self.params.out_path', '"""SPECTRUM_out.pickle"""'], {}), "(self.params.out_path, 'SPECTRUM_out.pickle')\n", (9948, 9993), False, 'import os\n'), ((10043, 10097), 'os.path.join', 'os.path.join', (['self.params.out_path', '"""SPECTRUM_out.dat"""'], {}), "(self.params.out_path, 'SPECTRUM_out.dat')\n", (10055, 10097), False, 'import os\n')]
|
import argparse
import contextlib
import faw_pipelines_util
import gridfs
import io
import pickle
import sys
import torch
from .dask_util import CachedParser
def main(api_info, cmd_args):
ap = argparse.ArgumentParser()
ap.add_argument('--header-bytes', type=int, default=100)
from . import model as model_module
model_module.Model.argparse_setup(ap)
args = ap.parse_args(cmd_args)
api = faw_pipelines_util.Api(api_info)
batch_size = 8
header_bytes = args.header_bytes
data = None
try:
with api.task_file_read('model.chkpt') as f:
buf = io.BytesIO(f.read())
data = torch.load(buf)
except gridfs.NoFile:
pass
if data is None:
data = {
'files': api.file_count(),
'header_bytes': header_bytes,
}
# With 2 uniform iterations, ~10% chance any given file isn't included
data['total'] = 100 # max(data['files'] * 2, 100000) // batch_size + 1
data['current'] = 0
# Create model
model_args = args.__dict__.copy()
model_args.pop('header_bytes')
model = model_module.Model.argparse_create(model_args)
model.build()
# Remember enough to rebuild model
data['model_args'] = model_args
data['model'] = None
else:
model = model_module.Model.argparse_create(data['model_args'])
model.build()
model.load_state_dict(data['model']['state_dict'])
model.state_restore(pickle.loads(data['model']['model_extra']))
for optim, state_dict in zip([model._optim], data['model']['optims']):
optim.load_state_dict(state_dict)
# Load model
while data['current'] < data['total']:
print(f'Training batch {data["current"]}')
batch = api.file_sample(batch_size)
# Polyfile examples
if False:
if False:
# Non-distribued
polyfile_json = [polyfile_run(api_info, f) for f in batch]
else:
# Distributed
import dask
polyfile_json = dask.compute([
dask.delayed(polyfile_run)(api_info, f)
for f in batch])[0]
with contextlib.ExitStack() as stack:
file_paths = [stack.enter_context(api.file_fetch(f)) for f in batch]
# Train on this batch of files
train_step(model, file_paths, header_bytes)
data['current'] += 1
api.task_status_set_message(f"{data['current']} out of {data['total']}")
if data['current'] % 100 == 1 or data['current'] == data['total']:
# Write out model to save most recent work (Save first batch to
# bootstrap viewing file detail views for debugging UI)
data['model'] = {
'state_dict': model.state_dict(),
'model_extra': pickle.dumps(model.state_to_save(),
protocol=pickle.HIGHEST_PROTOCOL),
'optims': [o.state_dict() for o in [model._optim]],
}
buf = io.BytesIO()
torch.save(data, buf)
with api.task_file_write('model.chkpt') as f:
f.write(buf.getvalue())
# Bust the cache for any downstream applications
CachedParser.purge(api, 'model.chkpt')
def train_step(model, file_paths, header_bytes):
sents = []
for fp in file_paths:
with open(fp, 'rb') as f:
data = f.read(header_bytes)
sents.append([data[i:i+1] for i in range(len(data))])
model.train_batch(sents)
# Get polyfile results for each file, or None for unable to
# parse.
def polyfile_run(api_info, f_in_db):
"""
This could also be done in dask, but that's a pretty small
change if the desired functionality is already in a top-level
module function.
"""
import json, subprocess
api = faw_pipelines_util.Api(api_info)
with api.file_fetch(f_in_db) as f_local:
p = subprocess.Popen(['polyfile', f_local],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
stdin=subprocess.PIPE,
text=True)
stdout, stderr = p.communicate()
ret = p.wait()
if ret != 0:
return None
try:
return json.loads(stdout)
except json.decoder.JSONDecodeError:
print(f'BAD JSON: {stdout[:1000]}', file=sys.stderr)
raise
|
[
"pickle.loads",
"subprocess.Popen",
"io.BytesIO",
"dask.delayed",
"argparse.ArgumentParser",
"json.loads",
"torch.load",
"contextlib.ExitStack",
"torch.save",
"faw_pipelines_util.Api"
] |
[((199, 224), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (222, 224), False, 'import argparse\n'), ((417, 449), 'faw_pipelines_util.Api', 'faw_pipelines_util.Api', (['api_info'], {}), '(api_info)\n', (439, 449), False, 'import faw_pipelines_util\n'), ((3937, 3969), 'faw_pipelines_util.Api', 'faw_pipelines_util.Api', (['api_info'], {}), '(api_info)\n', (3959, 3969), False, 'import faw_pipelines_util\n'), ((639, 654), 'torch.load', 'torch.load', (['buf'], {}), '(buf)\n', (649, 654), False, 'import torch\n'), ((4027, 4156), 'subprocess.Popen', 'subprocess.Popen', (["['polyfile', f_local]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.DEVNULL', 'stdin': 'subprocess.PIPE', 'text': '(True)'}), "(['polyfile', f_local], stdout=subprocess.PIPE, stderr=\n subprocess.DEVNULL, stdin=subprocess.PIPE, text=True)\n", (4043, 4156), False, 'import json, subprocess\n'), ((1513, 1555), 'pickle.loads', 'pickle.loads', (["data['model']['model_extra']"], {}), "(data['model']['model_extra'])\n", (1525, 1555), False, 'import pickle\n'), ((2261, 2283), 'contextlib.ExitStack', 'contextlib.ExitStack', ([], {}), '()\n', (2281, 2283), False, 'import contextlib\n'), ((3124, 3136), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3134, 3136), False, 'import io\n'), ((3149, 3170), 'torch.save', 'torch.save', (['data', 'buf'], {}), '(data, buf)\n', (3159, 3170), False, 'import torch\n'), ((4357, 4375), 'json.loads', 'json.loads', (['stdout'], {}), '(stdout)\n', (4367, 4375), False, 'import json, subprocess\n'), ((2162, 2188), 'dask.delayed', 'dask.delayed', (['polyfile_run'], {}), '(polyfile_run)\n', (2174, 2188), False, 'import dask\n')]
|
import random
from Games.game import Game
import utilities
class NumberGuessingGame(Game):
"""Manage Number Guessing Game game play"""
ID = 3
TITLE = "Number Guessing Game"
DIFFICULTIES = {
'e': {'guesses_allowed': 5, 'max_number': 10, 'min_number': 1},
'm': {'guesses_allowed': 4, 'max_number': 20, 'min_number': 1},
'h': {'guesses_allowed': 3, 'max_number': 50, 'min_number': 1},
}
game_difficulty = None
guess = None
guesses = []
guesses_allowed = None
max_number = None
min_number = None
secret_number = None
def cleanup(self):
"""Clean up the game to prepare for another."""
self.guesses = []
def get_guess(self):
"""Get player guess"""
self.guess = input("Guess a number between {0.min_number} and {0.max_number}: ".format(self))
def setup(self):
"""Set up the game"""
self.set_difficulty()
self.guesses_allowed = self.DIFFICULTIES[self.game_difficulty]['guesses_allowed']
self.max_number = self.DIFFICULTIES[self.game_difficulty]['max_number']
self.min_number = self.DIFFICULTIES[self.game_difficulty]['min_number']
self.secret_number = random.randint(self.min_number, self.max_number)
def start(self):
"""Start and manage the game."""
super().start()
utilities.clear_screen()
self.show_welcome()
self.setup()
print("I'm thinking of a number between {0.min_number} and {0.max_number}.".format(self))
print("Can you guess what it is with {0.guesses_allowed} guesses?".format(self))
print("Let's see.")
while self.playing and len(self.guesses) < self.guesses_allowed:
self.get_guess()
try:
self.guess = int(self.guess)
except ValueError:
print("{} isn't a number!".format(self.guess))
else:
# add guess to guesses
self.guesses.append(self.guess)
# compare guess to secret number
if self.guess == self.secret_number:
print("Wow! You got it! My number was {}. You are pretty amazing.".format(self.secret_number))
break
# print hit/miss
elif self.guess < self.secret_number:
print("Nope. Think higher.")
else:
print("Nope. Lower.")
else:
print("I win! You didn't guess it! My number was {}. Try again if you think you can beat me.".format(
self.secret_number))
self.play_again_prompt()
|
[
"utilities.clear_screen",
"random.randint"
] |
[((1214, 1262), 'random.randint', 'random.randint', (['self.min_number', 'self.max_number'], {}), '(self.min_number, self.max_number)\n', (1228, 1262), False, 'import random\n'), ((1358, 1382), 'utilities.clear_screen', 'utilities.clear_screen', ([], {}), '()\n', (1380, 1382), False, 'import utilities\n')]
|
import tkinter as tk
root = tk.Tk()
# top frame ...
tFrame = tk.Frame(root)
tFrame.pack()
# bottom frame ...
bFrame = tk.Frame(root)
bFrame.pack(side=tk.BOTTOM)
# adding widgets
# buttons ...
btn1 = tk.Button(tFrame, text="Button 1", fg="red")
btn2 = tk.Button(tFrame, text="Button 2", fg="blue")
btn3 = tk.Button(tFrame, text="Button 3", fg="green")
btn4 = tk.Button(bFrame, text="Button 4", fg="purple")
# packing widgets same as displaying them ....
btn1.pack(side=tk.LEFT)
btn2.pack(side=tk.LEFT)
btn3.pack(side=tk.LEFT)
btn4.pack(side=tk.BOTTOM)
root.mainloop()
|
[
"tkinter.Button",
"tkinter.Tk",
"tkinter.Frame"
] |
[((30, 37), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (35, 37), True, 'import tkinter as tk\n'), ((64, 78), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (72, 78), True, 'import tkinter as tk\n'), ((122, 136), 'tkinter.Frame', 'tk.Frame', (['root'], {}), '(root)\n', (130, 136), True, 'import tkinter as tk\n'), ((205, 249), 'tkinter.Button', 'tk.Button', (['tFrame'], {'text': '"""Button 1"""', 'fg': '"""red"""'}), "(tFrame, text='Button 1', fg='red')\n", (214, 249), True, 'import tkinter as tk\n'), ((257, 302), 'tkinter.Button', 'tk.Button', (['tFrame'], {'text': '"""Button 2"""', 'fg': '"""blue"""'}), "(tFrame, text='Button 2', fg='blue')\n", (266, 302), True, 'import tkinter as tk\n'), ((310, 356), 'tkinter.Button', 'tk.Button', (['tFrame'], {'text': '"""Button 3"""', 'fg': '"""green"""'}), "(tFrame, text='Button 3', fg='green')\n", (319, 356), True, 'import tkinter as tk\n'), ((364, 411), 'tkinter.Button', 'tk.Button', (['bFrame'], {'text': '"""Button 4"""', 'fg': '"""purple"""'}), "(bFrame, text='Button 4', fg='purple')\n", (373, 411), True, 'import tkinter as tk\n')]
|
import sys
from PyQt5.QtCore import QDir, Qt, QUrl
from PyQt5 import QtGui, QtCore, QtWidgets, QtMultimedia, QtMultimediaWidgets
from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer
from PyQt5.QtMultimediaWidgets import QVideoWidget
from PyQt5.QtWidgets import (QApplication, QFileDialog, QHBoxLayout, QLabel, QMessageBox,
QPushButton, QSizePolicy, QSlider, QStyle, QVBoxLayout, QWidget)
from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton
from PyQt5.QtGui import QIcon, QPixmap, QImage, QImageReader
from PyQt5.QtWidgets import QVBoxLayout
import mysql.connector
from PyQt5.QtMultimedia import QSound
import tkinter as tk
from new_qus import Ui_MainWindow
class App(QWidget):
curFileId = 1
ObjectID = 1
total = 3
img1 = None
img2 = None
img3 = None
video = None
audio = None
objNameImg = None
img = list()
alreadyLearned = list()
def __init__(self):
super().__init__()
root = tk.Tk()
self.width = root.winfo_screenwidth()
self.height = root.winfo_screenheight()
self.left = 0
self.top = 0
print(self.width, self.height)
self.title = 'WELCOME TO AUDIO-VISUAL LEARNING'
self.initUI()
def initUI(self):
horUnit = int(self.width / 12)
verUnit = int(self.height / 12)
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
# INITIAL DATABASE
mydb = mysql.connector.connect(
host = 'localhost',
user = "root",
#passwd = "<PASSWORD>",
database="spl"
)
myCursor = mydb.cursor(buffered=True)
sql = "SELECT image_name_1, image_name_2, image_name_3, \
object_image, audio_name, video_name FROM object where object_id = %s"
val = (App.ObjectID,)
myCursor.execute(sql, val)
myresult = myCursor.fetchone()
App.img.clear()
App.img.append(myresult[0])
App.img.append(myresult[1])
App.img.append(myresult[2])
App.objNameImg = myresult[3]
App.audio = myresult[4]
App.video = myresult[5]
myCursor.close()
mydb.close()
#=========================================Video Part===============================================#
self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface)
videoWidget = QVideoWidget()
self.playButton = QPushButton()
self.playButton.setEnabled(False)
self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
self.playButton.clicked.connect(self.play)
self.positionSlider = QSlider(Qt.Horizontal)
self.positionSlider.setRange(0, 0)
self.positionSlider.sliderMoved.connect(self.setPosition)
self.errorLabel = QLabel()
self.errorLabel.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Maximum)
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
# Create layouts to place inside widget
controlLayout = QHBoxLayout()
controlLayout.setContentsMargins(0, 0, 0, 0)
controlLayout.addWidget(self.playButton)
controlLayout.addWidget(self.positionSlider)
layout = QVBoxLayout()
layout.addWidget(videoWidget)
layout.addLayout(controlLayout)
layout.addWidget(self.errorLabel)
self.mediaPlayer.setVideoOutput(videoWidget)
self.mediaPlayer.stateChanged.connect(self.mediaStateChanged)
self.mediaPlayer.positionChanged.connect(self.positionChanged)
self.mediaPlayer.durationChanged.connect(self.durationChanged)
self.mediaPlayer.error.connect(self.handleError)
# ===============================Image widget========================================#
self.imglabel = QLabel(self)
self.imglabel.setGeometry(0.5*horUnit, 3*verUnit, 4.5*horUnit, 6*verUnit)
# self.pixmap = QPixmap('C:/Users/dell/Downloads/Python-master/Python-master/ImageShow/mango.jpg')
self.pixmap = QPixmap(App.img[0])
self.pixmap = self.pixmap.scaled(self.imglabel.width(), self.imglabel.height(), QtCore.Qt.KeepAspectRatio)
self.imglabel.setPixmap(self.pixmap)
self.imglabel.setStyleSheet("background-color: lightgray;")
self.imglabel.setAlignment(QtCore.Qt.AlignCenter)
# audio button widget
self.audioButton = QPushButton('\t PLAY AUDIO \t', self)
self.audioButton.setToolTip('play audio')
self.audioButton.setStyleSheet("background-color: lightgray; font-size: 18px; font-weight: bold;")
self.audioButton.setGeometry(7*horUnit, 9.5*verUnit, 4.5*horUnit, 0.5*verUnit)
self.audioButton.clicked.connect(lambda: self.play_audio(App.audio)) # NECESSARY
# Previous button widget
self.buttonP = QPushButton('\t PREVIOUS IMAGE \t', self)
self.buttonP.setToolTip('Go to previous picture')
self.buttonP.setStyleSheet("background-color: lightgray; font-size: 18px; font-weight: bold;")
self.buttonP.setGeometry(0.5*horUnit, 9.5*verUnit, 1.2*horUnit, 0.5*verUnit)
self.buttonP.clicked.connect(self.on_click_prev)
# Skip button widget
self.buttonS = QPushButton('\t SKIP THIS OBJECT \t', self)
self.buttonS.setToolTip('Skip this object')
self.buttonS.setStyleSheet("background-color: lightgray; font-size: 18px; font-weight: bold; color: blue;")
self.buttonS.setGeometry(2*horUnit, 9.8*verUnit, 1.5*horUnit, 0.7*verUnit)
self.buttonS.clicked.connect(self.on_click_skip)
# Next button widget
self.buttonN = QPushButton('\t NEXT IMAGE \t', self)
self.buttonN.setToolTip('Go to next picture')
self.buttonN.setStyleSheet("background-color: lightgray; font-size: 18px; font-weight: bold;")
self.buttonN.setGeometry(3.8*horUnit, 9.5*verUnit, 1.2*horUnit, 0.5*verUnit)
self.buttonN.clicked.connect(self.on_click_next)
'''
# Question Window load button widget
self.btnQues = QPushButton('\t TAKE A TEST \t', self)
self.btnQues.setToolTip('Answer Question on this Object')
self.btnQues.setStyleSheet("background-color: lightgray; font-size: 18px; font-weight: bold; color: blue;")
self.btnQues.setGeometry(5.3*horUnit, 9.4*verUnit, 1.4*horUnit, 0.7*verUnit)
self.btnQues.clicked.connect(self.on_click_test)
self.btnQues.hide()
'''
# OBJECT NAME LABEL
self.lblObjName = QLabel(self)
self.pixmap = QPixmap(App.objNameImg)
self.lblObjName.setPixmap(self.pixmap)
self.lblObjName.setGeometry(3*horUnit, 0*verUnit, 6*horUnit, 2*verUnit)
self.lblObjName.setAlignment(QtCore.Qt.AlignCenter)
# Create a widget for window contents
self.wid = QWidget(self)
self.wid.setGeometry(7*horUnit, 3*verUnit, 4.5*horUnit, 6*verUnit)
self.wid.setStyleSheet("background-color: lightgray;")
self.wid.setLayout(layout)
self.wid.show()
self.show()
# ================== All User Defined Functions ====================== #
def exitCall(self):
sys.exit(app.exec_())
def play(self):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.mediaPlayer.pause()
else:
self.mediaPlayer.play()
def mediaStateChanged(self, state):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPause))
else:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
def positionChanged(self, position):
self.positionSlider.setValue(position)
def durationChanged(self, duration):
self.positionSlider.setRange(0, duration)
def setPosition(self, position):
self.mediaPlayer.setPosition(position)
def handleError(self):
self.playButton.setEnabled(False)
self.errorLabel.setText("Error: " + self.mediaPlayer.errorString())
def showImage(self, filepath):
self.imglabel.clear()
pixmap = QtGui.QPixmap(filepath)
pixmap = pixmap.scaled(self.imglabel.width(), self.imglabel.height(), QtCore.Qt.KeepAspectRatio)
self.imglabel.setPixmap(pixmap)
self.imglabel.setAlignment(QtCore.Qt.AlignCenter)
def showObjectNameImage(self, filepath):
print(filepath)
self.lblObjName.clear()
pixmap = QPixmap(filepath)
pixmap = pixmap.scaled(self.imglabel.width(), self.imglabel.height(), QtCore.Qt.KeepAspectRatio)
self.lblObjName.setPixmap(pixmap)
self.imglabel.setAlignment(QtCore.Qt.AlignCenter)
def play_audio(self, path):
QSound.play(path)
def on_click_prev(self):
mydb = mysql.connector.connect(
host = 'localhost',
user = "root",
#passwd = "<PASSWORD>",
database="spl"
)
myCursor = mydb.cursor()
if (App.curFileId - 1) < 1:
if App.ObjectID - 1 < 1:
self.buttonP.hide()
else:
sql = "SELECT image_name_1, image_name_2, image_name_3, \
object_image, audio_name, video_name FROM object where object_id = %s"
val = (App.ObjectID - 1,)
myCursor.execute(sql, val)
myresult = myCursor.fetchone()
myCursor.close()
mydb.close()
App.img.clear()
App.img.append(myresult[0])
App.img.append(myresult[1])
App.img.append(myresult[2])
App.objNameImg = myresult[3]
App.audio = myresult[4]
App.video = myresult[5]
App.ObjectID -= 1
App.curFileId = 1
self.showImage(App.img[App.curFileId - 1])
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
self.showObjectNameImage(App.objNameImg)
# self.buttonP.hide()
else:
self.buttonN.show()
App.curFileId -= 1
self.showImage(App.img[App.curFileId - 1])
def on_click_next(self):
#self.btnQues.hide()
if (App.curFileId + 1) > App.total:
mydb = mysql.connector.connect(
host = 'localhost',
user = "root",
#passwd = "<PASSWORD>",
database="spl"
)
myCursor = mydb.cursor()
sql = "SELECT image_name_1, image_name_2, image_name_3, \
object_image, audio_name, video_name FROM object where object_id = %s"
val = (App.ObjectID + 1,)
myCursor.execute(sql, val)
myresult = myCursor.fetchone()
myCursor.close()
mydb.close()
App.img.clear()
App.img.append(myresult[0])
App.img.append(myresult[1])
App.img.append(myresult[2])
App.objNameImg = myresult[3]
App.audio = myresult[4]
App.video = myresult[5]
App.curFileId = 1
App.ObjectID += 1
if (App.ObjectID - 1) in App.alreadyLearned:
self.showImage(App.img[App.curFileId - 1])
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
self.showObjectNameImage(App.objNameImg)
# self.buttonN.hide()
else:
title = "Answer Question!"
ques = "Want to take a Test on this Object?\t"
reply = QMessageBox.question(self, title, ques, QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QtWidgets.QMessageBox.Yes:
print('Yes clicked.')
self.on_click_test()
else:
print('No clicked.')
App.alreadyLearned.append(App.ObjectID - 1)
self.showImage(App.img[App.curFileId - 1])
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
self.showObjectNameImage(App.objNameImg)
else:
self.buttonP.show()
App.curFileId += 1
self.showImage(App.img[App.curFileId - 1])
def on_click_skip(self):
mydb = mysql.connector.connect(
host = 'localhost',
user = "root",
#passwd = "<PASSWORD>",
database="spl"
)
myCursor = mydb.cursor(buffered=True)
sql = "SELECT image_name_1, image_name_2, image_name_3, \
object_image, audio_name, video_name FROM object where object_id = %s"
val = (App.ObjectID + 1,)
myCursor.execute(sql, val)
myresult = myCursor.fetchone()
App.img.clear()
App.img.append(myresult[0])
App.img.append(myresult[1])
App.img.append(myresult[2])
App.objNameImg = myresult[3]
App.audio = myresult[4]
App.video = myresult[5]
myCursor.close()
mydb.close()
App.curFileId = 1
App.ObjectID += 1
self.showImage(App.img[App.curFileId - 1])
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
self.showObjectNameImage(App.objNameImg)
def on_click_test(self):
#self.btnQues.hide()
App.alreadyLearned.append(App.ObjectID - 1)
self.showImage(App.img[App.curFileId - 1])
self.mediaPlayer.setMedia(QMediaContent(QUrl.fromLocalFile(App.video))) # NECESSARY
self.playButton.setEnabled(True)
self.showObjectNameImage(App.objNameImg)
self.showQuestionWindow(App.ObjectID - 1)
def showQuestionWindow(self,objectID):
self.QuesWindow = QtWidgets.QMainWindow()
self.ui = Ui_MainWindow()
#self.ui.setupUi()
select=False
self.ui.setDB(objectID,select)
self.QuesWindow.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
obj = App()
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtMultimediaWidgets.QVideoWidget",
"new_qus.Ui_MainWindow",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QMessageBox.question",
"PyQt5.QtWidgets.QMainWindow",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtMultimedia.QMediaPlayer",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QSlider",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtMultimedia.QSound.play",
"tkinter.Tk",
"PyQt5.QtCore.QUrl.fromLocalFile"
] |
[((14621, 14643), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (14633, 14643), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((1001, 1008), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (1006, 1008), True, 'import tkinter as tk\n'), ((2401, 2446), 'PyQt5.QtMultimedia.QMediaPlayer', 'QMediaPlayer', (['None', 'QMediaPlayer.VideoSurface'], {}), '(None, QMediaPlayer.VideoSurface)\n', (2413, 2446), False, 'from PyQt5.QtMultimedia import QMediaContent, QMediaPlayer\n'), ((2470, 2484), 'PyQt5.QtMultimediaWidgets.QVideoWidget', 'QVideoWidget', ([], {}), '()\n', (2482, 2484), False, 'from PyQt5.QtMultimediaWidgets import QVideoWidget\n'), ((2512, 2525), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (2523, 2525), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((2730, 2752), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (2737, 2752), False, 'from PyQt5.QtWidgets import QApplication, QFileDialog, QHBoxLayout, QLabel, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QVBoxLayout, QWidget\n'), ((2889, 2897), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (2895, 2897), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((3190, 3203), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (3201, 3203), False, 'from PyQt5.QtWidgets import QApplication, QFileDialog, QHBoxLayout, QLabel, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QVBoxLayout, QWidget\n'), ((3377, 3390), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (3388, 3390), False, 'from PyQt5.QtWidgets import QVBoxLayout\n'), ((3966, 3978), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (3972, 3978), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((4190, 4209), 'PyQt5.QtGui.QPixmap', 'QPixmap', (['App.img[0]'], {}), '(App.img[0])\n', (4197, 4209), False, 'from PyQt5.QtGui import QIcon, QPixmap, QImage, QImageReader\n'), ((4562, 4599), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\t PLAY AUDIO \t"""', 'self'], {}), "('\\t PLAY AUDIO \\t', self)\n", (4573, 4599), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((5005, 5046), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\t PREVIOUS IMAGE \t"""', 'self'], {}), "('\\t PREVIOUS IMAGE \\t', self)\n", (5016, 5046), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((5403, 5446), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\t SKIP THIS OBJECT \t"""', 'self'], {}), "('\\t SKIP THIS OBJECT \\t', self)\n", (5414, 5446), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((5808, 5845), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\t NEXT IMAGE \t"""', 'self'], {}), "('\\t NEXT IMAGE \\t', self)\n", (5819, 5845), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((6684, 6696), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (6690, 6696), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((6719, 6742), 'PyQt5.QtGui.QPixmap', 'QPixmap', (['App.objNameImg'], {}), '(App.objNameImg)\n', (6726, 6742), False, 'from PyQt5.QtGui import QIcon, QPixmap, QImage, QImageReader\n'), ((7001, 7014), 'PyQt5.QtWidgets.QWidget', 'QWidget', (['self'], {}), '(self)\n', (7008, 7014), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton\n'), ((8359, 8382), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['filepath'], {}), '(filepath)\n', (8372, 8382), False, 'from PyQt5 import QtGui, QtCore, QtWidgets, QtMultimedia, QtMultimediaWidgets\n'), ((8706, 8723), 'PyQt5.QtGui.QPixmap', 'QPixmap', (['filepath'], {}), '(filepath)\n', (8713, 8723), False, 'from PyQt5.QtGui import QIcon, QPixmap, QImage, QImageReader\n'), ((8970, 8987), 'PyQt5.QtMultimedia.QSound.play', 'QSound.play', (['path'], {}), '(path)\n', (8981, 8987), False, 'from PyQt5.QtMultimedia import QSound\n'), ((14406, 14429), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (14427, 14429), False, 'from PyQt5 import QtGui, QtCore, QtWidgets, QtMultimedia, QtMultimediaWidgets\n'), ((14448, 14463), 'new_qus.Ui_MainWindow', 'Ui_MainWindow', ([], {}), '()\n', (14461, 14463), False, 'from new_qus import Ui_MainWindow\n'), ((3028, 3057), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (3046, 3057), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((12064, 12157), 'PyQt5.QtWidgets.QMessageBox.question', 'QMessageBox.question', (['self', 'title', 'ques', '(QMessageBox.Yes | QMessageBox.No)', 'QMessageBox.No'], {}), '(self, title, ques, QMessageBox.Yes | QMessageBox.No,\n QMessageBox.No)\n', (12084, 12157), False, 'from PyQt5.QtWidgets import QApplication, QFileDialog, QHBoxLayout, QLabel, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QVBoxLayout, QWidget\n'), ((13785, 13814), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (13803, 13814), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((14146, 14175), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (14164, 14175), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((10193, 10222), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (10211, 10222), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((11697, 11726), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (11715, 11726), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((12545, 12574), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['App.video'], {}), '(App.video)\n', (12563, 12574), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n')]
|
import unittest
from prompt_toolkit.widgets import Button
from unittest.mock import patch, MagicMock
from hummingbot.client.tab.data_types import CommandTab
from hummingbot.client.ui.hummingbot_cli import HummingbotCLI
from hummingbot.client.ui.custom_widgets import CustomTextArea
class HummingbotCLITest(unittest.TestCase):
command_name = "command_1"
def setUp(self) -> None:
super().setUp()
tabs = {self.command_name: CommandTab(self.command_name, None, None, None, MagicMock())}
self.mock_hb = MagicMock()
self.app = HummingbotCLI(None, None, None, tabs)
self.app.app = MagicMock()
def test_handle_tab_command_on_close_argument(self):
tab = self.app.command_tabs[self.command_name]
tab.close_button = MagicMock()
tab.button = MagicMock()
tab.output_field = MagicMock()
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": True})
self.assertIsNone(tab.button)
self.assertIsNone(tab.close_button)
self.assertIsNone(tab.output_field)
self.assertFalse(tab.is_selected)
self.assertEqual(tab.tab_index, 0)
def test_handle_tab_command_create_new_tab_and_display(self):
tab = self.app.command_tabs[self.command_name]
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertIsInstance(tab.button, Button)
self.assertIsInstance(tab.close_button, Button)
self.assertIsInstance(tab.output_field, CustomTextArea)
self.assertEqual(tab.tab_index, 1)
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.called)
@patch("hummingbot.client.ui.layout.Layout")
@patch("hummingbot.client.ui.layout.FloatContainer")
@patch("hummingbot.client.ui.layout.ConditionalContainer")
@patch("hummingbot.client.ui.layout.Box")
@patch("hummingbot.client.ui.layout.HSplit")
@patch("hummingbot.client.ui.layout.VSplit")
def test_handle_tab_command_on_existing_tab(self, mock_vsplit, mock_hsplit, mock_box, moc_cc, moc_fc, mock_layout):
tab = self.app.command_tabs[self.command_name]
tab.button = MagicMock()
tab.output_field = MagicMock()
tab.close_button = MagicMock()
tab.is_selected = False
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.call_count == 1)
# Test display not called if there is a running task
tab.is_selected = False
tab.task = MagicMock()
tab.task.done.return_value = False
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.assertTrue(tab.is_selected)
self.assertTrue(tab.tab_class.display.call_count == 1)
@patch("hummingbot.client.ui.layout.Layout")
@patch("hummingbot.client.ui.layout.FloatContainer")
@patch("hummingbot.client.ui.layout.ConditionalContainer")
@patch("hummingbot.client.ui.layout.Box")
@patch("hummingbot.client.ui.layout.HSplit")
@patch("hummingbot.client.ui.layout.VSplit")
def test_tab_navigation(self, mock_vsplit, mock_hsplit, mock_box, moc_cc, moc_fc, mock_layout):
tab2 = CommandTab("command_2", None, None, None, MagicMock(), False)
self.app.command_tabs["command_2"] = tab2
tab1 = self.app.command_tabs[self.command_name]
self.app.handle_tab_command(self.mock_hb, self.command_name, {"close": False})
self.app.handle_tab_command(self.mock_hb, "command_2", {"close": False})
self.assertTrue(tab2.is_selected)
self.app.tab_navigate_left()
self.assertTrue(tab1.is_selected)
self.assertFalse(tab2.is_selected)
self.app.tab_navigate_left()
self.assertTrue(all(not t.is_selected for t in self.app.command_tabs.values()))
self.app.tab_navigate_left()
self.assertTrue(all(not t.is_selected for t in self.app.command_tabs.values()))
self.app.tab_navigate_right()
self.assertTrue(tab1.is_selected)
self.app.tab_navigate_right()
self.assertFalse(tab1.is_selected)
self.assertTrue(tab2.is_selected)
self.app.tab_navigate_right()
self.assertFalse(tab1.is_selected)
self.assertTrue(tab2.is_selected)
|
[
"unittest.mock.patch",
"unittest.mock.MagicMock",
"hummingbot.client.ui.hummingbot_cli.HummingbotCLI"
] |
[((1683, 1726), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.Layout"""'], {}), "('hummingbot.client.ui.layout.Layout')\n", (1688, 1726), False, 'from unittest.mock import patch, MagicMock\n'), ((1732, 1783), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.FloatContainer"""'], {}), "('hummingbot.client.ui.layout.FloatContainer')\n", (1737, 1783), False, 'from unittest.mock import patch, MagicMock\n'), ((1789, 1846), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.ConditionalContainer"""'], {}), "('hummingbot.client.ui.layout.ConditionalContainer')\n", (1794, 1846), False, 'from unittest.mock import patch, MagicMock\n'), ((1852, 1892), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.Box"""'], {}), "('hummingbot.client.ui.layout.Box')\n", (1857, 1892), False, 'from unittest.mock import patch, MagicMock\n'), ((1898, 1941), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.HSplit"""'], {}), "('hummingbot.client.ui.layout.HSplit')\n", (1903, 1941), False, 'from unittest.mock import patch, MagicMock\n'), ((1947, 1990), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.VSplit"""'], {}), "('hummingbot.client.ui.layout.VSplit')\n", (1952, 1990), False, 'from unittest.mock import patch, MagicMock\n'), ((2865, 2908), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.Layout"""'], {}), "('hummingbot.client.ui.layout.Layout')\n", (2870, 2908), False, 'from unittest.mock import patch, MagicMock\n'), ((2914, 2965), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.FloatContainer"""'], {}), "('hummingbot.client.ui.layout.FloatContainer')\n", (2919, 2965), False, 'from unittest.mock import patch, MagicMock\n'), ((2971, 3028), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.ConditionalContainer"""'], {}), "('hummingbot.client.ui.layout.ConditionalContainer')\n", (2976, 3028), False, 'from unittest.mock import patch, MagicMock\n'), ((3034, 3074), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.Box"""'], {}), "('hummingbot.client.ui.layout.Box')\n", (3039, 3074), False, 'from unittest.mock import patch, MagicMock\n'), ((3080, 3123), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.HSplit"""'], {}), "('hummingbot.client.ui.layout.HSplit')\n", (3085, 3123), False, 'from unittest.mock import patch, MagicMock\n'), ((3129, 3172), 'unittest.mock.patch', 'patch', (['"""hummingbot.client.ui.layout.VSplit"""'], {}), "('hummingbot.client.ui.layout.VSplit')\n", (3134, 3172), False, 'from unittest.mock import patch, MagicMock\n'), ((535, 546), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (544, 546), False, 'from unittest.mock import patch, MagicMock\n'), ((566, 603), 'hummingbot.client.ui.hummingbot_cli.HummingbotCLI', 'HummingbotCLI', (['None', 'None', 'None', 'tabs'], {}), '(None, None, None, tabs)\n', (579, 603), False, 'from hummingbot.client.ui.hummingbot_cli import HummingbotCLI\n'), ((627, 638), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (636, 638), False, 'from unittest.mock import patch, MagicMock\n'), ((779, 790), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (788, 790), False, 'from unittest.mock import patch, MagicMock\n'), ((812, 823), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (821, 823), False, 'from unittest.mock import patch, MagicMock\n'), ((851, 862), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (860, 862), False, 'from unittest.mock import patch, MagicMock\n'), ((2187, 2198), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2196, 2198), False, 'from unittest.mock import patch, MagicMock\n'), ((2226, 2237), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2235, 2237), False, 'from unittest.mock import patch, MagicMock\n'), ((2265, 2276), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2274, 2276), False, 'from unittest.mock import patch, MagicMock\n'), ((2613, 2624), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2622, 2624), False, 'from unittest.mock import patch, MagicMock\n'), ((3330, 3341), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3339, 3341), False, 'from unittest.mock import patch, MagicMock\n'), ((498, 509), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (507, 509), False, 'from unittest.mock import patch, MagicMock\n')]
|
"""TN3270 terminal class.
Low level APIs for telnet-3270 for Z tool and library.
Usage:
from tnz import tnz
Environment variables used:
SESSION_PS_SIZE
TNZ_COLORS
TNZ_LOGGING
ZTI_SECLEVEL
Copyright 2021 IBM Inc. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
"""
import asyncio
import enum
import json
import logging
import os
import platform
import re
import ssl
import sys
from . import __version__
__author__ = "<NAME>"
_wait_event = None
_loop = None
class Tnz:
"""One instance for each terminal and the connection.
Methods for actions taken at the terminal:
attn ATTN
enter ENTER
clear CLEAR
key_backspace Backspace
key_backtab Backtab
key_curdown Down
key_curleft Left
key_curright Right
key_curup Up
key_data characters/string
key_delete Delete
key_end (End)
key_eraseeof Erase EOF
key_eraseinput ERASE INPUT
key_home Home
key_insdata insert characters/string
key_newline NL
key_tab Tab
key_wordl (Word Left)
key_wordr (Word Right)
pa2, pa2, pa3 PAx
pfX (pf1, pf2, ... pf24) PFxx
Additional methods to enhance the terminal experience:
paste_data multiline characters/string
Characters in the terminal:
scrstr Method to get characters/string
Attributes in the terminal:
plane_fa Field attributes
plane_eh Extended highlighting
plane_fg Forground color
plane_bg Background color
Various methods perform actions taken by the remote application.
Use the wait method to wait for the remote application to request
actions and for the instance to process them.
The data in the 3270 terminal is located by an address. This may
be different from other applications that use a row and column or
an x position and a y position. In addition, vectors described
using addresses typically wrap - for instance from address 2 to
address 1 produces a vector start at address 2, continues to the
end of the terminal, continues at the beginning of the terminal,
and terminates at address 1.
The send method and the send_* methods send data to the remote host.
"""
def __init__(self, name=None):
"""Create a new Tnz object.
"""
self.use_tn3270e = False
self.lu_name = None
try:
self.colors = int(os.getenv("TNZ_COLORS", "768"))
except ValueError:
self.colors = 768
self.__secure = False
self.__host_verified = False
self.__loop = None
self.__connect_task = None
self.__zti = None
self.__waiting = False
self.__wait_rv = None
self._transport = None # asyncio.Transport
self.__rec = []
self.__pndrec = b""
self.__eor = False
self.__tn3270e = False
self.__work_buffer = b""
self._binary_local = False
self._binary_remote = False
# file transfer
self.file_appends = None # File to write downloads to
self.__indstemp = False
self.__indsfile = None
self.__indsisf = None
self.__inds_rm = None
self.__indsenc = None
self.__indspend = b""
self.ddmrecv = False # allow host-initiated ind$file get
self.ddmsend = False # allow host-initiated ind$file put
self.__ddmtdat = ""
self.ddmdata = None
self.__ddmdata = False
self.__ddmopen = False
self.__ddmrecnum = 0
self.__ddmascii = False
self.__ddmmsg = None
self.lastcmd = None
self.__ddmfile = None
self.__ddmupload = False
# session status
self.updated = False # character buffer updated
self.seslost = False # session/connection lost
# readlines
self.readlines = None
self.readlines_pa2 = True
self.__readlines_row = 0
self.__readlines_maxrow = 0
self.__indstr = ""
self.__indsdict = {}
# ddm limits for inbound and outbound
# 32767 is what HOD documents as the max
# 0000 results in no data coming across
# 2500 is what HOD sets by default
# 2048 is what x3270 seems to set
# Experience with TSO (under VICOM) has shown
# that TSO IND$FILE may not handle 32767. It
# seems like 32639 works. Is there a higher
# number that also works? Is there something
# that can be done to get 32767 to work?
self._limin = 32639
self._limout = 32767
self._sendbuf = []
self.local_do = []
self.local_will = []
self.local_wont = []
self.local_dont = []
self.remote_do = []
self.remote_will = []
self.remote_wont = []
self.remote_dont = []
self.bytes_sent = 0
self.bytes_received = 0
self.terminal_type = "IBM-DYNAMIC" # will be ascii-encoded
self.dmaxrow = 24 # 3270 Default Screen Size
self.dmaxcol = 80 # 3270 Default Screen Size
self.amaxrow = 24 # default Alternate number of rows
self.amaxcol = 80 # default Alternate number of columns
self.maxrow = self.dmaxrow # default
self.maxcol = self.dmaxcol # default
self.buffer_size = self.maxrow * self.maxcol
self.curadd = 0 # cursor address
self.bufadd = 0 # buffer address
self.addr16bit = False
self.aid = 0x60 # AID_NONE
self.__reply_mode = 0 # Field mode
self.__reply_cattrs = b"" # Character mode attrs of interest
self.pwait = False # PWAIT/TWAIT input inhibit
# SYSTEM LOCK is associated with partition
# SYSTEM LOCK can be reset by operator
self.system_lock_wait = True # System Lock input inhibit
self.read_state = self.__ReadState.NORMAL
self.inop = 0x06 # (RM) right initialization
self.inpid = 0
# Claim not capable of color
# for situations where the terminal (tty)
# is not used (i.e. the only view of the
# 3270 screen is plain text only).
self.capable_color = False
self.__extended_color_mode = False
buffer_size = self.buffer_size
self.plane_dc = bytearray(buffer_size) # data characters
self.plane_fa = bytearray(buffer_size) # field attributes
self.plane_eh = bytearray(buffer_size) # extended hilite
self.plane_cs = bytearray(buffer_size) # character set
self.plane_fg = bytearray(buffer_size) # foreground color
self.plane_bg = bytearray(buffer_size) # background color
self.__pt_erase = False
self.__proc_eh = 0 # extended highlighting
self.__proc_cs = 0 # character set
self.__proc_fg = 0 # foreground color
self.__proc_bg = 0 # background color
self.codec_info = {}
self.encoding = "cp037"
self.encoding = "cp037", 1
self.alt = 0 # No support for GE (default)
if sys.stdout.isatty():
if str(sys.stdout.encoding).upper().startswith("UTF"):
from . import cp310 as _
self.encoding = "cp310", 1
else:
import locale
preferredencoding = locale.getpreferredencoding()
if preferredencoding.upper().startswith("UTF"):
from . import cp310 as _
self.encoding = "cp310", 1
if name:
self.name = name
else:
self.name = str(hash(self))
self.need_shutdown = False
# Begin "smart" detection of default properties
try:
self.encoding = "cp1047"
except LookupError:
pass
if self.colors >= 8 and sys.stdin.isatty():
# Claim capable of color for zti
self.capable_color = True
ps_size = os.getenv("SESSION_PS_SIZE", None)
if ps_size:
try:
from . import _util
asize = _util.session_ps_size(ps_size)
self.amaxrow, self.amaxcol = asize
except ValueError:
self.__logger.exception("SESSION_PS_SIZE error")
ps_size = None
if ps_size is None:
try:
columns, lines = os.get_terminal_size()
if lines >= 62 and columns >= 160:
self.amaxrow = 62
self.amaxcol = 160
elif lines >= 27 and columns >= 132:
self.amaxrow = 27
self.amaxcol = 132
elif lines >= 43:
self.amaxrow = 43
self.amaxcol = 80
elif lines >= 32:
self.amaxrow = 32
self.amaxcol = 80
else:
self.amaxrow = 24
self.amaxcol = 80
except OSError:
pass
# Methods
def address(self, address_bytes):
"""
Return an integer buffer address for the input encoded byte
string address.
"""
if not isinstance(address_bytes, bytes):
raise TypeError("input address_bytes must be bytes")
if len(address_bytes) != 2:
raise ValueError("input address_bytes must be 2 bytes")
if not self.addr16bit and address_bytes[0] & 0x40:
high_6bits = address_bytes[0] & 0x3f
low_6bits = address_bytes[1] & 0x3f
return high_6bits * 64 + low_6bits
if not self.addr16bit and address_bytes[0] & 0x80:
raise ValueError("reserved address mode")
addr = int.from_bytes(address_bytes, "big")
if self.addr16bit and addr > self.buffer_size: # weird?
self.addr16bit = False
addr = self.address(address_bytes)
self.addr16bit = True
return addr
def address_bytes(self, address):
"""
Return an encoded byte string for the input integer buffer
address.
"""
if isinstance(address, bytes):
raise TypeError("bytes not expected")
value = address
if not self.addr16bit and self.buffer_size <= 4095:
_bit6 = bit6
high_6bits, low_6bits = divmod(address, 64)
value = _bit6(low_6bits) + 256 * _bit6(high_6bits)
return value.to_bytes(2, byteorder="big")
def attn(self):
"""Send 3270 ATTN
"""
if self.__tn3270e:
self.send_command(244) # IP (Interrupt Process)
else:
self.send_command(243) # BRK (Break)
def clear(self, zti=None):
"""Send CLEAR
"""
self.__extended_color_mode = False
buffer_size = self.buffer_size
self.plane_dc = bytearray(buffer_size) # data characters
self.plane_fa = bytearray(buffer_size) # field attributes
self.plane_eh = bytearray(buffer_size) # extended hilite
self.plane_cs = bytearray(buffer_size) # character set
self.plane_fg = bytearray(buffer_size) # foreground color
self.plane_bg = bytearray(buffer_size) # background color
self.curadd = 0 # cursor address
if zti:
zti.erase(self)
self.updated = True
return self.key_aid(0x6d) # AID_CLEAR
def close(self):
"""Close the connection.
"""
transport = self._transport
if transport:
self._transport = None
transport.abort()
def connect(self, host=None, port=None,
secure=False, verifycert=True):
"""Connect to the host.
"""
if self._transport:
raise TnzError("Already connected")
if host is None:
host = "127.0.0.1" # default host
if port is None:
if secure is False:
port = 23 # default insecure port
else:
port = 992 # default port
self.__secure = False
self.__host_verified = False
class _TnzProtocol(asyncio.BaseProtocol):
@staticmethod
def connection_made(transport):
self._transport = transport
self.need_shutdown = True
self.seslost = False
@staticmethod
def connection_lost(exc):
if not self.seslost:
if exc:
self.seslost = (type(exc), exc, None)
else:
self.seslost = True
_wait_event.set()
@staticmethod
def data_received(data):
"""Handle received data
"""
self._data_received(data)
@staticmethod
def eof_received():
"""Handle EOF
"""
self._log_warn("eof_received()")
@staticmethod
def pause_writing():
"""Handle pause writing
"""
self._log_warn("pause_writing")
@staticmethod
def resume_writing():
"""Handle resume writing
"""
self._log_warn("resume_writing")
if secure:
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
if os.getenv("ZTI_SECLEVEL", "2") == "1":
context.set_ciphers("DEFAULT@SECLEVEL=1")
if verifycert:
context.load_verify_locations("ibm-cacerts.pem")
self.__host_verified = True # ? too soon ?
else:
context.check_hostname = False # insecure FIXME
context.verify_mode = ssl.CERT_NONE # insecure FIXME
else:
context = None
coro = self.__connect(_TnzProtocol, host, port, context)
loop = self.__get_event_loop()
task = loop.create_task(coro)
self.__connect_task = task
def ddm_in_progress(self):
"""Return bool indicating if ddm is in progress.
"""
return self.__ddmopen
def download(self):
"""Return current Download (or None).
"""
if not self.__indstemp:
return None
return self.__indsfile
def enter(self, text=None, zti=None):
"""Send ENTER
"""
if text:
self.key_data(text, zti=zti)
return self.key_aid(0x7d) # AID_ENTER
def extended_color_mode(self):
"""Return bool indicating if screen using extended colors.
"""
return self.__extended_color_mode
def field(self, address):
"""Return field address and attribute value as tuple.
Get the field address and attribute value associated with the
input address.
"""
self.__check_address(address)
search = self.__patn0l.search
plane_fa = self.plane_fa
endpos = address + 1
mat = search(plane_fa, 0, endpos)
if not mat:
if endpos >= self.buffer_size:
return -1, 0 # no fields
mat = search(plane_fa, endpos)
if not mat:
return -1, 0 # no fields
return mat.start(), mat[0][0]
def fields(self, saddr=None, eaddr=None):
"""A generator of all fields as (address, attribute).
"""
next_field = self.next_field
if saddr is None:
faddr, fattr = next_field(0, eaddr, offset=0)
if eaddr is None:
eaddr = faddr
else:
if eaddr is None:
eaddr = saddr
faddr, fattr = next_field(saddr, eaddr, offset=0)
paddr = (eaddr - 1) % self.buffer_size # stop before end
while faddr >= 0:
yield faddr, fattr
if faddr == paddr: # if stop because next is end
return
faddr, fattr = next_field(faddr, eaddr)
def char_addrs(self, saddr=None, eaddr=None):
"""A generator of character data address ranges.
"""
next_data = self.next_data
if saddr is None:
faddr, _ = self.next_field(0, eaddr, offset=-1)
if faddr < 0: # no fields
faddr = 0
start, end = next_data(faddr, eaddr)
if eaddr is None:
eaddr = start
else:
if eaddr is None:
eaddr = saddr
start, end = next_data(saddr, eaddr)
while start >= 0:
yield start, end
if end == eaddr:
return
start, end = next_data(end, eaddr)
def group_addrs(self, saddr, eaddr):
"""A generator of character data address range groups.
A generator function that iterates through groups of
characters. The characters in each group have all the same
attributes. Each iteration returns a tuple with the start and
end addresses.
It is assumed that the input address(es) are valid for the
current buffer size. They are in the range from 0 to
buffer_size-1, inclusive.
"""
buffer_size = self.buffer_size
iterbs = self.__iterbs
iterow = self.iterow
plane_eh = self.plane_eh
plane_fg = self.plane_fg
plane_bg = self.plane_bg
for saddr1, eaddr1 in self.char_addrs(saddr, eaddr):
for rsp, rep in iterow(saddr1, eaddr1):
ieh = iterbs(plane_eh, rsp, rep)
ifg = iterbs(plane_fg, rsp, rep)
ibg = iterbs(plane_bg, rsp, rep)
tsa = rsp
eeh = -1
efg = -1
ebg = -1
while tsa != rep:
if tsa >= eeh:
eeh = next(ieh)
if tsa >= efg:
efg = next(ifg)
if tsa >= ebg:
ebg = next(ibg)
tea = min(eeh, efg, ebg)
if tea == buffer_size:
yield tsa, 0
else:
yield tsa, tea
tsa = tea
def get_file(self, parms, filename, assist=True):
"""
Get host file into local file.
Would be nice, when the local system is Z, to transfer in
binary. However, when transfer is done in binary, no
information about the record format comes across. So, there
is no knowledge if the host records were fixed-80 or variable
length - so there is no indication as to where the line
breaks should be.
Transfering using the ASCII CRLF options seems to cause the
host to convert between cp037 and iso8859-1. If something
other than cp037 is expected/needed, support must be added to
translate back from iso8859-1 to cp037. Then using the
desired code page (e.g. cp1047) to translate to Unicode.
"""
self.__log_info("get_file(%r, %r, assist=%r)",
filename, parms, assist)
from . import rexx
parmsu = parms.upper()
opts = rexx.subword(parmsu, 2)
if rexx.word(opts, 2).startswith("("):
opts = rexx.subword(opts, 2)[1:]
elif rexx.word(opts, 3).startswith("("):
opts = rexx.subword(opts, 3)[1:]
opts = " "+opts+" "
encoding = None
if assist:
# See assist comment in put_file
parmsu = parms.upper()
preopt = rexx.subword(parms, 1, 1)
opts = rexx.subword(parmsu, 2).split()
paren = False
if "(" in opts:
i = opts.index("(")
paren = True
preopt = rexx.subword(parms, 1, i+1)
opts = opts[i+1:]
elif len(opts) > 1 and opts[1].startswith("("):
opts[1] = opts[1][1:]
paren = True
preopt = rexx.subword(parms, 1, 2)
opts = opts[1:]
elif len(opts) > 2 and opts[2].startswith("("):
opts[2] = opts[2][1:]
paren = True
preopt = rexx.subword(parms, 1, 3)
opts = opts[2:]
if paren and opts[-1].endswith(")"):
opts[-1] = opts[-1][:-1].strip()
if not opts[-1]:
opts.pop()
if ("ASCII" in opts) and ("CRLF" in opts):
encoding = self.encoding
parms = [preopt]
if paren:
parms.append("(")
opts.remove("ASCII")
parms.extend(opts)
parms = " ".join(parms)
opts = " "+" ".join(opts)+" "
self.__log_debug("parms=%r", parms)
self.__ddmmsg = None
try:
self.__indsenc = encoding
if encoding:
if " APPEND " in opts:
mode = "a"
else:
mode = "w"
enc = "UTF-8" # avoid encoding errors
else:
if " APPEND " in opts:
mode = "ab"
else:
mode = "wb"
enc = None
with open(filename, mode, encoding=enc) as self.__indsfile:
# do not enter IND$FILE until file is open
# (avoids an inconstent state if error opening file)
self.__log_debug("entering IND$FILE GET %s", parms)
self.enter("IND$FILE GET "+parms)
while (not self.__ddmmsg and not self.seslost):
self.wait(3) # query reply, command acknowledge
finally:
self.__log_debug("clearing __indsfile")
self.__indsfile = None
self.__log_info("get_file: %s", self.__ddmmsg)
return self.__ddmmsg
def getpeername(self):
"""Return remote address to which socket is connected.
"""
transport = self._transport
if not transport:
return "?", "?"
return transport.get_extra_info("peername")
def getsockettype(self):
"""Return type of socket.
"""
transport = self._transport
if not transport:
return "?"
return type(transport.get_extra_info("socket"))
def is_pen_detectable(self, address):
"""
Return True or False to indicate if the address is
selector-pen detectable
"""
_, fattr = self.field(address)
return self.is_detectable_attr(fattr)
def is_protected(self, address):
"""
Return True or False to indicate if the address is protected
"""
fa1, fattr = self.field(address)
return (fa1 == address) or bool(fattr & 0x20)
def is_unprotected(self):
"""
Return True or False to indicate if all fields are
unprotected
"""
for _, fattr in self.fields():
if self.is_protected_attr(fattr):
return False
return True
def iterow(self, saddr, eaddr):
"""
Iterate through rows of bytes in the input bytearray.
Note that the inputs are addresses and the outputs are
positions. In other words, the outputs for each iteration
will always have the start less than the end and buffer_size
is used when the vector goes to the end of the bytearray.
"""
saddr1 = saddr
if saddr >= eaddr:
eaddr1 = self.buffer_size
else:
eaddr1 = eaddr
maxcol = self.maxcol
while saddr1 != eaddr1:
row = saddr1 // maxcol
tea = min(eaddr1, (row+1) * maxcol)
yield saddr1, tea
saddr1 = tea
if saddr >= eaddr:
saddr1 = 0
while saddr1 != eaddr:
row = saddr1 // maxcol
tea = min(eaddr, (row+1) * maxcol)
yield saddr1, tea
saddr1 = tea
def key_aid(self, aid):
"""Process an aid key.
"""
if self.pwait:
raise TnzError("PWAIT Input Inhibit")
if self.system_lock_wait:
raise TnzError("System Lock Input Inhibit")
if self.read_state == self.__ReadState.RENTER:
raise TnzError("Retry Enter State")
self.inpid = 0 # Inbound Partition Identifier (INPID)
self.inop = 0x06 # (RM) INOP = Read Modified
if aid != 0x7f: # not trigger action (AID_TRIGA)
self.system_lock_wait = True # System Lock Condition
self.pwait = True # Partition Wait Condition (PWAIT)
self.read_state = self.__ReadState.RENTER # Retry Enter State
self.send_aid(aid) # transmit data inbound
def key_backspace(self, zti=None):
"""
Process backspace key, which behaves like a cursor left
followed by a delete as long as cursor is not on a field
attribute nor is there a field attribute to the left.
"""
self.__log_debug(" backspace")
addr0 = self.curadd
faddr, fattr = self.field(addr0)
if faddr == addr0: # if on field attribute
return False
if fattr & 0x20: # if protected
return False
addr1 = (addr0 - 1) % self.buffer_size # left
if faddr == addr1: # left is field attribute
return False
self.curadd = addr1
self.key_delete(zti=zti)
return True
def key_backtab(self, zti=None):
"""Process backtab key.
"""
self.__log_debug(" backtab")
addr = self.curadd
faddr, fav = self.field(addr)
if faddr < 0:
self.curadd = 0
return
buffer_size = self.buffer_size
addrm1 = (addr-1) % buffer_size
if faddr in (addr, addrm1):
addr = (faddr-1) % buffer_size
faddr, fav = self.field(addr)
fa1 = faddr
plane_fa = self.plane_fa
field = self.field
while True:
if not fav & 0x20: # if unprotected
addr = (faddr+1) % buffer_size
fav = plane_fa[addr]
if fav == 0:
self.curadd = addr
return
faddr = (faddr-1) % buffer_size
faddr, fav = field(faddr)
if faddr == fa1:
self.curadd = 0
return
def key_curdown(self, zti=None):
"""Process cursor down key.
"""
self.__log_debug(" curdown")
addr = self.curadd
addr = (addr + self.maxcol) % self.buffer_size
self.curadd = addr
def key_curleft(self, zti=None):
"""Process cursor left key.
"""
self.__log_debug(" curleft")
addr = self.curadd
addr = (addr - 1) % self.buffer_size
self.curadd = addr
def key_curright(self, zti=None):
"""Process cursor right key.
"""
self.__log_debug(" curright")
addr = self.curadd
addr = (addr+1) % self.buffer_size
self.curadd = addr
def key_curup(self, zti=None):
"""Process cursor up key.
"""
self.__log_debug(" curup")
addr = self.curadd
addr = (addr - self.maxcol) % self.buffer_size
self.curadd = addr
def key_data(self, text, onerow=False, zti=None):
"""Process keyboard character data.
"""
start = 0
strlen = len(text)
ypos = self.curadd // self.maxcol
while start < strlen:
elist = []
for cii, codec in self.codec_info.items():
try:
bstr, consumed = codec.encode(text[start:])
except UnicodeEncodeError as exc:
elist.append(exc)
bstr = None
if exc.start:
end = start + exc.start
bstr, consumed = codec.encode(text[start:end])
if bstr:
self.__key_bytes(bstr, cii, onerow, zti)
break
else:
self.__log_error("Could not encode: %r", elist)
raise elist[0]
start += consumed
if onerow and self.curadd // self.maxcol != ypos:
break
return start
def key_delete(self, zti=None):
"""Process delete key.
"""
self.__log_debug(" delete")
addr0 = self.curadd
faddr, fattr = self.field(addr0)
if faddr == addr0: # if on field attribute
return False
if fattr & 0x20: # if protected field
return False
if faddr < 0: # if no fields
addr3 = addr0
else: # else faddr is field address
addr3, _ = self.next_field(addr0)
self.plane_fa[faddr] = bit6(fattr | 1) # Set MDT
self.__log_debug(" delete %d %d %d", faddr, addr0, addr3)
buffer_size = self.buffer_size
addr1 = (addr0+1) % buffer_size # address of source for copy
addr2 = (addr3-1) % buffer_size # address of last char in field
if addr1 != addr3:
self.ucba(self.plane_dc, addr0,
self.rcba(self.plane_dc, addr1, addr3))
self.ucba(self.plane_eh, addr0,
self.rcba(self.plane_eh, addr1, addr3))
self.ucba(self.plane_cs, addr0,
self.rcba(self.plane_cs, addr1, addr3))
self.ucba(self.plane_fg, addr0,
self.rcba(self.plane_fg, addr1, addr3))
self.ucba(self.plane_bg, addr0,
self.rcba(self.plane_bg, addr1, addr3))
self.plane_dc[addr2] = 0
self.plane_eh[addr2] = 0
self.plane_cs[addr2] = 0
self.plane_fg[addr2] = 0
self.plane_bg[addr2] = 0
if zti:
zti.write(self, faddr, addr0, addr3)
return True
def key_end(self):
"""Process End key - go to end of text in field.
End is not a 3270 key, but common on modern keyboard.
Cursor will move onto a null character in the current field
where the character to the left is
not a null character and all characters to the right
are null characters.
If cursor is in an unprotected field and the field does
not end in a null character, the cursor will be put on
the last character in the field.
If cursor is in a protected field and the field does not
end in a null character, the cursor will be put on the
field attribute of the next field.
"""
self.__log_debug(" key_end")
caddr = self.curadd
faddr, fattr = self.field(caddr)
if faddr == -1: # no fields
return
buffer_size = self.buffer_size
faddr1 = (faddr+1) % buffer_size
eaddr, _ = self.next_field(caddr)
if faddr1 == eaddr: # 0-length field
return
field_dc = self.rcba(self.plane_dc, faddr1, eaddr)
offset = len(field_dc.rstrip(b"\x00"))
caddr = (faddr1+offset) % buffer_size
if caddr == eaddr and not self.is_protected_attr(fattr):
caddr = (caddr-1) % buffer_size
self.curadd = caddr
def key_eraseeof(self, zti=None):
"""Process eraseeof key.
"""
self.__log_debug(" eraseeof")
addr0 = self.curadd
faddr, fattr = self.field(addr0)
if faddr == addr0: # if on field attribute
return False
if fattr & 0x20: # if protected field
return False
if faddr < 0:
addr2 = addr0
else:
addr2, _ = self.next_field(addr0)
fattr = bit6(fattr | 1) # Set MDT (Modified Data Tag)
self.plane_fa[faddr] = fattr
self.__erase(addr0, addr2)
if zti:
zti.write(self, faddr, addr0, addr2)
return True
def key_eraseinput(self, zti=None):
"""Process ERASE INPUT key.
"""
self.__erase_input(0, 0, zti=zti)
self._reset_mdt()
self.key_home(zti=zti)
def key_home(self, zti=None):
"""Process home key.
"""
if self.is_protected(0):
curadd = self.__tab(0)
else:
curadd = 0
self.__log_debug(" home -> %r", curadd)
self.curadd = curadd
def key_ins_data(self, text, zti=None):
"""Process keyboard character data in insert mode.
"""
self.__log_debug(" key_ins_data")
if self.pwait:
raise TnzError("PWAIT Input Inhibit")
if self.system_lock_wait:
raise TnzError("System Lock Input Inhibit")
addr0 = self.curadd
faddr, fattr = self.field(addr0)
if faddr == addr0: # if on field attribute
return 0
if fattr & 0x20: # if protected field
return 0
buffer_size = self.buffer_size
plane_dc = self.plane_dc
plane_eh = self.plane_eh
plane_cs = self.plane_cs
plane_fg = self.plane_fg
plane_bg = self.plane_bg
if faddr < 0:
addr2 = addr0
datalen = buffer_size
else:
addr2, _ = self.next_field(addr0)
if addr0 < addr2:
datalen = addr2 - addr0
else:
datalen = buffer_size - addr0 + addr2
if datalen < len(text):
text = text[:datalen]
inslen = 0
i = (addr2 - 1) % buffer_size
while inslen < len(text):
dc_byte = plane_dc[i]
if dc_byte not in (0, 0x40): # not 0 or space
break
inslen += 1
i = (i-1) % buffer_size
if inslen <= 0:
return 0
text = text[:inslen]
# copy existing data to the right
addr1 = (addr0+inslen) % buffer_size # copy target address
addr3 = (i+1) % buffer_size # copy source end address
ucba = self.ucba
rcba = self.rcba
ucba(plane_dc, addr1, rcba(plane_dc, addr0, addr3))
ucba(plane_eh, addr1, rcba(plane_eh, addr0, addr3))
ucba(plane_cs, addr1, rcba(plane_cs, addr0, addr3))
ucba(plane_fg, addr1, rcba(plane_fg, addr0, addr3))
ucba(plane_bg, addr1, rcba(plane_bg, addr0, addr3))
self.key_data(text)
if zti:
zti.write(self, faddr, addr0, addr2)
zti.rewrite_cursor = True
return len(text)
def key_newline(self, zti=None):
"""Process newline key.
"""
self.__log_debug(" newline")
addr0 = self.curadd
line = addr0 // self.maxcol # current line
addr1 = (line+1) * self.maxcol # first col in next row
buffer_size = self.buffer_size
if self.field(0) == (-1, 0): # if no fields
self.curadd = addr1 % buffer_size
else:
addr1 = (addr1 - 1) % buffer_size # last col current row
self.curadd = addr1
self.key_tab()
def key_tab(self, zti=None):
"""Process tab key.
"""
curadd = self.curadd
self.curadd = self.__tab(curadd)
self.__log_debug(" tab @ %r -> %r", curadd, self.curadd)
def key_word_left(self):
"""Move cursor left to the previous word.
"""
self.__log_debug(" key_word_left")
addr1 = (self.curadd-1) % self.buffer_size
text = self.scrstr(addr1, addr1)
mat = re.search(r"(?<=\s)\S(?=[\S]*[\s]*\Z)", text)
if mat:
self.curadd = (addr1+mat.start()) % self.buffer_size
def key_word_right(self):
"""Move cursor right to the next word.
"""
self.__log_debug(" key_word_right")
caddr = self.curadd
text = self.scrstr(caddr, caddr)
mat = re.search(r"(?<=\s)\S", text)
if mat:
self.curadd = (caddr+mat.start()) % self.buffer_size
def next_data(self, saddr, eaddr=None):
"""Get the address range of the next field data.
"""
self.__check_address(saddr)
if eaddr is None:
eaddr = saddr
else:
self.__check_address(eaddr)
if saddr < eaddr:
mat = self.__pat0s.search(self.plane_fa, saddr, eaddr)
if mat:
return mat.start(), mat.end()
return -1, -1 # no fields
pat0s = self.__pat0s
plane_fa = self.plane_fa
buffer_size = self.buffer_size
mat = pat0s.search(plane_fa, saddr, buffer_size)
if mat:
start = mat.start()
end = mat.end() % buffer_size
if eaddr and not end:
mat = pat0s.match(plane_fa, 0, eaddr)
if mat:
return start, mat.end()
return start, end
if eaddr:
mat = pat0s.search(plane_fa, 0, eaddr)
if mat:
return mat.start(), mat.end()
return -1, -1 # no fields
def next_field(self, saddr, eaddr=None, offset=1):
"""Get the address and attribute value of the next field.
Get the address and attribute value associated with the next
field relative to the input field address. The first location
for searching is the address AFTER the input saddr. If
eaddr is specified.
"""
self.__check_address(saddr)
buffer_size = self.buffer_size
saddr = (saddr + offset) % buffer_size
if eaddr is None:
eaddr = saddr
else:
self.__check_address(eaddr)
if saddr < eaddr:
mat = self.__patn0.search(self.plane_fa, saddr, eaddr)
else:
search = self.__patn0.search
plane_fa = self.plane_fa
mat = search(plane_fa, saddr, buffer_size)
if not mat and eaddr:
mat = search(plane_fa, 0, eaddr)
if not mat:
return -1, 0 # no fields
return mat.start(), mat[0][0]
def pa1(self):
"""Send PA1
"""
return self.key_aid(0x6c) # AID_PA1
def pa2(self):
"""Send PA2
"""
return self.key_aid(0x6e) # AID_PA2 (CNCL)
def pa3(self):
"""Send PA3
"""
return self.key_aid(0x6b) # AID_PA3
def paste_data(self, data, zti=None):
"""Process pasted character data.
"""
if self.pwait:
raise TnzError("PWAIT Input Inhibit")
if self.system_lock_wait:
raise TnzError("System Lock Input Inhibit")
ca0 = self.curadd
ypos, xpos = divmod(ca0, self.maxcol)
chars_pasted = 0
datal = data.splitlines()
for lined in datal:
if lined:
self.set_cursor_position(ypos+1, xpos+1)
rrv = self.key_data(lined, onerow=True, zti=zti)
if rrv == 0:
break
chars_pasted += rrv
ypos += 1
if ypos >= self.maxrow:
break
self.curadd = ca0
return chars_pasted
def pf1(self):
"""Send PF1
"""
return self.key_aid(0xf1) # AID_PF1
def pf2(self):
"""Send PF2
"""
return self.key_aid(0xf2) # AID_PF2
def pf3(self):
"""Send PF3
"""
return self.key_aid(0xf3) # AID_PF3
def pf4(self):
"""Send PF4
"""
return self.key_aid(0xf4) # AID_PF4
def pf5(self):
"""Send PF5
"""
return self.key_aid(0xf5) # AID_PF5
def pf6(self):
"""Send PF6
"""
return self.key_aid(0xf6) # AID_PF6
def pf7(self):
"""Send PF7
"""
return self.key_aid(0xf7) # AID_PF7
def pf8(self):
"""Send PF8
"""
return self.key_aid(0xf8) # AID_PF8
def pf9(self):
"""Send PF9
"""
return self.key_aid(0xf9) # AID_PF9
def pf10(self):
"""Send PF10
"""
return self.key_aid(0x7a) # AID_PF10
def pf11(self):
"""Send PF11
"""
return self.key_aid(0x7b) # AID_PF11
def pf12(self):
"""Send PF12
"""
return self.key_aid(0x7c) # AID_PF12
def pf13(self):
"""Send PF13
"""
return self.key_aid(0xc1) # AID_PF13
def pf14(self):
"""Send PF14
"""
return self.key_aid(0xc2) # AID_PF14
def pf15(self):
"""Send PF15
"""
return self.key_aid(0xc3) # AID_PF15
def pf16(self):
"""Send PF16
"""
return self.key_aid(0xc4) # AID_PF16
def pf17(self):
"""Send PF17
"""
return self.key_aid(0xc5) # AID_PF17
def pf18(self):
"""Send PF18
"""
return self.key_aid(0xc6) # AID_PF18
def pf19(self):
"""Send PF19
"""
return self.key_aid(0xc7) # AID_PF19
def pf20(self):
"""Send PF20
"""
return self.key_aid(0xc8) # AID_PF20
def pf21(self):
"""Send PF21
"""
return self.key_aid(0xc9) # AID_PF21
def pf22(self):
"""Send PF22
"""
return self.key_aid(0x4a) # AID_PF22
def pf23(self):
"""Send PF23
"""
return self.key_aid(0x4b) # AID_PF23
def pf24(self):
"""Send PF24
"""
return self.key_aid(0x4c) # AID_PF24
def put_file(self, filename, parms, assist=True):
"""Put host file from local file.
"""
self.__log_info("put_file(%r, %r, assist=%r)",
filename, parms, assist)
from . import rexx
encoding = None
if assist:
# Seems that only reasonable way to transfer
# a chararacter oriented in a host record/line
# oriented "file" is to transfer it to the
# host as ASCII. AND line separators MUST be
# CRLF. Well, not quite. It seems like the host
# translation table does strange things - like
# translate 7c (ascii '|') to 6a (ebcdic 'B&'),
# which is incorrect since the 'B&' character
# is unicode 00A6. It SHOULD translate 7c to
# 4f. To avoid bad translations, we will
# transfer as BINARY CRLF - __next_get will
# do the ascii->ebcdic translation. The strange
# thing is that the CRLF still needs to be in
# ascii. The CR is not a problem since it is
# the same in both ascii and ebcdic. The LF
# should NOT be a problem since ebcdic 0A is RPT.
# What sort of file would have a RPT???
parmsu = parms.upper()
preopt = rexx.subword(parms, 1, 1)
opts = rexx.subword(parmsu, 2).split()
paren = False
if "(" in opts:
i = opts.index("(")
paren = True
preopt = rexx.subword(parms, 1, i+1)
opts = opts[i+1:]
elif len(opts) > 1 and opts[1].startswith("("):
opts[1] = opts[1][1:]
paren = True
preopt = rexx.subword(parms, 1, 2)
opts = opts[1:]
elif len(opts) > 2 and opts[2].startswith("("):
opts[2] = opts[2][1:]
paren = True
preopt = rexx.subword(parms, 1, 3)
opts = opts[2:]
if paren and opts[-1].endswith(")"):
opts[-1] = opts[-1][:-1].strip()
if not opts[-1]:
opts.pop()
if ("ASCII" in opts) and ("CRLF" in opts):
encoding = self.encoding
parms = [preopt]
if paren:
parms.append("(")
opts.remove("ASCII")
parms.extend(opts)
parms = " ".join(parms)
self.__log_debug("parms=%r", parms)
self.__ddmmsg = None
try:
self.__indsenc = encoding
if encoding:
mode = "r"
else:
mode = "rb"
self.__indspend = b""
with open(filename, mode) as self.__indsfile:
# do not enter IND$FILE until file is open
# (avoids an inconstent state if error opening file)
self.__log_debug("entering IND$FILE PUT %s", parms)
self.enter("IND$FILE PUT "+parms)
while (not self.__ddmmsg and not self.seslost):
self.wait(3) # query reply, command acknowledge
finally:
self.__log_debug("clearing __indsfile")
self.__indsfile = None
self.__log_info("put_file: %s", self.__ddmmsg)
return self.__ddmmsg
def scrstr(self, saddr=0, eaddr=0, rstrip=None):
"""Return a string representation of the character buffer.
"""
if rstrip is None:
rstrip = (not saddr and not eaddr)
rcba = self.rcba
plane_dc = self.plane_dc
plane_cs = self.plane_cs
codec_info = self.codec_info
trans_dc_to_c = self.__trans_dc_to_c
strl = []
addr0 = saddr
for addr1 in self.__iterbs_addr(plane_cs, saddr, eaddr):
bytes1 = rcba(plane_dc, addr0, addr1)
bytes1 = bytes1.translate(trans_dc_to_c)
cii = plane_cs[addr0]
strl.append(codec_info[cii].decode(bytes1)[0])
addr0 = addr1
str1 = "".join(strl).translate(self.__trans_ords)
if not rstrip:
return str1
maxcol = self.maxcol
strl = []
for i in range(0, self.buffer_size, maxcol):
strl.append(str1[i:i+maxcol].rstrip())
strl.append("")
return "\n".join(strl)
def send(self, data=None):
"""
Send input byte array as data to the host. This method will
escape IAC bytes.
"""
if data:
data = data.replace(b"\xff", b"\xff\xff") # IAC -> IAC IAC
self._sendbuf.append(data)
transport = self._transport
if not transport:
self._log_warn("transport not available yet")
return
if transport.is_closing():
self._log_warn("transport is closing")
return
data = b"".join(self._sendbuf)
transport.write(data)
self.bytes_sent += len(data)
self._sendbuf.clear()
def send_3270_data(self, value):
"""
Send input byte array as a 3270-DATA record to the host.
This method will escape IAC bytes and send EOR after the
data.
"""
append = self._sendbuf.append
value = value.replace(b"\xff", b"\xff\xff") # IAC -> IAC IAC
if self.__tn3270e:
append(bytes(5)) # 3270-DATA TN3270E Header
append(value)
append(b"\xff\xef") # IAC EOR
self.send()
def send_aid(self, aid, short=None):
"""
Process the sending of an AID (enter command). Used by
methods with the name of the AID (e.g. enter).
"""
self.__log_debug("o>> send_aid 0x%02x", aid)
rec = bytes([aid])
gotcmd = False
reply_mode = self.__reply_mode
reply_cattrs = self.__reply_cattrs
if self.inpid:
raise TnzError(f"PID={self.inpid} not implemented")
if self.__indstr:
self.__log_debug("Removing SPECIAL: %s", self.__indstr)
self.__indstr = ""
if short is None:
short = 0x6b <= aid <= 0x6f # PAx or CLEAR
if short:
self.aid = aid
self.send_3270_data(rec)
return
baddr = self.address_bytes(self.curadd)
self.__log_debug(" cursor %r", self.curadd)
rec += baddr
for (sa1, ea1) in self.char_addrs():
fattr = self.plane_fa[sa1-1]
if fattr & 1 == 0: # if MDT is off
continue
if not gotcmd:
if self.is_displayable_attr(fattr):
gotcmd = True
self.lastcmd = self.scrstr(sa1, ea1).strip()
rec += b"\x11" # SBA (Set Buffer Address)
baddr = self.address_bytes(sa1)
self.__log_debug(" SBA(x11) %r", sa1)
rec += baddr
blst = []
append = blst.append
if reply_mode in (0x00, 0x01): # [Extended] Field mode
self.__append_char_bytes(blst, sa1, ea1)
elif reply_mode == 2: # Character mode
# TODO following needs to NOT append null characters
# (nor their attributes)
eh_attr = 0
fg_attr = 0
bg_attr = 0
for sa2, ea2 in self.group_addrs(sa1, ea1):
eh1 = eh_attr
fg1 = fg_attr
bg1 = bg_attr
if b"\x41" in reply_cattrs:
eh1 = self.plane_eh[sa2]
if b"\x42" in reply_cattrs:
fg1 = self.plane_fg[sa2]
if b"\x45" in reply_cattrs:
bg1 = self.plane_bg[sa2]
if eh1 != eh_attr:
append(bytes([0x28, 0x41, eh1])) # SA 41 eh
if fg1 != fg_attr:
append(bytes([0x28, 0x42, fg1])) # SA 42 fg
if bg1 != bg_attr:
append(bytes([0x28, 0x45, bg1])) # SA 45 bg
self.__append_char_bytes(blst, sa2, ea2)
else:
raise TnzError(f"bad reply mode {reply_mode}")
data = b"".join(blst)
data = data.replace(b"\x00", b"") # remove nulls
if len(data) != 0:
self.__log_debug(" AID: %d byte(s) of data @ %r",
len(data), sa1)
rec += data
self.aid = aid
self.send_3270_data(rec)
def send_command(self, code):
"""Send telnet command to the host.
Send a single-byte telnet command to the remote.
Supported commands:
NAME CODE MEANING
NOP 241 No operation.
Data Mark 242 The data stream portion of a Synch.
Break 243 NVT character BRK.
Interrupt Process 244 The function IP.
Abort output 245 The function AO.
Are You There 246 The function AYT.
Erase character 247 The function EC.
Erase Line 248 The function EL.
Go ahead 249 The GA signal.
See other methods for other commands:
NAME METHOD
EOR send_rec()
SE send_sub()
SB send_sub()
WILL send_will()
WON'T send_wont()
DO send_do()
DON'T send_dont()
"""
if not 241 <= code <= 249:
raise TnzError(f"Telnet command {code} not valid")
self.__log_info("o>> IAC %d", code)
self._sendbuf.append(bytes([0xff, code])) # IAC code
self.send()
def send_do(self, opt, buffer=False):
"""Send DO to the host.
"""
if opt == 0: # TRANSMIT-BINARY
self._binary_remote = True
elif opt == 25: # END-OF-RECORD
self.__eor = True
if opt not in self.local_do:
self.local_do.append(opt)
if opt in self.local_dont:
self.local_dont.remove(opt)
self.__log_info("o>> IAC DO %s", self.__tnon(opt))
self._sendbuf.append(bytes([0xff, 0xfd, opt])) # IAC DO opt
if not buffer:
self.send()
def send_dont(self, opt, buffer=False):
"""Send DON'T to the host.
"""
if opt == 0:
self._binary_remote = False
elif opt == 25: # END-OF-RECORD
self.__eor = False
if opt not in self.local_dont:
self.local_dont.append(opt)
if opt in self.local_do:
self.local_do.remove(opt)
self.__log_info("o>> IAC DONT %s", self.__tnon(opt))
self._sendbuf.append(bytes([0xff, 0xfe, opt]))
if not buffer:
self.send()
def send_rec(self, value):
"""
Send input byte array as a record to the host. This method
will escape IAC bytes and send EOR after the data.
"""
value = value.replace(b"\xff", b"\xff\xff") # IAC -> IAC IAC
append = self._sendbuf.append
append(value)
append(b"\xff\xef") # IAC EOR
self.send()
def send_sub(self, value, buffer=False):
"""
Send input subcommand data to the host. This method will
bookend the data with IAC SB adn IAC SE.
"""
value = value.replace(b"\xff", b"\xff\xff") # IAC -> IAC IAC
append = self._sendbuf.append
append(b"\xff\xfa") # IAC SB
append(value)
append(b"\xff\xf0") # IAC SE
if not buffer:
self.send()
def send_terminal_type(self, buffer=False):
"""Send the terminal type to the host.
"""
self.__log_info("o>> TERMINAL-TYPE IS %s",
self.terminal_type)
data = b"".join([b"\x18", # TERMINAL-TYPE
b"\x00", # IS
self.terminal_type.encode("ascii")])
return self.send_sub(data, buffer=buffer)
def send_will(self, opt, buffer=False):
"""Send WILL to the host.
"""
if opt not in self.local_will:
self.local_will.append(opt)
if opt in self.local_wont:
self.local_wont.remove(opt)
self.__log_info("o>> IAC WILL %s", self.__tnon(opt))
self._sendbuf.append(bytes([0xff, 0xfb, opt])) # IAC WILL opt
if not buffer:
self.send()
def send_wont(self, opt, buffer=False):
"""Send WON'T to the host.
"""
if opt not in self.local_wont:
self.local_wont.append(opt)
if opt in self.local_will:
self.local_will.remove(opt)
self.__log_info("o>> IAC WONT %s", self.__tnon(opt))
self._sendbuf.append(bytes([0xff, 0xfc, opt]))
if not buffer:
self.send()
def set_cursor_address(self, address):
"""Set the cursor address to the input address.
"""
self.__check_address(address)
self.curadd = address
def set_cursor_position(self, row, col):
"""Set the cursor address from the input row and column.
First (row, col) is (1, 1).
"""
maxrow = self.maxrow
if not 0 < row <= maxrow:
raise ValueError(f"{row} not in range 0-{maxrow}")
maxcol = self.maxcol
if not 0 < col <= maxcol:
raise ValueError(f"{col} not in range 0-{maxcol}")
self.curadd = ((row-1)*maxcol) + col - 1
def shutdown(self):
"""Shut down the connection.
"""
task = self.__connect_task
if task:
task.cancel()
loop = self.__get_event_loop()
if not loop.is_running():
# skip if ANY loop is running?
loop.run_until_complete(task)
transport = self._transport
if transport:
self._transport = None
# any way to handle need_shutdown?
transport.abort()
def start_readlines(self):
"""Initialize readlines.
"""
self.readlines = []
self.__readlines_row = 0
self.__readlines_check()
def upload(self, file):
"""Upload data from file-like object to remote server.
"""
if not self.__ddmopen:
raise ValueError("file transfer not in progress")
if not self.__ddmupload:
raise ValueError("download in progress")
if self.__indsfile is not None:
raise ValueError("file transfer in progress")
self.__indsfile = file
self.__next_get()
data = self.__indsisf
rec = b"\x88" # SF (Structured Field AID)
if not data:
isf = b"\xd0\x46\x08" # D04608 Get Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x22\x00" # Error Code Get Past End of File
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Get Past End Of File send")
self.__inds_rm = None
else:
self.__log_debug("DDM Data for Get send")
self.__log_debug("SF: %r", data)
rec += data
self.__inds_rm = rec
self.send_3270_data(rec)
if data:
# read BEFORE next ddm get
# so file data transfer is in parallel with
# network data transfer.
self.__next_get()
def uploading(self):
"""Return bool indicating if currently uploading.
"""
if not self.__indstemp:
return False
return self.__indsfile
def wait(self, timeout=None, zti=None, key=None):
"""Wait for event.
Returns after timeout seconds or when data is received.
"""
self.__log_debug("tnz.wait(%r, %r, %r)", timeout, zti, key)
loop = self.__get_event_loop()
wait_event = _wait_event
if not wait_event and self.__connect_task:
loop.stop()
loop.run_forever()
wait_event = _wait_event
if not wait_event:
self.__log_error("nothing to wait on")
return True
if self.seslost:
self.__log_debug("tnz.wait setting timeout=0")
timeout = 0
if self.__waiting:
raise TnzError("Already waiting")
event_wait = wait_event.wait()
if timeout is None:
timeout_handle = None
else:
def callback():
if not wait_event.is_set():
self.__wait_rv = False
wait_event.set()
timeout_handle = loop.call_later(timeout, callback)
try:
self.__waiting = True
if zti:
self.__zti = zti
self.ddmrecv = zti.ddmrecv # host-initiated get
self.ddmsend = zti.ddmsend # host-initiated put
self.__loop.run_until_complete(event_wait)
if self.seslost:
return True
return self.__wait_rv
finally:
wait_event.clear()
self.__wait_rv = None
self.__waiting = False
if zti:
self.__zti = None
self.ddmrecv = False
self.ddmsend = False
if timeout_handle:
timeout_handle.cancel()
def word_at(self, address):
"""Return the word at the input address.
"""
fa1, _ = self.field(address)
if fa1 == address:
return ""
if fa1 > 0:
buffer_size = self.buffer_size
fa1 = (fa1+1) % buffer_size
fa2, _ = self.next_field(fa1)
if fa1 < address:
pos = address - fa1
else:
pos = address + buffer_size - fa1
else:
fa1 = 0
fa2 = 0
pos = address
scr = self.scrstr(fa1, fa2)
idx1 = scr.find(" ", pos)
if idx1 == pos:
return ""
if idx1 < 0:
idx1 = len(scr)
idx2 = scr.rfind(" ", 0, pos)
if idx2 < 0:
idx2 = 0
else:
idx2 += 1
return scr[idx2:idx1]
# Internal methods
def _data_received(self, buff):
zti = self.__zti
bcnt = len(buff)
cmd = []
buff = self.__work_buffer + buff[:bcnt]
bcnt += len(self.__work_buffer)
self.__work_buffer = b""
b_start = 0
b_start_data = 0
b_last = bcnt - 1
while b_start < bcnt:
pos_iac = buff.find(0xff, b_start, bcnt) # find IAC
if pos_iac < 0: # IAC not found
self.__log_info("RECV: IAC not found")
if self.__eor:
self.__pndrec += buff[b_start_data:bcnt]
self.__log_debug("RECV: %d bytes pending",
len(self.__pndrec))
else:
self.__log_error("Unexpected data: %r",
buff[b_start, pos_iac])
b_start = bcnt
b_start_data = bcnt
break
if pos_iac >= b_last:
# what follows IAC?
break
byte2 = buff[pos_iac+1]
if byte2 == 239: # if EOR (end of record for tn3270)
data = self.__pndrec + buff[b_start_data:pos_iac]
self.__pndrec = b""
data = data.replace(b"\xff\xff", b"\xff") # 0xff=IAC
self.__rec.append(data)
b_start = pos_iac + 2
b_start_data = b_start
continue
if byte2 == 0xff: # escaped 0xff (IAC) part of data
b_start = pos_iac + 2
continue
if pos_iac != b_start_data: # some data before IAC
self._log_warn("RECV: Data interrupted by command")
if self.__eor:
self.__pndrec += buff[b_start_data:pos_iac]
self._log_warn("RECV: %d bytes pending",
len(self.__pndrec))
else:
self.__log_error("Unexpected data: %r",
buff[b_start_data, pos_iac])
if 251 <= byte2 <= 254: # //WILL or WON'T or DO or DON'T
cmd_end = pos_iac + 3
if cmd_end > bcnt:
self._log_warn("RECV: Command %02x incomplete",
byte2)
b_start_data = bcnt
break
cmd.append(buff[pos_iac:cmd_end])
b_start = pos_iac + 3
b_start_data = b_start
continue
if byte2 == 250: # if SB (subcommand begin)
pos_se = buff.find(b"\xff\xf0", # IAC SE
pos_iac+2, bcnt)
if pos_se < 0: # if subcommand end not found
self._log_warn("RECV: Subcommand %r incomplete",
buff[pos_iac:bcnt])
b_start_data = bcnt
break
cmd.append(buff[b_start:pos_se])
b_start = pos_se+2
b_start_data = b_start
continue
# single-byte command (IAC + one byte)
b_start = pos_iac + 2
b_start_data = b_start
cmd.append(buff[pos_iac:b_start])
if b_start < bcnt:
rcnt = bcnt - b_start # remaining bytes
self._log_warn("RECV: %d byte(s) not processed", rcnt)
self.__work_buffer = buff[b_start:bcnt]
for cmd1 in cmd:
self._process(cmd1)
rec = self.__rec
if self.__waiting and rec:
self.__wait_rv = True
_wait_event.set()
while rec:
rec_bytes = rec.pop(0)
try:
self._proc3270ds(rec_bytes, zti=zti)
except TnzError:
self.__logger.exception("3270 command/order error")
self.__log_error("Record: %s", rec_bytes.hex())
self.seslost = sys.exc_info()
if zti:
raise
import traceback
traceback.print_exc()
break
return bcnt
def _log_warn(self, *args, **kwargs):
return self.__log(logging.WARN, *args, **kwargs)
def _proc3270ds(self, b_str, zti=None):
"""Process input byte array as a 3270 data stream.
"""
if not b_str:
return
data_type = 0
# request_flag = 0
response_flag = 0
seq_number = 0
if self.__tn3270e:
header = b_str[:5]
b_str = b_str[5:]
self.__log_debug("TN3270E Header: %r", header)
data_type = header[0]
# request_flag = header[1]
response_flag = header[2]
seq_number = int.from_bytes(header[3:5], "big")
if data_type == 0: # 3270-DATA
pass
elif data_type == 1: # SCS-DATA
raise TnzError("DATA-TYPE SCS-DATA not implemented")
elif data_type == 2: # RESPONSE
raise TnzError("DATA-TYPE RESPONSE not implemented")
elif data_type == 3: # BIND-IMAGE
raise TnzError("DATA-TYPE BIND-IMAGE not implemented")
elif data_type == 4: # UNBIND
raise TnzError("DATA-TYPE UNDINB not implemented")
elif data_type == 5: # NVT-DATA
raise TnzError("DATA-TYPE NVT-DATA not implemented")
elif data_type == 6: # REQUEST
raise TnzError("DATA-TYPE REQUEST not implemented")
elif data_type == 7: # SSCP-LU-DATA
raise TnzError("DATA-TYPE SSCP-LU-DATA not implemented")
else:
raise TnzError(f"DATA-TYPE {data_type} not implemented")
command_hex = hex(b_str[0])
rtn_name = "_process_command_" + command_hex
rtn = getattr(self, rtn_name, self._process_command_unknown)
rtn(b_str, 0, len(b_str), zti=zti)
if response_flag == 2:
rsp = (b"\x02\x00" + # DATA-TYPE=RESPONSE REQUEST-FLAG=0
b"\x00" + # success (use x01 for error)
seq_number.to_bytes(2, byteorder="big") +
b"\x00") # successful (Device End)
self.__log_debug("Sending TN3270E response: %r", rsp)
self.send_rec(rsp)
def _process(self, data):
"""Process host data.
"""
if data[:2] == b"\xff\xfd": # IAC DO
self.__log_info("i<< IAC DO %s", self.__tnon(data[2]))
opt = data[2]
if data[2] == 40: # TN3270E
if self.use_tn3270e:
self.send_will(40, buffer=True)
else:
self.send_wont(40, buffer=True)
elif data[2] == 0: # TRANSMIT-BINARY
if opt not in self.local_will:
self.send_will(0, buffer=True)
elif data[2] == 24: # TERMINAL-TYPE
if opt not in self.local_will:
self.send_will(24, buffer=True)
elif data[2] == 25: # END-OF-RECORD
if opt not in self.local_will:
self.send_will(25, buffer=True)
if opt not in self.local_do:
self.send_do(25, buffer=True)
elif data[2] == 46: # START_TLS
if not hasattr(self.__loop, "start_tls"):
self._log_warn("START_TLS unsupported.")
self._log_warn("Python >= 3.7 required")
self.send_wont(data[2], buffer=True)
else:
if opt not in self.local_will:
self.send_will(data[2], buffer=True)
self.__log_info("o>> START_TLS FOLLOWS")
self.send_sub(b"\x2e\x01", buffer=True)
else:
# e.g. Timing mark (6)
self.send_wont(data[2], buffer=True)
if opt not in self.remote_do:
self.remote_do.append(opt)
if opt in self.remote_dont:
self.remote_dont.remove(opt)
elif data[:2] == b"\xff\xfe": # //IAC DON'T
self.__log_info("i<< IAC DONT "+self.__tnon(data[2]))
opt = data[2]
if opt not in self.remote_dont:
self.remote_dont.append(opt)
if opt in self.remote_do:
self.remote_do.remove(opt)
if opt not in (0, 25): # END-OF_RECORD nor TRANSMIT-BINARY
if opt not in self.local_wont:
self.send_wont(data[2], buffer=True)
elif data[:2] == b"\xff\xfb": # IAC WILL
self.__log_info("i<< IAC WILL %s", self.__tnon(data[2]))
# requesting permission
# or confirming request
if data[2] == 0 and not self._binary_remote:
# requesting permission
# confirm/grant permission
self.send_do(0, buffer=True)
opt = data[2]
if opt not in self.remote_will:
self.remote_will.append(opt)
if opt in self.remote_wont:
self.remote_wont.remove(opt)
elif data[:2] == b"\xff\xfc": # //IAC WON'T
self.__log_info("i<< IAC WONT %s", self.__tnon(data[2]))
opt = data[2]
if opt not in self.remote_wont:
self.remote_wont.append(opt)
if opt in self.remote_will:
self.remote_will.remove(opt)
elif data[:2] == b"\xff\xef": # IAC EOR
pass
elif data == b"\xff\xfa\x28\x08\x02": # IAC SB ...
self.__log_info("i<< TN3270E SEND DEVICE-TYPE")
rsp = b"\x28\x02\x07"+self.terminal_type.encode("ascii")
if self.lu_name:
self.__log_info("o>> %s %s %s %s %s %s",
"TN3270E", "DEVICE-TYPE", "REQUEST",
self.terminal_type,
"CONNECT", self.lu_name)
rsp += b"\x01"+self.lu_name.encode("ascii")
else:
self.__log_info("o>> %s %s %s %s",
"TN3270E", "DEVICE-TYPE", "REQUEST",
self.terminal_type)
self.send_sub(rsp)
elif data[:5] == b"\xff\xfa\x28\x02\x04": # IAC SB ...
i = data.find(b"\x01") # find CONNECT
if i < 0:
device_type = data[5:].decode("ascii")
device_name = ""
else:
device_type = data[5:i].decode("ascii")
device_name = data[i+1:].decode("ascii")
device_name = " CONNECT "+device_name
self.__log_info("i<<" +
" TN3270E" + # x28
" DEVICE-TYPE" + # x02
" IS %s%s", # x04
device_type, device_name)
funb = b""
funb = b"\x02"
funl = []
for fun in funb:
if fun == 0:
funl.append("BIND-IMAGE")
elif fun == 1:
funl.append("DATA-STREAM-CTL")
elif fun == 2:
funl.append("RESPONSES")
elif fun == 3:
funl.append("SCS-CTL-CODES")
elif fun == 4:
funl.append("SYSREQ")
else:
funl.append(repr(fun))
self.__log_info("o>>" +
" TN3270E" + # x28
" FUNCTIONS" + # x03
" REQUEST %r", # x07
funl)
self.send_sub(b"\x28\x03\x07"+funb)
self._binary_local = True
self._binary_remote = True
self.__eor = True
self.__tn3270e = True
elif data[:5] == b"\xff\xfa\x28\x03\x04": # IAC SB ...
funl = []
for fun in data[5:]:
if fun == 0:
funl.append("BIND-IMAGE")
elif fun == 1:
funl.append("DATA-STREAM-CTL")
elif fun == 2:
funl.append("RESPONSES")
elif fun == 3:
funl.append("SCS-CTL-CODES")
elif fun == 4:
funl.append("SYSREQ")
else:
funl.append(repr(fun))
self.__log_info("i<<" +
" TN3270E" + # x28
" FUNCTIONS" + # x03
" IS %r", # x04
funl)
elif data == b"\xff\xfa\x18\x01": # IAC SB TERMINAL-TYPE SEND
self.__log_info("i<< TERMINAL-TYPE SEND")
self.send_terminal_type(buffer=True)
elif data == b"\xff\xfa\x2e\x01": # IAC SB ...
self.__log_info("i<< START_TLS FOLLOWS")
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
coro = self.__start_tls(context)
task = self.__loop.create_task(coro)
self.__connect_task = task
elif data[0] == 255 and 241 <= data[1] <= 249: # IAC command
self.__log_info("i<< IAC %d", data[1])
else:
self._log_warn("i<< UNKNOWN! %s", data.hex())
self.send() # any any buffered data
def _process_cmnd_0xf1(self, b_str, start, stop, pid, zti=None):
"""
Process WSF Outbound 3270DS W (Write) partition command.
Input is Outbound 3270DS structured field. Called by
process_wsf_0x40.
"""
self._process_w(b_str, start+4, stop, pid, zti=zti)
def _process_cmnd_0xf5(self, b_str, start, stop, pid, zti=None):
"""
Process WSF Outbound 3270DS EW (Erase/Write) partition
command. Input is Outbound 3270DS structured field. Called
by process_wsf_0x40.
"""
if pid:
raise TnzError("Non-zero PID not implemented")
self._process_ew(b_str, start+4, stop, zti=zti)
def _process_cmnd_0x7e(self, b_str, start, stop, pid, zti=None):
"""
Process WSF Outbound 3270DS EWA (Erase/Write Alternate)
partition command. Input is Outbound 3270DS structured field.
Called by process_wsf_0x40.
"""
if pid:
raise TnzError("Non-zero PID not implemented")
self._process_ewa(b_str, start+4, stop, zti=zti)
def _process_cmnd_0x6f(self, _, start, stop, pid, zti=None):
"""
Process WSF Outbound 3270DS EAU (Erase All Unprotected)
partition command. Input is Outbound 3270DS structured field.
Called by process_wsf_0x40.
"""
cmnd_len = stop - start
if cmnd_len != 5:
raise TnzError("EAU must be 5 bytes, got {cmnd_len}")
self._process_eau(pid=pid, zti=zti)
def _process_cmnd_unknown(self, b_str, start, stop, pid, zti=None):
raise TnzError("Unknown CMND: "+hex(b_str[start+4]))
def _process_command_0x6e(self, _, start, stop, zti=None):
"""
Process RMA (Read Modified All) 3270 Data Stream Command.
Input is 3270 data stream data.
"""
cmnd_len = stop - start
if cmnd_len != 1:
raise TnzError("RMA must be 1 byte, got {cmnd_len}")
self.__log_debug("Read Modified All (RMA)")
self.read_state = self.__ReadState.NORMAL # Normal Read state
self.send_aid(self.aid, short=False)
def _process_command_0xf1(self, b_str, start, stop, zti=None):
"""
Process W (Write) 3270 Data Stream Command. Input is 3270
data stream data.
"""
return self._process_w(b_str, start, stop, zti=zti)
def _process_command_0xf2(self, _, start, stop, zti=None):
"""
Process RB (Read Buffer) 3270 Data Stream Command. Input is
3270 data stream data.
"""
cmnd_len = stop - start
if cmnd_len != 1:
raise TnzError("RB must be 1 byte, got {cmnd_len}")
self.read_state = self.__ReadState.NORMAL # Normal Read state
self.__read_buffer() # send reply
def _process_command_0xf3(self, b_str, start, stop, zti=None):
"""
Process WSF (Write Structured Field) 3270 Data Stream
Command. Input is 3270 data stream data.
"""
self.__log_debug("WSF")
cmnd_len = stop - start
if cmnd_len < 4:
raise TnzError("WSF needs 4 bytes, got {cmnd_len}")
from_bytes = int.from_bytes
i = start + 1
while i < stop:
sfl = from_bytes(b_str[i:(i+2)], "big") # length
if sfl == 0:
sfl = stop - i
if sfl < 3:
raise TnzError(f"Bad structured field length: {sfl}")
if i + sfl > stop:
self.__log_error("sfl=%s", sfl)
self.__log_error("sf=%s", b_str[i:stop].hex())
raise TnzError("WSF len and data inconsistent")
wsf_hex = hex(b_str[i+2])
rtn_name = "_process_wsf_" + wsf_hex
rtn = getattr(self, rtn_name, self._process_wsf_unknown)
rtn(b_str, i, i+sfl, zti=zti)
i += sfl
def _process_command_0xf5(self, b_str, start, stop, zti=None):
"""
Process EW (Erase/Write) 3270 Data Stream Command. Input is
3270 data stream data.
"""
return self._process_ew(b_str, start, stop, zti=zti)
def _process_command_0xf6(self, _, start, stop, zti=None):
"""
Process RM (Read Modified) 3270 Data Stream Command. Input is
3270 data stream data.
"""
self.__log_debug("Read Modified (RM)")
command_len = stop - start
if command_len != 1:
raise TnzError("RM must be 1 byte, got {command_len}")
# TODO: if read_state is Retry Enter, do retry of INOP
# TODO: if read_state is Retry Read, do retry of read
if self.__inds_rm and self.__indsfile:
# When using IND$FILE PUT in TSO, there have
# been occurrances where a RM is requested
# during the upload. This appears to be a
# request to resend the last DDM DATA-FOR-GET.
# Why are we forced to send the data twice?
# Seems to occur only for larger amounts of
# data. Perhaps the host did not have a large
# enough buffer and it needs to be sent again
# once the larger buffer is obtained?
self.send_3270_data(self.__inds_rm)
else:
self.send_aid(self.aid)
def _process_command_0x6f(self, b_str, start, stop, zti=None):
"""
Process EAU (Erase All Unprotected) 3270 Data Stream Command.
Input is 3270 data stream data.
"""
command_len = stop - start
if command_len != 1:
raise TnzError("EAU must be 1 byte, got {command_len}")
return self._process_eau(pid=0, zti=zti)
def _process_command_0x7e(self, b_str, start, stop, zti=None):
"""
Process EWA (Erase/Write Alternate) 3270 Data Stream Command.
Input is 3270 data stream data.
"""
return self._process_ewa(b_str, start, stop, zti=zti)
_process_command_0x1 = _process_command_0xf1 # W
_process_command_0x2 = _process_command_0xf2 # RB
_process_command_0x5 = _process_command_0xf5 # EW
_process_command_0x6 = _process_command_0xf6 # RM
_process_command_0xd = _process_command_0x7e # EWA
_process_command_0xf = _process_command_0x6f # EAU
_process_command_0x11 = _process_command_0xf3 # WSF
def _process_command_unknown(self, b_str, start, stop, zti=None):
command_hex = hex(b_str[start])
self.__log_error(b_str[start:stop].hex())
self.__log_error(self.codec_info[0].
decode(b_str[start:stop], errors="ignore")[0])
raise TnzError(f"Unknown command: {command_hex}")
def _process_data(self, data, begidx, endidx, zti=None):
"""Process host character data.
"""
if not isinstance(data, bytes):
raise TypeError("input data must be bytes")
datalen = endidx - begidx
if datalen <= 0:
# or better to just return?
raise ValueError("no input data to process")
if self.plane_fa[self.bufadd-1] == 124:
spc = self.codec_info[0].decode(data[begidx:endidx])[0]
self.__log_debug("SPECIAL DATA: %s", spc)
self.__indstr += spc
row = self.bufadd // self.maxcol
if row < self.__readlines_row:
self.__readlines_row = 0
saddr = self.bufadd
if zti:
zti.write_data_prep(self, saddr, datalen)
self.ucba(self.plane_dc, saddr, data, begidx, endidx)
self.ucba(self.plane_fa, saddr, b"\x00"*datalen)
self.ucba(self.plane_eh, saddr, [self.__proc_eh]*datalen)
self.ucba(self.plane_cs, saddr, [self.__proc_cs]*datalen)
self.ucba(self.plane_fg, saddr, [self.__proc_fg]*datalen)
self.ucba(self.plane_bg, saddr, [self.__proc_bg]*datalen)
oldadd = self.bufadd
self.bufadd = (self.bufadd + datalen) % self.buffer_size
self.__log_debug(" %d byte(s) of data @ %r -> %r",
datalen, oldadd, self.bufadd)
if zti:
# Use force=True to indicate that the data that was just
# updated may have removed a field attribute. It also
# indicates that the update did not change the cursor.
zti.write_data(self, saddr, datalen, force=True)
def _process_ddm_0xd00012(self, b_str, start, stop, zti=None):
"""Process DDM Open request
Called by process_wsf_0xd0.
"""
ddmupload = (b_str[start+14] == 1)
if ddmupload:
self.__log_debug("DDM Open Request (Upload)")
else:
self.__log_debug("DDM Open Request (Download)")
ddm_len = stop - start
if ddm_len < 35:
raise TnzError("DDM-Open needs 35 bytes, got {ddm_len}")
# b_str[start:stop]
# 5:11 = 010601010403
# Fixed Parameter Changes File, etc.
# 11:21 = 0A0A0000000011010100
# Functions Required Sequential Insert
# 21:26 = 50055203F0
# Data Not Compressed
# 26:28 = 0309
# File Nm. Header w/length
# 26:30 = 08062704
# Record Size Header w/length
# 28:35 = "FT:DATA"
# File Name for File Xfr. Data
# 30:32 = llll
# Size (LIMIN-17)
# 32:34 = 0309
# File Nm. Header w/length
# 34:41 = "FT:DATA"
# File Name for File Xfr. Data
if b_str[start+26] == 3: # no size
ft_bytes = b_str[start+28:(start+35)]
elif b_str[start+26] == 8: # has size
ft_bytes = b_str[start+34:(start+41)]
else: # ?
# Open Failed Exception
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x00\x08" # D00008 Open Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x01\x00" # Open Failed Exception
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Open Failed Exception send")
self.send_3270_data(rec)
self.__log_error("DDM Open unexpected")
return
ft_str = ft_bytes.decode("iso8859-1", errors="replace")
self.__log_debug("ft: %s", ft_str)
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x00\x09" # D00009 Open Acknowledgement
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
indstr = self.__indstr
if not self.__indsfile and indstr:
self.__indstr = ""
try:
indsdict = json.loads(indstr)
except ValueError:
self.__log_error("Not valid json: %r", indstr)
else:
self.__log_debug("indsdict: %r", indsdict)
filename = indsdict.get("filename", None)
enc = indsdict.get("encoding", None)
cmd = indsdict.get("command", None)
mode = "r" if ddmupload else "w"
if enc == "binary":
mode += "b"
enc = None
elif not enc:
enc = self.encoding
if ddmupload and cmd and filename:
os.system(cmd)
try:
if ddmupload and cmd and not filename:
self.__indsfile = os.popen(cmd)
elif filename:
self.__indsfile = open(filename, mode)
except OSError:
self.__indstemp = False
# Open Request Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x00\x08" # D00008 Open Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x01\x00" # Open Failed Exception
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Open Failed Exception send")
self.send_3270_data(rec)
self.__log_error("sf[14]=%r", b_str[start+14])
self.__log_error("DDM Open File failed")
return
except BaseException:
self.__indstemp = False
self.__logger.exception("open file error")
else:
self.__indstemp = True
self.__indspend = b""
self.__indsenc = enc
self.__indsdict = {} if ddmupload else indsdict
if (not self.__indsfile and
((not ddmupload and not self.ddmrecv) or
(ddmupload and not self.ddmsend))): # unexpected
# Open Request Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x00\x08" # D00008 Open Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x01\x00" # Open Failed Exception
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Open Failed Exception send")
self.send_3270_data(rec)
self.__log_error("sf[14]=%r", b_str[start+14])
self.__log_error("DDM Open unexpected")
return
self.__log_debug("DDM Open Ack send")
self.send_3270_data(rec) # send ddm open acknowledgement
oldupload = self.__ddmupload
self.__ddmupload = ddmupload
self.__ddmdata = (ft_str == "FT:DATA")
self.__ddmascii = (ft_str != "FT:DATA")
self.__ddmopen = True
self.__ddmrecnum = 0
self.__inds_rm = None
if self.__ddmfile and oldupload == ddmupload:
self.__ddmfile = None
if zti: # zti for user interaction
zti.rewrite_status = True
if not self.__indsfile:
if self.__ddmfile and ddmupload:
self.__indstemp = True
self.__indsenc = None
# same temp file back to remote
self.__ddmfile.start_upload()
self.__indsfile = self.__ddmfile
self.__ddmfile = None
elif not ddmupload:
self.__indstemp = True
self.__indsenc = None
self.__indsfile = zti.Download(zti, self)
if ddmupload and self.__indsfile:
# Sequential Get: x"0A0A0001000000000100"
# read BEFORE first ddm get
# so file data transfer is in parallel with
# network data transfer.
if self.__indsfile:
self.__indsisf = b""
self.__next_get()
else:
self.__indsisf = None
if not self.__indsfile and self.file_appends:
self.__indsenc = None
self.__indsfile = self.file_appends
def _process_ddm_0xd04511(self, b_str, start, stop, zti=None):
"""Process DDM Set Cursor request
Called by process_wsf_0xd0.
"""
self.__log_debug("DDM Set Cursor Request")
if self.__ddmopen and self.__indsfile is None:
return # upload file must be provided by user
if not self.__indsfile or not self.__ddmopen:
# Set Cursor Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x45\x08" # D04508 Set Cursor Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x60\x00" # Command Syntax Error
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Set Cursor Syntax Error send")
self.send_3270_data(rec)
self.__log_error("DDM Set Cursor unexpected")
return
if b_str[(start+5):(start+10)] != b"\x01\x05\x00\x06\x00":
self.__log_error("Not 'Rel-Pos.' for Set Cursor")
self.__log_error("sf=%r", b_str[start:stop])
if b_str[(start+10):(start+15)] != b"\x09\x05\x01\x03\x00":
self.__log_error("Not 'Next' for Set Cursor")
self.__log_error("sf=%r", b_str[start:stop])
def _process_ddm_0xd04611(self, b_str, start, stop, zti=None):
"""Process DDM Get request
Called by process_wsf_0xd0.
"""
self.__log_debug("DDM Get Request")
if self.__ddmopen and self.__indsfile is None:
return # upload file must be provided by user
if not self.__indsfile or not self.__ddmopen:
# Get Request Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x46\x08" # D04608 Get Request Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x60\x00" # Command Syntax Error
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Get Syntax Error send")
self.send_3270_data(rec)
self.__log_error("DDM Get unexpected")
return
if b_str[(start+5):(start+16)] != b"\x01\x04\x00\x80":
self.__log_error("No Feedback Requested for Get")
self.__log_error("sf=%r", b_str[start:stop])
data = self.__indsisf
rec = b"\x88" # SF (Structured Field AID)
if not data:
isf = b"\xd0\x46\x08" # D04608 Get Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x22\x00" # Error Code Get Past End of File
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Get Past End Of File send")
self.__inds_rm = None
else:
self.__log_debug("DDM Data for Get send")
self.__log_debug("SF: %r", data)
rec += data
self.__inds_rm = rec
self.send_3270_data(rec)
if zti:
zti.rewrite_status = True
if data:
# read BEFORE next ddm get
# so file data transfer is in parallel with
# network data transfer.
self.__next_get()
def _process_ddm_0xd04711(self, _, __, ___, zti=None):
"""Process DDM Insert request
Called by process_wsf_0xd0.
"""
# b_str[i1:i2]
# 5:10 = 0105008000
# Feedback Requested
self.__log_debug("DDM Insert Request")
if not self.__ddmopen:
# Insert Request Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x47\x08" # D04708 Insert Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x60\x00" # Command Syntax Error
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Insert Syntax Error send")
self.send_3270_data(rec)
self.__log_error("DDM insert unexpected")
def _process_ddm_0xd04704(self, b_str, start, stop, zti=None):
"""Process DDM Data To Insert request
Called by process_wsf_0xd0.
"""
self.__log_debug("DDM Data To Insert")
ddm_len = stop - start
if ddm_len < 11:
raise TnzError("DDM-Open needs 11 bytes, got {ddm_len}")
if not self.__ddmopen:
# Insert Request Error
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x47\x08" # D04708 Insert Error
isf += b"\x69\x04" # Error Code Header
isf += b"\x60\x00" # Command Syntax Error
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Data to Insert Syntax Error send")
self.send_3270_data(rec)
self.__log_error("DDM Data To Insert unexpected")
return
# b_str[start:stop]
# 5:7 = C080
# Data Not Compressed
# 7:8 = 61
# Begin Data Code
# 8:10 = dddd
# Data Length plus 5
# 10: = data
datalen = int.from_bytes(b_str[(start+8):(start+10)], "big")
if datalen <= 5:
# seems like this may happen when the host
# does not like the limin or limout value
raise TnzError("DDM data length is bad")
datalen -= 5
data = b_str[(start+10):(start+10+datalen)]
if len(data) != datalen:
raise TnzError("DDM data length is inconsistent")
self.__log_debug("DDM Inserting %d byte(s)", len(data))
if self.__ddmascii:
data_str = data.decode("iso8859-1", errors="ignore")
else:
data_str = self.codec_info[0].decode(data,
errors="ignore")[0]
if not self.__ddmdata: # DDM MSG (not DATA)
self.__log_debug("DDM MSG: %r", data_str)
self.__ddmmsg = data_str
self.__ddmrecnum += 1
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x47\x05" # D04705 Data Acknowledgement
isf += b"\x63\x06" # record number header
isf += self.__ddmrecnum.to_bytes(4, byteorder="big")
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Data Ack send")
self.send_3270_data(rec)
if self.__ddmdata: # DDM DATA (not MSG)
# do file data transfer in parallel with
# network data transfer.
if self.__indsenc: # if decoding needed
if data.endswith(b"\x1a"):
data = data[:-1]
# IND$FILE uses CRLF to mark record
# boundries. To be consistent with
# universal newlines, we want to convert
# CRLF to LF. There is also the
# complexity of translating data that
# is EBCDIC except for the ascii CRLF
# bytes marking the records. And is it
# possible that CRLF could be split
# across two data blocks? And consider
# that CR is the same byte value in both
# ascii and EBCDIC. And consider that
# EBCDIC translation often equates
# EBCDIC NL with unicode LF.
data = data.replace(b"\n", b"")
data = data.decode(self.__indsenc)
data = data.replace("\r", "\n")
if self.__indsfile: # if have file for saving
self.__log_debug("ddm writing file")
self.__indsfile.write(data)
self.__log_debug("ddm wrote file")
if zti:
zti.rewrite_status = True
if isinstance(data, bytes) and (not self.__indsfile or
self.__indstemp):
# unsolicited
if self.__ddmrecnum == 1:
self.__ddmtdat = ""
data2 = data[:2]
elif self.__ddmtdat:
self._log_warn("clearing ddmtdat for 2nd rec")
else: # DDM MSG (not DATA)
self.__ddmopen = False
self.__log_debug("DDM closed (got MSG)")
if self.__indstemp:
self.__indsfile.close()
self.__ddmfile = self.__indsfile
self.__indsfile = None
self.__indstemp = False
indsdict = self.__indsdict
if indsdict:
self.__indsdict = None
cmd = indsdict.get("command", None)
nowait = indsdict.get("async", None)
try:
if cmd and not nowait:
os.system(cmd)
elif cmd and nowait:
import subprocess
subprocess.Popen(cmd,
stdin=None,
stdout=None,
stderr=None,
close_fds=True)
except BaseException:
self.__logger.exception("command error")
else:
if self.__indsfile is self.file_appends:
self.__indsfile.flush()
else:
self.__indsfile.close()
self.__indsfile = None
self.ddmdata = self.__ddmtdat
self.__ddmtdat = ""
def _process_ddm_0xd04112(self, _, __, ___, zti=None):
"""Process DDM Close request
Called by process_wsf_0xd0.
"""
self.__log_debug("DDM Close Request")
# send close reply/acknowledgement
rec = b"\x88" # SF (Structured Field AID)
isf = b"\xd0\x41\x09" # D04109 Close Acknowledgement
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
rec += isf
self.__log_debug("DDM Close Ack send")
self.send_3270_data(rec)
def _process_ddm_unknown(self, b_str, start, stop, zti=None):
ddm_req = b_str[(start+2):min((start+5), stop)]
raise TnzError(f"Bad DDM request: {ddm_req}")
def _process_eau(self, pid=0, zti=None):
"""Perform host-initiated EAU (Erase All Unprotected)
"""
self.__log_debug("Erase All Unprotected (EAU)")
if pid:
raise TnzError("Non-zero PID not implemented")
self.__erase_input(0, 0, zti=zti)
self._reset_mdt()
self.key_home(zti=zti)
self._restore_keyboard(zti=zti)
def _process_ew(self, b_str, start, stop, zti=None):
"""Perform host-initiated EW (Erase/Write)
"""
if stop - start <= 1: # if no WCC
return
self.__log_debug("Erase/Write (EW) 0x%02x", b_str[start])
self.lastcmd = ""
self.__erase_reset(ipz=False, zti=zti)
self._process_orders_data(b_str, start+2, stop, zti=zti)
self._process_wcc(b_str[start+1], zti=zti)
self.updated = True
self.__readlines_row = 0
self.__readlines_check()
def _process_ewa(self, b_str, start, stop, zti=None):
"""Perform host-initiated EWA (Erase/Write Alternate)
"""
if stop - start <= 1: # if no WCC
return
self.__log_debug("Erase/Write Alternate (EWA) 0x%02x",
b_str[start])
self.lastcmd = ""
self.__erase_reset(ipz=True, zti=zti)
self._process_orders_data(b_str, start+2, stop, zti=zti)
self._process_wcc(b_str[start+1], zti=zti)
self.updated = True
self.__readlines_row = 0
self.__readlines_check()
def _process_order(self, order, start, stop, zti=None):
"""Process outbound data stream order.
Call the appropriate method to process the input order.
Args:
order (bytes): outbound data stream orders and data
start (int): index of first byte of order in input bytes
stop (int): stop index indicating end of orders and data
Returns:
The index after the last byte process by the order.
"""
rtn_name = "_process_order_" + hex(order[start])
rtn = getattr(self, rtn_name, self._process_order_unknown)
return rtn(order, start, stop, zti=zti)
def _process_order_0x5(self, _, start, stop, zti=None):
"""Process Program Tab (PT) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 1:
raise TnzError(f"PT requires 1 byte, got {order_len}")
oldadd = self.bufadd
if not self.plane_fa[oldadd] and self.__pt_erase:
addr0, _ = self.next_field(oldadd, 0)
if addr0 > 0:
self.__pt_erase = False
elif addr0 < 0:
addr0 = 0
self.__erase(oldadd, addr0)
if zti:
zti.write_data(self, oldadd, addr0)
bufadd = self.__tab(oldadd, 0)
self.bufadd = bufadd
self.__log_debug(" Program Tab @ %r -> %r", oldadd, bufadd)
return start + 1
def _process_order_0x8(self, order, start, stop, zti=None):
"""Process Graphic Escape (GE) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 2:
raise TnzError(f"GE requires 2 bytes, got {order_len}")
self.__pt_erase = False
ge_byte = order[start+1]
addr1 = self.bufadd
self.__log_debug(" Graphic Escape 0x%02x @ %r", ge_byte, addr1)
if zti:
zti.write_data_prep(self, addr1, 1)
self.plane_dc[addr1] = ge_byte
self.plane_fa[addr1] = 0
self.plane_eh[addr1] = self.__proc_eh
self.plane_cs[addr1] = 1
self.plane_fg[addr1] = self.__proc_fg
self.plane_bg[addr1] = self.__proc_bg
self.bufadd = (addr1+1) % self.buffer_size
if zti:
# Use force=True to indicate that the data that was
# just updated may have removed a field attribute. It
# also indicates that the update did not change the
# cursor.
zti.write_data(self, addr1, 1, force=True)
return start+2
def _process_order_0x11(self, order, start, stop, zti=None):
"""Process Set Buffer Address (SBA) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 3:
raise TnzError(f"SBA requires 3 bytes, got {order_len}")
self.__pt_erase = False
newaddr = self.address(order[(start+1):(start+3)])
self.__log_debug(" Set Buffer Address %r", newaddr)
self.__check_address(newaddr)
self.bufadd = newaddr
return start+3
def _process_order_0x12(self, order, start, stop, zti=None):
"""Process Erase Unprotected to Address (EUA) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 3:
raise TnzError(f"EUA requires 3 bytes, got {order_len}")
self.__pt_erase = False
addr = self.address(order[(start+1):(start+3)])
self.__log_debug(" Erase Unprotected to Address (EUA) %r", addr)
self.__check_address(addr)
self.__erase_input(self.bufadd, addr, zti=zti)
self.bufadd = addr
return start+3
def _process_order_0x13(self, _, start, stop, zti=None):
"""Process Insert Cursor (IC) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 1:
raise TnzError(f"IC requires 1 byte, got {order_len}")
self.__pt_erase = False
bufadd = self.bufadd
self.__log_debug(" Insert Cursor %r", bufadd)
self.curadd = bufadd
if zti:
zti.rewrite_cursor = True
return start+1
def _process_order_0x1d(self, order, start, stop, zti=None):
"""Process Start Field (SF) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 1:
raise TnzError(f"SF requires 1 bytes, got {order_len}")
self.__pt_erase = False
fattr = order[start+1]
bufadd = self.bufadd
self.__log_debug(" Start Field Value=x%02x @ %r", fattr, bufadd)
self.plane_dc[bufadd] = 0
self.plane_fa[bufadd] = bit6(fattr)
self.plane_eh[bufadd] = 0
self.plane_cs[bufadd] = 0
self.plane_fg[bufadd] = 0
self.plane_bg[bufadd] = 0
self.bufadd = (bufadd+1) % self.buffer_size
if zti:
zti.field(self, bufadd)
return start+2
def _process_order_0x28(self, order, start, stop, zti=None):
"""Process Set Attribute (SA) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 3:
raise TnzError(f"SA requires 3 bytes, got {order_len}")
self.__pt_erase = False
cat = order[start+1]
cav = order[start+2]
self.__log_debug(" Set Attribute Type=0x%x Value=0x%x",
cat, cav)
if cat == 0x00: # all character attributes
self.__proc_eh = 0 # extended highlighting
self.__proc_cs = 0 # character set
self.__proc_fg = 0 # foreground color
self.__proc_bg = 0 # background color
elif cat == 0x41: # extended highlighting
self.__proc_eh = cav
elif cat == 0x42: # foreground color
if not self.__extended_color_mode:
if zti:
zti.extended_color(self)
self.__extended_color_mode = True
self.__proc_fg = cav # foreground color
elif cat == 0x43: # character set
self.__proc_cs = cav
elif cat == 0x45: # background color
if not self.__extended_color_mode:
if zti:
zti.extended_color(self)
self.__extended_color_mode = True
self.__proc_bg = cav # background color
else:
raise TnzError(f"Bad character attribute type: {cat}")
return start+3
def _process_order_0x29(self, order, start, stop, zti=None):
"""Process Start Field Extended (SFE) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 1:
raise TnzError(f"SFE requires 1 byte, got {order_len}")
self.__pt_erase = False
bufadd = self.bufadd
self.plane_dc[bufadd] = 0
self.plane_fa[bufadd] = 0x40 # bit6(0) default
self.plane_eh[bufadd] = 0
self.plane_cs[bufadd] = 0
self.plane_fg[bufadd] = 0
self.plane_bg[bufadd] = 0
start, pairs = self.__set_attributes(bufadd,
order, start + 1, zti=zti)
self.__log_debug(" Start Field Extended Value=%r @ %r",
pairs, bufadd)
self.bufadd = (bufadd+1) % self.buffer_size
if zti:
zti.field(self, bufadd)
return start
def _process_order_0x2c(self, order, start, stop, zti=None):
"""Process Modify Field (MF) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 1:
raise TnzError(f"MF requires 1 byte, got {order_len}")
self.__pt_erase = False
bufadd = self.bufadd
if not self.plane_fa[bufadd]:
raise TnzTerminalError(f"Not a field: {bufadd}")
i, pairs = self.__set_attributes(bufadd,
order, start + 1, zti=zti)
self.__log_debug(" Modify Field=%r @ %r",
pairs, bufadd)
self.bufadd = (bufadd+1) % self.buffer_size
if zti:
zti.field(self, bufadd)
return i
def _process_order_0x3c(self, order, start, stop, zti=None):
"""Process Repeat to Address (RA) order.
Called by process_order.
"""
order_len = stop - start
if order_len < 4:
raise TnzError(f"RA requires 4 bytes, got {order_len}")
self.__pt_erase = False
stop_address = self.address(order[(start+1):(start+3)])
cs_attr = self.__proc_cs
getxt = ""
return_value = start + 4
data_byte = order[start+3]
if data_byte == 0x08: # if GE (Graphic Escape)
cs_attr = 1
data_byte = order[return_value]
getxt = "GE "
return_value += 1
self.__log_debug(" Repeat to Address %r, %s0x%02x",
stop_address, getxt, data_byte)
self.__check_address(stop_address)
bufadd = self.bufadd
if bufadd < stop_address:
rlen = stop_address - bufadd
elif stop_address < bufadd:
rlen = stop_address + self.buffer_size - bufadd
else:
rlen = self.buffer_size
if zti:
zti.write_data_prep(self, bufadd, rlen)
ucba = self.ucba
ucba(self.plane_dc, bufadd, bytes([data_byte]*rlen))
ucba(self.plane_fa, bufadd, b"\x00"*rlen)
ucba(self.plane_eh, bufadd, [self.__proc_eh]*rlen)
ucba(self.plane_cs, bufadd, bytes([cs_attr]*rlen))
ucba(self.plane_fg, bufadd, [self.__proc_fg]*rlen)
ucba(self.plane_bg, bufadd, [self.__proc_bg]*rlen)
self.bufadd = stop_address
if zti:
# Use force=True to indicate that the data that was
# just updated may have removed a field attribute. It
# also indicates that the update did not change the
# cursor.
zti.write_data(self, bufadd, rlen, force=True)
return return_value
def _process_order_unknown(self, order, start, stop, zti=None):
raise TnzError("Unknown order: "+hex(order[start]))
def _process_orders_data(self, b_str, start, end, zti=None):
"""Process a byte array of a stream of orders and data.
"""
self.bufadd = self.curadd
self.__proc_eh = 0
self.__proc_cs = 0
self.__proc_fg = 0
self.__proc_bg = 0
self.__pt_erase = False
patord = self.__patord
process_data = self._process_data
process_order = self._process_order
while start < end:
pat = patord.search(b_str, start, end)
if not pat:
process_data(b_str, start, end, zti=zti)
return
ordidx = pat.start()
if start < ordidx:
process_data(b_str, start, ordidx, zti=zti)
self.__pt_erase = True
start = process_order(b_str, ordidx, end, zti=zti)
def _process_w(self, b_str, start, stop, pid=0, zti=None):
"""Perform host-initiated W (Write)
"""
if stop - start <= 1: # if no WCC
return
self.__log_debug("Write (W) 0x%02x", b_str[start])
if pid:
raise TnzError("Non-zero PID not implemented")
self._process_wcc(b_str[start+1], for_mdt=True)
self._process_orders_data(b_str, start+2, stop, zti=zti)
self._process_wcc(b_str[start+1], zti=zti)
self.updated = True
self.__readlines_check()
def _process_wcc(self, wcc, for_mdt=False, zti=None):
"""Process a WCC (Write Control Character).
"""
self.__log_debug(" WCC 0x%02x for_mdt=%r", wcc, for_mdt)
if for_mdt:
if wcc & 0x01: # if bit 7 is 1
self.__log_debug(" WCC reset modified data bit = 1")
self._reset_mdt()
else:
if wcc & 0x40: # if bit 1 is 1
self._reset_partition()
if wcc & 0x08: # if bit 4 is 1
self.__log_error(" Start printer not implemented.")
if wcc & 0x04: # if bit 5 is 1
self.__log_info("<--- ALARM --->")
if wcc & 0x02: # if bit 6 is 1
self.__log_debug(" WCC keyboard restore bit = 1")
self._restore_keyboard(zti=zti)
def _process_wsf_0x1(self, b_str, start, stop, zti=None):
"""Process Read Partition structured field.
Called by process_command_0xf3. Input is outbound structured
field data.
"""
self.__log_debug("Read Partition")
pid = b_str[start+3] # 00-7e (read ops) or ff (query ops)
rp_type = b_str[start+4]
if rp_type in (0x02, 0x03) and pid != 255:
raise TnzTerminalError(f"pid={pid}, type={rp_type}")
if self.read_state == self.__ReadState.RENTER:
self.__log_error("Read Partition in Retry Enter state")
# TODO: reject
self.read_state = self.__ReadState.RREAD # Retry Read state
if rp_type == 0x02: # Query
self.inop = rp_type # type of operation
self.__query_reply()
elif rp_type == 0x03: # Query List
self.inop = rp_type # type of operation
reqtype = b_str[start+5]
qcode = b_str[start+6:stop]
self.__query_reply(reqtype=reqtype, qcode=qcode)
elif rp_type == 0x6e: # Read Modified All (RMA)
self.inpid = pid
self.inop = rp_type # type of operation
self.send_aid(0x61, short=False) # AID_READP
elif rp_type == 0xf2: # Read Buffer (RB)
self.inpid = pid
self.inop = rp_type # type of operation
self.aid = 0x61 # AID_READP
self.__read_buffer() # send reply
elif rp_type == 0xf6: # Read Modified (RM)
self.inpid = pid
self.inop = rp_type # type of operation
self.send_aid(0x61) # AID_READP
else:
raise TnzTerminalError(f"Unknown type=0x{rp_type:02x}")
def _process_wsf_0x3(self, b_str, start, stop, zti=None):
"""Process Erase/Reset structured field.
Called by process_command_0xf3.
Input is outbound structured field data.
"""
self.__extended_color_mode = False
sf_len = stop - start
if sf_len < 4:
raise TnzError(f"Erase/Reset needs 4 bytes, got {sf_len}")
self.__erase_reset(ipz=bool(b_str[start+3] & 0x80), zti=zti)
self.updated = True
self.__readlines_row = 0
self.__readlines_check()
def _process_wsf_0x9(self, b_str, start, stop, zti=None):
"""Process Set Reply Mode structured field.
Called by process_command_0xf3.
Input is outbound structured field data.
"""
pid = b_str[start+3] # Partition identifier (OO through 7E)
if pid:
raise TnzError("Non-zero PID not implemented")
mode = b_str[start+4]
if mode <= 1: # Field or Extended Field
self.__reply_cattrs = b""
elif mode == 2: # Character mode
self.__reply_cattrs = b_str[start+5:stop]
else:
raise TnzError(f"Bad reply mode: {mode}")
self.__reply_mode = mode
def _process_wsf_0x40(self, b_str, start, stop, zti=None):
"""Process Outbound 3270DS structured field.
Called by process_command_0xf3.
Input is outbound structured field data.
"""
self.__log_debug("Outbound 3270DS")
pid = b_str[start+3] # Partition identifier (OO through 7E)
rtn_name = "_process_cmnd_" + hex(b_str[start+4])
rtn = getattr(self, rtn_name, self._process_cmnd_unknown)
rtn(b_str, start, stop, pid=pid, zti=zti)
def _process_wsf_0xd0(self, b_str, start, stop, zti=None):
"""Process DDM structured field.
Called by process_command_0xf3.
Input is outbound structured field data.
See http://x3270.bgp.nu/SS-HCS12-1372-00.pdf
Only DFT file transfers are supported. DFT file transfers use
structured fields as opposed to other strange encodings.
Outbound transmission = host to PC = download
Inbound transmission = PC to host = upload
Downloads use IND$FILE GET, which use DDM DOWNLOAD requests
Uploads use IND$FILE PUT, which use DDM GET requests
General idea of download:
IND$FILE GET
Read Partition Query from host
Query Reply from PC
Open for Download from host
Open Acknowledgement from PC
Download Data Buffer from host
Data Acknowledgement from PC
- repeat at Download Data Buffer -
Close Request from host
Close Acknowledgement from PC
Open for Messages from host
Open Acknowledgement from PC
MSG : File Transfer Complete from host
Data Acknowledgement from PC
General idea of upload:
IND$FILE PUT
Read Partition Query from host
Query Reply from PC
Open for Upload from host
Open Acknowledgement from PC
Set Cursor And Get from host
Upload Data Buffer from PC
- repeat at Sest Cursor And Get -
Get Past End of File Error from PC
Close Request from host
Close Reply from PC
Open for Messages from host
Open Acknowledgement from PC
MSG : Transfer Complete from host
Data Acknowledgement from PC
"""
ddm_req = b_str[(start+2):(start+5)]
rtn_name = "_process_ddm_0x" + ddm_req.hex()
rtn = getattr(self, rtn_name, self._process_ddm_unknown)
rtn(b_str, start, stop, zti=zti)
def _process_wsf_unknown(self, b_str, start, stop, zti=None):
raise TnzError(f"Bad Structured Field ID: {b_str[start+2]}")
def _reset_mdt(self):
"""Reset the MDT (modified data tag) for all fields.
"""
plane_fa = self.plane_fa
_bit6 = bit6
for faddr, fattr in self.fields():
nattr = _bit6(fattr & (255 ^ 1)) # turn off MDT
if fattr != nattr:
plane_fa[faddr] = nattr
def _reset_partition(self):
"""Perform host-initiated Reset Partition.
"""
self.__reply_mode = 0 # Field mode
self.__reply_cattrs = b""
def _restore_keyboard(self, zti=None):
"""Perform host-initiated Restore Keyboard
"""
self.aid = 0x60 # AID_NONE
self.read_state = self.__ReadState.NORMAL
self.system_lock_wait = False
# Acknowledgment of an inbound transmission ?
self.inop = 0x06 # (RM) INOP = Read Modified
self.pwait = False
if zti:
zti.rewrite_keylock = True
# Private methods
def __addlines(self, keep_all=False):
# hint: use keep_all=True before pressing pa2
# to include blank lines at the bottom of the screen
row = self.__readlines_row
maxrow = self.__readlines_maxrow
if row >= maxrow:
return
maxcol = self.maxcol
saddr = row * maxcol
eaddr = maxrow * maxcol
rowcnt = (eaddr - saddr) // maxcol
newstr = self.scrstr(saddr, eaddr)
for i in range(0, eaddr-saddr, maxcol):
self.readlines.append(newstr[i:i+maxcol].rstrip())
if not keep_all:
readlines = self.readlines
while rowcnt:
line = readlines.pop()
if line:
readlines.append(line)
break
rowcnt -= 1
row += rowcnt
self.__readlines_row = row
if (keep_all or row >= maxrow) and self.readlines_pa2:
if not self.__indsfile: # if get/put not in progress
self.pa2()
def __append_char_bytes(self, blst, saddr, eaddr):
"""
Append data character bytes to the input list (blst) starting
at saddr and ending at (not including) eaddr. A GE (Graphic
Escape) is appended when the character byte is from character
set 1.
"""
plane_dc = self.plane_dc
rcba = self.rcba
addr0 = saddr
for addr1 in self.__iterbs_addr(self.plane_cs, saddr, eaddr):
cii = self.plane_cs[addr0]
if cii == 0:
blst.append(rcba(plane_dc, addr0, addr1))
elif cii == 1:
for addr2 in self.__range_addr(addr0, addr1):
blst.append(b"\x08") # GE (Graphic Escape)
blst.append(plane_dc[addr2:addr2+1])
else:
raise TnzError(f"cs={cii} not implemented")
addr0 = addr1
def __check_address(self, address):
if not 0 <= address < self.buffer_size:
raise TnzTerminalError(f"Invalid address: {address}")
async def __connect(self, protocol, host, port, ssl_context):
self.__log_debug("__connect(%r, %r, %r, %r)",
protocol, host, port, ssl_context)
loop = asyncio.get_event_loop()
if hasattr(asyncio, "current_task"):
task = asyncio.current_task()
else:
task = asyncio.Task.current_task()
# initialize using running loop implicitly
global _wait_event
if not _wait_event and loop is _loop:
_wait_event = asyncio.Event()
# connect
try:
await loop.create_connection(protocol, host, port,
ssl=ssl_context)
except asyncio.CancelledError:
self.seslost = True
_wait_event.set()
return # assume from shutdown/close
except (OSError, UnicodeError):
self.seslost = sys.exc_info()
self.__logger.exception("create_connection error")
_wait_event.set()
return # exception consumed
finally:
if self.__connect_task is task:
self.__connect_task = None
def __erase(self, saddr, eaddr):
"""Process erase function.
Assume input range contains no fields.
"""
size = eaddr - saddr
if size <= 0:
size += self.buffer_size
zeros = b"\x00" * size
ucba = self.ucba
ucba(self.plane_dc, saddr, zeros)
ucba(self.plane_eh, saddr, zeros)
ucba(self.plane_cs, saddr, zeros)
ucba(self.plane_fg, saddr, zeros)
ucba(self.plane_bg, saddr, zeros)
def __erase_input(self, saddr, eaddr, zti=None):
self.__log_debug(" ERASE INPUT %d %d", saddr, eaddr)
field = self.field
plane_fa = self.plane_fa
erase = self.__erase
for sa1, ea1 in self.char_addrs(saddr, eaddr):
if sa1 != saddr:
faddr = sa1 - 1
fav = plane_fa[faddr]
else:
faddr, fav = field(sa1)
if fav & 0x20: # if protected field
continue
erase(sa1, ea1)
if zti:
zti.write(self, faddr, sa1, ea1)
self.updated = True
def __erase_reset(self, ipz=False, zti=None):
"""Erase/Reset
Resets the device to implicit partition state destroying
all existing (implicit or explicit) partitions. Function
creates an implicit partition zero with default partition
characteristics and a default size if ips is False or of
alternate size if ipz is True.
"""
self.__extended_color_mode = False
if ipz:
self.maxrow = self.amaxrow
self.maxcol = self.amaxcol
else:
self.maxrow = self.dmaxrow
self.maxcol = self.dmaxcol
buffer_size = self.maxrow * self.maxcol
self.buffer_size = buffer_size
self.plane_dc = bytearray(buffer_size) # data characters
self.plane_fa = bytearray(buffer_size) # field attributes
self.plane_eh = bytearray(buffer_size) # extended hilite
self.plane_cs = bytearray(buffer_size) # character set
self.plane_fg = bytearray(buffer_size) # foreground color
self.plane_bg = bytearray(buffer_size) # background color
self.addr16bit = buffer_size >= 16384
self.curadd = 0
if zti:
zti.erase(self)
def __get_event_loop(self):
global _loop
loop = self.__loop
if not loop:
loop = _loop
if not loop:
if platform.system() == "Windows":
# default policy does not support add_reader
pol = asyncio.WindowsSelectorEventLoopPolicy()
asyncio.set_event_loop_policy(pol)
loop = asyncio.get_event_loop()
self.__loop = loop
if not _loop:
_loop = loop
return loop
def __iterbs(self, bav, pos, endpos):
"""
Iterate through sequences of same-value bytes in the input
bytearray.
Each iteration is represented by a single end position that
can be used to describe a vector. For the first iteration,
the start position of the vector is the input start address.
For subsequent iterations, the start address is the position
for the previos iteration.
"""
if pos >= endpos:
raise ValueError("pos >= endpos")
for mat in self.__patbs.finditer(bav, pos, endpos):
yield mat.end()
def __iterbs_addr(self, bav, saddr=0, eaddr=None):
"""
Iterate through sequences of same-value bytes in the input
bytearray.
Each iteration is represented by a single end address that
can be used to describe a vector. For the first iteration,
the start address of the vector is the input start address.
For subsequent iterations, the start address is the address
for the previous iteration.
"""
if eaddr is None:
eaddr = saddr
if saddr >= eaddr:
endpos = len(bav)
else:
endpos = eaddr
finditer = self.__patbs.finditer
for mat in finditer(bav, saddr, endpos):
taddr = mat.end()
if saddr >= eaddr and taddr >= endpos:
if eaddr != 0 and bav[-1] == bav[0]:
break
taddr = 0
yield taddr
if saddr >= eaddr and eaddr != 0:
for mat in finditer(bav, 0, eaddr):
yield mat.end()
def __key_bytes(self, data, codec_index, onerow, zti):
if self.pwait:
raise TnzError("PWAIT Input Inhibit")
if self.system_lock_wait:
raise TnzError("System Lock Input Inhibit")
buffer_size = self.buffer_size
if onerow:
cax = self.curadd // self.maxcol # y
cax += 1
cax *= self.maxcol
cax %= buffer_size
else:
cax = self.curadd
chars_keyed = 0
plane_bg = self.plane_bg
plane_dc = self.plane_dc
plane_cs = self.plane_cs
plane_eh = self.plane_eh
plane_fa = self.plane_fa
plane_fg = self.plane_fg
field = self.field
next_field = self.next_field
while True:
if not data:
return chars_keyed
ca1 = self.curadd
if plane_fa[ca1]:
self.__log_debug(" data rejected, on field attribute")
return chars_keyed # on field attribute
datalen = len(data)
# ca2 = (ca1 + datalen) % buffer_size
fa1, fattr = field(ca1)
if fattr & 0x20: # if protected
self.__log_debug("Rejected - Field protected @ %r", fa1)
return chars_keyed # on protected field
fa2, _ = next_field(ca1, cax)
if fa2 < 0:
fa2 = cax
if ca1 < fa2:
fieldlen = fa2 - ca1
else:
fieldlen = buffer_size + fa2 - ca1
usedlen = min(fieldlen, datalen)
zeros = b"\x00" * usedlen
self.ucba(plane_dc, ca1, data[:usedlen])
self.ucba(plane_eh, ca1, zeros)
self.ucba(plane_cs, ca1, bytes([codec_index]) * usedlen)
self.ucba(plane_fg, ca1, zeros)
self.ucba(plane_bg, ca1, zeros)
fattr = bit6(fattr | 1) # Set MDT (Modified Data Tag)
plane_fa[fa1] = fattr
self.curadd = (self.curadd + usedlen) % buffer_size
if zti:
zti.write(self, fa1, ca1, self.curadd)
zti.rewrite_cursor = True
chars_keyed += usedlen
data = data[usedlen:]
if self.curadd == cax:
return chars_keyed
fattr = plane_fa[self.curadd]
if fattr: # if on field attribute
if not fattr & 0x10: # if alphanumeric field
self.curadd += 1
self.curadd %= buffer_size
else:
self.key_tab()
def __log(self, lvl, *args, **kwargs):
self.__log_check()
self.__logger.log(lvl, "%s "+args[0],
self.name, *args[1:], **kwargs)
def __log_debug(self, *args, **kwargs):
return self.__log(logging.DEBUG, *args, **kwargs)
def __log_error(self, *args, **kwargs):
return self.__log(logging.ERROR, *args, **kwargs)
def __log_info(self, *args, **kwargs):
return self.__log(logging.INFO, *args, **kwargs)
def __next_get(self):
"""
Set up for next get structure field to transfer data to the
host for a put.
"""
self.__ddmrecnum += 1
# 2 bytes for the structure field length
# ...plus...
isf = b"\xd0\x46\x05" # D04605 Data for Get
isf += b"\x63\x06" # Record Number Header
isf += self.__ddmrecnum.to_bytes(4, byteorder="big")
isf += b"\xc0\x80" # Data Not Compressed
isf += b"\x61" # Begin Data Code
# ...plus...
# 2 bytes for length of data (plus 5)
# and end with the actual data
maxlen = self._limin - len(isf) - 2 - 2
if self.__indsenc:
if len(self.__indspend) >= maxlen:
data = self.__indspend[:maxlen]
self.__indspend = self.__indspend[maxlen:]
else:
rem = maxlen - len(self.__indspend)
rem *= 4 # at most 4 bytes per
data = self.__indsfile.read(rem)
# IND$FILE takes CRLF to delimit record
# boundries. Universal newlines enables
# this by converting LF to CRLF. Consider
# that data will be EBCDIC except for the
# LF in the CRLF sequence - which will be
# the byte value for EBCDIC RPT (ascii LF).
# Note that CR is the same in both ascii
# and EBCDIC. And condier that EBCDIC
# translation often equates EBCDIC NL with
# unicode LF.
data = data.replace("\n", "\r")
data = data.encode(self.__indsenc)
data = data.replace(b"\r", b"\r\n")
self.__indspend += data
if len(self.__indspend) >= maxlen:
data = self.__indspend[:maxlen]
self.__indspend = self.__indspend[maxlen:]
else:
data = self.__indspend
self.__indspend = b""
else:
data = self.__indsfile.read(maxlen)
if not data:
self.__log_debug("DDM NEXT none (no record %d)",
self.__ddmrecnum)
self.__indsisf = b""
return
self.__log_debug("DDM NEXT record (%d) is %d byte(s)",
self.__ddmrecnum, len(data))
isf += (len(data)+5).to_bytes(2, byteorder="big")
isf += data
isf = (len(isf)+2).to_bytes(2, byteorder="big")+isf
self.__indsisf = isf
def __query_reply(self, reqtype=None, qcode=None):
"""Perform query reply.
"""
self.__log_debug("query reply %r %r", reqtype, qcode)
rec = b"\x88" # SF (Structured Field AID)
# 80 Query Reply (Summary)
sfb = b"\x80" # Summary Query Reply
sfb += b"\x80" # Summary
sfb += b"\x81" # Usable Area
sfb += b"\x85" # Character Sets
if self.capable_color:
sfb += b"\x86" # Color
sfb += b"\x87" # Highlight/Highlighting
sfb += b"\x88" # Reply Modes
sfb += b"\x95" # DDM (for file transfer)
sfb += b"\xa6" # Implicit Partitions
# End of Summary
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 81 Query Reply (Usable Area)
sfb = b"\x81" # QCODE for the Usable Area Query Reply
# Flags bit 0 - Reserved
# (4) bit 1 - Page printer
# bit 2 - Reserved
# bit 3 - HC A "hard copy" device
# bit 4-7 - ADDR Addressing modes
# 0 - Reserved
# 1 - 12/14-bit addressing allowed
# 2 - Reserved
# 3 - 12/14/16-bit addressing allowed
# F - Unmapped (no explicit address)
sfb += b"\x01" # Flags (4)
# Flags bit 0 - VCP Variable cells supported
# (5) bit 1 - CHAR Non-matrix character
# bit 2 - CELLUNITS Value in bytes 6 & 7, 8 and 9; pels
# bit 3-7 - Reserved
sfb += b"\x00" # Flags (5)
sfb += self.amaxcol.to_bytes(2, byteorder="big") # W
sfb += self.amaxrow.to_bytes(2, byteorder="big") # H
sfb += b"\x00" # UNITS Pel measurement - inches
# Horizontal distance between points as fraction ?
sfb += (1).to_bytes(2, byteorder="big") # Xr numerator
sfb += (96).to_bytes(2, byteorder="big") # Xr denominator
# Vertical distance between points as fraction ?
sfb += (1).to_bytes(2, byteorder="big") # Yr numerator
sfb += (96).to_bytes(2, byteorder="big") # Yr denominator
sfb += b"\x06" # AW Number of X units in default cell?
sfb += b"\x0c" # AH Number of Y units in default cell?
# BUFSZ onward may not be needed
# Only set BUFSZ non-zero if paritions NOT supported
# End of Usable Area
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# A6 Query Reply (Implicit Partitions)
sfb = b"\xa6" # Implicit Partition Query Reply
sfb += b"\x00\x00" # Flags (Reserved)
# Implicit Partition Screen Sizes for Display Devices
sfb += b"\x0b" # Length of this self-defining parameter
sfb += b"\x01" # Implicit Partition Sizes
sfb += b"\x00" # Flags (Reserved)
sfb += self.dmaxcol.to_bytes(2, byteorder="big") # WD
sfb += self.dmaxrow.to_bytes(2, byteorder="big") # HD
sfb += self.amaxcol.to_bytes(2, byteorder="big") # WA
sfb += self.amaxrow.to_bytes(2, byteorder="big") # HA
# End of Implicit Partitions
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 85 Query Reply (Character Sets)
sfb = b"\x85" # Character Sets Query Reply
# Flags bit 0 - ALT Graphic Escape (GE) supported
# (1) bit 1 - MULTID Multiple LCIDs are supported
# bit 2 - LOADABLE LOAD PS are supported
# bit 3 - EXT LOAD PS EXTENDED is supported
# bit 4 - MS >1 size of character slot are supported
# bit 5 - CH2 2-byte coded character sets are supported
# bit 6 - GF CGCSGID is present
# bit 7 - Reserved
if self.alt:
sfb += b"\x82" # Flags (1)
else:
sfb += b"\x02" # Flags (1)
# Flags bit 0 - Reserved
# (2) bit 1 - PSCS Load PS slot size match not required
# bit 2-7 - Reserved
sfb += b"\x00" # Flags (2)
sfb += b"\x06" # SDW Default character slot width
sfb += b"\x0c" # SDH Default character slot height
sfb += b"\x00" * 4 # FORM Supported LOAD PS format types
sfb += b"\x07" # DL Length of each descriptor (one build below)
# Character Set Descriptor 1
# GA23-0059-4 describes SET as being at byte 1
# does that mean byte 0 is reserved? or a misprint?
sfb += b"\x00" # SET Device specific Char Set ID (PS store No.)
# Flags bit 0 - LOAD Loadable character set
# (3) bit 1 - TRIPLE Triple-plane character set
# bit 2 - CHAR Double-byte coded character set
# bit 3 - CB No LCID compare
# bit 4-7 - Reserved
sfb += b"\x00" # Flags (3)
sfb += b"\x00" # LCID Local character set ID (alias)
# SW and SH only present if MS=1
# sfb += b"\x00" # SW Width of the character slots
# sfb += b"\x00" # SH Height of the character slots
# SUBSN only present if CH2=1
# sfb += b"\x00" # SUBSN Starting subsection
# sfb += b"\x00" # SUBSN Ending subsection
# CGCSGID made up of 2-byte chararacter set number followed
# by 2-byte code page number.
sfb += (self.cs_00).to_bytes(2, byteorder="big")
sfb += (self.cp_00).to_bytes(2, byteorder="big")
if self.alt:
# Character Set Descriptor 2
sfb += b"\x01" # SET Device specific Char Set ID
# Flags bit 0 - LOAD Loadable character set
# (3) bit 1 - TRIPLE Triple-plane character set
# bit 2 - CHAR Double-byte coded character set
# bit 3 - CB No LCID compare
# bit 4-7 - Reserved
sfb += b"\x00" # Flags (3)
sfb += b"\xF1" # LCID Local character set ID (alias)
# SW and SH only present if MS=1
# sfb += b"\x00" # SW Width of the character slots
# sfb += b"\x00" # SH Height of the character slots
# SUBSN only present if CH2=1
# sfb += b"\x00" # SUBSN Starting subsection
# sfb += b"\x00" # SUBSN Ending subsection
# CGCSGID made up of 2-byte chararacter set number followed
# by 2-byte code page number.
sfb += (self.cs_01).to_bytes(2, byteorder="big")
sfb += (self.cp_01).to_bytes(2, byteorder="big")
# End of Character Sets)
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 87 Query Reply (Highlight)
# from some testing done on 5/1/2019, it seems that
# mvs "console switch" will not take place unless
# blink highlighting is supported. Blink highlighting
# is probably the kind of highlighting LEAST supported
# by modern terminals. Since claiming support seems
# to be required, clain support for all highlighting.
# this was the case for a 62x160 screen size.
# it did not seem to be the case for smaller screen sizes.
sfb = b"\x87" # QCODE for the Highlight Query Reply
sfb += b"\x05" # attribute-value/action pairs that follow
sfb += b"\x00\xf0" # value 00 -> action f0 = normal
sfb += b"\xf1\xf1" # value f1 -> action f1 = blink
sfb += b"\xf2\xf2" # value f2 -> action f2 = reverse
sfb += b"\xf4\xf4" # value f4 -> action f4 = underscore
sfb += b"\xf8\xf8" # value f8 -> action f8 = intensify
# End of Highlight
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 88 Query Reply (Reply Modes)
sfb = b"\x88" # QCODE for the Reply Modes Query Reply
sfb += b"\x00" # Field Mode
sfb += b"\x01" # Extended Field Mode
sfb += b"\x02" # Character mode
# End of Reply Modes
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 95 Query Reply (DDM)
# (for file transfer)
sfb = b"\x95" # QCODE for the DDM Query Reply
sfb += b"\x00\x00" # Flags reserved
sfb += self._limin.to_bytes(2, byteorder="big") # LIMIN
sfb += self._limout.to_bytes(2, byteorder="big") # LIMOUT
sfb += b"\x01" # NSS Number of subsets supported
sfb += b"\x01" # DDMSS DDM subset identifier
# End of DDM
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
# 86 Query Reply (Color)
# Where are these color identifiers defined?
# F1 241 Blue
# F2 242 Red
# F3 243 Pink
# F4 244 Green
# F5 245 Turquoise
# F6 246 Yellow
# F7 247 White
if self.capable_color:
sfb = b"\x86" # QCODE for the Color Query Reply
sfb += b"\x00" # flags (none defined for terminal)
sfb += b"\x08" # NP number of CAV/COLOR pairs
sfb += b"\x00\xf4" # Default -> Green
sfb += b"\xf1\xf1" # F1 -> Blue
sfb += b"\xf2\xf2" # F2 -> Red
sfb += b"\xf3\xf3" # F3 -> Pink
sfb += b"\xf4\xf4" # F4 -> Green
sfb += b"\xf5\xf5" # F5 -> Turquoise
sfb += b"\xf6\xf6" # F6 -> Yellow
sfb += b"\xf7\xf7" # F7 -> White?
# End of Highlight
sfb = b"\x81"+sfb # Query Reply
sfb = (len(sfb)+2).to_bytes(2, byteorder="big")+sfb
rec += sfb
self.send_3270_data(rec)
def __range_addr(self, saddr, eaddr):
if saddr >= eaddr:
endpos = self.buffer_size
else:
endpos = eaddr
for addr in range(saddr, endpos):
yield addr
if 0 < eaddr <= saddr:
for addr in range(0, eaddr):
yield addr
def __read_buffer(self):
"""Process RB (Read Buffer) 3270 Data Stream Command.
"""
self.__log_debug("Read Buffer (RB)")
if self.inpid:
raise TnzError(f"PID={self.inpid} not implemented")
# similiar to send_aid but sends SF but no SBA
self.__log_debug(" aid: 0x%02x", self.aid)
baddr = self.address_bytes(self.curadd)
self.__log_debug(" cursor %r", baddr)
rec = bytes([self.aid]) + baddr
reply_mode = self.__reply_mode
reply_cattrs = self.__reply_cattrs
buffer_size = self.buffer_size
addr = 0
while addr < buffer_size:
while addr < buffer_size:
fattr = self.plane_fa[addr]
if not fattr:
break
self.__log_debug(" SF %s", hex(fattr))
sfb = [0x1d, fattr] # SF (Start Field) fattr
if reply_mode: # Extended Field or Character
sfe = [0x29, 0] # SFE (Start Field Extended) 0
attr = self.plane_eh[addr]
if attr: # if not default
sfe[1] += 1
sfe.append(0x41, attr)
attr = self.plane_fg[addr]
if attr: # if not default
sfe[1] += 1
sfe.append(0x42, attr)
attr = self.plane_cs[addr]
if attr: # if not default
sfe[1] += 1
sfe.append(0x43, attr)
attr = self.plane_bg[addr]
if attr: # if not default
sfe[1] += 1
sfe.append(0x45, attr)
if sfe[1] != 0x40: # if not default`
if fattr:
sfe[1] += 1
sfe.append(0xc0, fattr)
sfb = sfe
rec += bytes(sfb)
addr += 1
else:
break
eindex = self.__pat0s.search(self.plane_fa, addr).end()
eaddr = eindex % buffer_size
blst = []
append = blst.append
if reply_mode in (0x00, 0x01): # [Extended] Field mode
# TODO following needs to NOT append null characters
self.__append_char_bytes(blst, addr, eaddr)
elif reply_mode == 0x02: # Character mode
# TODO following needs to NOT append null characters
# (nor their attributes)
eh_attr = 0
fg_attr = 0
bg_attr = 0
for sa2, ea2 in self.group_addrs(addr, eaddr):
eh1 = eh_attr
fg1 = fg_attr
bg1 = bg_attr
if 0x41 in reply_cattrs:
eh1 = self.plane_eh[sa2]
if 0x42 in reply_cattrs:
fg1 = self.plane_fg[sa2]
if 0x45 in reply_cattrs:
bg1 = self.plane_bg[sa2]
if eh1 != eh_attr:
append(bytes([0x28, 0x41, eh1])) # SA 41 eh
if fg1 != fg_attr:
append(bytes([0x28, 0x42, fg1])) # SA 42 fg
if bg1 != bg_attr:
append(bytes([0x28, 0x45, bg1])) # SA 45 bg
self.__append_char_bytes(blst, sa2, ea2)
else:
raise TnzError(f"bad reply mode {reply_mode}")
data = b"".join(blst)
self.__log_debug(" RB: %s byte(s) of data @ %r",
len(data), addr)
rec += data
addr = eindex
self.__log_debug(" End of RB response logging")
self.send_3270_data(rec)
def __readlines_check(self):
if self.readlines is None:
return
saddr = self.buffer_size - 22
vm_status = self.scrstr(saddr, 0).rstrip().upper()
keep_all = (" MORE" in vm_status or
" HOLDING" in vm_status)
if (" READ" in vm_status or
" RUNNING" in vm_status or
keep_all):
self.__readlines_maxrow = self.maxrow - 2
addr = self.__readlines_maxrow * self.maxcol
if (not self.is_protected(addr) and
self.is_protected(addr - 1)):
self.__addlines(keep_all)
elif self.is_unprotected(): # TSO?
self.__readlines_maxrow = self.maxrow - 1
self.__addlines()
if self.readlines:
line = self.readlines.pop()
if line not in ("***", " ***"):
self.readlines.append(line)
def __set_attributes(self, addr, b_str, b_idx, zti=None):
"""
Set field attributes according to input attributes in the
format used by MF and SFE.
"""
pairs = []
start = b_idx + 1
stop = start + b_str[b_idx] * 2
for pair_index in range(start, stop, 2):
fat = b_str[pair_index]
fav = b_str[pair_index + 1]
pairs.append((bytes([fat]), bytes([fav])))
if fat == 0xc0: # 3270 field attribute
self.plane_fa[addr] = bit6(fav)
elif fat == 0x41: # extended highlighting
self.plane_eh[addr] = fav
elif fat == 0x42: # foreground color
if not self.__extended_color_mode:
if zti:
zti.extended_color(self)
self.__extended_color_mode = True
self.plane_fg[addr] = fav
elif fat == 0x43: # character set
self.plane_cs[addr] = fav
elif fat == 0x45: # background color
if not self.__extended_color_mode:
if zti:
zti.extended_color(self)
self.__extended_color_mode = True
self.plane_bg[addr] = fav
else:
raise TnzError(f"Bad field attribute type: {fat}")
return stop, pairs
async def __start_tls(self, context):
self.__log_debug("__start_tls(%r)", context)
loop = asyncio.get_event_loop()
if hasattr(asyncio, "current_task"):
task = asyncio.current_task()
else:
task = asyncio.Task.current_task()
transport = self._transport
protocol = transport.get_protocol()
self._transport = None
try:
transport = await loop.start_tls(transport,
protocol,
context)
except asyncio.CancelledError:
self.seslost = True
_wait_event.set()
except OSError: # what could this be?
self.seslost = sys.exc_info()
self.__logger.exception("start_tls error")
_wait_event.set()
else:
self._transport = transport
self.__secure = True
self.__log_debug("__start_tls transport: %r", transport)
finally:
if self.__connect_task is task:
self.__connect_task = None
def __tab(self, saddr, eaddr=None):
"""Process tab action starting at input address.
Returns result address.
"""
plane_fa = self.plane_fa
if not plane_fa[saddr]: # if start on character
saddr, _ = self.next_field(saddr, eaddr)
if saddr < 0 or saddr == eaddr:
return 0
is_protected_attr = self.is_protected_attr
saddr = (saddr+1) % self.buffer_size
for sa1, _ in self.char_addrs(saddr, eaddr):
fattr = plane_fa[sa1-1]
if not is_protected_attr(fattr):
return sa1
return 0
# Class methods
@classmethod
def logging(cls):
"""Initialize logging
"""
cls.__log_check()
# Private class methods
@classmethod
def __log_check(cls):
if cls.__logger:
return
logger = logging.getLogger("tnz")
cls.__logger = logger
tnz_logging = os.getenv("TNZ_LOGGING")
if tnz_logging == "":
return
if tnz_logging is None:
dirname = os.path.expanduser(__file__)
dirname = os.path.abspath(dirname)
dirname = os.path.dirname(dirname)
tnz_logging = os.path.join(dirname, "logging.json")
with open(tnz_logging) as file:
logd = json.load(file)
from logging.config import dictConfig
logd["disable_existing_loggers"] = False
dictConfig(logd)
@classmethod
def __tnon(cls, value):
"""Translate input byte to a telnet option name.
"""
if isinstance(value, int):
value = bytes([value])
name = cls.__tn_options.get(value)
if name:
return name
return "0x"+value.hex()
# Static methods
@staticmethod
def fav_repr(fav):
"""Return string representation of field attribute value.
"""
fa_str = ""
if fav & 0x20: # if protected field
fa_str += "P" # protected
else:
fa_str += "u" # unprotected
if fav & 0x10: # if numeric-only field
fa_str += "N" # numeric
else:
fa_str += "a" # alphanumeric
if not fav & 12: # if b00..
fa_str += "00" # Display/not selector-pen-detectable
elif fav & 12 == 4: # if b01..
fa_str += "01" # Display/selector-pen-detectable
elif fav & 12 == 8: # if b10..
fa_str += "10" # Intensified display/pen-detect
else: # b11..
fa_str += "11" # Nondisplay, nondetectable (nonprint)
if fav & 1 != 0:
fa_str += "M" # Modified
else:
fa_str += "m" # Not modified
return fa_str
@staticmethod
def is_detectable_attr(attr):
"""
Return whether or not the input field attribute is for a
selector-pen-detectable field.
"""
return attr & 0x0c in (0x08, 0x04) # b....10.. or b....01..
@staticmethod
def is_displayable_attr(attr):
"""
Return whether or not the input field attribute is for a
displayable field.
"""
return attr & 0x0c != 0x0c # not b....11..
@staticmethod
def is_intensified_attr(attr):
"""
Return whether or not the input field attribute is for an
intensified field.
"""
return attr & 0x0c == 0x08 # b....10..
@staticmethod
def is_modified_attr(attr):
"""
Return whether or not the input field attribute is for a
modified field.
"""
return bool(attr & 0x01) # b.......1 MDT (Modified Data Tag)
@staticmethod
def is_normal_attr(attr):
"""
Return whether or not the input field attribute is for a
normal field.
"""
return not attr & 0x0c # b....00..
@staticmethod
def is_numeric_attr(attr):
"""
Return whether or not the input field attribute is for a
numeric field.
"""
return attr & 0x10 # b...1....
@staticmethod
def is_protected_attr(attr):
"""
Return whether or not the input field attribute is for a
protected field.
"""
return bool(attr & 0x20) # b..1.....
@staticmethod
def rcba(value, start, stop):
"""read circular byte array
value: byte array
start: first index into byte array
stop: last index into byte array
returns copy of data from array
"""
bal = len(value)
if not bal:
raise ValueError("no array")
if start > bal:
raise ValueError("start too big")
if stop > bal:
raise ValueError("stop too big")
if start < stop:
return value[start:stop]
return value[start:]+value[:stop]
@staticmethod
def ucba(dst, start, src, begidx=0, endidx=None):
"""update circular byte array
dst: target circular byte array
start: first index into target byte array
src: data to copy to array
begidx: start index in data to copy (default is 0)
endidx: end index in data to copy (default is len(src))
"""
if endidx is None:
endidx = len(src)
bdl = endidx - begidx
if bdl <= 0:
raise ValueError("no data")
bal = len(dst)
if bdl > bal:
raise ValueError("too much data")
if start >= bal:
raise ValueError("start too big")
len1 = bal - start
if len1 < bdl:
len2 = bdl - len1
else:
len1 = bdl
len2 = 0
enda = start + len1
endd = begidx + len1
dst[start:enda] = src[begidx:endd]
if len2:
dst[:len2] = src[endd:endidx]
# Readonly properties
@property
def host_verified(self):
"""Bool indicating if secure and host was verified.
"""
return self.__host_verified
@property
def secure(self):
"""Bool indicating if connection is secure.
"""
return self.__secure
@property
def tn3270(self):
"""Bool indicating if NOT NVT mode.
"""
return self.__eor
@property
def tn3270e(self):
"""Bool indicating if using TN3270E.
"""
return self.__tn3270e
# Data descriptors
@property
def encoding(self):
"""Name of encoding.
"""
return self.__encoding
@encoding.setter
def encoding(self, value):
if isinstance(value, tuple):
encoding, idx = value
else:
encoding, idx = value, 0
code_page = re.findall("\\d+$", encoding)
if len(code_page) != 1:
raise ValueError("Does not end in code page number")
code_page = int(code_page[0])
import codecs
self.codec_info[idx] = codecs.lookup(encoding)
if idx == 0:
self.__encoding = encoding
self.cs_00 = 697 # FIXME how do we determine?
self.cp_00 = code_page
elif idx == 1:
if code_page == 310:
self.alt = 1 # Support GE for char set ID 01
self.cs_01 = 963
else:
self.cs_01 = 697 # FIXME how do we determine?
self.cp_01 = code_page
# Class data
__logger = None # will be set by __log_check
# Private class data
# Translate data characters to printable characters
# NULL 0x00 -> space
# SUB 0x3f -> a solid circle
# DUP 0x1c -> an overscore asterisk
# FM 0x1e -> an overscore semicolon
# FF 0x0c -> space
# CR 0x0d -> space
# NL 0x15 -> space
# EM 0x19 -> space
# EO 0xff -> space
__trans_dc_to_c = bytes.maketrans(
b"\x00\x0c\x0d\x15\x19\xff",
b"\x40\x40\x40\x40\x40\x40")
# The translation to characters that are not in the
# code page must be done by unicode ordinal.
__trans_ords = {0x1a: 0x2218, # SUB -> solid circle
0x1c: 0x2611, # DUP -> check-mark???
0x1e: 0x2612} # FM -> x-mark???
# compiled regular expression patterns
__pat0s = re.compile(b"\x00+")
__patn0 = re.compile(b"[^\x00]")
__patn0l = re.compile(b"[^\x00][\x00]*\\Z")
__patbs = re.compile(b"(.)\\1*")
__patord = re.compile(b"[\x05\x08\x11\x12\x13\x1d\x28\x29\x2c\x3c]")
__pat0n0s = re.compile(b"[^\x00]\x00+")
class __ReadState(enum.Enum):
"""3270 DS READ STATE
"""
NORMAL = enum.auto()
RENTER = enum.auto()
RREAD = enum.auto()
__tn_options = {b"\x00": "TRANSMIT-BINARY",
b"\x01": "ECHO",
b"\x03": "SUPPRESS-GO-AHEAD",
b"\x06": "TIMING-MARK",
b"\x18": "TERMINAL-TYPE",
b"\x19": "END-OF-RECORD",
b"\x1d": "3270-REGIME",
b"\x28": "TN3270E",
b"\x2e": "START_TLS",
}
class TnzError(RuntimeError):
"""General Tnz error.
"""
class TnzTerminalError(TnzError):
"""May be related to terminal characteristics.
"""
# Functions
def bit6(control_int):
"""Translate 6-bit control characters to printable characters.
This
is used for bytes that have bits 0 and 1 reserved in order to
make the byte a printable character. See figure D-1 in Data
Stream Programmers Reference.
"""
control_int &= 63 # x3f - zero bits 0,1
cc11 = control_int | 192 # input with bits 0,1 = 11
if control_int == 48:
return cc11 # 11 0000 -> xF0
cc01 = control_int | 64 # input with bits 0,1 = 01
if control_int == 33:
return cc01 # 10 0001 -> x61
if 0 < (control_int & 0x0f) < 10: # if low nibble 1-9
return cc11 # aa aaaa -> 11aa aaaa
return cc01 # aa aaaa -> 01aa aaaa
def connect(host=None, port=None,
secure=None, verifycert=None,
name=None):
"""Create a new Tnz object and connect to the host.
secure = False if do not care about security
secure = True for encrypted connection
verifycert only has meaning when secure is True
"""
tnz = Tnz(name=name)
if port is None and secure is not False:
port = 992
if verifycert is None:
verifycert = False
if secure and verifycert is None:
verifycert = True
if secure is None:
secure = bool(port != 23)
tnz.connect(host, port, secure=secure, verifycert=verifycert)
return tnz
def selector_set(fileno, data=None):
"""Add input fd for wait read events.
"""
_loop.add_reader(fileno, _read_available, data)
def selector_del(fileno):
"""Remove input fd from wait read events.
"""
_loop.remove_reader(fileno)
def wakeup_wait(*_, **__):
"""Trigger wait event.
"""
if _wait_event:
_loop.call_soon_threadsafe(_wait_event.set)
# Private functions
def _read_available(_):
if _wait_event:
_wait_event.set()
# Private data
_loop = None # event loop for all sessions
_wait_event = None # event for all Wait calls
|
[
"os.get_terminal_size",
"ssl.SSLContext",
"os.popen",
"asyncio.set_event_loop_policy",
"sys.exc_info",
"os.path.join",
"os.path.abspath",
"traceback.print_exc",
"sys.stdin.isatty",
"json.loads",
"os.path.dirname",
"re.findall",
"locale.getpreferredencoding",
"re.search",
"subprocess.Popen",
"asyncio.get_event_loop",
"codecs.lookup",
"os.system",
"asyncio.current_task",
"sys.stdout.isatty",
"enum.auto",
"platform.system",
"os.getenv",
"re.compile",
"asyncio.WindowsSelectorEventLoopPolicy",
"json.load",
"asyncio.Event",
"asyncio.Task.current_task",
"logging.config.dictConfig",
"os.path.expanduser",
"logging.getLogger"
] |
[((159702, 159722), 're.compile', 're.compile', (["b'\\x00+'"], {}), "(b'\\x00+')\n", (159712, 159722), False, 'import re\n'), ((159737, 159759), 're.compile', 're.compile', (["b'[^\\x00]'"], {}), "(b'[^\\x00]')\n", (159747, 159759), False, 'import re\n'), ((159775, 159807), 're.compile', 're.compile', (["b'[^\\x00][\\x00]*\\\\Z'"], {}), "(b'[^\\x00][\\x00]*\\\\Z')\n", (159785, 159807), False, 'import re\n'), ((159822, 159844), 're.compile', 're.compile', (["b'(.)\\\\1*'"], {}), "(b'(.)\\\\1*')\n", (159832, 159844), False, 'import re\n'), ((159860, 159905), 're.compile', 're.compile', (["b'[\\x05\\x08\\x11\\x12\\x13\\x1d(),<]'"], {}), "(b'[\\x05\\x08\\x11\\x12\\x13\\x1d(),<]')\n", (159870, 159905), False, 'import re\n'), ((159934, 159961), 're.compile', 're.compile', (["b'[^\\x00]\\x00+'"], {}), "(b'[^\\x00]\\x00+')\n", (159944, 159961), False, 'import re\n'), ((7621, 7640), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (7638, 7640), False, 'import sys\n'), ((8484, 8518), 'os.getenv', 'os.getenv', (['"""SESSION_PS_SIZE"""', 'None'], {}), "('SESSION_PS_SIZE', None)\n", (8493, 8518), False, 'import os\n'), ((36295, 36344), 're.search', 're.search', (['"""(?<=\\\\s)\\\\S(?=[\\\\S]*[\\\\s]*\\\\Z)"""', 'text'], {}), "('(?<=\\\\s)\\\\S(?=[\\\\S]*[\\\\s]*\\\\Z)', text)\n", (36304, 36344), False, 'import re\n'), ((36640, 36670), 're.search', 're.search', (['"""(?<=\\\\s)\\\\S"""', 'text'], {}), "('(?<=\\\\s)\\\\S', text)\n", (36649, 36670), False, 'import re\n'), ((122698, 122722), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (122720, 122722), False, 'import asyncio\n'), ((150401, 150425), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (150423, 150425), False, 'import asyncio\n'), ((152297, 152321), 'logging.getLogger', 'logging.getLogger', (['"""tnz"""'], {}), "('tnz')\n", (152314, 152321), False, 'import logging\n'), ((152375, 152399), 'os.getenv', 'os.getenv', (['"""TNZ_LOGGING"""'], {}), "('TNZ_LOGGING')\n", (152384, 152399), False, 'import os\n'), ((152872, 152888), 'logging.config.dictConfig', 'dictConfig', (['logd'], {}), '(logd)\n', (152882, 152888), False, 'from logging.config import dictConfig\n'), ((158170, 158199), 're.findall', 're.findall', (['"""\\\\d+$"""', 'encoding'], {}), "('\\\\d+$', encoding)\n", (158180, 158199), False, 'import re\n'), ((158390, 158413), 'codecs.lookup', 'codecs.lookup', (['encoding'], {}), '(encoding)\n', (158403, 158413), False, 'import codecs\n'), ((160056, 160067), 'enum.auto', 'enum.auto', ([], {}), '()\n', (160065, 160067), False, 'import enum\n'), ((160085, 160096), 'enum.auto', 'enum.auto', ([], {}), '()\n', (160094, 160096), False, 'import enum\n'), ((160113, 160124), 'enum.auto', 'enum.auto', ([], {}), '()\n', (160122, 160124), False, 'import enum\n'), ((7865, 7894), 'locale.getpreferredencoding', 'locale.getpreferredencoding', ([], {}), '()\n', (7892, 7894), False, 'import locale\n'), ((8362, 8380), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (8378, 8380), False, 'import sys\n'), ((13913, 13952), 'ssl.SSLContext', 'ssl.SSLContext', (['ssl.PROTOCOL_TLS_CLIENT'], {}), '(ssl.PROTOCOL_TLS_CLIENT)\n', (13927, 13952), False, 'import ssl\n'), ((122787, 122809), 'asyncio.current_task', 'asyncio.current_task', ([], {}), '()\n', (122807, 122809), False, 'import asyncio\n'), ((122843, 122870), 'asyncio.Task.current_task', 'asyncio.Task.current_task', ([], {}), '()\n', (122868, 122870), False, 'import asyncio\n'), ((123023, 123038), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (123036, 123038), False, 'import asyncio\n'), ((150490, 150512), 'asyncio.current_task', 'asyncio.current_task', ([], {}), '()\n', (150510, 150512), False, 'import asyncio\n'), ((150546, 150573), 'asyncio.Task.current_task', 'asyncio.Task.current_task', ([], {}), '()\n', (150571, 150573), False, 'import asyncio\n'), ((152504, 152532), 'os.path.expanduser', 'os.path.expanduser', (['__file__'], {}), '(__file__)\n', (152522, 152532), False, 'import os\n'), ((152555, 152579), 'os.path.abspath', 'os.path.abspath', (['dirname'], {}), '(dirname)\n', (152570, 152579), False, 'import os\n'), ((152602, 152626), 'os.path.dirname', 'os.path.dirname', (['dirname'], {}), '(dirname)\n', (152617, 152626), False, 'import os\n'), ((152653, 152690), 'os.path.join', 'os.path.join', (['dirname', '"""logging.json"""'], {}), "(dirname, 'logging.json')\n", (152665, 152690), False, 'import os\n'), ((152751, 152766), 'json.load', 'json.load', (['file'], {}), '(file)\n', (152760, 152766), False, 'import json\n'), ((2988, 3018), 'os.getenv', 'os.getenv', (['"""TNZ_COLORS"""', '"""768"""'], {}), "('TNZ_COLORS', '768')\n", (2997, 3018), False, 'import os\n'), ((8904, 8926), 'os.get_terminal_size', 'os.get_terminal_size', ([], {}), '()\n', (8924, 8926), False, 'import os\n'), ((13968, 13998), 'os.getenv', 'os.getenv', (['"""ZTI_SECLEVEL"""', '"""2"""'], {}), "('ZTI_SECLEVEL', '2')\n", (13977, 13998), False, 'import os\n'), ((85104, 85122), 'json.loads', 'json.loads', (['indstr'], {}), '(indstr)\n', (85114, 85122), False, 'import json\n'), ((123412, 123426), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (123424, 123426), False, 'import sys\n'), ((126410, 126434), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (126432, 126434), False, 'import asyncio\n'), ((151040, 151054), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (151052, 151054), False, 'import sys\n'), ((64842, 64856), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (64854, 64856), False, 'import sys\n'), ((64957, 64978), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (64976, 64978), False, 'import traceback\n'), ((85749, 85763), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (85758, 85763), False, 'import os\n'), ((126167, 126184), 'platform.system', 'platform.system', ([], {}), '()\n', (126182, 126184), False, 'import platform\n'), ((126290, 126330), 'asyncio.WindowsSelectorEventLoopPolicy', 'asyncio.WindowsSelectorEventLoopPolicy', ([], {}), '()\n', (126328, 126330), False, 'import asyncio\n'), ((126351, 126385), 'asyncio.set_event_loop_policy', 'asyncio.set_event_loop_policy', (['pol'], {}), '(pol)\n', (126380, 126385), False, 'import asyncio\n'), ((85887, 85900), 'os.popen', 'os.popen', (['cmd'], {}), '(cmd)\n', (85895, 85900), False, 'import os\n'), ((98308, 98322), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (98317, 98322), False, 'import os\n'), ((98442, 98517), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdin': 'None', 'stdout': 'None', 'stderr': 'None', 'close_fds': '(True)'}), '(cmd, stdin=None, stdout=None, stderr=None, close_fds=True)\n', (98458, 98517), False, 'import subprocess\n'), ((73951, 73990), 'ssl.SSLContext', 'ssl.SSLContext', (['ssl.PROTOCOL_TLS_CLIENT'], {}), '(ssl.PROTOCOL_TLS_CLIENT)\n', (73965, 73990), False, 'import ssl\n')]
|
import re
bold = re.compile(r'\*{2}(.*?)\*{2}')
text = 'Make this **bold**. This **too**.'
print('Text:', text)
print('Bold:', bold.sub(r'<b>\1</b>', text))
|
[
"re.compile"
] |
[((18, 49), 're.compile', 're.compile', (['"""\\\\*{2}(.*?)\\\\*{2}"""'], {}), "('\\\\*{2}(.*?)\\\\*{2}')\n", (28, 49), False, 'import re\n')]
|
#!/usr/bin/env python3
#
import argparse
import datetime
import os
import re
import signal
import subprocess
import sys
import time
import traceback
import ivr
# Real-time recording format: mkv, mp4, avi
FOOTAGE_FILE_EXT = "avi"
# FFmpeg subprocess
ffmpeg_process = None
# Exception raised when FFmpeg doesn't exit the specified time is exceeded.
class TimeoutException(Exception):
pass
# A handler that only throws an TimeoutException when FFmpeg timeout is detected.
def timeout_handler(signum, frame):
raise TimeoutException("")
# Start recording the footage.
# Returns the FFmpeg exit-code and the name of the generated footage file.
def start_camera_recording(
dev_video,
dev_audio,
telop_file,
dir,
video_resolution,
video_fps,
video_input_format,
video_bitrate,
sampling_rate,
):
global ffmpeg_process
# determine unique file name
output = new_footage_file(dir, datetime.datetime.now(), FOOTAGE_FILE_EXT)
# calculate the number of seconds remaining in this hour
delta = datetime.timedelta(hours=1)
now = datetime.datetime.now()
end = datetime.datetime(now.year, now.month, now.day, now.hour) + delta
interval = (end - now).seconds
if interval < 60:
# to avoid a recording time of less than one minute
# to avoid running with -t 0 in cases like now=20:59:59.940
end = end + delta
interval = (end - now).seconds
t1 = now.strftime("%F %T")
t2 = end.time()
# scale text size according to resolution
m = re.fullmatch(r"(\d+)[xX](\d+)", video_resolution)
height = int(m.group(2))
text_resolution = height / 360
font_size = int(text_resolution * 12)
p16 = int(text_resolution * 16)
p4 = int(text_resolution * 4)
telop = [
"format=pix_fmts=yuv420p",
"drawbox=y=ih-{0}:w=iw:h={0}:t=fill:color=black@0.4".format(p16),
"drawtext=textfile={0}:fontsize={1}:reload=1:fontcolor=#DDDDDD:x={2}:y=h-{3}".format(
telop_file, font_size, p4, font_size
),
]
if video_fps is not None:
telop.extend(["framerate={}".format(video_fps)])
command = ["ffmpeg"]
command.extend(["-y"])
command.extend(["-nostdin"])
command.extend(["-loglevel", "warning"])
command.extend(["-t", str(interval)])
# video input options
# -vsync: When a frame isn't received from the camera at the specified frame rate, it
# deletes or duplicates the frame to achieve the specified frame rate.
# -ss 0:00: To avoid the error "application provided invalid, non monotonically increasing dts
# to muxer in stream" in Logitech C922n.
command.extend(["-f", "v4l2"])
command.extend(["-thread_queue_size", "8192"])
command.extend(["-s", video_resolution])
if video_input_format is not None:
command.extend(["-input_format", video_input_format])
command.extend(["-ss", "0:00"])
command.extend(["-i", dev_video])
# audio input options
# [mono/stereo] The "-ac 1" and "-channel_layout mono" are added to avoid warning message
# "Guessed Channel Layout for Input Stream #1.0 : mono", but they cause an error when C922n
# uses a stereo microphone, so they aren't fixed to mono but left to auto-recognition.
if dev_audio is not None:
command.extend(["-f", "alsa"])
command.extend(["-thread_queue_size", "8192"])
if sampling_rate is not None:
command.extend(["-ar", sampling_rate]) # audio sampling rate
command.extend(["-i", "hw:{}".format(dev_audio)])
# video filter
command.extend(["-vf", ",".join(telop)])
# video / audio output options
if FOOTAGE_FILE_EXT == "mkv":
command.extend(["-c:v", "mjpeg"])
command.extend(["-q:v", "3"]) # -q:v: JPEG quality (2-31)
elif FOOTAGE_FILE_EXT == "mp4":
command.extend(["-c:v", "h264_v4l2m2m"])
command.extend(["-pix_fmt", "yuv420p"])
elif FOOTAGE_FILE_EXT == "avi":
command.extend(["-c:v", "h264_v4l2m2m"])
command.extend(["-pix_fmt", "yuv420p"])
# NOTE: If the OS hangs-up or repeatedly stops with the following errors, increase the value
# of -qmax or comment it out.
# > [h264_v4l2m2m @ 0x55a384e750] === poll unexpected TIMEOUT: events=0x147, cap buffers=4
# command.extend(["-qmin", "0"])
# command.extend(["-qmax", "51"])
if video_bitrate is not None:
# NOTE: By making buffer size (in bit) to be same as bit rate, it'll be flushed once a
# second. This may cause underflow, but is necessary to save last-minute records in an
# environment with frequent power off.
command.extend(["-b:v", video_bitrate])
command.extend(["-bufsize", video_bitrate])
# output file
command.extend([output])
proc = subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
)
ffmpeg_process = proc
try:
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(interval + 15)
ivr.save_pid("ffmpeg", proc.pid)
ivr.log("start recording[{}]: {}".format(proc.pid, " ".join(proc.args)))
ivr.log(" to {} between {} and {} ({} sec)".format(output, t1, t2, interval))
line = proc.stderr.readline()
while line:
ivr.log("FFmpeg: {}".format(line.decode("utf-8").strip()))
line = proc.stderr.readline()
except TimeoutException:
ivr.log("FFmpeg didn't finish after {} sec; sending SIGTERM".format(interval))
finally:
ffmpeg_process = None
signal.alarm(0)
if proc.returncode is None:
proc.terminate()
ivr.remove_pid("ffmpeg")
try:
proc.wait(10)
except subprocess.TimeoutExpired:
proc.kill()
return (proc.returncode, output)
# Create a new file name based on the specified datetime that doesn't overlap with any existing
# footage file.
def new_footage_file(dir, now, ext):
# read sequence from control file
sequence_file = os.path.join(ivr.data_dir(), ".control")
i = 0
if os.path.exists(sequence_file):
with open(sequence_file, mode="r") as f:
i = int(f.read())
while True:
# test for successful creation of a new file
file_name = ivr.footage_file_name(now, i, ext)
path = os.path.join(dir, file_name)
try:
with open(path, mode="x") as f:
pass
except FileExistsError:
i = (i + 1) % 1000000
continue
# write sequence to control file
with open(sequence_file, mode="w") as f:
f.write(str((i + 1) % 1000000))
return path
SCREEN_SIZE_ALIASES = {
"320x180": ["QVGA"],
"400x240": ["WQVGA"],
"352x288": ["CIF"],
"640x200": ["CGA"],
"480x320": ["HVGA"],
"640x350": ["EGA"],
"640x400": ["DCGA"],
"640x480": ["VGA", "480p"],
"720x480": ["DVD", "NTSC480"],
"720x483": ["NTSC"],
"800x480": ["WVGA"],
"854x480": ["FWVGA"],
"864x480": ["FWVGA+"],
"800x600": ["SVGA"],
"1024x480": ["UWVGA"],
"1024x576": ["WSVGA"],
"1280x600": ["UWSVGA"],
"1024x768": ["XGA"],
"1280x720": ["720p", "HD", "HDTV"],
"1280x768": ["WXGA"],
"1152x864": ["XGA+"],
"1280x800": ["WXGA"],
"1366x768": ["FWXGA"],
"1280x1024": ["SXGA"],
"1280x1050": ["SXGA+"],
"1920x1080": ["1080p", "1080i", "FHD", "Full HD", "2k"],
"3840x2160": ["2160p", "4k"],
}
# Converts the specified resolution name to WIDTHxHEIGHT notation.
def screen_resolution(name):
if re.fullmatch(r"\d+x\d+", name) is not None:
return name
for resolution, aliases in SCREEN_SIZE_ALIASES.items():
for alias in aliases:
if alias.upper() == name.upper():
return resolution
return None
# Returns the device with the lowest number among the USB connected video devices from captured
# video device list using v4l2-ctl --list-devices.
# If no such device was detected, returns `None`.
# This function returns a map with a list of devices, such as /dev/video0, using the title as its
# key.
def detect_default_usb_camera():
cmd = ["v4l2-ctl", "--list-devices"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
# Get keys with a title like 'C270 HD WEBCAM (usb-3f980000.usb-1.3):'
current_title = None
min_n = None
device = None
title = None
pattern_device = r"/dev/video([0-9]+)"
pattern_title = r"\(usb-[^\)]*\):"
while True:
line = proc.stdout.readline()
if not line and proc.poll() is not None:
break
line = line.decode("utf-8")
if not line.startswith("\t") and line.endswith(":\n"):
matcher = re.search(pattern_title, line)
if matcher is not None and (min_n is None or min_n > int(matcher[1])):
current_title = line.strip()
else:
current_title = None
elif line.startswith("\t"):
if current_title is not None:
# Get a device with the smallest N for /dev/videoN.
matcher = re.search(pattern_device, line)
if matcher is not None and (min_n is None or min_n > int(matcher[1])):
title = current_title
device = line.strip()
min_n = int(matcher[1])
elif len(line.strip()) != 0:
ivr.log("WARNING: unknown device: %s" % line)
if device is not None:
return (title, device)
return (None, None)
# Get the card number and device number of the first USB audio device detected.
def detect_default_usb_audio():
cmd = ["arecord", "--list-devices"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
pattern = r"^card ([0-9]+): .*, device ([0-9]+): USB Audio.*$"
while True:
line = proc.stdout.readline()
if not line and proc.poll() is not None:
break
line = line.decode("utf-8").strip()
matcher = re.fullmatch(pattern, line, re.IGNORECASE)
if matcher is not None:
hw = "%d,%d" % (int(matcher[1]), int(matcher[2]))
return (line.strip(), hw)
return (None, None)
# Stop the FFmpeg subprocess if it's running and a TermException will be thrown.
def term_handler(signum, frame):
global ffmpeg_process
if ffmpeg_process is not None:
ffmpeg_process.terminate()
raise ivr.TermException("")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Save the footage from USB camera")
parser.add_argument(
"-d",
"--dir",
metavar="DIR",
default=ivr.data_dir(),
help="Directory where footage files are stored (default: {})".format(
ivr.data_dir()
),
)
parser.add_argument(
"-t",
"--telop",
metavar="FILE",
default=ivr.telop_file(),
help="File that contains text to overlay on the footage (default: {})".format(
ivr.telop_file()
),
)
parser.add_argument(
"-v",
"--video",
metavar="DEVICE",
help="Camera device to be used, such as /dev/video0 (default: auto detect)",
)
parser.add_argument(
"-vr",
"--video-resolution",
metavar="RESOLUTION",
default="640x360",
help="Screen resolution for video recording, such as 1280x720, 720p, or HD (default: 640x360)",
)
parser.add_argument(
"-vf",
"--video-fps",
metavar="FPS",
help="Output frames per second for video recording, such as 30, 60 (default: depends on runtime)",
)
parser.add_argument(
"-vif",
"--video-input-format",
metavar="FORMAT",
help="Input format from camera, such as yuyv422, mjpeg (default: depends on runtime). See `ffmpeg -f v4l2 -list_formats all -i /dev/video0` for valid values.",
)
parser.add_argument(
"-vbr",
"--video-bitrate",
metavar="BITRATE",
default="2M",
help="Bitrate for video recording (default: 2M)",
)
parser.add_argument(
"-a",
"--without-audio",
action="store_true",
help="Don't record audio along with video (default: with audio)",
)
parser.add_argument(
"-as",
"--audio-sampling-rate",
metavar="SAMPLING_RATE",
help="Sampling rate for audio recording (default: depends on runtime)",
)
try:
ivr.save_pid()
# register SIGTERM handler
signal.signal(signal.SIGTERM, term_handler)
signal.signal(signal.SIGINT, term_handler)
args = parser.parse_args()
dir = args.dir
telop = args.telop
dev_video = args.video
video_resolution = args.video_resolution
video_fps = args.video_fps
video_input_format = args.video_input_format
video_bitrate = args.video_bitrate
without_audio = args.without_audio
sampling_rate = args.audio_sampling_rate
# resolve screen resolution name
res = screen_resolution(video_resolution)
if res is None:
print("ERROR: invalid screen resolution: {}".format(video_resolution))
exit(1)
video_resolution = res
if len(video_bitrate) == 0:
video_bitrate = None
# auto-detect video and audio devices
if dev_video is None:
dev_video_title, dev_video = detect_default_usb_camera()
ivr.log("detected USB camera: {} = {}".format(dev_video, dev_video_title))
dev_audio = None
if not without_audio:
dev_autio_title, dev_audio = detect_default_usb_audio()
ivr.log("detected Audio: {} = {}".format(dev_audio, dev_autio_title))
# create an empty telop file assuming that it's before the GPS logger is started
if not os.path.isfile(telop):
ivr.write(telop, ivr.DEFAULT_TELOP)
ivr.beep("IVR starts to recording.")
while True:
start = datetime.datetime.now()
ret, file = start_camera_recording(
dev_video,
dev_audio,
telop,
dir,
video_resolution,
video_fps,
video_input_format,
video_bitrate,
sampling_rate,
)
ivr.beep("")
ivr.log(
"the recording of {} has been terminated with: {}".format(file, ret)
)
# to avoid reporting error consecutively in a short period of time
if ret != 0:
interval = max(0, 3 - (datetime.datetime.now() - start).total_seconds())
if interval > 0:
time.sleep(interval)
except ivr.TermException as e:
ivr.log("IVR terminates the recording")
ivr.beep("footage recording has stopped")
except Exception as e:
t = "".join(list(traceback.TracebackException.from_exception(e).format()))
ivr.log("ERROR: {}".format(t))
ivr.log("IVR terminates the recording by an error")
ivr.beep("footage recording has stopped due to an error")
sys.exit(1)
finally:
ivr.remove_pid()
|
[
"ivr.TermException",
"argparse.ArgumentParser",
"ivr.telop_file",
"os.path.isfile",
"os.path.join",
"ivr.write",
"traceback.TracebackException.from_exception",
"re.fullmatch",
"ivr.beep",
"ivr.remove_pid",
"os.path.exists",
"datetime.timedelta",
"signal.alarm",
"re.search",
"datetime.datetime.now",
"ivr.save_pid",
"subprocess.Popen",
"datetime.datetime",
"time.sleep",
"signal.signal",
"sys.exit",
"ivr.footage_file_name",
"ivr.data_dir",
"ivr.log"
] |
[((1051, 1078), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1069, 1078), False, 'import datetime\n'), ((1089, 1112), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1110, 1112), False, 'import datetime\n'), ((1545, 1595), 're.fullmatch', 're.fullmatch', (['"""(\\\\d+)[xX](\\\\d+)"""', 'video_resolution'], {}), "('(\\\\d+)[xX](\\\\d+)', video_resolution)\n", (1557, 1595), False, 'import re\n'), ((4842, 4949), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdin': 'subprocess.DEVNULL', 'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.PIPE'}), '(command, stdin=subprocess.DEVNULL, stdout=subprocess.\n DEVNULL, stderr=subprocess.PIPE)\n', (4858, 4949), False, 'import subprocess\n'), ((6170, 6199), 'os.path.exists', 'os.path.exists', (['sequence_file'], {}), '(sequence_file)\n', (6184, 6199), False, 'import os\n'), ((8312, 8357), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE)\n', (8328, 8357), False, 'import subprocess\n'), ((9809, 9854), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE)\n', (9825, 9854), False, 'import subprocess\n'), ((10526, 10547), 'ivr.TermException', 'ivr.TermException', (['""""""'], {}), "('')\n", (10543, 10547), False, 'import ivr\n'), ((10590, 10661), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Save the footage from USB camera"""'}), "(description='Save the footage from USB camera')\n", (10613, 10661), False, 'import argparse\n'), ((934, 957), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (955, 957), False, 'import datetime\n'), ((1123, 1180), 'datetime.datetime', 'datetime.datetime', (['now.year', 'now.month', 'now.day', 'now.hour'], {}), '(now.year, now.month, now.day, now.hour)\n', (1140, 1180), False, 'import datetime\n'), ((5027, 5073), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'timeout_handler'], {}), '(signal.SIGALRM, timeout_handler)\n', (5040, 5073), False, 'import signal\n'), ((5082, 5109), 'signal.alarm', 'signal.alarm', (['(interval + 15)'], {}), '(interval + 15)\n', (5094, 5109), False, 'import signal\n'), ((5119, 5151), 'ivr.save_pid', 'ivr.save_pid', (['"""ffmpeg"""', 'proc.pid'], {}), "('ffmpeg', proc.pid)\n", (5131, 5151), False, 'import ivr\n'), ((5660, 5675), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\n', (5672, 5675), False, 'import signal\n'), ((5749, 5773), 'ivr.remove_pid', 'ivr.remove_pid', (['"""ffmpeg"""'], {}), "('ffmpeg')\n", (5763, 5773), False, 'import ivr\n'), ((6125, 6139), 'ivr.data_dir', 'ivr.data_dir', ([], {}), '()\n', (6137, 6139), False, 'import ivr\n'), ((6371, 6405), 'ivr.footage_file_name', 'ivr.footage_file_name', (['now', 'i', 'ext'], {}), '(now, i, ext)\n', (6392, 6405), False, 'import ivr\n'), ((6421, 6449), 'os.path.join', 'os.path.join', (['dir', 'file_name'], {}), '(dir, file_name)\n', (6433, 6449), False, 'import os\n'), ((7673, 7704), 're.fullmatch', 're.fullmatch', (['"""\\\\d+x\\\\d+"""', 'name'], {}), "('\\\\d+x\\\\d+', name)\n", (7685, 7704), False, 'import re\n'), ((10105, 10147), 're.fullmatch', 're.fullmatch', (['pattern', 'line', 're.IGNORECASE'], {}), '(pattern, line, re.IGNORECASE)\n', (10117, 10147), False, 'import re\n'), ((12594, 12608), 'ivr.save_pid', 'ivr.save_pid', ([], {}), '()\n', (12606, 12608), False, 'import ivr\n'), ((12653, 12696), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'term_handler'], {}), '(signal.SIGTERM, term_handler)\n', (12666, 12696), False, 'import signal\n'), ((12705, 12747), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'term_handler'], {}), '(signal.SIGINT, term_handler)\n', (12718, 12747), False, 'import signal\n'), ((14080, 14116), 'ivr.beep', 'ivr.beep', (['"""IVR starts to recording."""'], {}), "('IVR starts to recording.')\n", (14088, 14116), False, 'import ivr\n'), ((15363, 15379), 'ivr.remove_pid', 'ivr.remove_pid', ([], {}), '()\n', (15377, 15379), False, 'import ivr\n'), ((8834, 8864), 're.search', 're.search', (['pattern_title', 'line'], {}), '(pattern_title, line)\n', (8843, 8864), False, 'import re\n'), ((10757, 10771), 'ivr.data_dir', 'ivr.data_dir', ([], {}), '()\n', (10769, 10771), False, 'import ivr\n'), ((10993, 11009), 'ivr.telop_file', 'ivr.telop_file', ([], {}), '()\n', (11007, 11009), False, 'import ivr\n'), ((14000, 14021), 'os.path.isfile', 'os.path.isfile', (['telop'], {}), '(telop)\n', (14014, 14021), False, 'import os\n'), ((14035, 14070), 'ivr.write', 'ivr.write', (['telop', 'ivr.DEFAULT_TELOP'], {}), '(telop, ivr.DEFAULT_TELOP)\n', (14044, 14070), False, 'import ivr\n'), ((14157, 14180), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14178, 14180), False, 'import datetime\n'), ((14512, 14524), 'ivr.beep', 'ivr.beep', (['""""""'], {}), "('')\n", (14520, 14524), False, 'import ivr\n'), ((14957, 14996), 'ivr.log', 'ivr.log', (['"""IVR terminates the recording"""'], {}), "('IVR terminates the recording')\n", (14964, 14996), False, 'import ivr\n'), ((15005, 15046), 'ivr.beep', 'ivr.beep', (['"""footage recording has stopped"""'], {}), "('footage recording has stopped')\n", (15013, 15046), False, 'import ivr\n'), ((15204, 15255), 'ivr.log', 'ivr.log', (['"""IVR terminates the recording by an error"""'], {}), "('IVR terminates the recording by an error')\n", (15211, 15255), False, 'import ivr\n'), ((15264, 15321), 'ivr.beep', 'ivr.beep', (['"""footage recording has stopped due to an error"""'], {}), "('footage recording has stopped due to an error')\n", (15272, 15321), False, 'import ivr\n'), ((15330, 15341), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15338, 15341), False, 'import sys\n'), ((10863, 10877), 'ivr.data_dir', 'ivr.data_dir', ([], {}), '()\n', (10875, 10877), False, 'import ivr\n'), ((11110, 11126), 'ivr.telop_file', 'ivr.telop_file', ([], {}), '()\n', (11124, 11126), False, 'import ivr\n'), ((9220, 9251), 're.search', 're.search', (['pattern_device', 'line'], {}), '(pattern_device, line)\n', (9229, 9251), False, 'import re\n'), ((9516, 9561), 'ivr.log', 'ivr.log', (["('WARNING: unknown device: %s' % line)"], {}), "('WARNING: unknown device: %s' % line)\n", (9523, 9561), False, 'import ivr\n'), ((14892, 14912), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (14902, 14912), False, 'import time\n'), ((15099, 15145), 'traceback.TracebackException.from_exception', 'traceback.TracebackException.from_exception', (['e'], {}), '(e)\n', (15142, 15145), False, 'import traceback\n'), ((14789, 14812), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14810, 14812), False, 'import datetime\n')]
|
#!/usr/bin/env python
"""
sample_metadata.txt example:
Sample,PrimerF,PrimerR
IgG,D702_F,D501_R
IgM,D702_F,D502_R
IgK,D702_F,D503_R
IgL,D702_F,D504_R
IgGsub,D705_F,D501_R
Command example:
python bcrtcr_process_post_lima.py output.D702_F--D501_R.bam.fastq IgG -n -w 50 > IgG.log
python bcrtcr_process_post_lima.py output.D702_F--D501_R.bam.fastq IgM -n -w 50 > IgM.log
"""
import os, sys
from csv import DictReader
VALID_REP_NAMES = {'IGG':50,'IGM':50,'IGK':50,'IGL':50,'IGA':50,'IGGSUB':120}
UMI_CUTOFFS = [2, 3, 5, 10]
def read_metadata(filename):
reader = DictReader(open(filename), delimiter=',')
if 'Sample' not in reader.fieldnames or \
'PrimerF' not in reader.fieldnames or \
'PrimerR' not in reader.fieldnames:
print("metadata file {0} must have headers Sample,PrimerF,PrimerR. Abort!", file=sys.stderr)
sys.exit(-1)
d = {} # (F,R) --> sample
for r in reader:
if r['Sample'].upper() not in VALID_REP_NAMES:
print("{0} is not a valid sample name! Must be one of {1}".format(r['Sample'], list(VALID_REP_NAMES.keys())), file=sys.stderr)
sys.exit(-1)
d[r['PrimerF'],r['PrimerR']] = r['Sample']
return d
def main(args):
f_cmd = open(args.output_cmd, 'w')
meta_info = read_metadata(args.metadata)
good_files = []
for (pF,pR),sample in meta_info.items():
bam_file = "{0}.{1}--{2}.bam".format(args.lima_prefix, pF, pR)
if not os.path.exists(bam_file):
print("WARNING: expected {0} but not found. Ignoring.".format(bam_file), file=sys.stderr)
else:
f_cmd.write("bamtools convert -format fastq -in {0} > {0}.fastq\n".format(bam_file))
good_files.append((bam_file, sample))
for bam_file,sample in good_files:
f_cmd.write("python bcrtcr_process_post_lima.py {0}.fastq {1} -n -w {2} > {1}.log\n".format(
bam_file, sample, VALID_REP_NAMES[sample.upper()]))
for bam_file,sample in good_files:
determined_fq = sample + '_' + sample.upper()[:3] + '.determined.fq'
for cutoff in UMI_CUTOFFS:
f_cmd.write("python bcr_filter_by_umi_count.py {0} -c {1}\n".format(determined_fq, cutoff))
f_cmd.close()
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("-p", "--lima_prefix", help="Lima output prefix")
parser.add_argument("-m", "--metadata", help="Sample metadata file")
parser.add_argument("-o", "--output_cmd", help="Output command file")
args = parser.parse_args()
main(args)
|
[
"os.path.exists",
"argparse.ArgumentParser",
"sys.exit"
] |
[((2311, 2327), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (2325, 2327), False, 'from argparse import ArgumentParser\n'), ((859, 871), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (867, 871), False, 'import os, sys\n'), ((1130, 1142), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1138, 1142), False, 'import os, sys\n'), ((1460, 1484), 'os.path.exists', 'os.path.exists', (['bam_file'], {}), '(bam_file)\n', (1474, 1484), False, 'import os, sys\n')]
|
import torch
from ..distances import CosineSimilarity
from ..reducers import DivisorReducer
from ..utils import common_functions as c_f
from .base_regularizer import BaseRegularizer
class SparseCentersRegularizer(BaseRegularizer):
def __init__(self, num_classes, centers_per_class, **kwargs):
super().__init__(**kwargs)
assert centers_per_class > 1
c_f.assert_distance_type(self, CosineSimilarity)
self.set_class_masks(num_classes, centers_per_class)
self.add_to_recordable_attributes(
list_of_names=["num_classes", "centers_per_class"], is_stat=False
)
self.add_to_recordable_attributes(
list_of_names=["same_class_center_sim", "diff_class_center_sim"],
is_stat=True,
)
def compute_loss(self, weights):
center_similarities = self.distance(weights)
small_val = c_f.small_val(weights.dtype)
center_similarities_masked = torch.clamp(
2.0 * center_similarities[self.same_class_mask], max=2
)
divisor = 2 * torch.sum(self.same_class_mask)
reg = torch.sqrt(2.0 + small_val - center_similarities_masked)
self.set_stats(center_similarities)
return {
"loss": {
"losses": reg,
"indices": c_f.torch_arange_from_size(reg),
"reduction_type": "element",
"divisor": divisor,
}
}
def set_class_masks(self, num_classes, centers_per_class):
total_num_centers = num_classes * centers_per_class
self.diff_class_mask = torch.ones(
total_num_centers, total_num_centers, dtype=torch.bool
)
self.same_class_mask = torch.zeros(
total_num_centers, total_num_centers, dtype=torch.bool
)
for i in range(num_classes):
s, e = i * centers_per_class, (i + 1) * centers_per_class
curr_block = torch.ones(centers_per_class, centers_per_class)
curr_block = torch.triu(curr_block, diagonal=1)
self.same_class_mask[s:e, s:e] = curr_block
self.diff_class_mask[s:e, s:e] = 0
def set_stats(self, center_similarities):
if self.collect_stats:
with torch.no_grad():
self.same_class_center_sim = torch.mean(
center_similarities[self.same_class_mask]
).item()
self.diff_class_center_sim = torch.mean(
center_similarities[self.diff_class_mask]
).item()
def get_default_distance(self):
return CosineSimilarity()
def get_default_reducer(self):
return DivisorReducer()
|
[
"torch.mean",
"torch.ones",
"torch.sqrt",
"torch.clamp",
"torch.triu",
"torch.zeros",
"torch.no_grad",
"torch.sum"
] |
[((955, 1022), 'torch.clamp', 'torch.clamp', (['(2.0 * center_similarities[self.same_class_mask])'], {'max': '(2)'}), '(2.0 * center_similarities[self.same_class_mask], max=2)\n', (966, 1022), False, 'import torch\n'), ((1113, 1169), 'torch.sqrt', 'torch.sqrt', (['(2.0 + small_val - center_similarities_masked)'], {}), '(2.0 + small_val - center_similarities_masked)\n', (1123, 1169), False, 'import torch\n'), ((1604, 1670), 'torch.ones', 'torch.ones', (['total_num_centers', 'total_num_centers'], {'dtype': 'torch.bool'}), '(total_num_centers, total_num_centers, dtype=torch.bool)\n', (1614, 1670), False, 'import torch\n'), ((1724, 1791), 'torch.zeros', 'torch.zeros', (['total_num_centers', 'total_num_centers'], {'dtype': 'torch.bool'}), '(total_num_centers, total_num_centers, dtype=torch.bool)\n', (1735, 1791), False, 'import torch\n'), ((1067, 1098), 'torch.sum', 'torch.sum', (['self.same_class_mask'], {}), '(self.same_class_mask)\n', (1076, 1098), False, 'import torch\n'), ((1946, 1994), 'torch.ones', 'torch.ones', (['centers_per_class', 'centers_per_class'], {}), '(centers_per_class, centers_per_class)\n', (1956, 1994), False, 'import torch\n'), ((2020, 2054), 'torch.triu', 'torch.triu', (['curr_block'], {'diagonal': '(1)'}), '(curr_block, diagonal=1)\n', (2030, 2054), False, 'import torch\n'), ((2253, 2268), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2266, 2268), False, 'import torch\n'), ((2315, 2368), 'torch.mean', 'torch.mean', (['center_similarities[self.same_class_mask]'], {}), '(center_similarities[self.same_class_mask])\n', (2325, 2368), False, 'import torch\n'), ((2459, 2512), 'torch.mean', 'torch.mean', (['center_similarities[self.diff_class_mask]'], {}), '(center_similarities[self.diff_class_mask])\n', (2469, 2512), False, 'import torch\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import ssh_config
from ssh_config import SSHConfig, Host
from os.path import expanduser
from pathlib import Path
from os import path
import subprocess
import shutil
import shlex
import sys
import os
from tempor import ROOT_DIR, DATA_DIR
from tempor.console import console
SSH_CONFIG_PATH = expanduser("~/.ssh/config")
def add_config_entry(hostname, attr):
new_host = Host(hostname, attr)
# does ~/.ssh/config exist?
if not path.isfile(expanduser(SSH_CONFIG_PATH)):
# ~/.ssh/ ?
if not path.exists(os.path.dirname(SSH_CONFIG_PATH)):
os.makedirs(os.path.dirname(SSH_CONFIG_PATH))
# create ~/.ssh/config
cfg = SSHConfig(expanduser(SSH_CONFIG_PATH))
else:
cfg = SSHConfig.load(expanduser(SSH_CONFIG_PATH))
cfg.append(new_host)
cfg.write()
def remove_config_entry(hostname):
# Nothing to remove if config doesn't exist
if not path.isfile(expanduser(SSH_CONFIG_PATH)):
return
cfg = SSHConfig.load(expanduser(SSH_CONFIG_PATH))
try:
cfg.remove(hostname)
cfg.write()
except KeyError:
pass
try:
shutil.rmtree(f"{DATA_DIR}/{hostname}")
except OSError as e:
pass
try:
shutil.rmtree(f"{ROOT_DIR}/playbooks/artifacts")
except OSError as e:
pass
def check_sshkeys(provider):
prog = shutil.which("ssh-keygen")
if not prog:
console.print("[red bold]ssh-keygen not available. Is OpenSSH installed?")
return False
out_dir = f"{ROOT_DIR}/providers/{provider}/files/.ssh"
if not os.path.exists(out_dir):
os.makedirs(out_dir)
out_file = f"{out_dir}/id_ed25519"
if not os.path.exists(out_file):
console.print("Generating new key pair...", end="", style="bold italic")
subprocess.call(
f'yes | ssh-keygen -t ed25519 -N "" -C "" -f {out_file}',
stdout=subprocess.DEVNULL,
shell=True,
)
console.print("Done.")
def install_ssh_keys(provider, hostname, ip_address):
old_dir = f"{ROOT_DIR}/providers/{provider}/files/.ssh"
out_dir = f"{DATA_DIR}/{hostname}/ssh"
if not os.path.exists(out_dir):
os.makedirs(out_dir)
for fname in os.listdir(old_dir):
shutil.copy(os.path.join(old_dir, fname), out_dir)
attr = {
"Hostname": ip_address,
"User": "root",
"Port": 22,
"Compression": "yes",
"StrictHostKeyChecking": "no",
"UserKnownHostsFile": "/dev/null",
"IdentityFile": f"{out_dir}/id_ed25519",
}
add_config_entry(hostname, attr)
|
[
"os.makedirs",
"os.path.join",
"os.path.dirname",
"os.path.exists",
"shutil.which",
"tempor.console.console.print",
"subprocess.call",
"ssh_config.Host",
"shutil.rmtree",
"os.path.expanduser",
"os.listdir"
] |
[((340, 367), 'os.path.expanduser', 'expanduser', (['"""~/.ssh/config"""'], {}), "('~/.ssh/config')\n", (350, 367), False, 'from os.path import expanduser\n'), ((423, 443), 'ssh_config.Host', 'Host', (['hostname', 'attr'], {}), '(hostname, attr)\n', (427, 443), False, 'from ssh_config import SSHConfig, Host\n'), ((1408, 1434), 'shutil.which', 'shutil.which', (['"""ssh-keygen"""'], {}), "('ssh-keygen')\n", (1420, 1434), False, 'import shutil\n'), ((2281, 2300), 'os.listdir', 'os.listdir', (['old_dir'], {}), '(old_dir)\n', (2291, 2300), False, 'import os\n'), ((1043, 1070), 'os.path.expanduser', 'expanduser', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (1053, 1070), False, 'from os.path import expanduser\n'), ((1183, 1222), 'shutil.rmtree', 'shutil.rmtree', (['f"""{DATA_DIR}/{hostname}"""'], {}), "(f'{DATA_DIR}/{hostname}')\n", (1196, 1222), False, 'import shutil\n'), ((1279, 1327), 'shutil.rmtree', 'shutil.rmtree', (['f"""{ROOT_DIR}/playbooks/artifacts"""'], {}), "(f'{ROOT_DIR}/playbooks/artifacts')\n", (1292, 1327), False, 'import shutil\n'), ((1460, 1534), 'tempor.console.console.print', 'console.print', (['"""[red bold]ssh-keygen not available. Is OpenSSH installed?"""'], {}), "('[red bold]ssh-keygen not available. Is OpenSSH installed?')\n", (1473, 1534), False, 'from tempor.console import console\n'), ((1628, 1651), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (1642, 1651), False, 'import os\n'), ((1661, 1681), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (1672, 1681), False, 'import os\n'), ((1733, 1757), 'os.path.exists', 'os.path.exists', (['out_file'], {}), '(out_file)\n', (1747, 1757), False, 'import os\n'), ((1767, 1839), 'tempor.console.console.print', 'console.print', (['"""Generating new key pair..."""'], {'end': '""""""', 'style': '"""bold italic"""'}), "('Generating new key pair...', end='', style='bold italic')\n", (1780, 1839), False, 'from tempor.console import console\n'), ((1848, 1964), 'subprocess.call', 'subprocess.call', (['f"""yes | ssh-keygen -t ed25519 -N "" -C "" -f {out_file}"""'], {'stdout': 'subprocess.DEVNULL', 'shell': '(True)'}), '(f\'yes | ssh-keygen -t ed25519 -N "" -C "" -f {out_file}\',\n stdout=subprocess.DEVNULL, shell=True)\n', (1863, 1964), False, 'import subprocess\n'), ((2016, 2038), 'tempor.console.console.print', 'console.print', (['"""Done."""'], {}), "('Done.')\n", (2029, 2038), False, 'from tempor.console import console\n'), ((2209, 2232), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (2223, 2232), False, 'import os\n'), ((2242, 2262), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (2253, 2262), False, 'import os\n'), ((500, 527), 'os.path.expanduser', 'expanduser', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (510, 527), False, 'from os.path import expanduser\n'), ((725, 752), 'os.path.expanduser', 'expanduser', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (735, 752), False, 'from os.path import expanduser\n'), ((793, 820), 'os.path.expanduser', 'expanduser', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (803, 820), False, 'from os.path import expanduser\n'), ((972, 999), 'os.path.expanduser', 'expanduser', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (982, 999), False, 'from os.path import expanduser\n'), ((2322, 2350), 'os.path.join', 'os.path.join', (['old_dir', 'fname'], {}), '(old_dir, fname)\n', (2334, 2350), False, 'import os\n'), ((577, 609), 'os.path.dirname', 'os.path.dirname', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (592, 609), False, 'import os\n'), ((636, 668), 'os.path.dirname', 'os.path.dirname', (['SSH_CONFIG_PATH'], {}), '(SSH_CONFIG_PATH)\n', (651, 668), False, 'import os\n')]
|
import os
from collections import defaultdict
from event.io.dataset.base import (
Span,
DataLoader,
DEDocument,
)
class Conll(DataLoader):
def __init__(self, params, corpus, with_doc=False):
super().__init__(params, corpus, with_doc)
self.params = params
def parse_conll_data(self, corpus, conll_in):
text = ""
offset = 0
arg_text = []
sent_predicates = []
sent_args = defaultdict(list)
doc = DEDocument(corpus)
props = []
for line in conll_in:
parts = line.strip().split()
if len(parts) < 8:
text += "\n"
offset += 1
for index, predicate in enumerate(sent_predicates):
arg_content = sent_args[index]
props.append((predicate, arg_content))
sent_predicates.clear()
sent_args.clear()
arg_text.clear()
continue
fname, _, index, token, pos, parse, lemma, sense = parts[:8]
pb_annos = parts[8:]
if len(arg_text) == 0:
arg_text = [None] * len(pb_annos)
domain = fname.split("/")[1]
start = offset
end = start + len(token)
text += token + " "
offset += len(token) + 1
for index, t in enumerate(arg_text):
if t:
arg_text[index] += " " + token
if not sense == "-":
sent_predicates.append((start, end, token))
for index, anno in enumerate(pb_annos):
if anno == "(V*)":
continue
if anno.startswith("("):
role = anno.strip("(").strip(")").strip("*")
sent_args[index].append([role, start])
arg_text[index] = token
if anno.endswith(")"):
sent_args[index][-1].append(end)
sent_args[index][-1].append(arg_text[index])
arg_text[index] = ""
doc.set_text(text)
for (p_start, p_end, p_token), args in props:
hopper = doc.add_hopper()
pred = doc.add_predicate(hopper, Span(p_start, p_end), p_token)
if pred is not None:
for role, arg_start, arg_end, arg_text in args:
filler = doc.add_filler(Span(arg_start, arg_end), arg_text)
doc.add_argument_mention(pred, filler.aid, role)
return doc
def get_doc(self):
super().get_doc()
for dirname in os.listdir(self.params.in_dir):
full_dir = os.path.join(self.params.in_dir, dirname)
for root, dirs, files in os.walk(full_dir):
for f in files:
if not f.endswith("gold_conll"):
continue
full_path = os.path.join(root, f)
out_dir = os.path.join(self.params.out_dir, dirname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
docid = f.replace("gold_conll", "")
with open(full_path) as conll_in:
doc = self.parse_conll_data(self.corpus, conll_in)
doc.set_id(docid)
yield doc
|
[
"event.io.dataset.base.DEDocument",
"event.io.dataset.base.Span",
"os.makedirs",
"os.walk",
"os.path.exists",
"collections.defaultdict",
"os.path.join",
"os.listdir"
] |
[((450, 467), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (461, 467), False, 'from collections import defaultdict\n'), ((482, 500), 'event.io.dataset.base.DEDocument', 'DEDocument', (['corpus'], {}), '(corpus)\n', (492, 500), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument\n'), ((2644, 2674), 'os.listdir', 'os.listdir', (['self.params.in_dir'], {}), '(self.params.in_dir)\n', (2654, 2674), False, 'import os\n'), ((2699, 2740), 'os.path.join', 'os.path.join', (['self.params.in_dir', 'dirname'], {}), '(self.params.in_dir, dirname)\n', (2711, 2740), False, 'import os\n'), ((2778, 2795), 'os.walk', 'os.walk', (['full_dir'], {}), '(full_dir)\n', (2785, 2795), False, 'import os\n'), ((2273, 2293), 'event.io.dataset.base.Span', 'Span', (['p_start', 'p_end'], {}), '(p_start, p_end)\n', (2277, 2293), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument\n'), ((2948, 2969), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (2960, 2969), False, 'import os\n'), ((3001, 3043), 'os.path.join', 'os.path.join', (['self.params.out_dir', 'dirname'], {}), '(self.params.out_dir, dirname)\n', (3013, 3043), False, 'import os\n'), ((2446, 2470), 'event.io.dataset.base.Span', 'Span', (['arg_start', 'arg_end'], {}), '(arg_start, arg_end)\n', (2450, 2470), False, 'from event.io.dataset.base import Span, DataLoader, DEDocument\n'), ((3072, 3095), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (3086, 3095), False, 'import os\n'), ((3121, 3141), 'os.makedirs', 'os.makedirs', (['out_dir'], {}), '(out_dir)\n', (3132, 3141), False, 'import os\n')]
|
import os
import csv
import copy
import tempfile
import warnings
from optparse import make_option
import MySQLdb
from django.db import connection
from calaccess_raw.models import ExpnCd
from calaccess_raw import get_download_directory
from calaccess_campaign_browser.management.commands import CalAccessCommand
from calaccess_campaign_browser.models import Expenditure, Filing, Committee
custom_options = (
make_option(
"--skip-transform-quarterly",
action="store_false",
dest="transform_quarterly",
default=True,
help="Skip transforming quarterly CSV"
),
make_option(
"--skip-load-quarterly",
action="store_false",
dest="load_quarterly",
default=True,
help="Skip loading quarterly CSV to db"
),
)
class Command(CalAccessCommand):
help = "Load refined campaign expenditures from CAL-ACCESS raw data"
option_list = CalAccessCommand.option_list + custom_options
def set_options(self, *args, **kwargs):
self.data_dir = os.path.join(get_download_directory(), 'csv')
# Make sure directory exists
os.path.exists(self.data_dir) or os.mkdir(self.data_dir)
self.cursor = connection.cursor()
# Quarterlies stuff
self.quarterly_tmp_csv = tempfile.NamedTemporaryFile().name
self.quarterly_target_csv = os.path.join(
self.data_dir,
'expn_cd_transformed.csv'
)
def transform_quarterly_expenditures_csv(self):
self.log(" Marking duplicates")
self.log(" Dumping CSV sorted by unique identifier")
sql = """
SELECT
`agent_namf`,
`agent_naml`,
`agent_nams`,
`agent_namt`,
`amend_id`,
`amount`,
`bakref_tid`,
`bal_juris`,
`bal_name`,
`bal_num`,
`cand_namf`,
`cand_naml`,
`cand_nams`,
`cand_namt`,
`cmte_id`,
`cum_oth`,
`cum_ytd`,
`dist_no`,
`entity_cd`,
`expn_chkno`,
`expn_code`,
`expn_date`,
`expn_dscr`,
`filing_id`,
`form_type`,
`g_from_e_f`,
`juris_cd`,
`juris_dscr`,
`line_item`,
`memo_code`,
`memo_refno`,
`off_s_h_cd`,
`offic_dscr`,
`office_cd`,
`payee_city`,
`payee_namf`,
`payee_naml`,
`payee_nams`,
`payee_namt`,
`payee_st`,
`payee_zip4`,
`rec_type`,
`sup_opp_cd`,
`tran_id`,
`tres_city`,
`tres_namf`,
`tres_naml`,
`tres_nams`,
`tres_namt`,
`tres_st`,
`tres_zip4`,
`xref_match`,
`xref_schnm`
FROM %(raw_model)s
ORDER BY filing_id, tran_id, amend_id DESC
INTO OUTFILE '%(tmp_csv)s'
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
""" % dict(
raw_model=ExpnCd._meta.db_table,
tmp_csv=self.quarterly_tmp_csv,
)
self.cursor.execute(sql)
INHEADERS = [
"agent_namf",
"agent_naml",
"agent_nams",
"agent_namt",
"amend_id",
"amount",
"bakref_tid",
"bal_juris",
"bal_name",
"bal_num",
"cand_namf",
"cand_naml",
"cand_nams",
"cand_namt",
"cmte_id",
"cum_oth",
"cum_ytd",
"dist_no",
"entity_cd",
"expn_chkno",
"expn_code",
"expn_date",
"expn_dscr",
"filing_id",
"form_type",
"g_from_e_f",
"juris_cd",
"juris_dscr",
"line_item",
"memo_code",
"memo_refno",
"off_s_h_cd",
"offic_dscr",
"office_cd",
"payee_city",
"payee_namf",
"payee_naml",
"payee_nams",
"payee_namt",
"payee_st",
"payee_zip4",
"rec_type",
"sup_opp_cd",
"tran_id",
"tres_city",
"tres_namf",
"tres_naml",
"tres_nams",
"tres_namt",
"tres_st",
"tres_zip4",
"xref_match",
"xref_schnm"
]
OUTHEADERS = copy.copy(INHEADERS)
OUTHEADERS.append("is_duplicate")
self.log(" Marking duplicates in a new CSV")
# `rU` is read Universal
# see: https://docs.python.org/2/library/functions.html#open
with open(self.quarterly_tmp_csv, 'rU') as fin:
fout = csv.DictWriter(
open(self.quarterly_target_csv, 'wb'),
fieldnames=OUTHEADERS
)
fout.writeheader()
last_uid = ''
reader = csv.DictReader(fin, fieldnames=INHEADERS)
for row in reader:
row.pop(None, None)
uid = '{}-{}'.format(
row['filing_id'],
row['tran_id']
)
if uid != last_uid:
row['is_duplicate'] = 0
last_uid = uid
else:
row['is_duplicate'] = 1
try:
fout.writerow(row)
except ValueError:
continue
def load_quarterly_expenditures(self):
self.log(" Loading CSV")
self.cursor.execute("DROP TABLE IF EXISTS TMP_EXPN_CD;")
sql = """
CREATE TABLE `TMP_EXPN_CD` (
`AGENT_NAMF` varchar(45),
`AGENT_NAML` varchar(200),
`AGENT_NAMS` varchar(10),
`AGENT_NAMT` varchar(10),
`AMEND_ID` int(11),
`AMOUNT` decimal(14,2),
`BAKREF_TID` varchar(20),
`BAL_JURIS` varchar(40),
`BAL_NAME` varchar(200),
`BAL_NUM` varchar(7),
`CAND_NAMF` varchar(45),
`CAND_NAML` varchar(200),
`CAND_NAMS` varchar(10),
`CAND_NAMT` varchar(10),
`CMTE_ID` varchar(9),
`CUM_OTH` decimal(14,2) DEFAULT NULL,
`CUM_YTD` decimal(14,2) DEFAULT NULL,
`DIST_NO` varchar(3),
`ENTITY_CD` varchar(3),
`EXPN_CHKNO` varchar(20),
`EXPN_CODE` varchar(3),
`EXPN_DATE` date DEFAULT NULL,
`EXPN_DSCR` varchar(400),
`FILING_ID` int(11),
`FORM_TYPE` varchar(6),
`G_FROM_E_F` varchar(1),
`JURIS_CD` varchar(3),
`JURIS_DSCR` varchar(40),
`LINE_ITEM` int(11),
`MEMO_CODE` varchar(1),
`MEMO_REFNO` varchar(20),
`OFF_S_H_CD` varchar(1),
`OFFIC_DSCR` varchar(40),
`OFFICE_CD` varchar(3),
`PAYEE_CITY` varchar(30),
`PAYEE_NAMF` varchar(45),
`PAYEE_NAML` varchar(200),
`PAYEE_NAMS` varchar(10),
`PAYEE_NAMT` varchar(10),
`PAYEE_ST` varchar(2),
`PAYEE_ZIP4` varchar(10),
`REC_TYPE` varchar(4),
`SUP_OPP_CD` varchar(1),
`TRAN_ID` varchar(20),
`TRES_CITY` varchar(30),
`TRES_NAMF` varchar(45),
`TRES_NAML` varchar(200),
`TRES_NAMS` varchar(10),
`TRES_NAMT` varchar(10),
`TRES_ST` varchar(2),
`TRES_ZIP4` varchar(10),
`XREF_MATCH` varchar(1),
`XREF_SCHNM` varchar(2),
`IS_DUPLICATE` bool
)
"""
self.cursor.execute(sql)
sql = """
LOAD DATA LOCAL INFILE '%s'
INTO TABLE TMP_EXPN_CD
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\\n'
IGNORE 1 LINES (
`agent_namf`,
`agent_naml`,
`agent_nams`,
`agent_namt`,
`amend_id`,
`amount`,
`bakref_tid`,
`bal_juris`,
`bal_name`,
`bal_num`,
`cand_namf`,
`cand_naml`,
`cand_nams`,
`cand_namt`,
`cmte_id`,
`cum_oth`,
`cum_ytd`,
`dist_no`,
`entity_cd`,
`expn_chkno`,
`expn_code`,
`expn_date`,
`expn_dscr`,
`filing_id`,
`form_type`,
`g_from_e_f`,
`juris_cd`,
`juris_dscr`,
`line_item`,
`memo_code`,
`memo_refno`,
`off_s_h_cd`,
`offic_dscr`,
`office_cd`,
`payee_city`,
`payee_namf`,
`payee_naml`,
`payee_nams`,
`payee_namt`,
`payee_st`,
`payee_zip4`,
`rec_type`,
`sup_opp_cd`,
`tran_id`,
`tres_city`,
`tres_namf`,
`tres_naml`,
`tres_nams`,
`tres_namt`,
`tres_st`,
`tres_zip4`,
`xref_match`,
`xref_schnm`,
`is_duplicate`
)
""" % (
self.quarterly_target_csv
)
self.cursor.execute(sql)
self.log(" Merging CSV data with other tables")
sql = """
INSERT INTO %(expenditure_model)s (
cycle_id,
committee_id,
filing_id,
filing_id_raw,
transaction_id,
amend_id,
backreference_transaction_id,
is_crossreference,
crossreference_schedule,
is_duplicate,
date_received,
expenditure_description,
amount,
candidate_full_name,
candidate_is_person,
candidate_committee_id,
candidate_prefix,
candidate_first_name,
candidate_last_name,
candidate_suffix,
candidate_entity_type,
candidate_expense_code,
payee_prefix,
payee_first_name,
payee_last_name,
payee_suffix,
payee_city,
payee_state,
payee_zipcode
)
SELECT
f.cycle_id as cycle_id,
f.committee_id as committee_id,
f.id as filing_id,
f.filing_id_raw,
e.tran_id,
e.amend_id,
e.bakref_tid,
e.xref_match,
e.xref_schnm,
e.is_duplicate,
e.expn_date,
e.expn_dscr,
e.amount,
CASE
WHEN e.cand_namf <> '' THEN e.cand_naml
END as candidate_full_name,
CASE
WHEN e.cand_namf <> '' THEN true
ELSE false
END as candidate_is_person,
c.id,
COALESCE(e.cand_namt, ''),
COALESCE(e.cand_namf, ''),
COALESCE(e.cand_naml, ''),
COALESCE(e.cand_nams, ''),
COALESCE(e.entity_cd, ''),
COALESCE(e.expn_code, ''),
COALESCE(e.payee_namt, ''),
COALESCE(e.payee_namf, ''),
COALESCE(e.payee_naml, ''),
COALESCE(e.payee_nams, ''),
COALESCE(e.payee_city, ''),
COALESCE(e.payee_st, ''),
COALESCE(e.payee_zip4, '')
FROM %(filing_model)s as f
INNER JOIN %(raw_model)s as e
ON f.filing_id_raw = e.filing_id
AND f.amend_id = e.amend_id
LEFT OUTER JOIN %(committee_model)s as c
ON e.cmte_id = c.xref_filer_id
""" % dict(
expenditure_model=Expenditure._meta.db_table,
filing_model=Filing._meta.db_table,
raw_model='TMP_EXPN_CD',
committee_model=Committee._meta.db_table,
)
self.cursor.execute(sql)
self.cursor.execute('DROP TABLE TMP_EXPN_CD;')
def handle(self, *args, **options):
self.header("Loading expenditures")
self.set_options(*args, **options)
warnings.filterwarnings("ignore", category=MySQLdb.Warning)
self.log(" Quarterly filings")
if options['transform_quarterly']:
self.transform_quarterly_expenditures_csv()
if options['load_quarterly']:
self.load_quarterly_expenditures()
|
[
"os.mkdir",
"tempfile.NamedTemporaryFile",
"warnings.filterwarnings",
"optparse.make_option",
"csv.DictReader",
"calaccess_raw.get_download_directory",
"copy.copy",
"os.path.exists",
"django.db.connection.cursor",
"os.path.join"
] |
[((416, 571), 'optparse.make_option', 'make_option', (['"""--skip-transform-quarterly"""'], {'action': '"""store_false"""', 'dest': '"""transform_quarterly"""', 'default': '(True)', 'help': '"""Skip transforming quarterly CSV"""'}), "('--skip-transform-quarterly', action='store_false', dest=\n 'transform_quarterly', default=True, help='Skip transforming quarterly CSV'\n )\n", (427, 571), False, 'from optparse import make_option\n'), ((613, 754), 'optparse.make_option', 'make_option', (['"""--skip-load-quarterly"""'], {'action': '"""store_false"""', 'dest': '"""load_quarterly"""', 'default': '(True)', 'help': '"""Skip loading quarterly CSV to db"""'}), "('--skip-load-quarterly', action='store_false', dest=\n 'load_quarterly', default=True, help='Skip loading quarterly CSV to db')\n", (624, 754), False, 'from optparse import make_option\n'), ((1213, 1232), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (1230, 1232), False, 'from django.db import connection\n'), ((1365, 1419), 'os.path.join', 'os.path.join', (['self.data_dir', '"""expn_cd_transformed.csv"""'], {}), "(self.data_dir, 'expn_cd_transformed.csv')\n", (1377, 1419), False, 'import os\n'), ((4697, 4717), 'copy.copy', 'copy.copy', (['INHEADERS'], {}), '(INHEADERS)\n', (4706, 4717), False, 'import copy\n'), ((12872, 12931), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'MySQLdb.Warning'}), "('ignore', category=MySQLdb.Warning)\n", (12895, 12931), False, 'import warnings\n'), ((1054, 1078), 'calaccess_raw.get_download_directory', 'get_download_directory', ([], {}), '()\n', (1076, 1078), False, 'from calaccess_raw import get_download_directory\n'), ((1133, 1162), 'os.path.exists', 'os.path.exists', (['self.data_dir'], {}), '(self.data_dir)\n', (1147, 1162), False, 'import os\n'), ((1166, 1189), 'os.mkdir', 'os.mkdir', (['self.data_dir'], {}), '(self.data_dir)\n', (1174, 1189), False, 'import os\n'), ((1294, 1323), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (1321, 1323), False, 'import tempfile\n'), ((5195, 5236), 'csv.DictReader', 'csv.DictReader', (['fin'], {'fieldnames': 'INHEADERS'}), '(fin, fieldnames=INHEADERS)\n', (5209, 5236), False, 'import csv\n')]
|
# FACE DETECTION ON VIDEO-CAMERA SAMPLE
# My own imports
import face_detect_hc as fdhc
# General module imports
import cv2 as cv
import time
VIDEO_CAPTURE = cv.VideoCapture(0)
while True:
# Get specific read image from each instant from video capture
_, image = VIDEO_CAPTURE.read()
# Apply face_detect algorithm to each image capture
fd = fdhc.FaceDetector(image, show_results=True, only_biggest_face=True)
print(fd.face_detect())
# Apply delay to obtain desired frequency
time.sleep(0.1)
# Exit when "q" is pressed
if (cv.waitKey(1) & 0xFF == ord("q")):
cv.destroyAllWindows()
break
|
[
"cv2.waitKey",
"time.sleep",
"cv2.VideoCapture",
"cv2.destroyAllWindows",
"face_detect_hc.FaceDetector"
] |
[((160, 178), 'cv2.VideoCapture', 'cv.VideoCapture', (['(0)'], {}), '(0)\n', (175, 178), True, 'import cv2 as cv\n'), ((361, 428), 'face_detect_hc.FaceDetector', 'fdhc.FaceDetector', (['image'], {'show_results': '(True)', 'only_biggest_face': '(True)'}), '(image, show_results=True, only_biggest_face=True)\n', (378, 428), True, 'import face_detect_hc as fdhc\n'), ((508, 523), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (518, 523), False, 'import time\n'), ((607, 629), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ([], {}), '()\n', (627, 629), True, 'import cv2 as cv\n'), ((564, 577), 'cv2.waitKey', 'cv.waitKey', (['(1)'], {}), '(1)\n', (574, 577), True, 'import cv2 as cv\n')]
|
#!/usr/bin/env python
# encoding: utf-8
"""
Minimal GUI for Ipaaca Plot Visualisation.
Allows to add several channels (ipaaca topcis) to listen to and plot the raw float
data. However, it does not further processing of the messages other than trying to
extract floats from it.
@author: jpoeppel
Last Modified: 17.12.14
10.08.2016
Fixed a bug that caused ipaaca to crash
11.08.2016
Added distribution plots as alternative (still early testing) and refactored channelBoxes
accordingly.
Also refactored back to 4 spaces=1 tab, to be more consitent with the python standard.
12.08.2016
Finished saving and loading of channel configurations.
15.08.2016
- Huge performance improvement by removing EVT_UPDATE_UI
- Allow to specify a positional argument to immediately load a given config file.
16.08.2016
Channels can now be shown in detached figures(windows)
Additional windows and their position are also stored in the config file
Now another optional argument can be given to specify the update rate.
"""
import os
import wx
from wx.lib.agw.pycollapsiblepane import PyCollapsiblePane
import ipaaca
import time
import threading
# The recommended way to use wx with mpl is with the WXAgg
# backend.
#
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import \
FigureCanvasWxAgg as FigCanvas, \
NavigationToolbar2WxAgg as NavigationToolbar
import pylab
import json #For config files
class ChannelBox(PyCollapsiblePane):
#class ChannelBox(wx.CollapsiblePane):
"""
Abstract class for a channel box, which provides the commong functionality for both timelines
and distribution plots.
"""
def __init__(self, parent, ID, ctrl, name="Channel", figurePanel=None, config=None):
# wx.Panel.__init__(self, parent, ID)
super(ChannelBox, self).__init__(parent, ID, name, agwStyle=wx.CP_GTK_EXPANDER)
self.name = name
self.ctrl = ctrl
self.xData = []
self.yData = []
self.lastData = 0
self.colour = (0,0,0)
self._isActive = False
self.minVal = 0.0
self.maxVal = 0.0
self.style= "-"
self.dataLock = threading.Lock()
self.category = ""
self.title = ""
if figurePanel == None:
figurePanel = ctrl.figurePanel
self.figurePanel = figurePanel
self._create_elements(config)
try:
if not config["collapsed"]:
self.Expand()
else:
self.SetLabel(self.name + "({})".format(self.category))
except (KeyError, TypeError):
#If collapsed not defined or no config given, also expand
self.Expand()
self.Bind(wx.EVT_COLLAPSIBLEPANE_CHANGED, self.on_change)
def OnButton(self, event):
"""
Intercept on Button pressin order to set the labels correctly!
"""
if self.IsCollapsed():
self.SetLabel(self.name)
else:
self.SetLabel(self.name + "({})".format(self.category))
#Call original method
super(ChannelBox,self).OnButton(event)
def on_change(self, event):
"Notify control to resize accordingly"
self.ctrl.update_layout()
def _create_elements(self):
raise NotImplementedError
@property
def isActive(self):
return (self._isActive and len(self.yData) > 0)
@isActive.setter
def isActive(self, value):
self._isActive = value
self.activeCB.SetValue(value)
def on_StyleSelect(self, event):
self.style = event.GetString()
self._update_style()
def _update_style(self):
if self.style in ['*','.','d']:
pylab.setp(self.plot_data, linestyle= '')
pylab.setp(self.plot_data, marker= self.style)
else:
pylab.setp(self.plot_data, linestyle= self.style)
pylab.setp(self.plot_data, marker= '')
self.figurePanel.canvas.draw()
def on_catText_enter(self, event):
self.category = self.catText.GetValue()
def on_colourChange(self, event):
col=self.colourPicker.GetColour()
self.colour = (float(col[0])/255,float(col[1])/255,float(col[2])/255)
pylab.setp(self.plot_data, color=self.colour)
def on_clear_button(self, event):
self.xData = []
self.yData= []
self.lastData = 0
self.maxVal = 0.0
self.minVal = 0.0
self.plot_data.set_xdata([])
self.plot_data.set_ydata([])
self.figurePanel.canvas.draw()
def on_remove_button(self,event):
self.remove_button.Unbind(wx.EVT_BUTTON)
self.figurePanel.remove_channel(self)
self.ctrl.removeChannel(self)
def updatePlotData(self):
raise NotImplementedError
def on_checkActive(self, event):
sender = event.GetEventObject()
isChecked = sender.GetValue()
self._change_activity(isChecked)
def _change_activity(self, activity):
self.isActive = activity
if self._isActive:
self.ctrl.activate_channel(self)
if self.plot_data not in self.figurePanel.axes.lines:
self.figurePanel.axes.lines.append(self.plot_data)
else:
if self.plot_data in self.figurePanel.axes.lines:
self.figurePanel.axes.lines.remove(self.plot_data)
def update_data(self, firstTimestep, payload):
raise NotImplementedError
class DistributionChannelBox(ChannelBox):
"""
A channel box for plotting "distributions". Expects to find x and y values in the payload
designated by their respective keys. The x values are used as labels for the bins in
the y values. x and y values MUST have identical dimensions!
"""
defaults = {"category": "",
"xKey": "x",
"yKey":"y",
"color": [0,0,0],
"style": "*",
"active": False}
def __init__(self, parent, ID, ctrl, figurePanel=None, config=None):
self.xKey = "x"
self.yKey = "y"
self.xMin = -1
super(DistributionChannelBox, self).__init__(parent, ID, ctrl, name="Distribution", figurePanel=figurePanel, config=config)
self.plot_data = self.figurePanel.axes.plot([], linewidth=1,color=self.colour,marker="*", linestyle="")[0]
def _create_elements(self, config):
if config == None:
#Create empty config, since it will be filled with defaults afterwards anyways
config = {}
# Fill potentially missing defaults:
for k,v in self.defaults.items():
if not k in config:
config[k] = v
parent = self.GetPane()
self.activeCB = wx.CheckBox(parent, -1, "Active")
self.activeCB.SetValue(config["active"])
self.activeCB.Bind(wx.EVT_CHECKBOX, self.on_checkActive)
box = wx.StaticBox(parent, -1, "")
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
catLabel = wx.StaticText(parent, -1, "Category: ")
self.category = config["category"]
self.catText = wx.TextCtrl(parent, -1, self.category, size=(100,-1))
self.catText.Bind(wx.EVT_TEXT_ENTER, self.on_catText_enter)
self.catText.Bind(wx.EVT_KILL_FOCUS, self.on_catText_enter)
category_box = wx.BoxSizer(wx.HORIZONTAL)
category_box.Add(catLabel, flag=wx.ALIGN_CENTER_VERTICAL)
category_box.Add(self.catText, flag=wx.ALIGN_CENTER_VERTICAL)
keyLabel = wx.StaticText(parent, -1, "X Key: ")
self.xKey = config["xKey"]
self.xKeyText = wx.TextCtrl(parent, -1, self.xKey, size=(100,-1))
self.xKeyText.Bind(wx.EVT_TEXT_ENTER, self.on_xKeyText_enter)
self.xKeyText.Bind(wx.EVT_KILL_FOCUS, self.on_xKeyText_enter)
x_key_box = wx.BoxSizer(wx.HORIZONTAL)
x_key_box.Add(keyLabel, flag=wx.ALIGN_CENTER_VERTICAL)
x_key_box.Add(self.xKeyText, flag=wx.ALIGN_CENTER_VERTICAL)
keyLabel = wx.StaticText(parent, -1, "Y Key: ")
self.yKey = config["yKey"]
self.yKeyText = wx.TextCtrl(parent, -1, self.yKey, size=(100,-1))
self.yKeyText.Bind(wx.EVT_TEXT_ENTER, self.on_yKeyText_enter)
self.yKeyText.Bind(wx.EVT_KILL_FOCUS, self.on_yKeyText_enter)
y_key_box = wx.BoxSizer(wx.HORIZONTAL)
y_key_box.Add(keyLabel, flag=wx.ALIGN_CENTER_VERTICAL)
y_key_box.Add(self.yKeyText, flag=wx.ALIGN_CENTER_VERTICAL)
self.keyText = self.yKeyText #Make it so the ctrl can focus this one
self.colourPicker = wx.ColourPickerCtrl(parent, -1)
self.colour = (float(config["color"][0])/255, float(config["color"][1])/255,float(config["color"][2])/255)
self.colourPicker.SetColour(config["color"])
self.colourPicker.Bind(wx.EVT_COLOURPICKER_CHANGED, self.on_colourChange)
self.remove_button = wx.Button(parent, -1, "Remove")
self.Bind(wx.EVT_BUTTON, self.on_remove_button, self.remove_button)
figureLabel = wx.StaticText(parent, -1, "Show in figure: ")
self.figureCB = wx.ComboBox(parent, value=self.style, size=(80, 30), choices=self.ctrl.get_figures(),
style=wx.CB_READONLY)
self.figureCB.SetValue("Main")
self.figureCB.Bind(wx.EVT_COMBOBOX, self.on_figure_select)
figureBox = wx.BoxSizer(wx.HORIZONTAL)
figureBox.Add(figureLabel, flag=wx.ALIGN_CENTER_VERTICAL)
figureBox.Add(self.figureCB, flag=wx.ALIGN_CENTER_VERTICAL)
styles = ['-','*','.','--', ':', 'd']
self.style = config["style"]
self.lineStyleCB = wx.ComboBox(parent, value=self.style, size=(60, 30), choices=styles,
style=wx.CB_READONLY)
self.lineStyleCB.Bind(wx.EVT_COMBOBOX, self.on_StyleSelect)
sizer.Add(category_box, 0, wx.ALL, 10)
sizer.Add(x_key_box, 0, wx.ALL, 10)
sizer.Add(y_key_box, 0, wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add(self.colourPicker, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
hbox.Add(self.lineStyleCB, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
sizer.Add(hbox, 0, wx.ALL, 10)
sizer.Add(figureBox, 0, wx.ALL, 10)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
hbox2.Add(self.remove_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
sizer.Add(hbox2, 0, wx.ALL, 10)
sizer.Add(self.activeCB, 0, wx.ALL, 10)
parent.SetSizer(sizer)
sizer.Fit(parent)
self.catText.SetFocus()
def on_xKeyText_enter(self, event):
self.xKey = self.xKeyText.GetValue()
def on_yKeyText_enter(self, event):
self.yKey = self.yKeyText.GetValue()
def on_figure_select(self, event):
figure = event.GetString()
self.ctrl.change_figure(self, figure)
def updatePlotData(self):
if self._isActive:
with self.dataLock:
self.plot_data.set_xdata(range(len(self.xData)))
self.plot_data.set_ydata(self.yData)
self.figurePanel.axes.set_xticks(range(len(self.xData)))
self.figurePanel.axes.set_xticklabels(self.xData)
self.figurePanel.axes.draw_artist(self.figurePanel.axes.patch)
self.figurePanel.axes.draw_artist(self.plot_data)
# self.figurePanel.fig.canvas.update()
# self.figurePanel.fig.flush_events()
pylab.setp(self.figurePanel.axes, title=self.title)
def update_data(self, firstTimestamp, payload):
self.figurePanel.newData = True
try:
xData = list(payload[self.xKey])
yData = list(payload[self.yKey])
self.maxVal = max(yData)
self.minVal = min(yData)
with self.dataLock:
self.xData = xData
self.yData = yData
self.lastData = len(self.xData)
except KeyError:
self.ctrl.prepFlashMessage = "Invalid key(s) (xKey: {}, yKey: {}) for category: {}. Channel will be disabled".format(self.xKey, self.yKey, self.category)
self.ctrl.disableChannelBuffer = self
if "title" in payload:
self.title = payload["title"]
class TimeLineChannelBox(ChannelBox):
"""
A channel box for time series. Expects to find a single new data point in the payload
designated by the specified key which is added to the timeline that is being drawn.
"""
defaults= {"category": "",
"key": "",
"color": [0,0,0],
"style": "-",
"active": False,
"useTime": True}
def __init__(self, parent, ID, ctrl, figurePanel=None, config=None):
self.key = ""
self.xMin = 0
self.useTime = True
super(TimeLineChannelBox, self).__init__(parent, ID, ctrl,
name="Timeline", figurePanel=figurePanel, config=config)
self.plot_data = self.figurePanel.axes.plot([], linewidth=1,color=self.colour,)[0]
def _create_elements(self, config):
if config == None:
#Create empty config, since it will be filled with defaults afterwards anyways
config = {}
# Fill potentially missing defaults:
for k,v in self.defaults.items():
if not k in config:
config[k] = v
parent = self.GetPane()
self.activeCB = wx.CheckBox(parent, -1, "Active")
self.activeCB.SetValue(config["active"])
self.activeCB.Bind(wx.EVT_CHECKBOX, self.on_checkActive)
self.useTimeCB = wx.CheckBox(parent, -1, "Use Time")
self.useTime = config["useTime"]
self.useTimeCB.SetValue(self.useTime)
self.useTimeCB.Bind(wx.EVT_CHECKBOX, self.on_checkUseTime)
box = wx.StaticBox(parent, -1, "")
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
catLabel = wx.StaticText(parent, -1, "Category: ")
self.category = config["category"]
self.catText = wx.TextCtrl(parent, -1, self.category, size=(100,-1))
self.catText.Bind(wx.EVT_TEXT_ENTER, self.on_catText_enter)
self.catText.Bind(wx.EVT_KILL_FOCUS, self.on_catText_enter)
category_box = wx.BoxSizer(wx.HORIZONTAL)
category_box.Add(catLabel, flag=wx.ALIGN_CENTER_VERTICAL)
category_box.Add(self.catText, flag=wx.ALIGN_CENTER_VERTICAL)
keyLabel = wx.StaticText(parent, -1, "Payload Key: ")
self.key = config["key"]
self.keyText = wx.TextCtrl(parent, -1, self.key, size=(100,-1))
self.keyText.Bind(wx.EVT_TEXT_ENTER, self.on_keyText_enter)
self.keyText.Bind(wx.EVT_KILL_FOCUS, self.on_keyText_enter)
key_box = wx.BoxSizer(wx.HORIZONTAL)
key_box.Add(keyLabel, flag=wx.ALIGN_CENTER_VERTICAL)
key_box.Add(self.keyText, flag=wx.ALIGN_CENTER_VERTICAL)
self.colourPicker = wx.ColourPickerCtrl(parent, -1)
self.colour = (float(config["color"][0])/255, float(config["color"][1])/255,float(config["color"][2])/255)
self.colourPicker.SetColour(config["color"])
self.colourPicker.Bind(wx.EVT_COLOURPICKER_CHANGED, self.on_colourChange)
self.clear_button = wx.Button(parent, -1, "Clear")
self.Bind(wx.EVT_BUTTON, self.on_clear_button, self.clear_button)
self.remove_button = wx.Button(parent, -1, "Remove")
self.Bind(wx.EVT_BUTTON, self.on_remove_button, self.remove_button)
figureLabel = wx.StaticText(parent, -1, "Show in figure: ")
self.figureCB = wx.ComboBox(parent, value=self.style, size=(80, 30), choices=self.ctrl.get_figures(),
style=wx.CB_READONLY)
self.figureCB.SetValue("Main")
self.figureCB.Bind(wx.EVT_COMBOBOX, self.on_figure_select)
figureBox = wx.BoxSizer(wx.HORIZONTAL)
figureBox.Add(figureLabel, flag=wx.ALIGN_CENTER_VERTICAL)
figureBox.Add(self.figureCB, flag=wx.ALIGN_CENTER_VERTICAL)
styles = ['-','*','.','--', ':', 'd']
self.style = config["style"]
self.lineStyleCB = wx.ComboBox(parent, value=self.style, size=(60, 30), choices=styles,
style=wx.CB_READONLY)
self.lineStyleCB.Bind(wx.EVT_COMBOBOX, self.on_StyleSelect)
sizer.Add(category_box, 0, wx.ALL, 10)
sizer.Add(key_box, 0, wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add(self.colourPicker, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
hbox.Add(self.lineStyleCB, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
sizer.Add(hbox, 0, wx.ALL, 10)
sizer.Add(figureBox, 0, wx.ALL, 10)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
hbox2.Add(self.clear_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
hbox2.Add(self.remove_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
sizer.Add(hbox2, 0, wx.ALL, 10)
hbox3 = wx.BoxSizer(wx.HORIZONTAL)
hbox3.Add(self.activeCB,border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
hbox3.Add(self.useTimeCB,border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
sizer.Add(hbox3, 0, wx.ALL, 10)
parent.SetSizer(sizer)
sizer.Fit(parent)
# sizer.SetSizeHints(parent)
self.catText.SetFocus()
def updatePlotData(self):
if self._isActive:
with self.dataLock:
if self.useTime:
self.plot_data.set_xdata(self.xData)
else:
self.plot_data.set_xdata(range(len(self.yData)))
self.plot_data.set_ydata(self.yData)
pylab.setp(self.figurePanel.axes, title=self.title)
def on_keyText_enter(self, event):
self.key = self.keyText.GetValue()
def on_figure_select(self, event):
figure = event.GetString()
self.ctrl.change_figure(self, figure)
def on_checkUseTime(self, event):
self.useTime = self.useTimeCB.GetValue()
def _addData(self, timestamp, data):
if self.useTime:
self.lastData = timestamp
else:
self.lastData= len(self.yData)+1
self.maxVal = max(self.maxVal, data)
self.minVal = min(self.minVal, data)
with self.dataLock:
self.xData.append(timestamp)
self.yData.append(data)
def update_data(self, firstTimestamp, payload):
self.figurePanel.newData = True
timestamp = time.time()-firstTimestamp
try:
data = float(payload[self.key])
self._addData(timestamp, data)
except KeyError:
if self.ctrl.missingKeyValue:
self.ctrl.prepFlashMessage = "Key {} for category: {} not found. Spezified default {} will be used".format(self.key, self.category, self.ctrl.missingKeyValue)
self._addData(timestamp, self.ctrl.missingKeyValue)
else:
self.ctrl.prepFlashMessage = "Key {} for category: {} not found. Will be ignored".format(self.key, self.category)
if "title" in payload:
self.title = payload["title"]
# self.ctrl.disableChannelBuffer = self
class ChildFrame(wx.Frame):
def __init__(self, parent, name, channel, position=None, options=None):
super(ChildFrame, self).__init__(parent, -1, size=(400,200), title=name)
self.parent = parent
self.panel = FigurePanel(self, name, parent, channel, options)
self.Bind(wx.EVT_CLOSE, self.on_close)
if position:
self.SetPosition(position)
def on_close(self, event):
self.parent.child_closed(self)
class FigurePanel(wx.Panel):
def __init__(self, parent, name, ctrl, channel=None, options=None):
super(FigurePanel, self).__init__(parent)
self.parent = parent
self.name = name
self.dpi = 100
self.fig = Figure((3.0, 3.0), dpi=self.dpi, )
self.axes = self.fig.add_subplot(111)
self.newData = False
self.ctrl = ctrl
pylab.setp(self.axes.get_xticklabels(), fontsize=8)
pylab.setp(self.axes.get_yticklabels(), fontsize=8)
self.axes.grid(True, color='gray')
# self.Bind(wx.EVT_CLOSE, self.on_close)
self.channels = []
self.paused = False
self.canvas = FigCanvas(self, -1, self.fig)
# self.canvas = FigCanvas(self.fig)
if channel:
self.add_channel(channel)
# self.background = self.canvas.copy_from_bbox(self.axes.bbox)
self.toolbar = NavigationToolbar(self.canvas)
# self.toolbar = NavigationToolbar(self.canvas, )
self.toolbar.Realize()
self.toolbar.DeleteToolByPos(7) #Deletes the adjust subplots button
self.toolbar.DeleteToolByPos(2) #Deletes the forward button
self.toolbar.DeleteToolByPos(1) #Deletes the backward button
self.toolbar.DeleteToolByPos(0) #Deletes the home button
self.pause_button = wx.Button(self, -1, "Pause")
self.Bind(wx.EVT_BUTTON, self.on_pause_button, self.pause_button)
if not options:
options = [True, True, False, False]
#
self.cb_grid = wx.CheckBox(self, -1, "Show Grid", style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, self.on_cb_grid, self.cb_grid)
self.cb_grid.SetValue(options[0])
self.cb_xlab = wx.CheckBox(self, -1, "Show X labels", style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, self.on_cb_xlab, self.cb_xlab)
self.cb_xlab.SetValue(options[1])
self.cb_x_window = wx.CheckBox(self, -1, "Fixed X window", style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, self.on_cb_window, self.cb_x_window)
self.cb_x_window.SetValue(options[2])
self.cb_y_window = wx.CheckBox(self, -1, "Fixed Y window", style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, self.on_cb_window, self.cb_y_window)
self.cb_y_window.SetValue(options[3])
self.hboxCtrl = wx.BoxSizer(wx.HORIZONTAL)
self.hboxCtrl.Add(self.toolbar, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxCtrl.Add(self.pause_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxCtrl.AddSpacer(20)
self.hboxCtrl.Add(self.cb_grid, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxCtrl.AddSpacer(10)
self.hboxCtrl.Add(self.cb_xlab, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxCtrl.AddSpacer(10)
self.hboxCtrl.Add(self.cb_x_window, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxCtrl.AddSpacer(10)
self.hboxCtrl.Add(self.cb_y_window, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.canvas, 1, flag=wx.LEFT | wx.TOP | wx.GROW)
self.vbox.Add(self.hboxCtrl, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.SetSizer(self.vbox)
self.vbox.Fit(self.parent)
def add_channel(self, channel):
self.channels.append(channel)
channel.figurePanel = self
channel.plot_data = self.axes.plot([], linewidth=1, color=channel.colour, marker="*", linestyle="")[0]
channel._update_style()
channel.figureCB.SetValue(self.name)
def remove_channel(self, channel):
self.channels.remove(channel)
if channel.isActive:
self.axes.lines.remove(channel.plot_data)
if len(self.channels) == 0:
self.ctrl.remove_figure(self)
def draw_plot(self):
""" Redraws the plot
"""
if self.paused or not self.newData:
return
maxSize = 0
ymin = 0
ymax = 0
xmin = 0
#
for channel in self.channels:
maxSize = max(maxSize, channel.lastData)
ymin = round(min(channel.minVal,ymin),0)
ymax = round(max(channel.maxVal,ymax),1)
xmin = min(xmin, channel.xMin)
#
xmax = maxSize if maxSize > 5 else 5
if self.cb_x_window.IsChecked():
xmin = xmin if maxSize - 5 < 0 else maxSize - 5
if self.cb_y_window.IsChecked():
ymin = -0.1
ymax = 1.1
else:
ymax *= 2
self.axes.set_xbound(lower=xmin, upper=xmax)
self.axes.set_ybound(lower=ymin, upper=ymax)
for channel in self.channels:
channel.updatePlotData()
self.canvas.draw() #Costs a lot of CPU performance :-(
self.newData = False
def on_pause_button(self, event):
self.paused = not self.paused
label = "Resume" if self.paused else "Pause"
self.pause_button.SetLabel(label)
def on_cb_grid(self, event):
# anecdote: axes.grid assumes b=True if any other flag is
# given even if b is set to False.
# so just passing the flag into the first statement won't
# work.
if self.cb_grid.IsChecked():
self.axes.grid(True, color='gray')
else:
self.axes.grid(False)
self.canvas.draw()
def on_cb_xlab(self, event):
# Using setp here is convenient, because get_xticklabels
# returns a list over which one needs to explicitly
# iterate, and setp already handles this.
pylab.setp(self.axes.get_xticklabels(),
visible=self.cb_xlab.IsChecked())
self.canvas.draw()
def on_cb_window(self, event):
self.newData = True
self.draw_plot()
def get_figure_options(self):
"""
Returns a binary array, containing flags for the 4 options
"Show Grid", "Show X labels", "Fixed X window", "Fixed Y window"
"""
res = []
res.append(self.cb_grid.IsChecked())
res.append(self.cb_xlab.IsChecked())
res.append(self.cb_x_window.IsChecked())
res.append(self.cb_y_window.IsChecked())
return res
def set_figure_options(self, options):
self.cb_grid.SetValue(options[0])
self.cb_xlab.SetValue(options[1])
self.cb_x_window.SetValue(options[2])
self.cb_y_window.SetValue(options[3])
class GraphFrame(wx.Frame):
""" The main frame of the application
"""
title = 'Ipaaca Plot Visualisation.'
def __init__(self, updateRate=100, configPath=None, missingKeyValue=None):
wx.Frame.__init__(self, None, -1, self.title)
self.outputBuffer = ipaaca.OutputBuffer("Ipaaca_Plot")
self.inputBuffer = ipaaca.InputBuffer("Ipaaca_Plot")
self.inputBuffer.register_handler(self.update_data)
#self.data = [1]
self.channels = []
self.paused = False
self.firstTime = None
self.figurePlots = []
self.create_menu()
self.create_status_bar()
self.create_main_panel()
self.redraw_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer)
self.redraw_timer.Start(updateRate)
self.Bind(wx.EVT_CLOSE, self.on_exit)
self.prepFlashMessage = None
self.disableChannelBuffer = None
self.newData = False
self.missingKeyValue = missingKeyValue
if configPath:
self._handle_config(configPath)
#Force another layout for strange laptop behaviour.
self.panel.Layout()
def get_figures(self):
res = []
for f in self.figurePlots:
res.append(f.name)
res.append("New")
return res
def activate_channel(self, channel):
if not channel.category in self.inputBuffer._category_interests:
self.inputBuffer.add_category_interests(channel.category)
def update_data(self, iu, event_type, local):
if self.firstTime == None:
self.firstTime = time.time()
if event_type in ['ADDED', 'UPDATED', 'MESSAGE']:
category = iu.category
for channel in self.channels:
if channel._isActive and channel.category == category:
channel.update_data(self.firstTime, iu.payload)
def create_menu(self):
self.menubar = wx.MenuBar()
menu_file = wx.Menu()
m_expt = menu_file.Append(-1, "Export plot\tCtrl-E", "Save main plot to file")
self.Bind(wx.EVT_MENU, self.on_save_plot, m_expt)
m_saveConfig = menu_file.Append(-1, "Save configuration\tCtrl-S", "Save plot configuration")
self.Bind(wx.EVT_MENU, self.on_save_config, m_saveConfig)
m_loadConfig = menu_file.Append(-1, "Load configuration\tCtrl-O", "Load plot configuration")
self.Bind(wx.EVT_MENU, self.on_load_config, m_loadConfig)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, "Exit\tCtrl-Q", "Exit")
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
self.menubar.Append(menu_file, "&File")
self.SetMenuBar(self.menubar)
def create_main_panel(self):
self.panel = wx.Panel(self)
#Monkey patch panel position getter and setter to allow treating the control
#window the same as the child windows when loading and saving configs
self.panel.SetPosition = self.SetPosition
self.panel.GetPosition = self.GetPosition
self.figurePanel = FigurePanel(self.panel, "Main", self)
self.figurePlots.append(self.figurePanel)
self.addChannel_button = wx.Button(self.panel, -1, "Add Channel")
self.Bind(wx.EVT_BUTTON, self.on_addTimelineChannel_button, self.addChannel_button)
self.addDistChannel_button = wx.Button(self.panel, -1, "Add Distribution Channel")
self.Bind(wx.EVT_BUTTON, self.on_addDistChannel_button, self.addDistChannel_button)
self.clearAll_button = wx.Button(self.panel, -1, "Clear all")
self.Bind(wx.EVT_BUTTON, self.on_clearAll_button, self.clearAll_button)
self.hboxChannelCtrl = wx.BoxSizer(wx.HORIZONTAL)
self.hboxChannelCtrl.Add(self.addChannel_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxChannelCtrl.Add(self.addDistChannel_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxChannelCtrl.Add(self.clearAll_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hboxChannels = wx.BoxSizer(wx.HORIZONTAL)
self.hboxSend = wx.BoxSizer(wx.HORIZONTAL)
catLabel = wx.StaticText(self.panel, -1, "Category: ")
self.catText = wx.TextCtrl(self.panel, -1, "", size=(100,-1))
keyLabel = wx.StaticText(self.panel, -1, "Payload Key: ")
self.keyText = wx.TextCtrl(self.panel, -1, "", size=(100,-1))
payloadLabel = wx.StaticText(self.panel, -1, "Payload Value: ")
self.payloadText = wx.TextCtrl(self.panel, -1, "", size=(200,-1))
self.send_button = wx.Button(self.panel, -1, "Send")
self.Bind(wx.EVT_BUTTON, self.on_send_button, self.send_button)
self.hboxSend.Add(catLabel, 0, wx.ALL, 5)
self.hboxSend.Add(self.catText, 0, wx.ALL, 5)
self.hboxSend.Add(keyLabel, 0, wx.ALL, 5)
self.hboxSend.Add(self.keyText, 0, wx.ALL, 5)
self.hboxSend.Add(payloadLabel, 0, wx.ALL, 5)
self.hboxSend.Add(self.payloadText, 0, wx.ALL, 5)
self.hboxSend.Add(self.send_button, 0, wx.ALL, 5)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.figurePanel, 1, flag=wx.ALL | wx.EXPAND)
self.vbox.Add(self.hboxChannelCtrl, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.vbox.Add(self.hboxChannels, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.vbox.Add(wx.StaticLine(self.panel), 0, wx.ALL|wx.EXPAND, 5)
self.vbox.Add(self.hboxSend, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.vbox.SetSizeHints(self)
self.panel.SetSizer(self.vbox)
# self.vbox.Fit(self)
def child_closed(self, child):
for c in child.panel.channels:
self.change_figure(c, "Main")
def remove_figure(self, figure):
if figure.name != "Main":
self.figurePlots.remove(figure)
self.update_available_figures()
figure.parent.Destroy()
def change_figure(self, channel, figure, position=None, options=None):
oldFigure = channel.figurePanel
oldFigure.remove_channel(channel)
for f in self.figurePlots:
if f.name == figure:
f.add_channel(channel)
if position:
f.parent.SetPosition(position)
if options:
f.set_figure_options(options)
break
else:
#Figure not found -> Create new one
if figure == "New":
figure = "Figure"+str(len(self.figurePlots))
newFrame = ChildFrame(self, figure, channel, position=position, options=options)
newFrame.Show()
self.figurePlots.append(newFrame.panel)
self.update_available_figures()
channel.figureCB.SetValue(figure)
def update_available_figures(self):
for c in self.channels:
c.figureCB.Clear()
for f in self.get_figures():
c.figureCB.Append(f)
def create_status_bar(self):
self.statusbar = self.CreateStatusBar()
def init_plot(self):
self.dpi = 100
self.fig = Figure((3.0, 3.0), dpi=self.dpi)
self.axes = self.fig.add_subplot(111)
pylab.setp(self.axes.get_xticklabels(), fontsize=8)
pylab.setp(self.axes.get_yticklabels(), fontsize=8)
def draw_plots(self):
""" Trigger redraw in all figures
"""
for f in self.figurePlots:
f.draw_plot()
def on_clearAll_button(self, event):
for channel in self.channels:
channel.on_clear_button(event)
self.firstTime = None
self.newData = True
def on_send_button(self, event):
cat = self.catText.GetValue()
key = self.keyText.GetValue()
payload = self.payloadText.GetValue()
print("Sending %s to category %s" % ({key:payload}, cat))
msg = ipaaca.Message(cat)
msg.payload = {key:payload}
self.outputBuffer.add(msg)
def on_addDistChannel_button(self, event):
newDistChannelBox = DistributionChannelBox(self.panel, -1, self)
self._add_channelBox(newDistChannelBox)
def on_addTimelineChannel_button(self, event):
newChannelBox = TimeLineChannelBox(self.panel, -1, self)
self._add_channelBox(newChannelBox)
def _add_channelBox(self, channelBox):
self.hboxChannels.Add(channelBox, 0, wx.ALL, 5)
self.channels.append(channelBox)
self.figurePanel.channels.append(channelBox)
self.update_layout()
def update_layout(self):
self.vbox.SetSizeHints(self)
self.vbox.Layout()
self.Refresh()
def removeChannel(self, channel):
for c in self.channels:
if c != channel and c.category == channel.category:
break
else:
self.inputBuffer.remove_category_interests(channel.category)
self.channels.remove(channel)
self.hboxChannels.Remove(channel)
pylab.setp(self.figurePanel.axes, title="")
channel.Destroy()
if len(self.channels) == 0:
self.firstTime = None
self.update_layout()
def on_save_plot(self, event):
file_choices = "PNG (*.png)|*.png|SVG (*.svg)|*.svg|PDF (*.pdf)|*.pdf"
fileFormats = ["png", "svg", "pdf"]
dlg = wx.FileDialog(
self,
message="Save plot as...",
defaultDir=os.getcwd(),
defaultFile="plot.png",
wildcard=file_choices,
style=wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.figurePanel.canvas.print_figure(path, dpi=self.figurePanel.dpi, format=fileFormats[dlg.GetFilterIndex()])
self.flash_status_message("Saved to %s" % path)
def on_redraw_timer(self, event):
self.draw_plots()
if self.prepFlashMessage != None:
self.flash_status_message(self.prepFlashMessage)
self.prepFlashMessage = None
if self.disableChannelBuffer != None:
self.disableChannelBuffer.isActive = False
self.disableChannelBuffer.keyText.SetFocus()
self.disableChannelBuffer = None
def on_exit(self, event):
self.redraw_timer.Destroy()
self.Destroy()
def on_load_config(self, event):
openFileDialog = wx.FileDialog(self,
message="Open plot config",
defaultDir=os.getcwd(),
defaultFile="",
wildcard="Plot config files (*.pconf)|*.pconf",
style=wx.FD_OPEN | wx.FILE_MUST_EXIST)
if openFileDialog.ShowModal() == wx.ID_OK:
path = openFileDialog.GetPath()
self._handle_config(path)
def _handle_config(self, configPath):
config = json.load(open(configPath))
handledFigures = []
for channel in config["channels"]:
if channel["channeltype"] == "Timeline":
newChannelBox = TimeLineChannelBox(self.panel, -1, self, config=channel["config"])
elif channel["channeltype"] == "Distribution":
newChannelBox = DistributionChannelBox(self.panel, -1, self, config=channel["config"])
self._add_channelBox(newChannelBox)
newChannelBox._update_style() #Needs to be done after it was created.
try:
newChannelBox._change_activity(channel["config"]["active"]) #To potentially start the recording
except KeyError:
#Ignore potentially missing active
pass
if channel["config"].has_key("figure"):
figureName = channel["config"]["figure"]
try:
self.change_figure(newChannelBox, figureName,
position=config["figures"][figureName]["position"],
options=config["figures"][figureName]["options"])
except KeyError:
#Referenzed figure was not specified, will be ignored.
print("There are no stored configurations for figure {}. " \
"Defaults will be used.".format(figureName))
self.change_figure(newChannelBox, figureName)
handledFigures.append(figureName)
#Make sure main window options are set even if no channel used it!
if not "Main" in handledFigures and "Main" in config["figures"]:
self.figurePlots[0].parent.SetPosition(config["figures"]["Main"]["position"])
self.figurePlots[0].set_figure_options(config["figures"]["Main"]["options"])
def on_save_config(self, event):
saveFileDialog = wx.FileDialog(
self,
message="Save plot config",
defaultDir=os.getcwd(),
defaultFile="config.pconf",
wildcard="Plot config files (*.pconf)|*.pconf",
style=wx.SAVE)
if saveFileDialog.ShowModal() == wx.ID_OK:
path = saveFileDialog.GetPath()
config = {"channels":[]}
for channel in self.channels:
channelObject = {"config": {"category": channel.category,
"color": [int(channel.colour[0]*255),
int(channel.colour[1]*255),
int(channel.colour[2]*255)],
"figure": channel.figurePanel.name,
# "figurePos": list(channel.figurePanel.parent.GetPosition()),
# "figureOptions": channel.figurePanel.get_figure_options(),
"style":channel.style,
"active":channel._isActive,
"collapsed": channel.IsCollapsed()}}
if isinstance(channel, TimeLineChannelBox):
channelObject["channeltype"] = "Timeline"
channelObject["config"].update({"key": channel.key,
"useTime": channel.useTime})
elif isinstance(channel, DistributionChannelBox):
channelObject["channeltype"] = "Distribution"
channelObject["config"].update({"xKey": channel.xKey,
"yKey": channel.yKey})
config["channels"].append(channelObject)
config["figures"] = {}
for fig in self.figurePlots:
figureObject = {"position": list(fig.parent.GetPosition()),
"options": fig.get_figure_options()}
config["figures"][fig.name] = figureObject
json.dump(config, open(path,"w"))
def flash_status_message(self, msg, flash_len_ms=1500):
self.statusbar.SetStatusText(msg)
self.timeroff = wx.Timer(self)
self.Bind(
wx.EVT_TIMER,
self.on_flash_status_off,
self.timeroff)
self.timeroff.Start(flash_len_ms, oneShot=True)
def on_flash_status_off(self, event):
self.statusbar.SetStatusText('')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="Plots timelines and distributions send via Ipaaca")
parser.add_argument("config", nargs="?", default=None, help="Path to config file that should be loaded directly.")
parser.add_argument("-u", "--updateRate", default=100, type=int, help="Update rate in ms that should be used. Default 100ms.")
parser.add_argument("-m", "--missingKeyValue", default=None, type=float, help="Defines what value should be used for " \
"missing payload keys for timeline plots. If not given, the payload will be ignored " \
"(i.e. the corresponding cannel will not be updated)")
args = parser.parse_args()
app = wx.App(False)
app.frame = GraphFrame(updateRate=args.updateRate, configPath = args.config, missingKeyValue= args.missingKeyValue)
app.frame.Show()
app.MainLoop()
os._exit(0)
|
[
"wx.Menu",
"argparse.ArgumentParser",
"wx.CheckBox",
"matplotlib.backends.backend_wxagg.FigureCanvasWxAgg",
"wx.ComboBox",
"wx.Panel",
"threading.Lock",
"matplotlib.figure.Figure",
"wx.TextCtrl",
"wx.Timer",
"ipaaca.OutputBuffer",
"wx.MenuBar",
"ipaaca.Message",
"wx.StaticLine",
"wx.BoxSizer",
"wx.StaticBox",
"wx.StaticText",
"matplotlib.use",
"wx.App",
"matplotlib.backends.backend_wxagg.NavigationToolbar2WxAgg",
"os.getcwd",
"wx.ColourPickerCtrl",
"time.time",
"wx.Button",
"os._exit",
"wx.Frame.__init__",
"wx.StaticBoxSizer",
"ipaaca.InputBuffer",
"pylab.setp"
] |
[((1216, 1239), 'matplotlib.use', 'matplotlib.use', (['"""WXAgg"""'], {}), "('WXAgg')\n", (1230, 1239), False, 'import matplotlib\n'), ((43138, 43231), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Plots timelines and distributions send via Ipaaca"""'}), "(description=\n 'Plots timelines and distributions send via Ipaaca')\n", (43161, 43231), False, 'import argparse\n'), ((43853, 43866), 'wx.App', 'wx.App', (['(False)'], {}), '(False)\n', (43859, 43866), False, 'import wx\n'), ((44032, 44043), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (44040, 44043), False, 'import os\n'), ((2195, 2211), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2209, 2211), False, 'import threading\n'), ((4375, 4420), 'pylab.setp', 'pylab.setp', (['self.plot_data'], {'color': 'self.colour'}), '(self.plot_data, color=self.colour)\n', (4385, 4420), False, 'import pylab\n'), ((6992, 7025), 'wx.CheckBox', 'wx.CheckBox', (['parent', '(-1)', '"""Active"""'], {}), "(parent, -1, 'Active')\n", (7003, 7025), False, 'import wx\n'), ((7163, 7191), 'wx.StaticBox', 'wx.StaticBox', (['parent', '(-1)', '""""""'], {}), "(parent, -1, '')\n", (7175, 7191), False, 'import wx\n'), ((7208, 7243), 'wx.StaticBoxSizer', 'wx.StaticBoxSizer', (['box', 'wx.VERTICAL'], {}), '(box, wx.VERTICAL)\n', (7225, 7243), False, 'import wx\n'), ((7272, 7311), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Category: """'], {}), "(parent, -1, 'Category: ')\n", (7285, 7311), False, 'import wx\n'), ((7378, 7432), 'wx.TextCtrl', 'wx.TextCtrl', (['parent', '(-1)', 'self.category'], {'size': '(100, -1)'}), '(parent, -1, self.category, size=(100, -1))\n', (7389, 7432), False, 'import wx\n'), ((7604, 7630), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (7615, 7630), False, 'import wx\n'), ((7795, 7831), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""X Key: """'], {}), "(parent, -1, 'X Key: ')\n", (7808, 7831), False, 'import wx\n'), ((7891, 7941), 'wx.TextCtrl', 'wx.TextCtrl', (['parent', '(-1)', 'self.xKey'], {'size': '(100, -1)'}), '(parent, -1, self.xKey, size=(100, -1))\n', (7902, 7941), False, 'import wx\n'), ((8128, 8154), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (8139, 8154), False, 'import wx\n'), ((8314, 8350), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Y Key: """'], {}), "(parent, -1, 'Y Key: ')\n", (8327, 8350), False, 'import wx\n'), ((8410, 8460), 'wx.TextCtrl', 'wx.TextCtrl', (['parent', '(-1)', 'self.yKey'], {'size': '(100, -1)'}), '(parent, -1, self.yKey, size=(100, -1))\n', (8421, 8460), False, 'import wx\n'), ((8647, 8673), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (8658, 8673), False, 'import wx\n'), ((8920, 8951), 'wx.ColourPickerCtrl', 'wx.ColourPickerCtrl', (['parent', '(-1)'], {}), '(parent, -1)\n', (8939, 8951), False, 'import wx\n'), ((9240, 9271), 'wx.Button', 'wx.Button', (['parent', '(-1)', '"""Remove"""'], {}), "(parent, -1, 'Remove')\n", (9249, 9271), False, 'import wx\n'), ((9379, 9424), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Show in figure: """'], {}), "(parent, -1, 'Show in figure: ')\n", (9392, 9424), False, 'import wx\n'), ((9705, 9731), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (9716, 9731), False, 'import wx\n'), ((9985, 10080), 'wx.ComboBox', 'wx.ComboBox', (['parent'], {'value': 'self.style', 'size': '(60, 30)', 'choices': 'styles', 'style': 'wx.CB_READONLY'}), '(parent, value=self.style, size=(60, 30), choices=styles, style=\n wx.CB_READONLY)\n', (9996, 10080), False, 'import wx\n'), ((10316, 10342), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (10327, 10342), False, 'import wx\n'), ((10631, 10657), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (10642, 10657), False, 'import wx\n'), ((13953, 13986), 'wx.CheckBox', 'wx.CheckBox', (['parent', '(-1)', '"""Active"""'], {}), "(parent, -1, 'Active')\n", (13964, 13986), False, 'import wx\n'), ((14139, 14174), 'wx.CheckBox', 'wx.CheckBox', (['parent', '(-1)', '"""Use Time"""'], {}), "(parent, -1, 'Use Time')\n", (14150, 14174), False, 'import wx\n'), ((14352, 14380), 'wx.StaticBox', 'wx.StaticBox', (['parent', '(-1)', '""""""'], {}), "(parent, -1, '')\n", (14364, 14380), False, 'import wx\n'), ((14397, 14432), 'wx.StaticBoxSizer', 'wx.StaticBoxSizer', (['box', 'wx.VERTICAL'], {}), '(box, wx.VERTICAL)\n', (14414, 14432), False, 'import wx\n'), ((14461, 14500), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Category: """'], {}), "(parent, -1, 'Category: ')\n", (14474, 14500), False, 'import wx\n'), ((14567, 14621), 'wx.TextCtrl', 'wx.TextCtrl', (['parent', '(-1)', 'self.category'], {'size': '(100, -1)'}), '(parent, -1, self.category, size=(100, -1))\n', (14578, 14621), False, 'import wx\n'), ((14793, 14819), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (14804, 14819), False, 'import wx\n'), ((14984, 15026), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Payload Key: """'], {}), "(parent, -1, 'Payload Key: ')\n", (14997, 15026), False, 'import wx\n'), ((15083, 15132), 'wx.TextCtrl', 'wx.TextCtrl', (['parent', '(-1)', 'self.key'], {'size': '(100, -1)'}), '(parent, -1, self.key, size=(100, -1))\n', (15094, 15132), False, 'import wx\n'), ((15314, 15340), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (15325, 15340), False, 'import wx\n'), ((15496, 15527), 'wx.ColourPickerCtrl', 'wx.ColourPickerCtrl', (['parent', '(-1)'], {}), '(parent, -1)\n', (15515, 15527), False, 'import wx\n'), ((15815, 15845), 'wx.Button', 'wx.Button', (['parent', '(-1)', '"""Clear"""'], {}), "(parent, -1, 'Clear')\n", (15824, 15845), False, 'import wx\n'), ((15958, 15989), 'wx.Button', 'wx.Button', (['parent', '(-1)', '"""Remove"""'], {}), "(parent, -1, 'Remove')\n", (15967, 15989), False, 'import wx\n'), ((16097, 16142), 'wx.StaticText', 'wx.StaticText', (['parent', '(-1)', '"""Show in figure: """'], {}), "(parent, -1, 'Show in figure: ')\n", (16110, 16142), False, 'import wx\n'), ((16423, 16449), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (16434, 16449), False, 'import wx\n'), ((16703, 16798), 'wx.ComboBox', 'wx.ComboBox', (['parent'], {'value': 'self.style', 'size': '(60, 30)', 'choices': 'styles', 'style': 'wx.CB_READONLY'}), '(parent, value=self.style, size=(60, 30), choices=styles, style=\n wx.CB_READONLY)\n', (16714, 16798), False, 'import wx\n'), ((16997, 17023), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (17008, 17023), False, 'import wx\n'), ((17303, 17329), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (17314, 17329), False, 'import wx\n'), ((17587, 17613), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (17598, 17613), False, 'import wx\n'), ((20646, 20678), 'matplotlib.figure.Figure', 'Figure', (['(3.0, 3.0)'], {'dpi': 'self.dpi'}), '((3.0, 3.0), dpi=self.dpi)\n', (20652, 20678), False, 'from matplotlib.figure import Figure\n'), ((21069, 21098), 'matplotlib.backends.backend_wxagg.FigureCanvasWxAgg', 'FigCanvas', (['self', '(-1)', 'self.fig'], {}), '(self, -1, self.fig)\n', (21078, 21098), True, 'from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigCanvas, NavigationToolbar2WxAgg as NavigationToolbar\n'), ((21315, 21345), 'matplotlib.backends.backend_wxagg.NavigationToolbar2WxAgg', 'NavigationToolbar', (['self.canvas'], {}), '(self.canvas)\n', (21332, 21345), True, 'from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigCanvas, NavigationToolbar2WxAgg as NavigationToolbar\n'), ((21749, 21777), 'wx.Button', 'wx.Button', (['self', '(-1)', '"""Pause"""'], {}), "(self, -1, 'Pause')\n", (21758, 21777), False, 'import wx\n'), ((21967, 22023), 'wx.CheckBox', 'wx.CheckBox', (['self', '(-1)', '"""Show Grid"""'], {'style': 'wx.ALIGN_RIGHT'}), "(self, -1, 'Show Grid', style=wx.ALIGN_RIGHT)\n", (21978, 22023), False, 'import wx\n'), ((22177, 22237), 'wx.CheckBox', 'wx.CheckBox', (['self', '(-1)', '"""Show X labels"""'], {'style': 'wx.ALIGN_RIGHT'}), "(self, -1, 'Show X labels', style=wx.ALIGN_RIGHT)\n", (22188, 22237), False, 'import wx\n'), ((22382, 22443), 'wx.CheckBox', 'wx.CheckBox', (['self', '(-1)', '"""Fixed X window"""'], {'style': 'wx.ALIGN_RIGHT'}), "(self, -1, 'Fixed X window', style=wx.ALIGN_RIGHT)\n", (22393, 22443), False, 'import wx\n'), ((22614, 22675), 'wx.CheckBox', 'wx.CheckBox', (['self', '(-1)', '"""Fixed Y window"""'], {'style': 'wx.ALIGN_RIGHT'}), "(self, -1, 'Fixed Y window', style=wx.ALIGN_RIGHT)\n", (22625, 22675), False, 'import wx\n'), ((22838, 22864), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (22849, 22864), False, 'import wx\n'), ((23599, 23623), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (23610, 23623), False, 'import wx\n'), ((27301, 27346), 'wx.Frame.__init__', 'wx.Frame.__init__', (['self', 'None', '(-1)', 'self.title'], {}), '(self, None, -1, self.title)\n', (27318, 27346), False, 'import wx\n'), ((27375, 27409), 'ipaaca.OutputBuffer', 'ipaaca.OutputBuffer', (['"""Ipaaca_Plot"""'], {}), "('Ipaaca_Plot')\n", (27394, 27409), False, 'import ipaaca\n'), ((27437, 27470), 'ipaaca.InputBuffer', 'ipaaca.InputBuffer', (['"""Ipaaca_Plot"""'], {}), "('Ipaaca_Plot')\n", (27455, 27470), False, 'import ipaaca\n'), ((27819, 27833), 'wx.Timer', 'wx.Timer', (['self'], {}), '(self)\n', (27827, 27833), False, 'import wx\n'), ((29168, 29180), 'wx.MenuBar', 'wx.MenuBar', ([], {}), '()\n', (29178, 29180), False, 'import wx\n'), ((29210, 29219), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (29217, 29219), False, 'import wx\n'), ((30061, 30075), 'wx.Panel', 'wx.Panel', (['self'], {}), '(self)\n', (30069, 30075), False, 'import wx\n'), ((30505, 30545), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""Add Channel"""'], {}), "(self.panel, -1, 'Add Channel')\n", (30514, 30545), False, 'import wx\n'), ((30684, 30737), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""Add Distribution Channel"""'], {}), "(self.panel, -1, 'Add Distribution Channel')\n", (30693, 30737), False, 'import wx\n'), ((30870, 30908), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""Clear all"""'], {}), "(self.panel, -1, 'Clear all')\n", (30879, 30908), False, 'import wx\n'), ((31030, 31056), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (31041, 31056), False, 'import wx\n'), ((31417, 31443), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (31428, 31443), False, 'import wx\n'), ((31468, 31494), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (31479, 31494), False, 'import wx\n'), ((31523, 31566), 'wx.StaticText', 'wx.StaticText', (['self.panel', '(-1)', '"""Category: """'], {}), "(self.panel, -1, 'Category: ')\n", (31536, 31566), False, 'import wx\n'), ((31590, 31637), 'wx.TextCtrl', 'wx.TextCtrl', (['self.panel', '(-1)', '""""""'], {'size': '(100, -1)'}), "(self.panel, -1, '', size=(100, -1))\n", (31601, 31637), False, 'import wx\n'), ((31657, 31703), 'wx.StaticText', 'wx.StaticText', (['self.panel', '(-1)', '"""Payload Key: """'], {}), "(self.panel, -1, 'Payload Key: ')\n", (31670, 31703), False, 'import wx\n'), ((31727, 31774), 'wx.TextCtrl', 'wx.TextCtrl', (['self.panel', '(-1)', '""""""'], {'size': '(100, -1)'}), "(self.panel, -1, '', size=(100, -1))\n", (31738, 31774), False, 'import wx\n'), ((31806, 31854), 'wx.StaticText', 'wx.StaticText', (['self.panel', '(-1)', '"""Payload Value: """'], {}), "(self.panel, -1, 'Payload Value: ')\n", (31819, 31854), False, 'import wx\n'), ((31882, 31929), 'wx.TextCtrl', 'wx.TextCtrl', (['self.panel', '(-1)', '""""""'], {'size': '(200, -1)'}), "(self.panel, -1, '', size=(200, -1))\n", (31893, 31929), False, 'import wx\n'), ((31965, 31998), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""Send"""'], {}), "(self.panel, -1, 'Send')\n", (31974, 31998), False, 'import wx\n'), ((32479, 32503), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (32490, 32503), False, 'import wx\n'), ((34526, 34558), 'matplotlib.figure.Figure', 'Figure', (['(3.0, 3.0)'], {'dpi': 'self.dpi'}), '((3.0, 3.0), dpi=self.dpi)\n', (34532, 34558), False, 'from matplotlib.figure import Figure\n'), ((35313, 35332), 'ipaaca.Message', 'ipaaca.Message', (['cat'], {}), '(cat)\n', (35327, 35332), False, 'import ipaaca\n'), ((36451, 36494), 'pylab.setp', 'pylab.setp', (['self.figurePanel.axes'], {'title': '""""""'}), "(self.figurePanel.axes, title='')\n", (36461, 36494), False, 'import pylab\n'), ((42800, 42814), 'wx.Timer', 'wx.Timer', (['self'], {}), '(self)\n', (42808, 42814), False, 'import wx\n'), ((3839, 3879), 'pylab.setp', 'pylab.setp', (['self.plot_data'], {'linestyle': '""""""'}), "(self.plot_data, linestyle='')\n", (3849, 3879), False, 'import pylab\n'), ((3893, 3938), 'pylab.setp', 'pylab.setp', (['self.plot_data'], {'marker': 'self.style'}), '(self.plot_data, marker=self.style)\n', (3903, 3938), False, 'import pylab\n'), ((3966, 4014), 'pylab.setp', 'pylab.setp', (['self.plot_data'], {'linestyle': 'self.style'}), '(self.plot_data, linestyle=self.style)\n', (3976, 4014), False, 'import pylab\n'), ((4028, 4065), 'pylab.setp', 'pylab.setp', (['self.plot_data'], {'marker': '""""""'}), "(self.plot_data, marker='')\n", (4038, 4065), False, 'import pylab\n'), ((11904, 11955), 'pylab.setp', 'pylab.setp', (['self.figurePanel.axes'], {'title': 'self.title'}), '(self.figurePanel.axes, title=self.title)\n', (11914, 11955), False, 'import pylab\n'), ((18316, 18367), 'pylab.setp', 'pylab.setp', (['self.figurePanel.axes'], {'title': 'self.title'}), '(self.figurePanel.axes, title=self.title)\n', (18326, 18367), False, 'import pylab\n'), ((19176, 19187), 'time.time', 'time.time', ([], {}), '()\n', (19185, 19187), False, 'import time\n'), ((28831, 28842), 'time.time', 'time.time', ([], {}), '()\n', (28840, 28842), False, 'import time\n'), ((32746, 32771), 'wx.StaticLine', 'wx.StaticLine', (['self.panel'], {}), '(self.panel)\n', (32759, 32771), False, 'import wx\n'), ((36902, 36913), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (36911, 36913), False, 'import os\n'), ((37998, 38009), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (38007, 38009), False, 'import os\n'), ((40516, 40527), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40525, 40527), False, 'import os\n')]
|
import random
from collections import deque
import numpy as np
import torch
from flatland.envs.malfunction_generators import malfunction_from_params
from flatland.envs.observations import TreeObsForRailEnv
from flatland.envs.predictions import ShortestPathPredictorForRailEnv
from flatland.envs.rail_env import RailEnv
from flatland.envs.rail_generators import sparse_rail_generator
from flatland.envs.schedule_generators import sparse_schedule_generator
from flatland.utils.rendertools import RenderTool
from importlib_resources import path
import torch_training.Nets
from torch_training.dueling_double_dqn import Agent
from utils.observation_utils import normalize_observation
random.seed(1)
np.random.seed(1)
"""
file_name = "./railway/complex_scene.pkl"
env = RailEnv(width=10,
height=20,
rail_generator=rail_from_file(file_name),
obs_builder_object=TreeObsForRailEnv(max_depth=3, predictor=ShortestPathPredictorForRailEnv()))
x_dim = env.width
y_dim = env.height
"""
# Parameters for the Environment
x_dim = 25
y_dim = 25
n_agents = 10
# We are training an Agent using the Tree Observation with depth 2
observation_builder = TreeObsForRailEnv(max_depth=2)
# Use a the malfunction generator to break agents from time to time
stochastic_data = {'malfunction_rate': 8000, # Rate of malfunction occurence of single agent
'min_duration': 15, # Minimal duration of malfunction
'max_duration': 50 # Max duration of malfunction
}
# Custom observation builder
TreeObservation = TreeObsForRailEnv(max_depth=2, predictor=ShortestPathPredictorForRailEnv(30))
# Different agent types (trains) with different speeds.
speed_ration_map = {1.: 0.25, # Fast passenger train
1. / 2.: 0.25, # Fast freight train
1. / 3.: 0.25, # Slow commuter train
1. / 4.: 0.25} # Slow freight train
env = RailEnv(width=x_dim,
height=y_dim,
rail_generator=sparse_rail_generator(max_num_cities=3,
# Number of cities in map (where train stations are)
seed=1, # Random seed
grid_mode=False,
max_rails_between_cities=2,
max_rails_in_city=2),
schedule_generator=sparse_schedule_generator(speed_ration_map),
number_of_agents=n_agents,
malfunction_generator_and_process_data=malfunction_from_params(stochastic_data),
obs_builder_object=TreeObservation)
env.reset(True, True)
observation_helper = TreeObsForRailEnv(max_depth=3, predictor=ShortestPathPredictorForRailEnv())
env_renderer = RenderTool(env, gl="PILSVG", )
num_features_per_node = env.obs_builder.observation_dim
tree_depth = 2
nr_nodes = 0
for i in range(tree_depth + 1):
nr_nodes += np.power(4, i)
state_size = num_features_per_node * nr_nodes
action_size = 5
# We set the number of episodes we would like to train on
if 'n_trials' not in locals():
n_trials = 60000
max_steps = int(4 * 2 * (20 + env.height + env.width))
eps = 1.
eps_end = 0.005
eps_decay = 0.9995
action_dict = dict()
final_action_dict = dict()
scores_window = deque(maxlen=100)
done_window = deque(maxlen=100)
scores = []
dones_list = []
action_prob = [0] * action_size
agent_obs = [None] * env.get_num_agents()
agent_next_obs = [None] * env.get_num_agents()
agent = Agent(state_size, action_size)
with path(torch_training.Nets, "navigator_checkpoint1200.pth") as file_in:
agent.qnetwork_local.load_state_dict(torch.load(file_in))
record_images = False
frame_step = 0
for trials in range(1, n_trials + 1):
# Reset environment
obs, info = env.reset(True, True)
env_renderer.reset()
# Build agent specific observations
for a in range(env.get_num_agents()):
agent_obs[a] = agent_obs[a] = normalize_observation(obs[a], tree_depth, observation_radius=10)
# Reset score and done
score = 0
env_done = 0
# Run episode
for step in range(max_steps):
# Action
for a in range(env.get_num_agents()):
if info['action_required'][a]:
action = agent.act(agent_obs[a], eps=0.)
else:
action = 0
action_prob[action] += 1
action_dict.update({a: action})
# Environment step
obs, all_rewards, done, _ = env.step(action_dict)
env_renderer.render_env(show=True, show_predictions=True, show_observations=False)
# Build agent specific observations and normalize
for a in range(env.get_num_agents()):
if obs[a]:
agent_obs[a] = normalize_observation(obs[a], tree_depth, observation_radius=10)
if done['__all__']:
break
|
[
"numpy.random.seed",
"flatland.envs.predictions.ShortestPathPredictorForRailEnv",
"numpy.power",
"torch.load",
"torch_training.dueling_double_dqn.Agent",
"utils.observation_utils.normalize_observation",
"flatland.envs.malfunction_generators.malfunction_from_params",
"importlib_resources.path",
"flatland.envs.rail_generators.sparse_rail_generator",
"random.seed",
"flatland.utils.rendertools.RenderTool",
"flatland.envs.observations.TreeObsForRailEnv",
"collections.deque",
"flatland.envs.schedule_generators.sparse_schedule_generator"
] |
[((682, 696), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (693, 696), False, 'import random\n'), ((697, 714), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (711, 714), True, 'import numpy as np\n'), ((1177, 1207), 'flatland.envs.observations.TreeObsForRailEnv', 'TreeObsForRailEnv', ([], {'max_depth': '(2)'}), '(max_depth=2)\n', (1194, 1207), False, 'from flatland.envs.observations import TreeObsForRailEnv\n'), ((2867, 2895), 'flatland.utils.rendertools.RenderTool', 'RenderTool', (['env'], {'gl': '"""PILSVG"""'}), "(env, gl='PILSVG')\n", (2877, 2895), False, 'from flatland.utils.rendertools import RenderTool\n'), ((3382, 3399), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (3387, 3399), False, 'from collections import deque\n'), ((3414, 3431), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (3419, 3431), False, 'from collections import deque\n'), ((3589, 3619), 'torch_training.dueling_double_dqn.Agent', 'Agent', (['state_size', 'action_size'], {}), '(state_size, action_size)\n', (3594, 3619), False, 'from torch_training.dueling_double_dqn import Agent\n'), ((3031, 3045), 'numpy.power', 'np.power', (['(4)', 'i'], {}), '(4, i)\n', (3039, 3045), True, 'import numpy as np\n'), ((3625, 3682), 'importlib_resources.path', 'path', (['torch_training.Nets', '"""navigator_checkpoint1200.pth"""'], {}), "(torch_training.Nets, 'navigator_checkpoint1200.pth')\n", (3629, 3682), False, 'from importlib_resources import path\n'), ((1625, 1660), 'flatland.envs.predictions.ShortestPathPredictorForRailEnv', 'ShortestPathPredictorForRailEnv', (['(30)'], {}), '(30)\n', (1656, 1660), False, 'from flatland.envs.predictions import ShortestPathPredictorForRailEnv\n'), ((2030, 2147), 'flatland.envs.rail_generators.sparse_rail_generator', 'sparse_rail_generator', ([], {'max_num_cities': '(3)', 'seed': '(1)', 'grid_mode': '(False)', 'max_rails_between_cities': '(2)', 'max_rails_in_city': '(2)'}), '(max_num_cities=3, seed=1, grid_mode=False,\n max_rails_between_cities=2, max_rails_in_city=2)\n', (2051, 2147), False, 'from flatland.envs.rail_generators import sparse_rail_generator\n'), ((2501, 2544), 'flatland.envs.schedule_generators.sparse_schedule_generator', 'sparse_schedule_generator', (['speed_ration_map'], {}), '(speed_ration_map)\n', (2526, 2544), False, 'from flatland.envs.schedule_generators import sparse_schedule_generator\n'), ((2640, 2680), 'flatland.envs.malfunction_generators.malfunction_from_params', 'malfunction_from_params', (['stochastic_data'], {}), '(stochastic_data)\n', (2663, 2680), False, 'from flatland.envs.malfunction_generators import malfunction_from_params\n'), ((2817, 2850), 'flatland.envs.predictions.ShortestPathPredictorForRailEnv', 'ShortestPathPredictorForRailEnv', ([], {}), '()\n', (2848, 2850), False, 'from flatland.envs.predictions import ShortestPathPredictorForRailEnv\n'), ((3736, 3755), 'torch.load', 'torch.load', (['file_in'], {}), '(file_in)\n', (3746, 3755), False, 'import torch\n'), ((4042, 4106), 'utils.observation_utils.normalize_observation', 'normalize_observation', (['obs[a]', 'tree_depth'], {'observation_radius': '(10)'}), '(obs[a], tree_depth, observation_radius=10)\n', (4063, 4106), False, 'from utils.observation_utils import normalize_observation\n'), ((4843, 4907), 'utils.observation_utils.normalize_observation', 'normalize_observation', (['obs[a]', 'tree_depth'], {'observation_radius': '(10)'}), '(obs[a], tree_depth, observation_radius=10)\n', (4864, 4907), False, 'from utils.observation_utils import normalize_observation\n')]
|
import os
import numpy as np
import esutil as eu
import fitsio
import meds
import piff
import pixmappy
import desmeds
import ngmix
import scipy
from .._pizza_cutter import _build_metadata
from .._constants import MAGZP_REF
from meds.maker import MEDS_FMT_VERSION
from ... import __version__
def test_pizza_cutter_build_metadata(monkeypatch):
monkeypatch.setenv('MEDS_DIR', 'BLAH')
monkeypatch.setenv('PIFF_DATA_DIR', 'BLAHH')
monkeypatch.setenv('DESDATA', 'BLAHHH')
config = 'blah blah blah'
json_info = "tile info"
metadata, json_info_image = _build_metadata(config=config, json_info=json_info)
assert np.all(metadata['numpy_version'] == np.__version__)
assert np.all(metadata['scipy_version'] == scipy.__version__)
assert np.all(metadata['esutil_version'] == eu.__version__)
assert np.all(metadata['ngmix_version'] == ngmix.__version__)
assert np.all(
metadata['fitsio_version'] == fitsio.__version__)
assert np.all(metadata['meds_version'] == meds.__version__)
assert np.all(metadata['piff_version'] == piff.__version__)
assert np.all(
metadata['pixmappy_version'] == pixmappy.__version__)
assert np.all(
metadata['desmeds_version'] == desmeds.__version__)
assert np.all(
metadata['pizza_cutter_version'] == __version__)
assert np.all(metadata['config'] == config)
assert np.all(metadata['magzp_ref'] == MAGZP_REF)
assert np.all(
metadata['meds_fmt_version'] == MEDS_FMT_VERSION)
assert np.all(
metadata['meds_dir'] == os.environ['MEDS_DIR'])
assert np.all(
metadata['piff_data_dir'] ==
os.environ['PIFF_DATA_DIR'])
assert np.all(
metadata['desdata'] == os.environ['DESDATA'])
assert np.array_equal(
json_info_image,
np.frombuffer(json_info.encode("ascii"), dtype='u1'),
)
|
[
"numpy.all"
] |
[((636, 687), 'numpy.all', 'np.all', (["(metadata['numpy_version'] == np.__version__)"], {}), "(metadata['numpy_version'] == np.__version__)\n", (642, 687), True, 'import numpy as np\n'), ((699, 753), 'numpy.all', 'np.all', (["(metadata['scipy_version'] == scipy.__version__)"], {}), "(metadata['scipy_version'] == scipy.__version__)\n", (705, 753), True, 'import numpy as np\n'), ((765, 817), 'numpy.all', 'np.all', (["(metadata['esutil_version'] == eu.__version__)"], {}), "(metadata['esutil_version'] == eu.__version__)\n", (771, 817), True, 'import numpy as np\n'), ((829, 883), 'numpy.all', 'np.all', (["(metadata['ngmix_version'] == ngmix.__version__)"], {}), "(metadata['ngmix_version'] == ngmix.__version__)\n", (835, 883), True, 'import numpy as np\n'), ((895, 951), 'numpy.all', 'np.all', (["(metadata['fitsio_version'] == fitsio.__version__)"], {}), "(metadata['fitsio_version'] == fitsio.__version__)\n", (901, 951), True, 'import numpy as np\n'), ((972, 1024), 'numpy.all', 'np.all', (["(metadata['meds_version'] == meds.__version__)"], {}), "(metadata['meds_version'] == meds.__version__)\n", (978, 1024), True, 'import numpy as np\n'), ((1036, 1088), 'numpy.all', 'np.all', (["(metadata['piff_version'] == piff.__version__)"], {}), "(metadata['piff_version'] == piff.__version__)\n", (1042, 1088), True, 'import numpy as np\n'), ((1100, 1160), 'numpy.all', 'np.all', (["(metadata['pixmappy_version'] == pixmappy.__version__)"], {}), "(metadata['pixmappy_version'] == pixmappy.__version__)\n", (1106, 1160), True, 'import numpy as np\n'), ((1181, 1239), 'numpy.all', 'np.all', (["(metadata['desmeds_version'] == desmeds.__version__)"], {}), "(metadata['desmeds_version'] == desmeds.__version__)\n", (1187, 1239), True, 'import numpy as np\n'), ((1260, 1315), 'numpy.all', 'np.all', (["(metadata['pizza_cutter_version'] == __version__)"], {}), "(metadata['pizza_cutter_version'] == __version__)\n", (1266, 1315), True, 'import numpy as np\n'), ((1336, 1372), 'numpy.all', 'np.all', (["(metadata['config'] == config)"], {}), "(metadata['config'] == config)\n", (1342, 1372), True, 'import numpy as np\n'), ((1384, 1426), 'numpy.all', 'np.all', (["(metadata['magzp_ref'] == MAGZP_REF)"], {}), "(metadata['magzp_ref'] == MAGZP_REF)\n", (1390, 1426), True, 'import numpy as np\n'), ((1438, 1494), 'numpy.all', 'np.all', (["(metadata['meds_fmt_version'] == MEDS_FMT_VERSION)"], {}), "(metadata['meds_fmt_version'] == MEDS_FMT_VERSION)\n", (1444, 1494), True, 'import numpy as np\n'), ((1515, 1569), 'numpy.all', 'np.all', (["(metadata['meds_dir'] == os.environ['MEDS_DIR'])"], {}), "(metadata['meds_dir'] == os.environ['MEDS_DIR'])\n", (1521, 1569), True, 'import numpy as np\n'), ((1590, 1654), 'numpy.all', 'np.all', (["(metadata['piff_data_dir'] == os.environ['PIFF_DATA_DIR'])"], {}), "(metadata['piff_data_dir'] == os.environ['PIFF_DATA_DIR'])\n", (1596, 1654), True, 'import numpy as np\n'), ((1683, 1735), 'numpy.all', 'np.all', (["(metadata['desdata'] == os.environ['DESDATA'])"], {}), "(metadata['desdata'] == os.environ['DESDATA'])\n", (1689, 1735), True, 'import numpy as np\n')]
|
import sys
import os
import json
sys.path.append("../../")
from account.models import User, UserProfile
from account.permission import IsAdminUserPermission
from django.core import serializers
from django.utils.timezone import now
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from utils.api import get_user_and_token_by_jwt_request
from conf.project_conf import JUDGE_SERVER_TEST_CASE_URL
from ..models import ProblemTag, Problem
from ..serializers import (AdminAddProblemSerializer, AdminAddTagSerializer,
AdminGetProblemListSerializer, AdminUpdateProblemSerializer,
UploadFileSerializer, AdminGetProblemByIDSerializer,
ReturnAdminGetProbelmByIDSerializer, )
from ..pagination import (AdminGetProblemListPagination, )
from ..tasks import send_test_case_to_judge
class ProblemListAPI(APIView):
permission_classes = [IsAuthenticated, IsAdminUserPermission]
def get(self, request):
problem_list = Problem.objects.all().order_by('id')
page_data = AdminGetProblemListPagination().paginate_queryset(problem_list, request, self)
data = AdminGetProblemListSerializer(page_data, many=True)
data = data.data
for k in data:
try:
user = User.objects.get(username=k['created_by'])
k['created_by'] = UserProfile.objects.get(user=user).nickname
except Exception as e:
pass
resp_data = {}
resp_data['total'] = Problem.objects.all().count()
resp_data['data'] = data
return Response(data=resp_data)
class ProblemAPI(APIView):
permission_classes = [IsAuthenticated, IsAdminUserPermission]
def get(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data': {}}
serializer = AdminGetProblemByIDSerializer(data=request.GET)
if not serializer.is_valid():
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
data = serializer.data
id = data['id']
try:
pro = Problem.objects.get(id=id)
except Problem.DoesNotExist:
resp_data['code'] = -2
resp_data['msg'] = f"{id} problem does not exist"
return Response(data=resp_data)
data = ReturnAdminGetProbelmByIDSerializer(pro).data
try:
user = User.objects.get(username=data['created_by'])
data['created_by'] = UserProfile.objects.get(user=user).nickname
except Exception as e:
pass
resp_data['data'] = data
return Response(data=resp_data)
def put(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data': {}}
serializer = AdminAddProblemSerializer(data=request.data)
if not serializer.is_valid():
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
user, token = get_user_and_token_by_jwt_request(request)
data = serializer.data
data['created_by'] = user
tags = data.pop('tags')
try:
pro = Problem.objects.create(**data)
except Exception as e:
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
for item in tags:
if not ProblemTag.objects.filter(name=item).exists():
tag = ProblemTag.objects.create(name=item)
tag.save()
pro.tags.add(item)
pro.last_update_time = now()
pro.save()
pro_data = AdminAddProblemSerializer(pro)
resp_data['data'] = pro_data.data
resp_data['data']['id'] = pro.pk
resp_data['data']['created_by'] = UserProfile.objects.get(user=user).nickname
return Response(data=resp_data)
def post(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data': {}}
serializer = AdminUpdateProblemSerializer(data=request.data)
if not serializer.is_valid():
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
user, token = get_user_and_token_by_jwt_request(request)
data = serializer.data
try:
pro = Problem.objects.get(id=data['id'])
except Problem.DoesNotExist:
resp_data['code'] = -2
resp_data['msg'] = f"{data['id']} problem does not exist"
return Response(data=resp_data)
tags = data.pop('tags')
for k, v in data.items():
setattr(pro, k, v)
pro.tags.clear()
for item in tags:
if not ProblemTag.objects.filter(name=item).exists():
tag = ProblemTag.objects.create(name=item)
tag.save()
pro.tags.add(item)
pro.last_update_time = now()
pro.save()
pro_data = AdminUpdateProblemSerializer(pro)
resp_data['data'] = pro_data.data
resp_data['data']['created_by'] = UserProfile.objects.get(user=user).nickname
return Response(data=resp_data)
class ProblemTagAPI(APIView):
permission_classes = [IsAuthenticated, IsAdminUserPermission]
def get(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data':{}}
query_set = ProblemTag.objects.all()
resp_data['data']['tags'] = serializers.serialize('json', query_set)
return Response(data=resp_data)
def post(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data':{}}
serializer = AdminAddTagSerializer(data=request.data)
if not serializer.is_valid():
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
tag = serializer.data['tag']
if ProblemTag.objects.filter(name=tag).exists():
resp_data['code'] = -2
resp_data['msg'] = f"{tag} exist"
return Response(data=resp_data)
new_tag = ProblemTag.objects.create(name=tag)
new_tag.save()
return Response(data=resp_data)
class ProblemTestCaseAPI(APIView):
permission_classes = [IsAuthenticated, IsAdminUserPermission]
def post(self, request):
resp_data = {'code': 0, 'msg': 'success', 'data': {}}
serializer = UploadFileSerializer(request.POST, request.FILES)
if not serializer.is_valid():
resp_data['code'] = -1
resp_data['msg'] = 'request data error'
return Response(data=resp_data)
test_case = request.FILES['file']
test_case_suf = os.path.splitext(test_case.name)[-1].lower()
if test_case_suf != '.zip':
resp_data['code'] = -2
resp_data['msg'] = f"{test_case_suf} unsupport"
return Response(data=resp_data)
pro_id = serializer.data['id']
test_case_name = str(pro_id) + '.zip'
with open(os.path.join("./data/test_case/", test_case_name), "wb") as f:
for chunk in test_case:
f.write(chunk)
file = {'file': open(os.path.join("./data/test_case", test_case_name), "rb")}
data = {'problem_id': pro_id}
pack = send_test_case_to_judge(JUDGE_SERVER_TEST_CASE_URL, data, file)
resp_data = pack
return Response(data=resp_data)
|
[
"sys.path.append",
"django.core.serializers.serialize",
"django.utils.timezone.now",
"account.models.User.objects.get",
"account.models.UserProfile.objects.get",
"rest_framework.response.Response",
"os.path.splitext",
"os.path.join",
"utils.api.get_user_and_token_by_jwt_request"
] |
[((33, 58), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (48, 58), False, 'import sys\n'), ((1709, 1733), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (1717, 1733), False, 'from rest_framework.response import Response\n'), ((2766, 2790), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (2774, 2790), False, 'from rest_framework.response import Response\n'), ((3140, 3182), 'utils.api.get_user_and_token_by_jwt_request', 'get_user_and_token_by_jwt_request', (['request'], {}), '(request)\n', (3173, 3182), False, 'from utils.api import get_user_and_token_by_jwt_request\n'), ((3745, 3750), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (3748, 3750), False, 'from django.utils.timezone import now\n'), ((4005, 4029), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (4013, 4029), False, 'from rest_framework.response import Response\n'), ((4383, 4425), 'utils.api.get_user_and_token_by_jwt_request', 'get_user_and_token_by_jwt_request', (['request'], {}), '(request)\n', (4416, 4425), False, 'from utils.api import get_user_and_token_by_jwt_request\n'), ((5082, 5087), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (5085, 5087), False, 'from django.utils.timezone import now\n'), ((5303, 5327), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (5311, 5327), False, 'from rest_framework.response import Response\n'), ((5598, 5638), 'django.core.serializers.serialize', 'serializers.serialize', (['"""json"""', 'query_set'], {}), "('json', query_set)\n", (5619, 5638), False, 'from django.core import serializers\n'), ((5654, 5678), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (5662, 5678), False, 'from rest_framework.response import Response\n'), ((6324, 6348), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (6332, 6348), False, 'from rest_framework.response import Response\n'), ((7551, 7575), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (7559, 7575), False, 'from rest_framework.response import Response\n'), ((2134, 2158), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (2142, 2158), False, 'from rest_framework.response import Response\n'), ((2546, 2591), 'account.models.User.objects.get', 'User.objects.get', ([], {'username': "data['created_by']"}), "(username=data['created_by'])\n", (2562, 2591), False, 'from account.models import User, UserProfile\n'), ((3092, 3116), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (3100, 3116), False, 'from rest_framework.response import Response\n'), ((3946, 3980), 'account.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user': 'user'}), '(user=user)\n', (3969, 3980), False, 'from account.models import User, UserProfile\n'), ((4336, 4360), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (4344, 4360), False, 'from rest_framework.response import Response\n'), ((5244, 5278), 'account.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user': 'user'}), '(user=user)\n', (5267, 5278), False, 'from account.models import User, UserProfile\n'), ((5977, 6001), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (5985, 6001), False, 'from rest_framework.response import Response\n'), ((6197, 6221), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (6205, 6221), False, 'from rest_framework.response import Response\n'), ((6761, 6785), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (6769, 6785), False, 'from rest_framework.response import Response\n'), ((7048, 7072), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (7056, 7072), False, 'from rest_framework.response import Response\n'), ((1402, 1444), 'account.models.User.objects.get', 'User.objects.get', ([], {'username': "k['created_by']"}), "(username=k['created_by'])\n", (1418, 1444), False, 'from account.models import User, UserProfile\n'), ((2427, 2451), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (2435, 2451), False, 'from rest_framework.response import Response\n'), ((2625, 2659), 'account.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user': 'user'}), '(user=user)\n', (2648, 2659), False, 'from account.models import User, UserProfile\n'), ((3479, 3503), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (3487, 3503), False, 'from rest_framework.response import Response\n'), ((4684, 4708), 'rest_framework.response.Response', 'Response', ([], {'data': 'resp_data'}), '(data=resp_data)\n', (4692, 4708), False, 'from rest_framework.response import Response\n'), ((7177, 7226), 'os.path.join', 'os.path.join', (['"""./data/test_case/"""', 'test_case_name'], {}), "('./data/test_case/', test_case_name)\n", (7189, 7226), False, 'import os\n'), ((7337, 7385), 'os.path.join', 'os.path.join', (['"""./data/test_case"""', 'test_case_name'], {}), "('./data/test_case', test_case_name)\n", (7349, 7385), False, 'import os\n'), ((1479, 1513), 'account.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user': 'user'}), '(user=user)\n', (1502, 1513), False, 'from account.models import User, UserProfile\n'), ((6853, 6885), 'os.path.splitext', 'os.path.splitext', (['test_case.name'], {}), '(test_case.name)\n', (6869, 6885), False, 'import os\n')]
|
from _collections import defaultdict
from _weakrefset import WeakSet
import itertools
from interactions.priority import Priority
from objects.object_enums import ResetReason
from reservation.reservation_handler_basic import ReservationHandlerBasic, ReservationHandlerAllParts
from reservation.reservation_handler_uselist import ReservationHandlerUseList
from services.reset_and_delete_service import ResetRecord
from sims4.callback_utils import CallableList
import gsi_handlers
import sims4.log
logger = sims4.log.Logger('ReservationHandler')
class ReservationMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._reservation_handlers = ()
self._on_reservation_handlers_changed = None
self._reservation_clobberers = None
@property
def in_use(self):
if self._reservation_handlers:
return True
return False
@property
def self_or_part_in_use(self):
if self._reservation_handlers:
return True
elif self.parts:
return any(part.in_use for part in self.parts)
return False
def in_use_by(self, sim, owner=None):
for handler in self._reservation_handlers:
if handler.sim is not sim:
continue
if owner is not None and handler.reservation_interaction is not owner:
continue
return True
return False
def get_users(self, sims_only=False):
users = set(handler.sim for handler in self._reservation_handlers if not sims_only or handler.sim.is_sim)
if self.parts:
for part in self.parts:
users |= part.get_users(sims_only=sims_only)
return frozenset(users)
def get_reservation_handler(self, sim, **kwargs):
reservation_type = ReservationHandlerBasic if not self.parts else ReservationHandlerAllParts
return reservation_type(sim, self, **kwargs)
def get_use_list_handler(self, sim, **kwargs):
return ReservationHandlerUseList(sim, self, **kwargs)
def may_reserve(self, reserver, *_, reservation_handler=None, _from_reservation_call=False, **kwargs):
if reservation_handler is None:
reservation_handler = self.get_reservation_handler(reserver)
reserve_result = reservation_handler.may_reserve_internal(**kwargs)
if gsi_handlers.sim_handlers_log.sim_reservation_archiver.enabled and reserver.is_sim:
reserve_result_str = '{}: {}'.format('reserve' if not _from_reservation_call else 'may_reserve', reserve_result)
gsi_handlers.sim_handlers_log.archive_sim_reservation(reservation_handler, reserve_result_str)
return reserve_result
def add_reservation_handler(self, reservation_handler):
if isinstance(self._reservation_handlers, tuple):
self._reservation_handlers = WeakSet()
self._reservation_handlers.add(reservation_handler)
if self._on_reservation_handlers_changed:
self._on_reservation_handlers_changed(user=reservation_handler.sim, added=True)
def get_reservation_handlers(self):
return tuple(self._reservation_handlers)
def remove_reservation_handler(self, reservation_handler):
if not self._reservation_handlers:
return
self._reservation_handlers.discard(reservation_handler)
if self._on_reservation_handlers_changed:
self._on_reservation_handlers_changed(user=reservation_handler.sim, added=False)
def add_reservation_clobberer(self, reservation_holder, reservation_clobberer):
if self._reservation_clobberers is None:
self._reservation_clobberers = defaultdict(WeakSet)
self._reservation_clobberers[reservation_holder].add(reservation_clobberer)
def is_reservation_clobberer(self, reservation_holder, reservation_clobberer):
if self._reservation_clobberers is None:
return False
if reservation_holder not in self._reservation_clobberers:
return False
return reservation_clobberer in self._reservation_clobberers[reservation_holder]
def remove_reservation_clobberer(self, reservation_holder, reservation_clobberer):
if self._reservation_clobberers is None:
return
if reservation_holder not in self._reservation_clobberers:
return
self._reservation_clobberers[reservation_holder].discard(reservation_clobberer)
if not self._reservation_clobberers[reservation_holder]:
del self._reservation_clobberers[reservation_holder]
if not self._reservation_clobberers:
self._reservation_clobberers = None
def on_reset_get_interdependent_reset_records(self, reset_reason, reset_records):
super().on_reset_get_interdependent_reset_records(reset_reason, reset_records)
relevant_sims = self.get_users(sims_only=True)
for sim in relevant_sims:
if self.reset_reason() == ResetReason.BEING_DESTROYED:
reset_records.append(ResetRecord(sim, ResetReason.RESET_EXPECTED, self, 'In use list of object being destroyed.'))
else:
body_target_part_owner = sim.posture_state.body.target
if body_target_part_owner is not None:
if body_target_part_owner.is_part:
body_target_part_owner = body_target_part_owner.part_owner
transition_controller = sim.queue.transition_controller
if not body_target_part_owner is self:
if not transition_controller is None:
if not transition_controller.will_derail_if_given_object_is_reset(self):
reset_records.append(ResetRecord(sim, ResetReason.RESET_EXPECTED, self, 'Transitioning To or In.'))
reset_records.append(ResetRecord(sim, ResetReason.RESET_EXPECTED, self, 'Transitioning To or In.'))
def usable_by_transition_controller(self, transition_controller):
if transition_controller is None:
return False
required_sims = transition_controller.interaction.required_sims()
targets = (self,) + tuple(self.get_overlapping_parts()) if self.is_part else (self,)
for reservation_handler in itertools.chain.from_iterable(target.get_reservation_handlers() for target in targets):
if reservation_handler.sim in required_sims:
continue
reservation_interaction = reservation_handler.reservation_interaction
if reservation_interaction is None:
continue
if reservation_interaction.priority >= transition_controller.interaction.priority:
return False
if transition_controller.interaction.priority <= Priority.Low:
return False
return True
def register_on_use_list_changed(self, callback):
if self._on_reservation_handlers_changed is None:
self._on_reservation_handlers_changed = CallableList()
self._on_reservation_handlers_changed.append(callback)
def unregister_on_use_list_changed(self, callback):
if callback in self._on_reservation_handlers_changed:
self._on_reservation_handlers_changed.remove(callback)
if not self._on_reservation_handlers_changed:
self._on_reservation_handlers_changed = None
|
[
"reservation.reservation_handler_uselist.ReservationHandlerUseList",
"services.reset_and_delete_service.ResetRecord",
"_weakrefset.WeakSet",
"sims4.callback_utils.CallableList",
"gsi_handlers.sim_handlers_log.archive_sim_reservation",
"_collections.defaultdict"
] |
[((2024, 2070), 'reservation.reservation_handler_uselist.ReservationHandlerUseList', 'ReservationHandlerUseList', (['sim', 'self'], {}), '(sim, self, **kwargs)\n', (2049, 2070), False, 'from reservation.reservation_handler_uselist import ReservationHandlerUseList\n'), ((2600, 2698), 'gsi_handlers.sim_handlers_log.archive_sim_reservation', 'gsi_handlers.sim_handlers_log.archive_sim_reservation', (['reservation_handler', 'reserve_result_str'], {}), '(reservation_handler,\n reserve_result_str)\n', (2653, 2698), False, 'import gsi_handlers\n'), ((2885, 2894), '_weakrefset.WeakSet', 'WeakSet', ([], {}), '()\n', (2892, 2894), False, 'from _weakrefset import WeakSet\n'), ((3697, 3717), '_collections.defaultdict', 'defaultdict', (['WeakSet'], {}), '(WeakSet)\n', (3708, 3717), False, 'from _collections import defaultdict\n'), ((7041, 7055), 'sims4.callback_utils.CallableList', 'CallableList', ([], {}), '()\n', (7053, 7055), False, 'from sims4.callback_utils import CallableList\n'), ((5061, 5157), 'services.reset_and_delete_service.ResetRecord', 'ResetRecord', (['sim', 'ResetReason.RESET_EXPECTED', 'self', '"""In use list of object being destroyed."""'], {}), "(sim, ResetReason.RESET_EXPECTED, self,\n 'In use list of object being destroyed.')\n", (5072, 5157), False, 'from services.reset_and_delete_service import ResetRecord\n'), ((5884, 5961), 'services.reset_and_delete_service.ResetRecord', 'ResetRecord', (['sim', 'ResetReason.RESET_EXPECTED', 'self', '"""Transitioning To or In."""'], {}), "(sim, ResetReason.RESET_EXPECTED, self, 'Transitioning To or In.')\n", (5895, 5961), False, 'from services.reset_and_delete_service import ResetRecord\n'), ((5768, 5845), 'services.reset_and_delete_service.ResetRecord', 'ResetRecord', (['sim', 'ResetReason.RESET_EXPECTED', 'self', '"""Transitioning To or In."""'], {}), "(sim, ResetReason.RESET_EXPECTED, self, 'Transitioning To or In.')\n", (5779, 5845), False, 'from services.reset_and_delete_service import ResetRecord\n')]
|
#!/usr/bin/env python3
import argparse
from collections import Counter
import math
import os
import pprint
import re
import string
import sys
import threading
from urllib.parse import unquote
from urllib.request import urlretrieve
import zipfile
try: # use ujson if available
import ujson as json
except ModuleNotFoundError:
import json
# Constants
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CPE_DICT_URL = "https://nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.zip"
CPE_DATA_FILES = {"2.2": os.path.join(SCRIPT_DIR, "cpe-search-dictionary_v2.2.csv"),
"2.3": os.path.join(SCRIPT_DIR, "cpe-search-dictionary_v2.3.csv")}
CPE_DICT_ITEM_RE = re.compile(r"<cpe-item name=\"([^\"]*)\">.*?<title xml:lang=\"en-US\"[^>]*>([^<]*)</title>.*?<cpe-23:cpe23-item name=\"([^\"]*)\"", flags=re.DOTALL)
TEXT_TO_VECTOR_RE = re.compile(r"[\w+\.]+")
GET_ALL_CPES_RE = re.compile(r'(.*);.*;.*')
LOAD_CPE_TFS_MUTEX = threading.Lock()
VERSION_MATCH_ZE_RE = re.compile(r'\b([\d]+\.?){1,4}\b')
VERSION_MATCH_CPE_CREATION_RE = re.compile(r'\b([\d]+\.?){1,4}([a-z][\d]{0,3})?[^\w]*$')
CPE_TFS = []
TERMS = []
TERMS_MAP = {}
SILENT = True
ALT_QUERY_MAXSPLIT = 1
def parse_args():
"""Parse command line arguments"""
parser = argparse.ArgumentParser(description="Search for CPEs using software names and titles -- Created by <NAME> (ra1nb0rn)")
parser.add_argument("-u", "--update", action="store_true", help="Update the local CPE database")
parser.add_argument("-c", "--count", default=3, type=int, help="The number of CPEs to show in the similarity overview (default: 3)")
parser.add_argument("-v", "--version", default="2.2", choices=["2.2", "2.3"], help="The CPE version to use: 2.2 or 2.3 (default: 2.2)")
parser.add_argument("-q", "--query", dest="queries", metavar="QUERY", action="append", help="A query, i.e. textual software name / title like 'Apache 2.4.39' or 'Wordpress 5.7.2'")
args = parser.parse_args()
if not args.update and not args.queries:
parser.print_help()
return args
def set_silent(silent):
global SILENT
SILENT = silent
def update(cpe_version):
"""Update locally stored CPE information"""
# download dictionary
if not SILENT:
print("[+] Downloading NVD's official CPE dictionary (might take some time)")
src = CPE_DICT_URL
dst = os.path.join(SCRIPT_DIR, src.rsplit("/", 1)[1])
urlretrieve(CPE_DICT_URL, dst)
# unzip CPE dictionary
if not SILENT:
print("[+] Unzipping dictionary")
with zipfile.ZipFile(dst,"r") as zip_ref:
cpe_dict_name = zip_ref.namelist()[0]
cpe_dict_filepath = os.path.join(SCRIPT_DIR, cpe_dict_name)
zip_ref.extractall(SCRIPT_DIR)
# build custom CPE database, additionally containing term frequencies and normalization factors
if not SILENT:
print("[+] Creating a custom CPE database for future invocations")
cpe22_infos, cpe23_infos = [], []
with open(cpe_dict_filepath) as fin:
content = fin.read()
cpe_items = CPE_DICT_ITEM_RE.findall(content)
for cpe_item in cpe_items:
cpe22, cpe_name, cpe23 = cpe_item[0].lower(), cpe_item[1].lower(), cpe_item[-1].lower()
for i, cpe in enumerate((cpe22, cpe23)):
if "%" in cpe:
cpe = unquote(cpe)
cpe_mod = cpe.replace("_", ":")
if i == 0:
cpe_elems = cpe_mod[7:].split(":")
else:
cpe_elems = cpe_mod[10:].split(":")
cpe_name_elems = cpe_name.split()
words = TEXT_TO_VECTOR_RE.findall(" ".join(cpe_elems + cpe_name_elems))
cpe_tf = Counter(words)
for term, tf in cpe_tf.items():
cpe_tf[term] = tf / len(cpe_tf)
cpe_abs = math.sqrt(sum([cnt**2 for cnt in cpe_tf.values()]))
cpe_info = (cpe, cpe_tf, cpe_abs)
if i == 0:
cpe22_infos.append(cpe_info)
else:
cpe23_infos.append(cpe_info)
# store customly built CPE database
if cpe_version == "2.2":
with open(CPE_DATA_FILES["2.2"], "w") as fout:
for cpe, cpe_tf, cpe_abs in cpe22_infos:
fout.write('%s;%s;%f\n' % (cpe, json.dumps(cpe_tf), cpe_abs))
else:
with open(CPE_DATA_FILES["2.3"], "w") as fout:
for cpe, cpe_tf, cpe_abs in cpe23_infos:
fout.write('%s;%s;%f\n' % (cpe, json.dumps(cpe_tf), cpe_abs))
# clean up
if not SILENT:
print("[+] Cleaning up")
os.remove(dst)
os.remove(os.path.join(SCRIPT_DIR, cpe_dict_name))
def _get_alternative_queries(init_queries, zero_extend_versions=False):
alt_queries_mapping = {}
for query in init_queries:
alt_queries_mapping[query] = []
# replace 'httpd' with 'http' e.g. for Apache HTTP Server
if 'httpd' in query:
alt_query = query.replace('httpd', 'http')
alt_queries_mapping[query].append(alt_query)
# split certain version parts with space, e.g. 'openssh 7.4p1' --> 'openssh 7.4 p1'
pot_alt_query = ''
cur_char_class = string.ascii_letters
did_split, seen_first_break = False, False
splits, maxsplit = 0, query.count(' ') + ALT_QUERY_MAXSPLIT
for char in query:
if char in (' ', '.', '-', '+'):
seen_first_break = True
pot_alt_query += char
did_split = False
continue
if seen_first_break and splits < maxsplit and char not in cur_char_class and not did_split:
pot_alt_query += ' '
did_split = True
splits += 1
if char in string.ascii_letters:
cur_char_class = string.ascii_letters
else:
try:
int(char)
cur_char_class = '0123456789'
except ValueError:
cur_char_class = '!"#$%&\'()*+,-./:;<=>?@[\]^_`{|}~'
pot_alt_query += char
# zero extend versions, e.g. 'Apache httpd 2.4' --> 'Apache httpd 2.4.0'
if zero_extend_versions:
version_match = VERSION_MATCH_ZE_RE.search(query)
if version_match:
alt_query = query.replace(version_match.group(0), version_match.group(0) + '.0')
alt_queries_mapping[query].append(alt_query)
pot_alt_query_parts = pot_alt_query.split()
for i in range(len(pot_alt_query_parts)):
if pot_alt_query_parts[i][-1] in ('.', '-', '+'):
pot_alt_query_parts[i] = pot_alt_query_parts[i][:-1]
pot_alt_query = ' '.join(pot_alt_query_parts)
if pot_alt_query != query.strip():
alt_queries_mapping[query].append(pot_alt_query)
return alt_queries_mapping
def _load_cpe_tfs(cpe_version="2.3"):
"""Load CPE TFs from file"""
LOAD_CPE_TFS_MUTEX.acquire()
if not CPE_TFS:
# iterate over every CPE, for every query compute similarity scores and keep track of most similar CPEs
with open(CPE_DATA_FILES[cpe_version], "r") as fout:
for line in fout:
cpe, cpe_tf, cpe_abs = line.rsplit(';', maxsplit=2)
cpe_tf = json.loads(cpe_tf)
indirect_cpe_tf = {}
for word, count in cpe_tf.items():
if word not in TERMS_MAP:
TERMS.append(word)
TERMS_MAP[word] = len(TERMS)-1
indirect_cpe_tf[len(TERMS)-1] = count
else:
indirect_cpe_tf[TERMS_MAP[word]] = count
cpe_abs = float(cpe_abs)
CPE_TFS.append((cpe, indirect_cpe_tf, cpe_abs))
LOAD_CPE_TFS_MUTEX.release()
def _search_cpes(queries_raw, cpe_version, count, threshold, zero_extend_versions=False, keep_data_in_memory=False):
"""Facilitate CPE search as specified by the program arguments"""
# create term frequencies and normalization factors for all queries
queries = [query.lower() for query in queries_raw]
# add alternative queries to improve retrieval
alt_queries_mapping = _get_alternative_queries(queries, zero_extend_versions)
for alt_queries in alt_queries_mapping.values():
queries += alt_queries
query_infos = {}
most_similar = {}
for query in queries:
query_tf = Counter(TEXT_TO_VECTOR_RE.findall(query))
for term, tf in query_tf.items():
query_tf[term] = tf / len(query_tf)
query_abs = math.sqrt(sum([cnt**2 for cnt in query_tf.values()]))
query_infos[query] = (query_tf, query_abs)
most_similar[query] = [("N/A", -1)]
if keep_data_in_memory:
_load_cpe_tfs(cpe_version)
for cpe, indirect_cpe_tf, cpe_abs in CPE_TFS:
for query in queries:
query_tf, query_abs = query_infos[query]
cpe_tf = {}
for term_idx, term_count in indirect_cpe_tf.items():
cpe_tf[TERMS[term_idx]] = term_count
intersecting_words = set(cpe_tf.keys()) & set(query_tf.keys())
inner_product = sum([cpe_tf[w] * query_tf[w] for w in intersecting_words])
normalization_factor = cpe_abs * query_abs
if not normalization_factor: # avoid divison by 0
continue
sim_score = float(inner_product)/float(normalization_factor)
if threshold > 0 and sim_score < threshold:
continue
if sim_score > most_similar[query][0][1]:
most_similar[query] = [(cpe, sim_score)] + most_similar[query][:count-1]
elif len(most_similar[query]) < count:
most_similar[query].append((cpe, sim_score))
else:
# iterate over every CPE, for every query compute similarity scores and keep track of most similar
with open(CPE_DATA_FILES[cpe_version], "r") as fout:
for line in fout:
cpe, cpe_tf, cpe_abs = line.rsplit(';', maxsplit=2)
cpe_tf = json.loads(cpe_tf)
cpe_abs = float(cpe_abs)
for query in queries:
query_tf, query_abs = query_infos[query]
intersecting_words = set(cpe_tf.keys()) & set(query_tf.keys())
inner_product = sum([cpe_tf[w] * query_tf[w] for w in intersecting_words])
normalization_factor = cpe_abs * query_abs
if not normalization_factor: # avoid divison by 0
continue
sim_score = float(inner_product)/float(normalization_factor)
if threshold > 0 and sim_score < threshold:
continue
if sim_score > most_similar[query][0][1]:
most_similar[query] = [(cpe, sim_score)] + most_similar[query][:count-1]
elif len(most_similar[query]) < count:
most_similar[query].append((cpe, sim_score))
# create intermediate results (including any additional queries)
intermediate_results = {}
for query in queries:
if most_similar[query] and len(most_similar[query]) == 1 and most_similar[query][0][1] == -1:
continue
intermediate_results[query] = most_similar[query]
rm_idxs = []
for i, result in enumerate(intermediate_results[query]):
if result[1] == -1:
rm_idxs.append(i)
for i in rm_idxs:
del intermediate_results[query][i]
# create final results
results = {}
for query_raw in queries_raw:
query = query_raw.lower()
if query not in intermediate_results and (query not in alt_queries_mapping or not alt_queries_mapping[query]):
continue
if query not in alt_queries_mapping or not alt_queries_mapping[query]:
results[query_raw] = intermediate_results[query]
else:
most_similar = None
if query in intermediate_results:
most_similar = intermediate_results[query]
for alt_query in alt_queries_mapping[query]:
if alt_query not in intermediate_results:
continue
if not most_similar or intermediate_results[alt_query][0][1] > most_similar[0][1]:
most_similar = intermediate_results[alt_query]
results[query_raw] = most_similar
return results
def is_cpe_equal(cpe1, cpe2):
"""Return True if both CPEs are considered equal, False otherwise"""
if len(cpe1) != len(cpe2):
return False
for i in range(len(cpe1)):
if cpe1[i] != cpe2[i]:
if not(cpe1[i] in ('*', '-') and cpe2[i] in('*', '-')):
return False
return True
def _match_cpe23_to_cpe23_from_dict_memory(cpe23_in):
"""
Try to return a valid CPE 2.3 string that exists in the NVD's CPE
dictionary based on the given, potentially badly formed, CPE string.
"""
_load_cpe_tfs('2.3')
# if CPE is already in the NVD dictionary
for (pot_cpe, _, _) in CPE_TFS:
if cpe23_in == pot_cpe:
return cpe23_in
# if the given CPE is simply not a full CPE 2.3 string
pot_new_cpe = ''
if cpe23_in.count(':') < 12:
pot_new_cpe = cpe23_in
if pot_new_cpe.endswith(':'):
pot_new_cpe += '*'
while pot_new_cpe.count(':') < 12:
pot_new_cpe += ':*'
# if the given CPE is simply not a full CPE 2.3 string
if cpe23_in.count(':') < 12:
new_cpe = cpe23_in
if new_cpe.endswith(':'):
new_cpe += '*'
while new_cpe.count(':') < 12:
new_cpe += ':*'
for (pot_cpe, _, _) in CPE_TFS:
if new_cpe == pot_cpe:
return pot_cpe
# try to "fix" badly formed CPE strings like
# "cpe:2.3:a:proftpd:proftpd:1.3.3c:..." vs. "cpe:2.3:a:proftpd:proftpd:1.3.3:c:..."
pre_cpe_in = cpe23_in
while pre_cpe_in.count(':') > 3: # break if next cpe part would be vendor part
pre_cpe_in = pre_cpe_in[:-1]
if pre_cpe_in.endswith(':') or pre_cpe_in.count(':') > 9: # skip rear parts in fixing process
continue
for (pot_cpe, _, _) in CPE_TFS:
if pre_cpe_in in pot_cpe:
# stitch together the found prefix and the remaining part of the original CPE
if cpe23_in[len(pre_cpe_in)] == ':':
cpe_in_add_back = cpe23_in[len(pre_cpe_in)+1:]
else:
cpe_in_add_back = cpe23_in[len(pre_cpe_in):]
new_cpe = '%s:%s' % (pre_cpe_in, cpe_in_add_back)
# get new_cpe to full CPE 2.3 length by adding or removing wildcards
while new_cpe.count(':') < 12:
new_cpe += ':*'
if new_cpe.count(':') > 12:
new_cpe = new_cpe[:new_cpe.rfind(':')]
# if a matching CPE was found, return it
if is_cpe_equal(new_cpe, pot_cpe):
return pot_cpe
return ''
def _match_cpe23_to_cpe23_from_dict_file(cpe23_in):
"""
Try to return a valid CPE 2.3 string that exists in the NVD's CPE
dictionary based on the given, potentially badly formed, CPE string.
"""
# if the given CPE is simply not a full CPE 2.3 string
pot_new_cpe = ''
if cpe23_in.count(':') < 12:
pot_new_cpe = cpe23_in
if pot_new_cpe.endswith(':'):
pot_new_cpe += '*'
while pot_new_cpe.count(':') < 12:
pot_new_cpe += ':*'
pre_cpe_in = cpe23_in
while pre_cpe_in.count(':') > 3: # break if next cpe part would be vendor part
pre_cpe_in = pre_cpe_in[:-1]
if pre_cpe_in.endswith(':') or pre_cpe_in.count(':') > 9: # skip rear parts in fixing process
continue
with open(CPE_DATA_FILES['2.3'], "r") as fout:
for line in fout:
cpe = line.rsplit(';', maxsplit=2)[0].strip()
if cpe23_in == cpe:
return cpe23_in
if pot_new_cpe and pot_new_cpe == cpe:
return pot_new_cpe
if pre_cpe_in in cpe:
# stitch together the found prefix and the remaining part of the original CPE
if cpe23_in[len(pre_cpe_in)] == ':':
cpe_in_add_back = cpe23_in[len(pre_cpe_in)+1:]
else:
cpe_in_add_back = cpe23_in[len(pre_cpe_in):]
new_cpe = '%s:%s' % (pre_cpe_in, cpe_in_add_back)
# get new_cpe to full CPE 2.3 length by adding or removing wildcards
while new_cpe.count(':') < 12:
new_cpe += ':*'
if new_cpe.count(':') > 12:
new_cpe = new_cpe[:new_cpe.rfind(':')]
# if a matching CPE was found, return it
if is_cpe_equal(new_cpe, cpe):
return cpe
return ''
def match_cpe23_to_cpe23_from_dict(cpe23_in, keep_data_in_memory=False):
"""
Try to return a valid CPE 2.3 string that exists in the NVD's CPE
dictionary based on the given, potentially badly formed, CPE string.
"""
if not keep_data_in_memory:
return _match_cpe23_to_cpe23_from_dict_file(cpe23_in)
else:
return _match_cpe23_to_cpe23_from_dict_memory(cpe23_in)
def create_cpe_from_base_cpe_and_query(cpe, query):
version_str_match = VERSION_MATCH_CPE_CREATION_RE.search(query)
if version_str_match:
version_str = version_str_match.group(0).strip()
if version_str in cpe:
return None
# try to find earliest wildcard in cpe and replace it with version from query
cpe_parts = cpe.split(':')
for i, part in reversed(list(enumerate(cpe_parts))):
if part in ('*', '-'):
if not any([front_part in ('*', '-') for front_part in cpe_parts[:i]]):
cpe_parts[i] = version_str
return ':'.join(cpe_parts)
return None
def create_base_cpe_if_versionless_query(cpe, query):
version_str_match = VERSION_MATCH_CPE_CREATION_RE.search(query)
if not version_str_match:
cpe_parts = cpe.split(':')
base_cpe = ':'.join(cpe_parts[:5] + ['*'] * 8)
return base_cpe
return None
def get_all_cpes(version):
if not CPE_TFS:
with open(CPE_DATA_FILES[version], "r") as f:
cpes = GET_ALL_CPES_RE.findall(f.read())
else:
_load_cpe_tfs(version)
cpes = [cpe_tf[0] for cpe_tf in CPE_TFS]
return cpes
def search_cpes(queries, cpe_version="2.3", count=3, threshold=-1, zero_extend_versions=False, keep_data_in_memory=False):
if not queries:
return {}
if isinstance(queries, str):
queries = [queries]
return _search_cpes(queries, cpe_version, count, threshold, zero_extend_versions, keep_data_in_memory)
if __name__ == "__main__":
SILENT = not sys.stdout.isatty()
args = parse_args()
if args.update:
update(args.version)
if args.queries and not os.path.isfile(CPE_DATA_FILES[args.version]):
if not SILENT:
print("[+] Running initial setup (might take a couple of minutes)", file=sys.stderr)
update(args.version)
if args.queries:
results = search_cpes(args.queries, args.version, args.count)
for i, query in enumerate(results):
if not SILENT and i > 0:
print()
print(results[query][0][0])
if not SILENT:
pprint.pprint(results[query])
|
[
"urllib.parse.unquote",
"os.remove",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"json.loads",
"os.path.realpath",
"json.dumps",
"threading.Lock",
"urllib.request.urlretrieve",
"os.path.isfile",
"sys.stdout.isatty",
"pprint.pprint",
"collections.Counter",
"os.path.join",
"re.compile"
] |
[((707, 872), 're.compile', 're.compile', (['"""<cpe-item name=\\\\"([^\\\\"]*)\\\\">.*?<title xml:lang=\\\\"en-US\\\\"[^>]*>([^<]*)</title>.*?<cpe-23:cpe23-item name=\\\\"([^\\\\"]*)\\\\\\""""'], {'flags': 're.DOTALL'}), '(\n \'<cpe-item name=\\\\"([^\\\\"]*)\\\\">.*?<title xml:lang=\\\\"en-US\\\\"[^>]*>([^<]*)</title>.*?<cpe-23:cpe23-item name=\\\\"([^\\\\"]*)\\\\"\'\n , flags=re.DOTALL)\n', (717, 872), False, 'import re\n'), ((876, 900), 're.compile', 're.compile', (['"""[\\\\w+\\\\.]+"""'], {}), "('[\\\\w+\\\\.]+')\n", (886, 900), False, 'import re\n'), ((918, 942), 're.compile', 're.compile', (['"""(.*);.*;.*"""'], {}), "('(.*);.*;.*')\n", (928, 942), False, 'import re\n'), ((965, 981), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (979, 981), False, 'import threading\n'), ((1004, 1041), 're.compile', 're.compile', (['"""\\\\b([\\\\d]+\\\\.?){1,4}\\\\b"""'], {}), "('\\\\b([\\\\d]+\\\\.?){1,4}\\\\b')\n", (1014, 1041), False, 'import re\n'), ((1071, 1131), 're.compile', 're.compile', (['"""\\\\b([\\\\d]+\\\\.?){1,4}([a-z][\\\\d]{0,3})?[^\\\\w]*$"""'], {}), "('\\\\b([\\\\d]+\\\\.?){1,4}([a-z][\\\\d]{0,3})?[^\\\\w]*$')\n", (1081, 1131), False, 'import re\n'), ((390, 416), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (406, 416), False, 'import os\n'), ((543, 601), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', '"""cpe-search-dictionary_v2.2.csv"""'], {}), "(SCRIPT_DIR, 'cpe-search-dictionary_v2.2.csv')\n", (555, 601), False, 'import os\n'), ((628, 686), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', '"""cpe-search-dictionary_v2.3.csv"""'], {}), "(SCRIPT_DIR, 'cpe-search-dictionary_v2.3.csv')\n", (640, 686), False, 'import os\n'), ((1276, 1404), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Search for CPEs using software names and titles -- Created by <NAME> (ra1nb0rn)"""'}), "(description=\n 'Search for CPEs using software names and titles -- Created by <NAME> (ra1nb0rn)'\n )\n", (1299, 1404), False, 'import argparse\n'), ((2435, 2465), 'urllib.request.urlretrieve', 'urlretrieve', (['CPE_DICT_URL', 'dst'], {}), '(CPE_DICT_URL, dst)\n', (2446, 2465), False, 'from urllib.request import urlretrieve\n'), ((4658, 4672), 'os.remove', 'os.remove', (['dst'], {}), '(dst)\n', (4667, 4672), False, 'import os\n'), ((2564, 2589), 'zipfile.ZipFile', 'zipfile.ZipFile', (['dst', '"""r"""'], {}), "(dst, 'r')\n", (2579, 2589), False, 'import zipfile\n'), ((2675, 2714), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', 'cpe_dict_name'], {}), '(SCRIPT_DIR, cpe_dict_name)\n', (2687, 2714), False, 'import os\n'), ((4687, 4726), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', 'cpe_dict_name'], {}), '(SCRIPT_DIR, cpe_dict_name)\n', (4699, 4726), False, 'import os\n'), ((19373, 19392), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (19390, 19392), False, 'import sys\n'), ((19495, 19539), 'os.path.isfile', 'os.path.isfile', (['CPE_DATA_FILES[args.version]'], {}), '(CPE_DATA_FILES[args.version])\n', (19509, 19539), False, 'import os\n'), ((3743, 3757), 'collections.Counter', 'Counter', (['words'], {}), '(words)\n', (3750, 3757), False, 'from collections import Counter\n'), ((7412, 7430), 'json.loads', 'json.loads', (['cpe_tf'], {}), '(cpe_tf)\n', (7422, 7430), False, 'import json\n'), ((10312, 10330), 'json.loads', 'json.loads', (['cpe_tf'], {}), '(cpe_tf)\n', (10322, 10330), False, 'import json\n'), ((19972, 20001), 'pprint.pprint', 'pprint.pprint', (['results[query]'], {}), '(results[query])\n', (19985, 20001), False, 'import pprint\n'), ((3357, 3369), 'urllib.parse.unquote', 'unquote', (['cpe'], {}), '(cpe)\n', (3364, 3369), False, 'from urllib.parse import unquote\n'), ((4360, 4378), 'json.dumps', 'json.dumps', (['cpe_tf'], {}), '(cpe_tf)\n', (4370, 4378), False, 'import json\n'), ((4556, 4574), 'json.dumps', 'json.dumps', (['cpe_tf'], {}), '(cpe_tf)\n', (4566, 4574), False, 'import json\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides basic cryptographic utilities for the Python EnigmaBridge client, e.g.,
generating random numbers, encryption, decryption, padding, etc...
For now we use PyCrypto, later we may use pure python implementations to minimize dependency count.
"""
from past.builtins import basestring
from past.builtins import long
import logging
import os
import base64
import struct
from Crypto import Random
from Crypto.Cipher import AES
from Crypto.Util import Counter
from Crypto.Util.py3compat import *
from Crypto.Util.number import long_to_bytes, bytes_to_long, size, ceil_div
# Logging if needed
logger = logging.getLogger(__name__)
__author__ = 'dusanklinec'
#
# Utils
#
def to_bytes(x, blocksize=0):
"""
Converts input to a byte string.
Typically used in PyCrypto as an argument (e.g., key, iv)
:param x: string (does nothing), bytearray, array with numbers
:return:
"""
if isinstance(x, bytearray):
return left_zero_pad(''.join([bchr(y) for y in x]), blocksize)
elif isinstance(x, basestring):
return left_zero_pad(x, blocksize)
elif isinstance(x, (list, tuple)):
return left_zero_pad(''.join([bchr(y) for y in bytearray(x)]), blocksize)
elif isinstance(x, (int, long)):
return long_to_bytes(x, blocksize)
else:
raise ValueError('Unknown input argument type')
def to_long(x):
"""
Converts input to a long number (arbitrary precision python long)
:param x:
:return:
"""
if isinstance(x, long):
return x
elif isinstance(x, int):
return long(x)
else:
return bytes_to_long(to_bytes(x))
def to_bytearray(x):
"""
Converts input to byte array.
If already a byte array, return directly.
:param x:
:return:
"""
if isinstance(x, bytearray):
return x
else:
return bytearray(x)
def to_hex(x):
"""
Converts input to the hex string
:param x:
:return:
"""
if isinstance(x, bytearray):
return x.decode('hex')
elif isinstance(x, basestring):
return base64.b16encode(x)
elif isinstance(x, (list, tuple)):
return bytearray(x).decode('hex')
else:
raise ValueError('Unknown input argument type')
def from_hex(x):
"""
Converts hex-coded (b16 encoding) string to the byte string.
:param x:
:return:
"""
return base64.b16decode(x, True)
def long_bit_size(x):
return size(x)
def long_byte_size(x):
return ceil_div(long_bit_size(x), 8)
def get_zero_vector(numBytes):
"""
Generates a zero vector of a given size
:param numBytes:
:return:
"""
return bytearray([0] * numBytes).decode('ascii')
def left_zero_pad(s, blocksize):
"""
Left padding with zero bytes to a given block size
:param s:
:param blocksize:
:return:
"""
if blocksize > 0 and len(s) % blocksize:
s = (blocksize - len(s) % blocksize) * b('\000') + s
return s
def str_equals(a, b):
"""
Constant time string equals method - no time leakage
:param a:
:param b:
:return:
"""
al = len(a)
bl = len(b)
match = True
for i in range(0, min(al, bl)):
match &= a[i] == b[i]
return match
def bytes_replace(byte_str, start_idx, stop_idx, replacement):
"""
Replaces given portion of the byte string with the replacement, returns new array
:param bytes:
:param start_idx:
:param stop_idx:
:param replacement:
:return:
"""
return byte_str[:start_idx] + replacement + byte_str[stop_idx:]
def bytes_transform(byte_str, start_idx, stop_idx, fction):
"""
Takes portion of the byte array and passes it to the function for transformation.
Result is replaced in the byte string, new one is created.
:param bytes:
:param start_idx:
:param stop_idx:
:param fction:
:return:
"""
return bytes_replace(byte_str, start_idx, stop_idx, fction(byte_str[start_idx:stop_idx]))
def bytes_to_short(byte, offset=0):
return struct.unpack('>H', byte[offset:offset+2])[0]
def short_to_bytes(short):
return struct.pack('>H', int(short))
def bytes_to_byte(byte, offset=0):
return struct.unpack('>B', byte[offset:offset+1])[0]
def byte_to_bytes(byte):
return struct.pack('>B', int(byte) & 0xFF)
def dump_uint(n):
"""
Constant-width integer serialization
:param writer:
:param n:
:param width:
:return:
"""
buffer = []
while n:
buffer.append(n & 0xff)
n >>= 8
return buffer
#
# Randomness
#
def get_random_vector(numBytes):
#return Random.get_random_bytes(numBytes)
return os.urandom(numBytes)
def get_random_integer(N, randfunc=None):
"""getRandomInteger(N:int, randfunc:callable):long
Return a random number with at most N bits.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
if randfunc is None:
randfunc = Random.new().read
S = randfunc(N>>3)
odd_bits = N % 8
if odd_bits != 0:
char = ord(randfunc(1)) >> (8-odd_bits)
S = bchr(char) + S
value = bytes_to_long(S)
return value
def get_random_range(a, b, randfunc=None):
"""getRandomRange(a:int, b:int, randfunc:callable):long
Return a random number n so that a <= n < b.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
range_ = b - a - 1
bits = size(range_)
value = get_random_integer(bits, randfunc)
while value > range_:
value = get_random_integer(bits, randfunc)
return a + value
#
# Padding
#
class Padding(object):
"""
Basic Padding methods
"""
@staticmethod
def pad(data, *args, **kwargs): # pragma: no cover
"""Pads data with given padding.
:returns: Padded data.
:rtype: list
"""
raise NotImplementedError()
@staticmethod
def unpad(data, *args, **kwargs): # pragma: no cover
"""UnPads data with given padding.
:returns: unpaded data
:rtype: list
"""
raise NotImplementedError()
class EmptyPadding(Padding):
@staticmethod
def unpad(data, *args, **kwargs):
return data
@staticmethod
def pad(data, *args, **kwargs):
return data
class PKCS7(Padding):
@staticmethod
def unpad(data, *args, **kwargs):
return data[:-ord(data[len(data)-1:])]
@staticmethod
def pad(data, *args, **kwargs):
bs = kwargs.get('bs', 16)
return data + (bs - len(data) % bs) * chr(bs - len(data) % bs)
class PKCS15(Padding):
@staticmethod
def unpad(data, *args, **kwargs):
bs = kwargs.get('bs', 256 if len(args) == 0 else args[0])
bt = kwargs.get('bt', 2 if len(args) <= 1 else args[1])
prefix = b("\x00") + bchr(bt)
if data[0:2] != prefix:
raise ValueError('Padding error')
# Not needed in the client
raise NotImplementedError()
@staticmethod
def pad(data, *args, **kwargs):
bs = kwargs.get('bs', 256 if len(args) == 0 else args[0])
bt = kwargs.get('bt', 2 if len(args) <= 1 else args[1])
data = to_bytes(data)
blb = len(data)
if blb+3 > bs:
raise ValueError('Input data too long')
ps_len = bs - 3 - blb
padding_str = bchr(0x00) # tmp
if bt == 0:
padding_str = bchr(0x00) * ps_len
elif bt == 1:
padding_str = bchr(0xFF) * ps_len
elif bt == 2:
arr = [int(get_random_range(1, 0x100)) for _ in range(ps_len)]
padding_str = to_bytes(arr)
else:
raise ValueError('Unknown padding type')
return b("\x00") + bchr(bt) + padding_str + b("\x00") + data
#
# Encryption
#
def aes_ecb(key):
"""
Returns AES-ECB instance that can be used for [incremental] encryption/decryption in ProcessData.
Uses zero IV.
:param key:
:param iv:
:return:
"""
return AES.new(key, AES.MODE_ECB)
def aes_cbc(key, iv=None):
"""
Returns AES-CBC instance that can be used for [incremental] encryption/decryption in ProcessData.
Uses zero IV.
:param key:
:param iv:
:return:
"""
return AES.new(key, AES.MODE_CBC, iv if iv is not None else get_zero_vector(16))
def aes_ctr(key, counter=None):
"""
Returns AES-CTR instance that can be used for [incremental] encryption/decryption in ProcessData.
Uses zero IV.
:param key:
:return:
"""
return AES.new(key, AES.MODE_CTR, counter=(counter if counter is not None else Counter.new(128)))
def aes(encrypt, key, data):
"""
One-pass AES-256-CBC used in ProcessData. Zero IV (don't panic, IV-like random nonce is included in plaintext in the
first block in ProcessData).
Does not use padding (data has to be already padded).
:param encrypt:
:param key:
:param data:
:return:
"""
cipher = AES.new(key, AES.MODE_CBC, get_zero_vector(16))
if encrypt:
return cipher.encrypt(data)
else:
return cipher.decrypt(data)
def aes_enc(key, data):
return aes(True, key, data)
def aes_dec(key, data):
return aes(False, key, data)
def cbc_mac(key, data):
"""
AES-265-CBC-MAC on the data used in ProcessData.
Does not use padding (data has to be already padded).
:param key:
:param data:
:return:
"""
engine = AES.new(key, AES.MODE_CBC, get_zero_vector(16))
return engine.encrypt(data)[-16:]
def rsa_enc(data, modulus, exponent):
"""
Simple RAW RSA encryption method, returns byte string.
Returns byte string of the same size as the modulus (left padded with 0)
:param data:
:param modulus:
:param exponent:
:return:
"""
modulus = to_long(modulus)
exponent = to_long(exponent)
data = to_long(data)
return long_to_bytes(pow(data, exponent, modulus), long_byte_size(modulus))
|
[
"Crypto.Util.number.long_to_bytes",
"Crypto.Util.Counter.new",
"struct.unpack",
"Crypto.Util.number.bytes_to_long",
"Crypto.Cipher.AES.new",
"base64.b16decode",
"past.builtins.long",
"base64.b16encode",
"Crypto.Util.number.size",
"Crypto.Random.new",
"os.urandom",
"logging.getLogger"
] |
[((658, 685), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (675, 685), False, 'import logging\n'), ((2439, 2464), 'base64.b16decode', 'base64.b16decode', (['x', '(True)'], {}), '(x, True)\n', (2455, 2464), False, 'import base64\n'), ((2500, 2507), 'Crypto.Util.number.size', 'size', (['x'], {}), '(x)\n', (2504, 2507), False, 'from Crypto.Util.number import long_to_bytes, bytes_to_long, size, ceil_div\n'), ((4727, 4747), 'os.urandom', 'os.urandom', (['numBytes'], {}), '(numBytes)\n', (4737, 4747), False, 'import os\n'), ((5273, 5289), 'Crypto.Util.number.bytes_to_long', 'bytes_to_long', (['S'], {}), '(S)\n', (5286, 5289), False, 'from Crypto.Util.number import long_to_bytes, bytes_to_long, size, ceil_div\n'), ((5657, 5669), 'Crypto.Util.number.size', 'size', (['range_'], {}), '(range_)\n', (5661, 5669), False, 'from Crypto.Util.number import long_to_bytes, bytes_to_long, size, ceil_div\n'), ((8229, 8255), 'Crypto.Cipher.AES.new', 'AES.new', (['key', 'AES.MODE_ECB'], {}), '(key, AES.MODE_ECB)\n', (8236, 8255), False, 'from Crypto.Cipher import AES\n'), ((4096, 4140), 'struct.unpack', 'struct.unpack', (['""">H"""', 'byte[offset:offset + 2]'], {}), "('>H', byte[offset:offset + 2])\n", (4109, 4140), False, 'import struct\n'), ((4260, 4304), 'struct.unpack', 'struct.unpack', (['""">B"""', 'byte[offset:offset + 1]'], {}), "('>B', byte[offset:offset + 1])\n", (4273, 4304), False, 'import struct\n'), ((1627, 1634), 'past.builtins.long', 'long', (['x'], {}), '(x)\n', (1631, 1634), False, 'from past.builtins import long\n'), ((2134, 2153), 'base64.b16encode', 'base64.b16encode', (['x'], {}), '(x)\n', (2150, 2153), False, 'import base64\n'), ((5101, 5113), 'Crypto.Random.new', 'Random.new', ([], {}), '()\n', (5111, 5113), False, 'from Crypto import Random\n'), ((8834, 8850), 'Crypto.Util.Counter.new', 'Counter.new', (['(128)'], {}), '(128)\n', (8845, 8850), False, 'from Crypto.Util import Counter\n'), ((1313, 1340), 'Crypto.Util.number.long_to_bytes', 'long_to_bytes', (['x', 'blocksize'], {}), '(x, blocksize)\n', (1326, 1340), False, 'from Crypto.Util.number import long_to_bytes, bytes_to_long, size, ceil_div\n')]
|
import math
import torch
import pytest
from pyro.distributions import Gamma, InverseGamma
from tests.common import assert_equal
@pytest.mark.parametrize('concentration', [3.3, 4.0])
@pytest.mark.parametrize('rate', [2.5, 3.0])
def test_sample(concentration, rate, n_samples=int(1e6)):
samples = InverseGamma(concentration, rate).sample((n_samples,))
mean, std = samples.mean().item(), samples.std().item()
expected_mean = rate / (concentration - 1.0)
expected_std = rate / ((concentration - 1.0) * math.sqrt(concentration - 2.0))
assert_equal(mean, expected_mean, prec=1e-2)
assert_equal(std, expected_std, prec=0.03)
@pytest.mark.parametrize('concentration', [2.5, 4.0])
@pytest.mark.parametrize('rate', [2.5, 3.0])
@pytest.mark.parametrize('value', [0.5, 1.7])
def test_log_prob(concentration, rate, value):
value = torch.tensor(value)
log_prob = InverseGamma(concentration, rate).log_prob(value)
expected_log_prob = Gamma(concentration, rate).log_prob(1.0 / value) - 2.0 * value.log()
assert_equal(log_prob, expected_log_prob, prec=1e-6)
|
[
"math.sqrt",
"tests.common.assert_equal",
"pyro.distributions.InverseGamma",
"pytest.mark.parametrize",
"pyro.distributions.Gamma",
"torch.tensor"
] |
[((133, 185), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""concentration"""', '[3.3, 4.0]'], {}), "('concentration', [3.3, 4.0])\n", (156, 185), False, 'import pytest\n'), ((187, 230), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""rate"""', '[2.5, 3.0]'], {}), "('rate', [2.5, 3.0])\n", (210, 230), False, 'import pytest\n'), ((649, 701), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""concentration"""', '[2.5, 4.0]'], {}), "('concentration', [2.5, 4.0])\n", (672, 701), False, 'import pytest\n'), ((703, 746), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""rate"""', '[2.5, 3.0]'], {}), "('rate', [2.5, 3.0])\n", (726, 746), False, 'import pytest\n'), ((748, 792), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value"""', '[0.5, 1.7]'], {}), "('value', [0.5, 1.7])\n", (771, 792), False, 'import pytest\n'), ((554, 598), 'tests.common.assert_equal', 'assert_equal', (['mean', 'expected_mean'], {'prec': '(0.01)'}), '(mean, expected_mean, prec=0.01)\n', (566, 598), False, 'from tests.common import assert_equal\n'), ((603, 645), 'tests.common.assert_equal', 'assert_equal', (['std', 'expected_std'], {'prec': '(0.03)'}), '(std, expected_std, prec=0.03)\n', (615, 645), False, 'from tests.common import assert_equal\n'), ((852, 871), 'torch.tensor', 'torch.tensor', (['value'], {}), '(value)\n', (864, 871), False, 'import torch\n'), ((1034, 1087), 'tests.common.assert_equal', 'assert_equal', (['log_prob', 'expected_log_prob'], {'prec': '(1e-06)'}), '(log_prob, expected_log_prob, prec=1e-06)\n', (1046, 1087), False, 'from tests.common import assert_equal\n'), ((303, 336), 'pyro.distributions.InverseGamma', 'InverseGamma', (['concentration', 'rate'], {}), '(concentration, rate)\n', (315, 336), False, 'from pyro.distributions import Gamma, InverseGamma\n'), ((518, 548), 'math.sqrt', 'math.sqrt', (['(concentration - 2.0)'], {}), '(concentration - 2.0)\n', (527, 548), False, 'import math\n'), ((887, 920), 'pyro.distributions.InverseGamma', 'InverseGamma', (['concentration', 'rate'], {}), '(concentration, rate)\n', (899, 920), False, 'from pyro.distributions import Gamma, InverseGamma\n'), ((961, 987), 'pyro.distributions.Gamma', 'Gamma', (['concentration', 'rate'], {}), '(concentration, rate)\n', (966, 987), False, 'from pyro.distributions import Gamma, InverseGamma\n')]
|
# coding=utf-8
import logging
import datetime as dt
from multiprocessing.util import Finalize
import sqlalchemy
from celery import current_app
from celery import schedules
from celery.beat import Scheduler, ScheduleEntry
from celery.five import values, items
from celery.utils.encoding import safe_str, safe_repr
from celery.utils.log import get_logger
from celery.utils.time import maybe_make_aware
from kombu.utils.json import dumps, loads
from .session import session_cleanup
from .session import SessionManager
from .models import (
PeriodicTask, PeriodicTaskChanged,
CrontabSchedule, IntervalSchedule,
SolarSchedule,
)
# This scheduler must wake up more frequently than the
# regular of 5 minutes because it needs to take external
# changes to the schedule into account.
DEFAULT_MAX_INTERVAL = 5 # seconds
DEFAULT_BEAT_DBURI = 'sqlite:///schedule.db'
ADD_ENTRY_ERROR = """\
Cannot add entry %r to database schedule: %r. Contents: %r
"""
session_manager = SessionManager()
# session = session_manager()
logger = get_logger('celery_sqlalchemy_scheduler.schedulers')
class ModelEntry(ScheduleEntry):
"""Scheduler entry taken from database row."""
model_schedules = (
# (schedule_type, model_type, model_field)
(schedules.crontab, CrontabSchedule, 'crontab'),
(schedules.schedule, IntervalSchedule, 'interval'),
(schedules.solar, SolarSchedule, 'solar'),
)
save_fields = ['last_run_at', 'total_run_count', 'no_changes']
def __init__(self, model, Session, app=None, **kw):
"""Initialize the model entry."""
self.app = app or current_app._get_current_object()
self.session = kw.get('session')
self.Session = Session
self.model = model
self.name = model.name
self.task = model.task
try:
self.schedule = model.schedule
logger.debug('schedule: {}'.format(self.schedule))
except Exception as e:
logger.error(e)
logger.error(
'Disabling schedule %s that was removed from database',
self.name,
)
self._disable(model)
try:
self.args = loads(model.args or '[]')
self.kwargs = loads(model.kwargs or '{}')
except ValueError as exc:
logger.exception(
'Removing schedule %s for argument deseralization error: %r',
self.name, exc,
)
self._disable(model)
self.options = {}
for option in ['queue', 'exchange', 'routing_key', 'expires',
'priority']:
value = getattr(model, option)
if value is None:
continue
self.options[option] = value
self.total_run_count = model.total_run_count
self.enabled = model.enabled
if not model.last_run_at:
model.last_run_at = self._default_now()
self.last_run_at = model.last_run_at
# 因为从数据库读取的 last_run_at 可能没有时区信息,所以这里必须加上时区信息
self.last_run_at = self.last_run_at.astimezone(self.app.timezone)
# self.options['expires'] 同理
# if 'expires' in self.options:
# expires = self.options['expires']
# self.options['expires'] = expires.replace(tzinfo=self.app.timezone)
def _disable(self, model):
model.no_changes = True
self.model.enabled = self.enabled = model.enabled = False
if self.session:
self.session.add(model)
self.session.commit()
else:
session = self.Session()
with session_cleanup(session):
session.add(model)
session.commit()
# obj = session.query(PeriodicTask).get(model.id)
# obj.enable = model.enabled
# session.add(obj)
# session.commit()
def is_due(self):
if not self.model.enabled:
# 5 second delay for re-enable.
return schedules.schedstate(False, 5.0)
# START DATE: only run after the `start_time`, if one exists.
if self.model.start_time is not None:
now = maybe_make_aware(self._default_now())
start_time = self.model.start_time.replace(
tzinfo=self.app.timezone)
if now < start_time:
# The datetime is before the start date - don't run.
_, delay = self.schedule.is_due(self.last_run_at)
# use original delay for re-check
return schedules.schedstate(False, delay)
# ONE OFF TASK: Disable one off tasks after they've ran once
if self.model.one_off and self.model.enabled \
and self.model.total_run_count > 0:
self.model.enabled = False # disable
self.model.total_run_count = 0 # Reset
self.model.no_changes = False # Mark the model entry as changed
save_fields = ('enabled',) # the additional fields to save
self.save(save_fields)
return schedules.schedstate(False, None) # Don't recheck
return self.schedule.is_due(self.last_run_at)
def _default_now(self):
now = self.app.now()
# The PyTZ datetime must be localised for the Django-Celery-Beat
# scheduler to work. Keep in mind that timezone arithmatic
# with a localized timezone may be inaccurate.
# return now.tzinfo.localize(now.replace(tzinfo=None))
return now.replace(tzinfo=self.app.timezone)
def __next__(self):
# should be use `self._default_now()` or `self.app.now()` ?
self.model.last_run_at = self.app.now()
self.model.total_run_count += 1
self.model.no_changes = True
return self.__class__(self.model, Session=self.Session)
next = __next__ # for 2to3
def save(self, fields=tuple()):
"""
:params fields: tuple, the additional fields to save
"""
# TODO:
session = self.Session()
with session_cleanup(session):
# Object may not be synchronized, so only
# change the fields we care about.
obj = session.query(PeriodicTask).get(self.model.id)
for field in self.save_fields:
setattr(obj, field, getattr(self.model, field))
for field in fields:
setattr(obj, field, getattr(self.model, field))
session.add(obj)
session.commit()
@classmethod
def to_model_schedule(cls, session, schedule):
for schedule_type, model_type, model_field in cls.model_schedules:
# change to schedule
schedule = schedules.maybe_schedule(schedule)
if isinstance(schedule, schedule_type):
# TODO:
model_schedule = model_type.from_schedule(session, schedule)
return model_schedule, model_field
raise ValueError(
'Cannot convert schedule type {0!r} to model'.format(schedule))
@classmethod
def from_entry(cls, name, Session, app=None, **entry):
"""
**entry sample:
{'task': 'celery.backend_cleanup',
'schedule': schedules.crontab('0', '4', '*'),
'options': {'expires': 43200}}
"""
session = Session()
with session_cleanup(session):
periodic_task = session.query(
PeriodicTask).filter_by(name=name).first()
if not periodic_task:
periodic_task = PeriodicTask(name=name)
temp = cls._unpack_fields(session, **entry)
periodic_task.update(**temp)
session.add(periodic_task)
try:
session.commit()
except sqlalchemy.exc.IntegrityError as exc:
logger.error(exc)
session.rollback()
except Exception as exc:
logger.error(exc)
session.rollback()
res = cls(periodic_task, app=app, Session=Session, session=session)
return res
@classmethod
def _unpack_fields(cls, session, schedule,
args=None, kwargs=None, relative=None, options=None,
**entry):
"""
**entry sample:
{'task': 'celery.backend_cleanup',
'schedule': <crontab: 0 4 * * * (m/h/d/dM/MY)>,
'options': {'expires': 43200}}
"""
model_schedule, model_field = cls.to_model_schedule(session, schedule)
entry.update(
# the model_id which to relationship
{model_field + '_id': model_schedule.id},
args=dumps(args or []),
kwargs=dumps(kwargs or {}),
**cls._unpack_options(**options or {})
)
return entry
@classmethod
def _unpack_options(cls, queue=None, exchange=None, routing_key=None,
priority=None, one_off=None, expires=None, **kwargs):
data = {
'queue': queue,
'exchange': exchange,
'routing_key': routing_key,
'priority': priority,
'one_off': one_off,
}
if expires:
if isinstance(expires, int):
expires = dt.datetime.utcnow() + dt.timedelta(seconds=expires)
elif isinstance(expires, dt.datetime):
pass
else:
raise ValueError('expires value error')
data['expires'] = expires
return data
def __repr__(self):
return '<ModelEntry: {0} {1}(*{2}, **{3}) {4}>'.format(
safe_str(self.name), self.task, safe_repr(self.args),
safe_repr(self.kwargs), self.schedule,
)
class DatabaseScheduler(Scheduler):
Entry = ModelEntry
Model = PeriodicTask
Changes = PeriodicTaskChanged
_schedule = None
_last_timestamp = None
_initial_read = True
_heap_invalidated = False
def __init__(self, *args, **kwargs):
"""Initialize the database scheduler."""
self.app = kwargs['app']
self.dburi = kwargs.get('dburi') or self.app.conf.get(
'beat_dburi') or DEFAULT_BEAT_DBURI
self.engine, self.Session = session_manager.create_session(self.dburi)
session_manager.prepare_models(self.engine)
self._dirty = set()
Scheduler.__init__(self, *args, **kwargs)
self._finalize = Finalize(self, self.sync, exitpriority=5)
self.max_interval = (kwargs.get('max_interval') or
self.app.conf.beat_max_loop_interval or
DEFAULT_MAX_INTERVAL)
def setup_schedule(self):
"""override"""
logger.info('setup_schedule')
self.install_default_entries(self.schedule)
self.update_from_dict(self.app.conf.beat_schedule)
def all_as_schedule(self):
# TODO:
session = self.Session()
with session_cleanup(session):
logger.debug('DatabaseScheduler: Fetching database schedule')
# get all enabled PeriodicTask
models = session.query(self.Model).filter_by(enabled=True).all()
s = {}
for model in models:
try:
s[model.name] = self.Entry(model,
app=self.app,
Session=self.Session,
session=session)
except ValueError:
pass
return s
def schedule_changed(self):
session = self.Session()
with session_cleanup(session):
changes = session.query(self.Changes).get(1)
if not changes:
changes = self.Changes(id=1)
session.add(changes)
session.commit()
return False
last, ts = self._last_timestamp, changes.last_update
try:
if ts and ts > (last if last else ts):
return True
finally:
self._last_timestamp = ts
return False
def reserve(self, entry):
"""override
It will be called in parent class.
"""
new_entry = next(entry)
# Need to store entry by name, because the entry may change
# in the mean time.
self._dirty.add(new_entry.name)
return new_entry
def sync(self):
"""override"""
logger.info('Writing entries...')
_tried = set()
_failed = set()
try:
while self._dirty:
name = self._dirty.pop()
try:
self.schedule[name].save() # save to database
logger.debug(
'{name} save to database'.format(name=name))
_tried.add(name)
except (KeyError) as exc:
logger.error(exc)
_failed.add(name)
except sqlalchemy.exc.IntegrityError as exc:
logger.exception('Database error while sync: %r', exc)
except Exception as exc:
logger.exception(exc)
finally:
# retry later, only for the failed ones
self._dirty |= _failed
def update_from_dict(self, mapping):
s = {}
for name, entry_fields in items(mapping):
# {'task': 'celery.backend_cleanup',
# 'schedule': schedules.crontab('0', '4', '*'),
# 'options': {'expires': 43200}}
try:
entry = self.Entry.from_entry(
name, Session=self.Session, app=self.app,
**entry_fields)
if entry.model.enabled:
s[name] = entry
except Exception as exc:
logger.error(ADD_ENTRY_ERROR, name, exc, entry_fields)
# update self.schedule
self.schedule.update(s)
def install_default_entries(self, data):
entries = {}
if self.app.conf.result_expires:
entries.setdefault(
'celery.backend_cleanup', {
'task': 'celery.backend_cleanup',
'schedule': schedules.crontab('0', '4', '*'),
'options': {'expires': 12 * 3600},
},
)
self.update_from_dict(entries)
def schedules_equal(self, *args, **kwargs):
if self._heap_invalidated:
self._heap_invalidated = False
return False
return super(DatabaseScheduler, self).schedules_equal(*args, **kwargs)
@property
def schedule(self):
initial = update = False
if self._initial_read:
logger.debug('DatabaseScheduler: initial read')
initial = update = True
self._initial_read = False
elif self.schedule_changed():
# when you updated the `PeriodicTasks` model's `last_update` field
logger.info('DatabaseScheduler: Schedule changed.')
update = True
if update:
self.sync()
self._schedule = self.all_as_schedule()
# the schedule changed, invalidate the heap in Scheduler.tick
if not initial:
self._heap = []
self._heap_invalidated = True
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Current schedule:\n%s', '\n'.join(
repr(entry) for entry in values(self._schedule)),
)
# logger.debug(self._schedule)
return self._schedule
@property
def info(self):
"""override"""
# return infomation about Schedule
return ' . db -> {self.dburi}'.format(self=self)
|
[
"kombu.utils.json.dumps",
"multiprocessing.util.Finalize",
"celery.beat.Scheduler.__init__",
"kombu.utils.json.loads",
"celery.schedules.crontab",
"celery.utils.encoding.safe_repr",
"celery.five.values",
"celery.current_app._get_current_object",
"celery.schedules.schedstate",
"datetime.datetime.utcnow",
"celery.utils.log.get_logger",
"celery.utils.encoding.safe_str",
"datetime.timedelta",
"celery.five.items",
"celery.schedules.maybe_schedule"
] |
[((1039, 1091), 'celery.utils.log.get_logger', 'get_logger', (['"""celery_sqlalchemy_scheduler.schedulers"""'], {}), "('celery_sqlalchemy_scheduler.schedulers')\n", (1049, 1091), False, 'from celery.utils.log import get_logger\n'), ((10411, 10452), 'celery.beat.Scheduler.__init__', 'Scheduler.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (10429, 10452), False, 'from celery.beat import Scheduler, ScheduleEntry\n'), ((10478, 10519), 'multiprocessing.util.Finalize', 'Finalize', (['self', 'self.sync'], {'exitpriority': '(5)'}), '(self, self.sync, exitpriority=5)\n', (10486, 10519), False, 'from multiprocessing.util import Finalize\n'), ((13456, 13470), 'celery.five.items', 'items', (['mapping'], {}), '(mapping)\n', (13461, 13470), False, 'from celery.five import values, items\n'), ((1620, 1653), 'celery.current_app._get_current_object', 'current_app._get_current_object', ([], {}), '()\n', (1651, 1653), False, 'from celery import current_app\n'), ((2205, 2230), 'kombu.utils.json.loads', 'loads', (["(model.args or '[]')"], {}), "(model.args or '[]')\n", (2210, 2230), False, 'from kombu.utils.json import dumps, loads\n'), ((2257, 2284), 'kombu.utils.json.loads', 'loads', (["(model.kwargs or '{}')"], {}), "(model.kwargs or '{}')\n", (2262, 2284), False, 'from kombu.utils.json import dumps, loads\n'), ((4028, 4060), 'celery.schedules.schedstate', 'schedules.schedstate', (['(False)', '(5.0)'], {}), '(False, 5.0)\n', (4048, 4060), False, 'from celery import schedules\n'), ((5092, 5125), 'celery.schedules.schedstate', 'schedules.schedstate', (['(False)', 'None'], {}), '(False, None)\n', (5112, 5125), False, 'from celery import schedules\n'), ((6720, 6754), 'celery.schedules.maybe_schedule', 'schedules.maybe_schedule', (['schedule'], {}), '(schedule)\n', (6744, 6754), False, 'from celery import schedules\n'), ((9668, 9687), 'celery.utils.encoding.safe_str', 'safe_str', (['self.name'], {}), '(self.name)\n', (9676, 9687), False, 'from celery.utils.encoding import safe_str, safe_repr\n'), ((9700, 9720), 'celery.utils.encoding.safe_repr', 'safe_repr', (['self.args'], {}), '(self.args)\n', (9709, 9720), False, 'from celery.utils.encoding import safe_str, safe_repr\n'), ((9734, 9756), 'celery.utils.encoding.safe_repr', 'safe_repr', (['self.kwargs'], {}), '(self.kwargs)\n', (9743, 9756), False, 'from celery.utils.encoding import safe_str, safe_repr\n'), ((4573, 4607), 'celery.schedules.schedstate', 'schedules.schedstate', (['(False)', 'delay'], {}), '(False, delay)\n', (4593, 4607), False, 'from celery import schedules\n'), ((8717, 8734), 'kombu.utils.json.dumps', 'dumps', (['(args or [])'], {}), '(args or [])\n', (8722, 8734), False, 'from kombu.utils.json import dumps, loads\n'), ((8755, 8774), 'kombu.utils.json.dumps', 'dumps', (['(kwargs or {})'], {}), '(kwargs or {})\n', (8760, 8774), False, 'from kombu.utils.json import dumps, loads\n'), ((9310, 9330), 'datetime.datetime.utcnow', 'dt.datetime.utcnow', ([], {}), '()\n', (9328, 9330), True, 'import datetime as dt\n'), ((9333, 9362), 'datetime.timedelta', 'dt.timedelta', ([], {'seconds': 'expires'}), '(seconds=expires)\n', (9345, 9362), True, 'import datetime as dt\n'), ((14308, 14340), 'celery.schedules.crontab', 'schedules.crontab', (['"""0"""', '"""4"""', '"""*"""'], {}), "('0', '4', '*')\n", (14325, 14340), False, 'from celery import schedules\n'), ((15582, 15604), 'celery.five.values', 'values', (['self._schedule'], {}), '(self._schedule)\n', (15588, 15604), False, 'from celery.five import values, items\n')]
|
from django.urls import path
from items import views
urlpatterns = [
path('', views.list_items, name='list_items'),
path('create/', views.create_item, name='create_item'),
path('<int:item_id>/', views.get_item, name='get_item'),
path('<int:item_id>/edit/', views.edit_item, name='edit_item'),
path('<int:item_id>/delete/', views.delete_item, name='delete_item'),
]
|
[
"django.urls.path"
] |
[((75, 120), 'django.urls.path', 'path', (['""""""', 'views.list_items'], {'name': '"""list_items"""'}), "('', views.list_items, name='list_items')\n", (79, 120), False, 'from django.urls import path\n'), ((126, 180), 'django.urls.path', 'path', (['"""create/"""', 'views.create_item'], {'name': '"""create_item"""'}), "('create/', views.create_item, name='create_item')\n", (130, 180), False, 'from django.urls import path\n'), ((186, 241), 'django.urls.path', 'path', (['"""<int:item_id>/"""', 'views.get_item'], {'name': '"""get_item"""'}), "('<int:item_id>/', views.get_item, name='get_item')\n", (190, 241), False, 'from django.urls import path\n'), ((247, 309), 'django.urls.path', 'path', (['"""<int:item_id>/edit/"""', 'views.edit_item'], {'name': '"""edit_item"""'}), "('<int:item_id>/edit/', views.edit_item, name='edit_item')\n", (251, 309), False, 'from django.urls import path\n'), ((315, 383), 'django.urls.path', 'path', (['"""<int:item_id>/delete/"""', 'views.delete_item'], {'name': '"""delete_item"""'}), "('<int:item_id>/delete/', views.delete_item, name='delete_item')\n", (319, 383), False, 'from django.urls import path\n')]
|
import os
import shutil
from src.OrderData import OrderData
from src.logger import Logger
from src.shop import Shop
from src.pizzaIngredientsStagingTableBuilder import PizzaIngredientsStagingTableBuilder
from src.extraIngredientsStagingTableBuilder import ExtraIngredientsStagingTableBuilder
from src.pizzaCrustsStagingTableBuilder import PizzaCrustsStagingTableBuilder
from src.productsMigration import ProductsMigration
from src.zipcode import ZipCode
from src.muncipality import Muncipality
from src.otherProductsStagingTableBuilder import OtherProductsStagingTableBuilder
def main():
# Loop through 'watch' directory
files = os.listdir("watch")
if files:
# Start processing
run_time = PizzaIngredientsStagingTableBuilder('pizza_ingredienten.xlsx', 'Extra Ingredienten.csv').process()
run_time += ExtraIngredientsStagingTableBuilder('Extra Ingredienten.csv').process()
run_time += PizzaCrustsStagingTableBuilder('pizzabodems.xlsx').process()
run_time += OtherProductsStagingTableBuilder('Overige Producten.xlsx').process()
print('All staging tables built in ' + str(run_time) + ' ms. \n')
run_time += ProductsMigration().migrate_product_data()
print('Product data migrated in ' + str(run_time) + ' milliseconds.\n')
Muncipality("Postcode tabel.mdb").process()
ZipCode("Postcode tabel.mdb").process()
Shop("Winkels Mario.txt").process()
OrderData("MarioOrderData01_10000.csv")
OrderData("MarioOrderData02_10000.csv")
OrderData("MarioOrderData03_10000.csv")
OrderData("MarioOrderData04_10000.csv")
# # Move files to 'complete' directory
# for file in files:
# move_file(file)
Logger().commit_errors()
# Logger().info("Import completed")
# Move file from watch to complete directory
def move_file(filename):
dir_from = os.getcwd() + "/watch/" + filename
dir_to = os.getcwd() + "/complete/" + filename
shutil.move(dir_from, dir_to)
# Start script
if __name__ == "__main__":
main()
|
[
"src.muncipality.Muncipality",
"src.extraIngredientsStagingTableBuilder.ExtraIngredientsStagingTableBuilder",
"src.zipcode.ZipCode",
"src.pizzaIngredientsStagingTableBuilder.PizzaIngredientsStagingTableBuilder",
"os.getcwd",
"src.shop.Shop",
"src.otherProductsStagingTableBuilder.OtherProductsStagingTableBuilder",
"src.logger.Logger",
"shutil.move",
"src.OrderData.OrderData",
"src.productsMigration.ProductsMigration",
"src.pizzaCrustsStagingTableBuilder.PizzaCrustsStagingTableBuilder",
"os.listdir"
] |
[((640, 659), 'os.listdir', 'os.listdir', (['"""watch"""'], {}), "('watch')\n", (650, 659), False, 'import os\n'), ((2000, 2029), 'shutil.move', 'shutil.move', (['dir_from', 'dir_to'], {}), '(dir_from, dir_to)\n', (2011, 2029), False, 'import shutil\n'), ((1455, 1494), 'src.OrderData.OrderData', 'OrderData', (['"""MarioOrderData01_10000.csv"""'], {}), "('MarioOrderData01_10000.csv')\n", (1464, 1494), False, 'from src.OrderData import OrderData\n'), ((1503, 1542), 'src.OrderData.OrderData', 'OrderData', (['"""MarioOrderData02_10000.csv"""'], {}), "('MarioOrderData02_10000.csv')\n", (1512, 1542), False, 'from src.OrderData import OrderData\n'), ((1551, 1590), 'src.OrderData.OrderData', 'OrderData', (['"""MarioOrderData03_10000.csv"""'], {}), "('MarioOrderData03_10000.csv')\n", (1560, 1590), False, 'from src.OrderData import OrderData\n'), ((1599, 1638), 'src.OrderData.OrderData', 'OrderData', (['"""MarioOrderData04_10000.csv"""'], {}), "('MarioOrderData04_10000.csv')\n", (1608, 1638), False, 'from src.OrderData import OrderData\n'), ((1910, 1921), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1919, 1921), False, 'import os\n'), ((1958, 1969), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1967, 1969), False, 'import os\n'), ((721, 813), 'src.pizzaIngredientsStagingTableBuilder.PizzaIngredientsStagingTableBuilder', 'PizzaIngredientsStagingTableBuilder', (['"""pizza_ingredienten.xlsx"""', '"""Extra Ingredienten.csv"""'], {}), "('pizza_ingredienten.xlsx',\n 'Extra Ingredienten.csv')\n", (756, 813), False, 'from src.pizzaIngredientsStagingTableBuilder import PizzaIngredientsStagingTableBuilder\n'), ((840, 901), 'src.extraIngredientsStagingTableBuilder.ExtraIngredientsStagingTableBuilder', 'ExtraIngredientsStagingTableBuilder', (['"""Extra Ingredienten.csv"""'], {}), "('Extra Ingredienten.csv')\n", (875, 901), False, 'from src.extraIngredientsStagingTableBuilder import ExtraIngredientsStagingTableBuilder\n'), ((932, 982), 'src.pizzaCrustsStagingTableBuilder.PizzaCrustsStagingTableBuilder', 'PizzaCrustsStagingTableBuilder', (['"""pizzabodems.xlsx"""'], {}), "('pizzabodems.xlsx')\n", (962, 982), False, 'from src.pizzaCrustsStagingTableBuilder import PizzaCrustsStagingTableBuilder\n'), ((1013, 1071), 'src.otherProductsStagingTableBuilder.OtherProductsStagingTableBuilder', 'OtherProductsStagingTableBuilder', (['"""Overige Producten.xlsx"""'], {}), "('Overige Producten.xlsx')\n", (1045, 1071), False, 'from src.otherProductsStagingTableBuilder import OtherProductsStagingTableBuilder\n'), ((1178, 1197), 'src.productsMigration.ProductsMigration', 'ProductsMigration', ([], {}), '()\n', (1195, 1197), False, 'from src.productsMigration import ProductsMigration\n'), ((1311, 1344), 'src.muncipality.Muncipality', 'Muncipality', (['"""Postcode tabel.mdb"""'], {}), "('Postcode tabel.mdb')\n", (1322, 1344), False, 'from src.muncipality import Muncipality\n'), ((1363, 1392), 'src.zipcode.ZipCode', 'ZipCode', (['"""Postcode tabel.mdb"""'], {}), "('Postcode tabel.mdb')\n", (1370, 1392), False, 'from src.zipcode import ZipCode\n'), ((1411, 1436), 'src.shop.Shop', 'Shop', (['"""Winkels Mario.txt"""'], {}), "('Winkels Mario.txt')\n", (1415, 1436), False, 'from src.shop import Shop\n'), ((1754, 1762), 'src.logger.Logger', 'Logger', ([], {}), '()\n', (1760, 1762), False, 'from src.logger import Logger\n')]
|
from __future__ import absolute_import
from __future__ import unicode_literals
import uuid
from datetime import timedelta, datetime
from django.test import TestCase
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.util import post_case_blocks
from corehq.apps.zapier.consts import EventTypes
from corehq.apps.zapier.models import ZapierSubscription
from corehq.apps.zapier.tests.test_utils import bootrap_domain_for_zapier
from corehq.form_processor.tests.utils import run_with_all_backends
from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records, delete_all_repeaters
from corehq.motech.repeaters.models import RepeatRecord
DOMAIN = 'zapier-case-forwarding-tests'
ZAPIER_CASE_TYPE = 'animal'
class TestZapierCaseForwarding(TestCase):
@classmethod
def setUpClass(cls):
super(TestZapierCaseForwarding, cls).setUpClass()
cls.domain = DOMAIN
cls.domain_object, cls.web_user, cls.api_key = bootrap_domain_for_zapier(cls.domain)
@classmethod
def tearDownClass(cls):
cls.web_user.delete()
cls.domain_object.delete()
delete_all_repeaters()
super(TestZapierCaseForwarding, cls).tearDownClass()
def tearDown(self):
delete_all_repeat_records()
ZapierSubscription.objects.all().delete()
@run_with_all_backends
def test_create_case_forwarding(self):
self._run_test(EventTypes.NEW_CASE, 1, 1)
@run_with_all_backends
def test_update_case_forwarding(self):
self._run_test(EventTypes.UPDATE_CASE, 0, 1)
@run_with_all_backends
def test_change_case_forwarding(self):
self._run_test(EventTypes.CHANGED_CASE, 1, 2)
@run_with_all_backends
def test_case_forwarding_wrong_type(self):
self._run_test(EventTypes.NEW_CASE, 0, 0, 'plant')
@run_with_all_backends
def test_update_case_forwarding_wrong_type(self):
self._run_test(EventTypes.UPDATE_CASE, 0, 0, 'plant')
@run_with_all_backends
def test_change_case_forwarding_wrong_type(self):
self._run_test(EventTypes.CHANGED_CASE, 0, 0, 'plant')
def _run_test(self, event_type, expected_records_after_create, expected_records_after_update,
case_type=ZAPIER_CASE_TYPE):
ZapierSubscription.objects.create(
domain=self.domain,
user_id=str(self.web_user._id),
event_name=event_type,
url='http://example.com/lets-make-some-cases/',
case_type=ZAPIER_CASE_TYPE,
)
# create case and run checks
case_id = uuid.uuid4().hex
post_case_blocks(
[
CaseBlock(
create=True,
case_id=case_id,
case_type=case_type,
).as_xml()
], domain=self.domain
)
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
repeat_records = list(RepeatRecord.all(domain=self.domain, due_before=later))
self.assertEqual(expected_records_after_create, len(repeat_records))
for record in repeat_records:
self.assertEqual(case_id, record.payload_id)
# update case and run checks
post_case_blocks(
[
CaseBlock(
create=False,
case_id=case_id,
).as_xml()
], domain=self.domain
)
repeat_records = list(RepeatRecord.all(domain=self.domain, due_before=later))
self.assertEqual(expected_records_after_update, len(repeat_records))
for record in repeat_records:
self.assertEqual(case_id, record.payload_id)
|
[
"uuid.uuid4",
"corehq.apps.zapier.models.ZapierSubscription.objects.all",
"casexml.apps.case.mock.CaseBlock",
"datetime.datetime.utcnow",
"datetime.timedelta",
"corehq.motech.repeaters.dbaccessors.delete_all_repeat_records",
"corehq.apps.zapier.tests.test_utils.bootrap_domain_for_zapier",
"corehq.motech.repeaters.models.RepeatRecord.all",
"corehq.motech.repeaters.dbaccessors.delete_all_repeaters"
] |
[((961, 998), 'corehq.apps.zapier.tests.test_utils.bootrap_domain_for_zapier', 'bootrap_domain_for_zapier', (['cls.domain'], {}), '(cls.domain)\n', (986, 998), False, 'from corehq.apps.zapier.tests.test_utils import bootrap_domain_for_zapier\n'), ((1118, 1140), 'corehq.motech.repeaters.dbaccessors.delete_all_repeaters', 'delete_all_repeaters', ([], {}), '()\n', (1138, 1140), False, 'from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records, delete_all_repeaters\n'), ((1235, 1262), 'corehq.motech.repeaters.dbaccessors.delete_all_repeat_records', 'delete_all_repeat_records', ([], {}), '()\n', (1260, 1262), False, 'from corehq.motech.repeaters.dbaccessors import delete_all_repeat_records, delete_all_repeaters\n'), ((2572, 2584), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2582, 2584), False, 'import uuid\n'), ((2932, 2949), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2947, 2949), False, 'from datetime import timedelta, datetime\n'), ((2952, 2975), 'datetime.timedelta', 'timedelta', ([], {'hours': '(48 + 1)'}), '(hours=48 + 1)\n', (2961, 2975), False, 'from datetime import timedelta, datetime\n'), ((3006, 3060), 'corehq.motech.repeaters.models.RepeatRecord.all', 'RepeatRecord.all', ([], {'domain': 'self.domain', 'due_before': 'later'}), '(domain=self.domain, due_before=later)\n', (3022, 3060), False, 'from corehq.motech.repeaters.models import RepeatRecord\n'), ((3511, 3565), 'corehq.motech.repeaters.models.RepeatRecord.all', 'RepeatRecord.all', ([], {'domain': 'self.domain', 'due_before': 'later'}), '(domain=self.domain, due_before=later)\n', (3527, 3565), False, 'from corehq.motech.repeaters.models import RepeatRecord\n'), ((1271, 1303), 'corehq.apps.zapier.models.ZapierSubscription.objects.all', 'ZapierSubscription.objects.all', ([], {}), '()\n', (1301, 1303), False, 'from corehq.apps.zapier.models import ZapierSubscription\n'), ((2645, 2705), 'casexml.apps.case.mock.CaseBlock', 'CaseBlock', ([], {'create': '(True)', 'case_id': 'case_id', 'case_type': 'case_type'}), '(create=True, case_id=case_id, case_type=case_type)\n', (2654, 2705), False, 'from casexml.apps.case.mock import CaseBlock\n'), ((3328, 3368), 'casexml.apps.case.mock.CaseBlock', 'CaseBlock', ([], {'create': '(False)', 'case_id': 'case_id'}), '(create=False, case_id=case_id)\n', (3337, 3368), False, 'from casexml.apps.case.mock import CaseBlock\n')]
|
from typing import List, Tuple
import requests
from bs4 import BeautifulSoup
from dagster import composite_solid, pipeline, solid
from food_ke.scripts.custom import Article
from food_ke.scripts.modes import dev, prod
from food_ke.scripts.ner import get_articles
from food_ke.scripts.utils import chunks, flatten
@solid
def doi_to_pmid(doi: str) -> str:
# not a stable way to find a pmid from a doi
# can't do more than one search at a time.
url = (
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term="
+ doi[16:]
)
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
return soup.find("id").text
@solid
def find_similar_articles(
context, articles: List[Article], num_similar: int
) -> List[Tuple[str, List[str]]]:
# wrapper to make sure that we are limiting ourselves to 200 articles at a time.
retval = []
for chunk in chunks(articles, 200):
pmids = [str(article.pmid) for article in chunk]
retval += _find_similar_by_pmid(pmids, num_similar)
context.log.info(str(retval))
return retval
def _find_similar_by_pmid(
pmids: List[str], num_articles: int
) -> List[Tuple[str, List[str]]]:
url = (
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?id="
+ ",&id=".join(pmids)
)
page = requests.get(url)
soup = BeautifulSoup(page.content, "lxml")
group = soup.find_all("linkset")
# each pmid will have its own linkset
# use this list to store return values
retval = []
for i in group:
sets = i.find("linksetdb")
id = sets.find_all("id")
tmp = []
if len(id) > 1:
for j in id[1:]:
if len(tmp) >= num_articles:
retval.append((id[0].text, tmp))
break
tmp.append(j.text)
else:
retval.append((id[0].text, []))
return retval
@composite_solid
def similarity_search_composite_solid(articles: List[Article]):
similar_pmids = find_similar_articles(articles)
# TODO: download and deposit the articles into the database...
@pipeline(mode_defs=[dev, prod])
def similarity_search_pipeline():
articles = (
get_articles()
) # TODO: we should filter to check whether from_foodmine is true on
# the articles since we only want to use those as the seed corpus.
similarity_search_composite_solid(articles)
|
[
"dagster.pipeline",
"requests.get",
"bs4.BeautifulSoup",
"food_ke.scripts.ner.get_articles",
"food_ke.scripts.utils.chunks"
] |
[((2156, 2187), 'dagster.pipeline', 'pipeline', ([], {'mode_defs': '[dev, prod]'}), '(mode_defs=[dev, prod])\n', (2164, 2187), False, 'from dagster import composite_solid, pipeline, solid\n'), ((586, 603), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (598, 603), False, 'import requests\n'), ((615, 650), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content', '"""lxml"""'], {}), "(page.content, 'lxml')\n", (628, 650), False, 'from bs4 import BeautifulSoup\n'), ((926, 947), 'food_ke.scripts.utils.chunks', 'chunks', (['articles', '(200)'], {}), '(articles, 200)\n', (932, 947), False, 'from food_ke.scripts.utils import chunks, flatten\n'), ((1355, 1372), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1367, 1372), False, 'import requests\n'), ((1384, 1419), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content', '"""lxml"""'], {}), "(page.content, 'lxml')\n", (1397, 1419), False, 'from bs4 import BeautifulSoup\n'), ((2247, 2261), 'food_ke.scripts.ner.get_articles', 'get_articles', ([], {}), '()\n', (2259, 2261), False, 'from food_ke.scripts.ner import get_articles\n')]
|
import multiprocessing
from Bio import SeqIO
import numpy as np
import timeit
from functools import partial
import pandas as pd
def translate(dna):
transdic={"TTT":"F","TTC":"F","TTA":"L","TTG":"L",
"CTT":"L","CTC":"L","CTA":"L","CTG":"L",
"ATT":"I","ATC":"I","ATA":"I","ATG":"M",
"GTT":"V","GTC":"V","GTA":"V","GTG":"V",
"TCT":"S","TCC":"S","TCA":"S","TCG":"S",
"CCT":"P","CCC":"P","CCA":"P","CCG":"P",
"ACT":"T","ACC":"T","ACA":"T","ACG":"T",
"GCT":"A","GCC":"A","GCA":"A","GCG":"A",
"TAT":"Y","TAC":"Y","TAA":"Z","TAG":"Z",
"CAT":"H","CAC":"H","CAA":"Q","CAG":"Q",
"AAT":"N","AAC":"N","AAA":"K","AAG":"K",
"GAT":"D","GAC":"D","GAA":"E","GAG":"E",
"TGT":"C","TGC":"C","TGA":"Z","TGG":"W",
"CGT":"R","CGC":"R","CGA":"R","CGG":"R",
"AGT":"S","AGC":"S","AGA":"R","AGG":"R",
"GGT":"G","GGC":"G","GGA":"G","GGG":"G"}
AAseq=[]
if len(dna)%3!=0:
return "FRAMESHIFT"
for i in range(0,len(dna),3):
AAseq.append(transdic[str(dna[i:i+3])])
AAseq=''.join(AAseq)
if "KFWATV"==AAseq[0:6] and "VTRVRP"==AAseq[-6:] and "FEVPVYAETLDEALQLAEWQY" in AAseq:
AAseq=AAseq[6:-6]
mid=AAseq.find("FEVPVYAETLDEALQLAEWQY")
l1=AAseq[:mid]
l2=AAseq[mid+len("FEVPVYAETLDEALQLAEWQY"):]
if (6<=len(l1)<=8) and (6<=len(l2)<=8):
if len(l1)==6:
l1=l1[:3]+'XX'+l1[3:]
elif len(l1)==7:
l1=l1[:4]+'X'+l1[4:]
if len(l2)==6:
l2=l2[:3]+'XX'+l2[3:]
elif len(l2)==7:
l2=l2[:4]+'X'+l2[4:]
AAseq=l1+l2
return AAseq
else:
return "FRAMESHIFT"
else:
return "FRAMESHIFT"
def main():
# num_cores = multiprocessing.cpu_count()
# pool=multiprocessing.Pool(processes=num_cores)
# DNAseq,name=[],[]
# with open('./zotus_match.fasta') as indna:
# parser= SeqIO.parse(indna,'fasta')
# for i in parser:
# DNAseq.append(i.seq)
# name.append(i.id)
# (AAseq)=pool.map(translate,DNAseq)
# stop,CC,match=[],[],[]
# for A in AAseq:
# if "Z" in A:
# stop.append(True)
# else:
# stop.append(False)
# if A[0]==A[7] and A[0]=='C':
# CC.append(True)
# else:
# CC.append(False)
# if A=='FRAMESHIFT':
# match.append(False)
# else:
# match.append(True)
# name_type=pd.DataFrame([name,DNAseq,AAseq,stop,CC,match])
# name_type=name_type.transpose()
# name_type.columns=['Zotu_name','DNA','Paratope','Stop','CC','Match']
# name_type=name_type[name_type['Match']==True]
# name_type.to_pickle('./name_type.pkl')
# ### merge name and scores
name_type=pd.read_pickle('./name_type.pkl')
otu_table=pd.read_pickle('./match_scores.pkl')
merged_otu=pd.merge(name_type,otu_table,how='left',left_on='Zotu_name',right_on='#OTU ID')
merged_otu.to_pickle('./name_scores.pkl')
### clean up datatable
final_table=pd.read_pickle('./name_scores.pkl')
cols_test=[]
for i in range(1,11):
cols_test.append('Sort'+str(i)+'_mean_count')
cols_test.append('Sort'+str(i)+'_mean_score')
cols_test.append('Sort'+str(i)+'_std_score')
cols_keep=["#OTU ID","DNA","Paratope","Stop","CC"]
for i in range(1,11):
for j in range(1,4):
# cols_keep.append('Sort'+str(i)+'_'+str(j)+'_count')
cols_keep.append('Sort'+str(i)+'_'+str(j)+'_score')
for i in cols_test:
cols_keep.append(i)
clean=final_table[cols_keep]
clean.to_pickle('./name_scores_cleaned.pkl')
# ### merge name+scores and dots
otu_table=pd.read_pickle('./name_scores_cleaned.pkl')
IQ_table=pd.read_csv('./IQ_final.csv',header=0)
SH_table=pd.read_csv('./SH_final.csv',header=0)
yields_table=pd.merge(IQ_table,SH_table,how='outer',on=['DNA','CC','Stop','AA'])
yields_table['Paratope']=yields_table['AA']
yields_table=yields_table[["DNA","Position_x","IQ_Average",'IQ_Trial1_Adj','IQ_Trial2_Adj','IQ_Trial3_Adj',"Position_y","SH_Average",'SH_Trial1_Adj','SH_Trial2_Adj','SH_Trial3_Adj',"CC",'Stop','Paratope']]
final_table=pd.merge(otu_table,yields_table,how='outer',on=['DNA','CC','Stop','Paratope'])
final_table.to_pickle('./name_scores_yield.pkl')
# ###save a subset to visualize
final_table=pd.read_pickle('./name_scores_yield.pkl')
final_table=final_table.iloc[-1000:]
final_table.to_csv('./name_scores_yield_sample.csv')
if __name__ == '__main__':
main()
|
[
"pandas.read_pickle",
"pandas.merge",
"pandas.read_csv"
] |
[((2957, 2990), 'pandas.read_pickle', 'pd.read_pickle', (['"""./name_type.pkl"""'], {}), "('./name_type.pkl')\n", (2971, 2990), True, 'import pandas as pd\n'), ((3005, 3041), 'pandas.read_pickle', 'pd.read_pickle', (['"""./match_scores.pkl"""'], {}), "('./match_scores.pkl')\n", (3019, 3041), True, 'import pandas as pd\n'), ((3057, 3145), 'pandas.merge', 'pd.merge', (['name_type', 'otu_table'], {'how': '"""left"""', 'left_on': '"""Zotu_name"""', 'right_on': '"""#OTU ID"""'}), "(name_type, otu_table, how='left', left_on='Zotu_name', right_on=\n '#OTU ID')\n", (3065, 3145), True, 'import pandas as pd\n'), ((3223, 3258), 'pandas.read_pickle', 'pd.read_pickle', (['"""./name_scores.pkl"""'], {}), "('./name_scores.pkl')\n", (3237, 3258), True, 'import pandas as pd\n'), ((3888, 3931), 'pandas.read_pickle', 'pd.read_pickle', (['"""./name_scores_cleaned.pkl"""'], {}), "('./name_scores_cleaned.pkl')\n", (3902, 3931), True, 'import pandas as pd\n'), ((3945, 3984), 'pandas.read_csv', 'pd.read_csv', (['"""./IQ_final.csv"""'], {'header': '(0)'}), "('./IQ_final.csv', header=0)\n", (3956, 3984), True, 'import pandas as pd\n'), ((3997, 4036), 'pandas.read_csv', 'pd.read_csv', (['"""./SH_final.csv"""'], {'header': '(0)'}), "('./SH_final.csv', header=0)\n", (4008, 4036), True, 'import pandas as pd\n'), ((4053, 4126), 'pandas.merge', 'pd.merge', (['IQ_table', 'SH_table'], {'how': '"""outer"""', 'on': "['DNA', 'CC', 'Stop', 'AA']"}), "(IQ_table, SH_table, how='outer', on=['DNA', 'CC', 'Stop', 'AA'])\n", (4061, 4126), True, 'import pandas as pd\n'), ((4396, 4484), 'pandas.merge', 'pd.merge', (['otu_table', 'yields_table'], {'how': '"""outer"""', 'on': "['DNA', 'CC', 'Stop', 'Paratope']"}), "(otu_table, yields_table, how='outer', on=['DNA', 'CC', 'Stop',\n 'Paratope'])\n", (4404, 4484), True, 'import pandas as pd\n'), ((4580, 4621), 'pandas.read_pickle', 'pd.read_pickle', (['"""./name_scores_yield.pkl"""'], {}), "('./name_scores_yield.pkl')\n", (4594, 4621), True, 'import pandas as pd\n')]
|
import time
class Timer():
def __init__(self, enable, Log):
self._enable = enable
self._startTime = 0
self._endTime = -1
self._dif = -1
self.Log = Log
def start(self):
self._startTime = time.time()
def end(self):
self._endTime = time.time()
self._dif = self._endTime - self._startTime
def result(self, n=2):
return round(self._dif, n)
def reset(self):
self._startTime = 0
self._endTime = -1
self._dif = -1
|
[
"time.time"
] |
[((245, 256), 'time.time', 'time.time', ([], {}), '()\n', (254, 256), False, 'import time\n'), ((305, 316), 'time.time', 'time.time', ([], {}), '()\n', (314, 316), False, 'import time\n')]
|
from setuptools import setup, find_packages
setup(name='gdcv',
version='0.1.0',
packages=find_packages(),
)
|
[
"setuptools.find_packages"
] |
[((102, 117), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (115, 117), False, 'from setuptools import setup, find_packages\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 8 17:29:59 2020
@author: nmei
"""
import os
import itertools
import numpy as np
import pandas as pd
from shutil import rmtree
verbose = 1
batch_size = 16
node = 1
core = 16
mem = 2 * node * core
cput = 24 * node * core
units = [2,5,10,20,50,100,300] # one unit hidden layer cannot learn
dropouts = [0,0.25,0.5,0.75]
activations = ['elu',
'relu',
'selu',
'sigmoid',
'tanh',
'linear',
]
models = ['alexnet','vgg19','densenet',
# 'inception',
'mobilenet','resnet']
output_activations = ['softmax','sigmoid',]
temp = np.array(list(itertools.product(*[units,dropouts,models,activations,output_activations])))
df = pd.DataFrame(temp,columns = ['hidden_units','dropouts','model_names','hidden_activations','output_activations'])
df['hidden_units'] = df['hidden_units'].astype(int)
df['dropouts'] = df['dropouts'].astype(float)
#############
train_template = '1.1.train_many_models.py'
decode_template = '5.1.RSA_as_a_function_of_noise.py'
scripts_folder = 'stability_batch'
if not os.path.exists(scripts_folder):
os.mkdir(scripts_folder)
else:
rmtree(scripts_folder)
os.mkdir(scripts_folder)
os.mkdir(f'{scripts_folder}/outputs')
from shutil import copyfile
copyfile('utils_deep.py',f'{scripts_folder}/utils_deep.py')
collections = []
first_GPU,second_GPU = [],[]
replace = False # change to second GPU
for ii,row in df.iterrows():
src = '_{}_{}_{}_{}_{}'.format(*list(row.to_dict().values()))
scripts = []
for template in [train_template,decode_template]:
new_scripts_name = os.path.join(scripts_folder,template.replace('.py',f'{src}.py').replace('1.1.train_many_models','1.1.train'))
scripts.append(new_scripts_name)
if ii > df.shape[0]/2 :
replace = True
second_GPU.append(new_scripts_name)
else:
first_GPU.append(new_scripts_name)
with open(new_scripts_name,'w') as new_file:
with open(template,'r') as old_file:
for line in old_file:
if "../" in line:
line = line.replace("../","../../")
elif "print_train = True" in line:
line = line.replace('True','False')
elif "pretrain_model_name = " in line:
line = f"pretrain_model_name = '{row['model_names']}'\n"
elif "hidden_units = " in line:
line = f"hidden_units = {row['hidden_units']}\n"
elif "hidden_func_name = " in line:
line = f"hidden_func_name = '{row['hidden_activations']}'\n"
elif "hidden_dropout = " in line:
line = f"hidden_dropout = {float(row['dropouts'])}\n"
elif "output_activation = " in line:
line = f"output_activation = '{row['output_activations']}'\n"
elif "train_folder = " in line:
line = "train_folder = 'grayscaled'\n"
elif "#plt.switch_backend('agg')" in line:
line = "plt.switch_backend('agg')\n"
elif "True #" in line:
line = line.replace("True","False")
elif "idx_GPU = 0" in line:
if replace:
line = line.replace('0','-1')
new_file.write(line)
old_file.close()
new_file.close()
new_batch_script_name = os.path.join(scripts_folder,f'SIM{ii+1}')
content = f"""#!/bin/bash
#PBS -q bcbl
#PBS -l nodes={node}:ppn={core}
#PBS -l mem={mem}gb
#PBS -l cput={cput}:00:00
#PBS -N SIM{ii+1}
#PBS -o outputs/out_{ii+1}.txt
#PBS -e outputs/err_{ii+1}.txt
cd $PBS_O_WORKDIR
export PATH="/scratch/ningmei/anaconda3/bin:/scratch/ningmei/anaconda3/condabin:$PATH"
source activate keras-2.1.6_tensorflow-2.0.0
pwd
echo {scripts[0].split('/')[-1]}
echo {scripts[1].split('/')[-1]}
python "{scripts[0].split('/')[-1]}"
python "{scripts[1].split('/')[-1]}"
"""
with open(new_batch_script_name,'w') as f:
f.write(content)
f.close()
collections.append(f"qsub SIM{ii+1}")
with open(f'{scripts_folder}/qsub_jobs.py','w') as f:
f.write("""import os\nimport time""")
with open(f'{scripts_folder}/qsub_jobs.py','a') as f:
for ii,line in enumerate(collections):
if ii == 0:
f.write(f'\nos.system("{line}")\n')
else:
f.write(f'time.sleep(.3)\nos.system("{line}")\n')
f.close()
from glob import glob
all_scripts = glob(os.path.join(scripts_folder,'simulation*.py'))
with open(os.path.join(scripts_folder,'run_all.py'),'w') as f:
f.write('import os\n')
for files in all_scripts:
file_name = files.split('bash/')[-1]
f.write(f'os.system("python {file_name}")\n')
f.close()
|
[
"pandas.DataFrame",
"os.mkdir",
"os.path.exists",
"itertools.product",
"shutil.copyfile",
"shutil.rmtree",
"os.path.join"
] |
[((794, 913), 'pandas.DataFrame', 'pd.DataFrame', (['temp'], {'columns': "['hidden_units', 'dropouts', 'model_names', 'hidden_activations',\n 'output_activations']"}), "(temp, columns=['hidden_units', 'dropouts', 'model_names',\n 'hidden_activations', 'output_activations'])\n", (806, 913), True, 'import pandas as pd\n'), ((1283, 1320), 'os.mkdir', 'os.mkdir', (['f"""{scripts_folder}/outputs"""'], {}), "(f'{scripts_folder}/outputs')\n", (1291, 1320), False, 'import os\n'), ((1350, 1410), 'shutil.copyfile', 'copyfile', (['"""utils_deep.py"""', 'f"""{scripts_folder}/utils_deep.py"""'], {}), "('utils_deep.py', f'{scripts_folder}/utils_deep.py')\n", (1358, 1410), False, 'from shutil import copyfile\n'), ((1160, 1190), 'os.path.exists', 'os.path.exists', (['scripts_folder'], {}), '(scripts_folder)\n', (1174, 1190), False, 'import os\n'), ((1196, 1220), 'os.mkdir', 'os.mkdir', (['scripts_folder'], {}), '(scripts_folder)\n', (1204, 1220), False, 'import os\n'), ((1231, 1253), 'shutil.rmtree', 'rmtree', (['scripts_folder'], {}), '(scripts_folder)\n', (1237, 1253), False, 'from shutil import rmtree\n'), ((1258, 1282), 'os.mkdir', 'os.mkdir', (['scripts_folder'], {}), '(scripts_folder)\n', (1266, 1282), False, 'import os\n'), ((3767, 3811), 'os.path.join', 'os.path.join', (['scripts_folder', 'f"""SIM{ii + 1}"""'], {}), "(scripts_folder, f'SIM{ii + 1}')\n", (3779, 3811), False, 'import os\n'), ((4839, 4885), 'os.path.join', 'os.path.join', (['scripts_folder', '"""simulation*.py"""'], {}), "(scripts_folder, 'simulation*.py')\n", (4851, 4885), False, 'import os\n'), ((712, 790), 'itertools.product', 'itertools.product', (['*[units, dropouts, models, activations, output_activations]'], {}), '(*[units, dropouts, models, activations, output_activations])\n', (729, 790), False, 'import itertools\n'), ((4897, 4939), 'os.path.join', 'os.path.join', (['scripts_folder', '"""run_all.py"""'], {}), "(scripts_folder, 'run_all.py')\n", (4909, 4939), False, 'import os\n')]
|
"""
pcapToRawTrafficItem.py
Tested with two back-2-back Ixia ports
- Connect to the API server
- Connect to chassis
- Configure license server IP
- Assign ports:
- If variable forceTakePortOwnership is True, take over the ports if they're owned by another user.
- If variable forceTakePortOwnership if False, abort test.
- Configure a Raw Traffic Item
- Read a PCAP file.
- Add Traffic Item packet headers using PCAP packet header values.
Supports IxNetwork API servers:
- Windows, Windows Connection Mgr and Linux
Requirements
- IxNetwork 8.50
- Python 2.7 and 3+
- tcp.pcap file (Included in the same local directory).
- pip install scapy
- pip install requests
- pip install -U --no-cache-dir ixnetwork_restpy
Script development API doc:
- The doc is located in your Python installation site-packages/ixnetwork_restpy/docs/index.html
- On a web browser:
- If installed in Windows: enter: file://c:/<path_to_ixnetwork_restpy>/docs/index.html
- If installed in Linux: enter: file:///<path_to_ixnetwork_restpy>/docs/index.html
Usage:
# Defaults to Windows
- Enter: python <script>
# Connect to Windows Connection Manager
- Enter: python <script> connection_manager <apiServerIp> 443
# Connect to Linux API server
- Enter: python <script> linux <apiServerIp> 443
"""
import sys, os, re, traceback
from scapy.all import *
# Import the RestPy module
from ixnetwork_restpy.testplatform.testplatform import TestPlatform
from ixnetwork_restpy.assistants.statistics.statviewassistant import StatViewAssistant
# If you got RestPy by doing a git clone instead of using pip, uncomment this line so
# your system knows where the RestPy modules are located.
#sys.path.append(os.path.dirname(os.path.abspath(__file__).replace('SampleScripts', '')))
# Defaulting to windows
# Options: windows|connection_manager|linux
osPlatform = 'windows'
apiServerIp = '192.168.70.3'
# windows:11009. linux:443. connection_manager:443
apiServerPort = 11009
# For Linux API server only
username = 'admin'
password = '<PASSWORD>'
# Allow passing in some params/values from the CLI to replace the defaults
if len(sys.argv) > 1:
# Command line input:
# osPlatform: windows, connection_manager or linux
osPlatform = sys.argv[1]
apiServerIp = sys.argv[2]
apiServerPort = sys.argv[3]
# The IP address for your Ixia license server(s) in a list.
licenseServerIp = ['192.168.70.3']
# subscription, perpetual or mixed
licenseMode = 'subscription'
# For linux and windowsConnectionMgr only. Set to False to leave the session alive for debugging.
deleteSessionWhenDone = True
# Forcefully take port ownership if the portList are owned by other users.
forceTakePortOwnership = True
# A list of chassis to use
ixChassisIpList = ['192.168.70.128']
portList = [[ixChassisIpList[0], 1, 1], [ixChassisIpList[0], 1, 2]]
pcapFile = 'tcp.pcap'
try:
testPlatform = TestPlatform(apiServerIp, rest_port=apiServerPort, platform=platform, log_file_name='restpy.log')
# Console output verbosity: None|request|request_response
testPlatform.Trace = 'request_response'
if osPlatform == 'linux':
testPlatform.Authenticate(username, password)
session = testPlatform.Sessions.add()
ixNetwork = session.Ixnetwork
if osPlatform == 'windows':
ixNetwork.NewConfig()
ixNetwork.info('\nConfiguring license server')
ixNetwork.Globals.Licensing.LicensingServers = licenseServerIp
ixNetwork.Globals.Licensing.Mode = licenseMode
# Create vport for RAW Traffic Item source/dest endpoints
vport1 = ixNetwork.Vport.add(Name='Port1')
vport2 = ixNetwork.Vport.add(Name='Port2')
# Assign ports
testPorts = []
vportList = [vport.href for vport in ixNetwork.Vport.find()]
for port in portList:
testPorts.append(dict(Arg1=port[0], Arg2=port[1], Arg3=port[2]))
ixNetwork.AssignPorts(testPorts, [], vportList, forceTakePortOwnership)
# This will get the last packet header from the tcp.pcap file.
for index, packet in enumerate(PcapReader(pcapFile)):
ixNetwork.info('\nPacket: {}:\n'.format(index, packet.show()))
try:
ethSrcAddr = packet[Ether].src
ethDstAddr = packet[Ether].dst
ipSrcAddr = packet[IP].src
ipDstAddr = packet[IP].dst
tcpSrcPort = packet[TCP].sport
tcpDstPort = packet[TCP].dport
ixNetwork.info('ethSrc: {} ethDst: {}'.format(ethSrc, ethDst))
ixNetwork.info('ipSrc: {} ipDst: {}'.format(ipSrc, ipDst))
ixNetwork.info('tcpSrcPort: {} tcpDstPort: {}'.format(tcpSrcPort, tcpDstPort))
except:
pass
ixNetwork.info('Create Traffic Item')
trafficItem = ixNetwork.Traffic.TrafficItem.add(Name='RAW TCP',
BiDirectional=False,
TrafficType='raw',
TrafficItemType='l2L3'
)
ixNetwork.info('Add flow group')
trafficItem.EndpointSet.add(Sources=vport1.Protocols.find(), Destinations=vport2.Protocols.find())
# Note: A Traffic Item could have multiple EndpointSets (Flow groups).
# Therefore, ConfigElement is a list.
ixNetwork.info('\tConfiguring config elements')
configElement = trafficItem.ConfigElement.find()[0]
configElement.FrameRate.Rate = 28
configElement.FrameRate.Type = 'framesPerSecond'
configElement.TransmissionControl.FrameCount = 10000
configElement.TransmissionControl.Type = 'fixedFrameCount'
configElement.FrameRateDistribution.PortDistribution = 'splitRateEvenly'
configElement.FrameSize.FixedSize = 128
trafficItem.Tracking.find()[0].TrackBy = ['flowGroup0']
# Show a list of current configured packet headers in the first Traffic Item and first EndpointSet.
ethernetStackObj = configElement.Stack.find(DisplayName='Ethernet II')
# Uncomment this to show a list of all the available protocol templates (packet headers)
#for protocolHeader in ixNetwork.Traffic.ProtocolTemplate():
# ixNetwork.info('\n', protocolHeader.DisplayName)
# NOTE: If you are using virtual ports (IxVM), you must use the Destination MAC address of
# the IxVM port from your virtual host (ESX-i host or KVM)
ixNetwork.info('\nConfiguring Ethernet packet header')
ethernetDstField = ethernetStackObj.Field.find(DisplayName='Destination MAC Address')
ethernetDstField.ValueType = 'increment'
ethernetDstField.StartValue = ethDstAddr
ethernetDstField.StepValue = "00:00:00:00:00:00"
ethernetDstField.CountValue = 1
ethernetSrcField = ethernetStackObj.Field.find(DisplayName='Source MAC Address')
ethernetSrcField.ValueType = 'increment'
ethernetSrcField.StartValue = ethSrcAddr
ethernetSrcField.StepValue = "00:00:00:00:00:01"
ethernetSrcField.CountValue = 1
# Add IPv4 packet header after the Ethernet stack
# 1> Get the protocol template for IPv4
ipv4ProtocolTemplate = ixNetwork.Traffic.ProtocolTemplate.find(DisplayName='IPv4')
# 2> Append the IPv4 protocol header after the Ethernet stack.
ethernetStackObj.Append(Arg2=ipv4ProtocolTemplate)
# 3> Get the new IPv4 packet header stack to use it for appending any protocol after IP layer such as
# UDP/TCP.
# Look for the IPv4 packet header object.
ipv4StackObj = configElement.Stack.find(DisplayName='IPv4')
# 4> Configure the mpls packet header
ipv4SrcFieldObj = ipv4StackObj.Field.find(DisplayName='Source Address')
ipv4SrcFieldObj.ValueType = 'increment'
ipv4SrcFieldObj.StartValue = ipSrcAddr
ipv4SrcFieldObj.StepValue = "0.0.0.1"
ipv4SrcFieldObj.CountValue = 1
ipv4DstFieldObj = ipv4StackObj.Field.find(DisplayName='Destination Address')
ipv4DstFieldObj.ValueType = 'increment'
ipv4DstFieldObj.StartValue = ipDstAddr
ipv4DstFieldObj.StepValue = "0.0.0.1"
ipv4DstFieldObj.CountValue = 1
# Add TCP packet header after the IPv4 packet header stack
# 1> Get the protocol template for TCP
tcpProtocolTemplate = ixNetwork.Traffic.ProtocolTemplate.find(DisplayName='TCP')
# 2> Append the TCP protocol header after the IPv4 header.
ipv4StackObj.Append(Arg2=tcpProtocolTemplate)
tcpStackObj = configElement.Stack.find(DisplayName='TCP')
tcpSrcPortFieldObj = tcpStackObj.Field.find(DisplayName='TCP-Source-Port')
tcpSrcPortFieldObj.Auto = False
tcpSrcPortFieldObj.SingleValue = tcpSrcPort
tcpDstPortFieldObj = tcpStackObj.Field.find(DisplayName='TCP-Dest-Port')
tcpDstPortFieldObj.Auto = False
tcpDstPortFieldObj.SingleValue = tcpDstPort
# This sample script does not expect traffic.
# It is only demonstrating how to parse a pcap file and insert the values into a
# Traffic Item.
if deleteSessionWhenDone:
# For Linux and WindowsConnectionMgr only
if osPlatform in ['linux', 'windowsConnectionMgr']:
session.remove()
except Exception as errMsg:
ixNetwork.debug(traceback.format_exc())
if deleteSessionWhenDone and 'session' in locals():
if osPlatform in ['linux', 'windowsConnectionMgr']:
session.remove()
|
[
"ixnetwork_restpy.testplatform.testplatform.TestPlatform",
"traceback.format_exc"
] |
[((2972, 3073), 'ixnetwork_restpy.testplatform.testplatform.TestPlatform', 'TestPlatform', (['apiServerIp'], {'rest_port': 'apiServerPort', 'platform': 'platform', 'log_file_name': '"""restpy.log"""'}), "(apiServerIp, rest_port=apiServerPort, platform=platform,\n log_file_name='restpy.log')\n", (2984, 3073), False, 'from ixnetwork_restpy.testplatform.testplatform import TestPlatform\n'), ((9205, 9227), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (9225, 9227), False, 'import sys, os, re, traceback\n')]
|