code stringlengths 31 1.05M | apis list | extract_api stringlengths 97 1.91M |
|---|---|---|
""" An example highlighting the difference between DMD and streaming DMD
Streaming DMD is a modification of the "standard" DMD procedure that
produces *APPROXIMATIONS* of the DMD modes and eigenvalues. The benefit
of this procedure is that it can be applied to data sets with large
(in theory, infinite) numbers of snapshots provided the underlying
system is effectively low-rank.
Returns
-------
Outputs a plot comparing the streaming and standard eigenvalues
"""
import sys
sys.path.append('..')
import dmdtools
import numpy as np
import matplotlib.pyplot as plt
max_rank = 0 # maximum allowable rank of the DMD operator (0 = unlimited)
n_snaps = 501 # total number of snapshots to be processed
n_states = 4000 # number of states
noise_cov = 1.e-4 # measurement noise covariance
dt = 0.01 # timestep
np.random.seed(0)
def snapshots(n_states, n_snaps, noise_cov=0):
# Define the example system
v1 = np.random.randn(n_states)
v2 = np.random.randn(n_states)
v3 = np.random.randn(n_states)
v4 = np.random.randn(n_states)
# characteristic frequencies
f1 = 5.2
f2 = 1.0
for k in range(n_snaps):
x = (v1 * np.cos(2 * np.pi * f1 * dt * k) +
v2 * np.cos(2 * np.pi * f2 * dt * k) +
v3 * np.sin(2 * np.pi * f1 * dt * k) +
v4 * np.sin(2 * np.pi * f2 * dt * k))
yield x + np.sqrt(noise_cov) * np.random.randn(n_states)
def standard_dmd():
X = np.zeros((n_states, n_snaps-1))
Y = np.zeros((n_states, n_snaps-1))
snaps = snapshots(n_states, n_snaps, noise_cov)
x = snaps.next()
for k, y in enumerate(snaps):
X[:, k] = x
Y[:, k] = y
x = y
DMD = dmdtools.DMD()
DMD.fit(X, Y)
return DMD.modes, DMD.evals
def streaming_dmd():
sdmd = dmdtools.StreamingDMD(max_rank)
snaps = snapshots(n_states, n_snaps, noise_cov)
x = snaps.next()
for y in snaps:
sdmd.update(x, y)
x = y
return sdmd.compute_modes()
def main(streaming):
modes, evals = streaming_dmd() if streaming else standard_dmd()
fdmd = np.abs(np.angle(evals)) / (2 * np.pi * dt)
n_modes = len(fdmd)
ydmd = np.zeros(n_modes)
for i in range(n_modes):
ydmd[i] = np.linalg.norm(modes[:, i] * np.abs(evals[i]))
ydmd /= max(ydmd)
plt.stem(fdmd, ydmd)
plt.show()
def compare_methods():
np.random.seed(0)
modes, evals = standard_dmd()
np.random.seed(0)
modes2, evals2 = streaming_dmd()
evals.sort()
evals2.sort()
# print("standard:")
# print(evals)
# print("\nstreaming:")
# print(evals2)
plt.plot(evals.real, evals.imag, 'x')
plt.plot(evals2.real, evals2.imag, '+')
plt.legend(["DMD", "Streaming"])
plt.title("DMD Spectrum")
plt.xlabel(r"$\Re(\lambda)$")
plt.ylabel(r"$\Im(\lambda)$")
plt.show()
print(np.allclose(evals, evals2))
if __name__ == "__main__":
streaming = True
#main(streaming)
compare_methods()
| [
"dmdtools.DMD",
"matplotlib.pyplot.title",
"numpy.random.seed",
"numpy.abs",
"numpy.angle",
"numpy.allclose",
"matplotlib.pyplot.stem",
"numpy.sin",
"sys.path.append",
"numpy.random.randn",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"numpy.cos",
"matplotlib.pyplot.ylabel",
"ma... | [((512, 533), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (527, 533), False, 'import sys\n'), ((914, 931), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (928, 931), True, 'import numpy as np\n'), ((1022, 1047), 'numpy.random.randn', 'np.random.randn', (['n_states'], {}), '(n_states)\n', (1037, 1047), True, 'import numpy as np\n'), ((1057, 1082), 'numpy.random.randn', 'np.random.randn', (['n_states'], {}), '(n_states)\n', (1072, 1082), True, 'import numpy as np\n'), ((1092, 1117), 'numpy.random.randn', 'np.random.randn', (['n_states'], {}), '(n_states)\n', (1107, 1117), True, 'import numpy as np\n'), ((1127, 1152), 'numpy.random.randn', 'np.random.randn', (['n_states'], {}), '(n_states)\n', (1142, 1152), True, 'import numpy as np\n'), ((1543, 1576), 'numpy.zeros', 'np.zeros', (['(n_states, n_snaps - 1)'], {}), '((n_states, n_snaps - 1))\n', (1551, 1576), True, 'import numpy as np\n'), ((1583, 1616), 'numpy.zeros', 'np.zeros', (['(n_states, n_snaps - 1)'], {}), '((n_states, n_snaps - 1))\n', (1591, 1616), True, 'import numpy as np\n'), ((1787, 1801), 'dmdtools.DMD', 'dmdtools.DMD', ([], {}), '()\n', (1799, 1801), False, 'import dmdtools\n'), ((1886, 1917), 'dmdtools.StreamingDMD', 'dmdtools.StreamingDMD', (['max_rank'], {}), '(max_rank)\n', (1907, 1917), False, 'import dmdtools\n'), ((2263, 2280), 'numpy.zeros', 'np.zeros', (['n_modes'], {}), '(n_modes)\n', (2271, 2280), True, 'import numpy as np\n'), ((2401, 2421), 'matplotlib.pyplot.stem', 'plt.stem', (['fdmd', 'ydmd'], {}), '(fdmd, ydmd)\n', (2409, 2421), True, 'import matplotlib.pyplot as plt\n'), ((2426, 2436), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2434, 2436), True, 'import matplotlib.pyplot as plt\n'), ((2466, 2483), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (2480, 2483), True, 'import numpy as np\n'), ((2523, 2540), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (2537, 2540), True, 'import numpy as np\n'), ((2710, 2747), 'matplotlib.pyplot.plot', 'plt.plot', (['evals.real', 'evals.imag', '"""x"""'], {}), "(evals.real, evals.imag, 'x')\n", (2718, 2747), True, 'import matplotlib.pyplot as plt\n'), ((2752, 2791), 'matplotlib.pyplot.plot', 'plt.plot', (['evals2.real', 'evals2.imag', '"""+"""'], {}), "(evals2.real, evals2.imag, '+')\n", (2760, 2791), True, 'import matplotlib.pyplot as plt\n'), ((2796, 2828), 'matplotlib.pyplot.legend', 'plt.legend', (["['DMD', 'Streaming']"], {}), "(['DMD', 'Streaming'])\n", (2806, 2828), True, 'import matplotlib.pyplot as plt\n'), ((2833, 2858), 'matplotlib.pyplot.title', 'plt.title', (['"""DMD Spectrum"""'], {}), "('DMD Spectrum')\n", (2842, 2858), True, 'import matplotlib.pyplot as plt\n'), ((2863, 2893), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\Re(\\\\lambda)$"""'], {}), "('$\\\\Re(\\\\lambda)$')\n", (2873, 2893), True, 'import matplotlib.pyplot as plt\n'), ((2897, 2927), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\Im(\\\\lambda)$"""'], {}), "('$\\\\Im(\\\\lambda)$')\n", (2907, 2927), True, 'import matplotlib.pyplot as plt\n'), ((2931, 2941), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2939, 2941), True, 'import matplotlib.pyplot as plt\n'), ((2953, 2979), 'numpy.allclose', 'np.allclose', (['evals', 'evals2'], {}), '(evals, evals2)\n', (2964, 2979), True, 'import numpy as np\n'), ((2192, 2207), 'numpy.angle', 'np.angle', (['evals'], {}), '(evals)\n', (2200, 2207), True, 'import numpy as np\n'), ((1415, 1446), 'numpy.sin', 'np.sin', (['(2 * np.pi * f2 * dt * k)'], {}), '(2 * np.pi * f2 * dt * k)\n', (1421, 1446), True, 'import numpy as np\n'), ((2357, 2373), 'numpy.abs', 'np.abs', (['evals[i]'], {}), '(evals[i])\n', (2363, 2373), True, 'import numpy as np\n'), ((1363, 1394), 'numpy.sin', 'np.sin', (['(2 * np.pi * f1 * dt * k)'], {}), '(2 * np.pi * f1 * dt * k)\n', (1369, 1394), True, 'import numpy as np\n'), ((1466, 1484), 'numpy.sqrt', 'np.sqrt', (['noise_cov'], {}), '(noise_cov)\n', (1473, 1484), True, 'import numpy as np\n'), ((1487, 1512), 'numpy.random.randn', 'np.random.randn', (['n_states'], {}), '(n_states)\n', (1502, 1512), True, 'import numpy as np\n'), ((1259, 1290), 'numpy.cos', 'np.cos', (['(2 * np.pi * f1 * dt * k)'], {}), '(2 * np.pi * f1 * dt * k)\n', (1265, 1290), True, 'import numpy as np\n'), ((1311, 1342), 'numpy.cos', 'np.cos', (['(2 * np.pi * f2 * dt * k)'], {}), '(2 * np.pi * f2 * dt * k)\n', (1317, 1342), True, 'import numpy as np\n')] |
import discord
import os
import time
#from dotenv import load_dotenv
import numpy as np
import matplotlib.pyplot as plt
from scipy.special import binom
import io
import urllib, base64
from random import randint
import random
import asyncio
from boto.s3.connection import S3Connection
client = discord.Client()
#load_dotenv('.env')
reply_messages = [
"Hmm, interessant! ",
"Da habe ich mich selber übertroffen, dieser Zufall, so zufällig!",
"Klare Sache, ist doch offensichtlich.",
"lol 🤪💫",
"Naja, schaut komisch aus, aber bringt sicher Glück 🎩🐷",
"Yeah, da kann 2021 kommen! äh",
"😇"]
wait_messages = [
"Langsam tut sich was.. ",
"Meh schneller..! 💤",
"Shiny shiny .. 💥 ..",
"Seit wann macht man das eigentlich zu Silvester 🤔 ..",
"AU die Kerze is heiß 😣 ..",
"Es wird.. es wird..",
"Uuuuuuuuunnddd........."]
bleios_count = 0
@client.event
async def on_ready():
print('We have logged in as {0.user}'.format(client))
await client.change_presence(activity=discord.Game(name="!bleigießen", type=1))
#text_channel_list = []
#for guild in client.guilds:
# for channel in guild.text_channels:
# text_channel_list.append(channel)
# print(text_channel_list[0].id)
# channel = client.get_channel(text_channel_list[-1].id)
# await channel.send('hello')
@client.event
async def on_message(message):
global bleios_count
if message.author == client.user:
return
if message.content.startswith('$hello'):
await message.channel.send('Hello!')
if message.content.startswith('!blei'):
await message.add_reaction('🎉')
await message.channel.send(message.author.name+' 🔥 schmilzt das 🪨 Blei im 🥄 Löffel .. ')
bleio_filename = 'bleio_'+str(message.id)+'.png'
bleio(bleio_filename)
wartezeit = randint(7,10)
#print('Wartezeit 1: '+str(wartezeit)+'s.')
await asyncwait(wartezeit)
#await message.channel.send(' langsam tut sich was... 🤵')
await message.channel.send(random.choice(wait_messages))
wartezeit = randint(3,10)
#print('Wartezeit 2: '+str(wartezeit)+'s.')
await asyncwait(wartezeit)
await message.channel.send('Uuuund.. _splash_ 💨!')
await message.channel.send(file=discord.File(bleio_filename))
await message.channel.send(random.choice(reply_messages))
bleios_count += 1
print('Created Led Pouring #'+str(bleios_count)+' for '+message.author.name+'.')
os.remove(bleio_filename)
@client.event
async def on_reaction_add(reaction, user):
"""Event handler for when reactions are added on the help message."""
# ensure it was the relevant session message
#if reaction.message.id != self.message.id:
# return
# ensure it was the session author who reacted
#if user.id != reaction.message.author.id:
# return
#emoji = str(reaction.emoji)
#await reaction.message.channel.send(emoji)
async def asyncwait(time):
await asyncio.sleep(time)
bernstein = lambda n, k, t: binom(n,k)* t**k * (1.-t)**(n-k)
def bezier(points, num=200):
N = len(points)
t = np.linspace(0, 1, num=num)
curve = np.zeros((num, 2))
for i in range(N):
curve += np.outer(bernstein(N - 1, i, t), points[i])
return curve
class Segment():
def __init__(self, p1, p2, angle1, angle2, **kw):
self.p1 = p1; self.p2 = p2
self.angle1 = angle1; self.angle2 = angle2
self.numpoints = kw.get("numpoints", 100)
r = kw.get("r", 0.3)
d = np.sqrt(np.sum((self.p2-self.p1)**2))
self.r = r*d
self.p = np.zeros((4,2))
self.p[0,:] = self.p1[:]
self.p[3,:] = self.p2[:]
self.calc_intermediate_points(self.r)
def calc_intermediate_points(self,r):
self.p[1,:] = self.p1 + np.array([self.r*np.cos(self.angle1),
self.r*np.sin(self.angle1)])
self.p[2,:] = self.p2 + np.array([self.r*np.cos(self.angle2+np.pi),
self.r*np.sin(self.angle2+np.pi)])
self.curve = bezier(self.p,self.numpoints)
def get_curve(points, **kw):
segments = []
for i in range(len(points)-1):
seg = Segment(points[i,:2], points[i+1,:2], points[i,2],points[i+1,2],**kw)
segments.append(seg)
curve = np.concatenate([s.curve for s in segments])
return segments, curve
def ccw_sort(p):
d = p-np.mean(p,axis=0)
s = np.arctan2(d[:,0], d[:,1])
return p[np.argsort(s),:]
def get_bezier_curve(a, rad=0.2, edgy=0):
""" given an array of points *a*, create a curve through
those points.
*rad* is a number between 0 and 1 to steer the distance of
control points.
*edgy* is a parameter which controls how "edgy" the curve is,
edgy=0 is smoothest."""
p = np.arctan(edgy)/np.pi+.5
a = ccw_sort(a)
a = np.append(a, np.atleast_2d(a[0,:]), axis=0)
d = np.diff(a, axis=0)
ang = np.arctan2(d[:,1],d[:,0])
f = lambda ang : (ang>=0)*ang + (ang<0)*(ang+2*np.pi)
ang = f(ang)
ang1 = ang
ang2 = np.roll(ang,1)
ang = p*ang1 + (1-p)*ang2 + (np.abs(ang2-ang1) > np.pi )*np.pi
ang = np.append(ang, [ang[0]])
a = np.append(a, np.atleast_2d(ang).T, axis=1)
s, c = get_curve(a, r=rad, method="var")
x,y = c.T
return x,y, a
def get_random_points(n=5, scale=0.8, mindst=None, rec=0):
""" create n random points in the unit square, which are *mindst*
apart, then scale them."""
mindst = mindst or .7/n
a = np.random.rand(n,2)
d = np.sqrt(np.sum(np.diff(ccw_sort(a), axis=0), axis=1)**2)
if np.all(d >= mindst) or rec>=200:
return a*scale
else:
return get_random_points(n=n, scale=scale, mindst=mindst, rec=rec+1)
def bleio(filename):
fig, ax = plt.subplots()
ax.set_aspect("equal")
#ax.set_facecolor((1.0, 0.47, 0.42))
fig.patch.set_facecolor('#36393E')
fig.patch.set_alpha(0.7)
ax.patch.set_facecolor('#36393E')
ax.patch.set_alpha(0.5)
positions=np.array([[0,0], [0,0.5], [0,1], [1,0], [1,0.5], [1,1], [0.5,0], [0.5,0.5], [0.5,1]])
random_coords=np.random.choice([0, 1, 2, 3, 4, 5, 6, 7, 8],randint(1,4), replace=False)
#print(random_coords)
for c2 in random_coords:
#for c in np.array([[0,0], [0,0.5], [0,1], [1,0], [1,0.5], [1,1], [0.5,0], [0.5,0.5], [0.5,1]]):
#for c in np.array([[0,0]]):
random_rad = randint(2,7)/10
rad = random_rad
#rad = 0.2
random_edgy = randint(10,100)/100
edgy=random_edgy
#edgy = 0.05
c = positions[c2]
# random offset
c[0] = c[0] + randint(0,20)/100
c[1] = c[1] + randint(0,20)/100
a = get_random_points(n=randint(6,20), scale=0.4) + c
x,y, _ = get_bezier_curve(a,rad=rad, edgy=edgy)
color_theme=[randint(0,235)/235, randint(0,235)/235, randint(0,235)/235]
ax.fill(x,y, color=color_theme)
plt.plot(x,y, color=color_theme)
plt.axis('off')
plt.savefig(filename, bbox_inches='tight')
#plt.show()
return 1
client.run(os.environ['BOT_TOKEN']) | [
"os.remove",
"numpy.arctan2",
"numpy.sum",
"numpy.abs",
"numpy.argsort",
"numpy.mean",
"discord.Game",
"numpy.sin",
"numpy.atleast_2d",
"random.randint",
"discord.File",
"numpy.append",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"discord.Client",
"scipy.special.binom",
"asyncio.... | [((302, 318), 'discord.Client', 'discord.Client', ([], {}), '()\n', (316, 318), False, 'import discord\n'), ((2971, 2997), 'numpy.linspace', 'np.linspace', (['(0)', '(1)'], {'num': 'num'}), '(0, 1, num=num)\n', (2982, 2997), True, 'import numpy as np\n'), ((3007, 3025), 'numpy.zeros', 'np.zeros', (['(num, 2)'], {}), '((num, 2))\n', (3015, 3025), True, 'import numpy as np\n'), ((3994, 4037), 'numpy.concatenate', 'np.concatenate', (['[s.curve for s in segments]'], {}), '([s.curve for s in segments])\n', (4008, 4037), True, 'import numpy as np\n'), ((4110, 4138), 'numpy.arctan2', 'np.arctan2', (['d[:, 0]', 'd[:, 1]'], {}), '(d[:, 0], d[:, 1])\n', (4120, 4138), True, 'import numpy as np\n'), ((4554, 4572), 'numpy.diff', 'np.diff', (['a'], {'axis': '(0)'}), '(a, axis=0)\n', (4561, 4572), True, 'import numpy as np\n'), ((4580, 4608), 'numpy.arctan2', 'np.arctan2', (['d[:, 1]', 'd[:, 0]'], {}), '(d[:, 1], d[:, 0])\n', (4590, 4608), True, 'import numpy as np\n'), ((4695, 4710), 'numpy.roll', 'np.roll', (['ang', '(1)'], {}), '(ang, 1)\n', (4702, 4710), True, 'import numpy as np\n'), ((4781, 4805), 'numpy.append', 'np.append', (['ang', '[ang[0]]'], {}), '(ang, [ang[0]])\n', (4790, 4805), True, 'import numpy as np\n'), ((5108, 5128), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (5122, 5128), True, 'import numpy as np\n'), ((5355, 5369), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5367, 5369), True, 'import matplotlib.pyplot as plt\n'), ((5569, 5668), 'numpy.array', 'np.array', (['[[0, 0], [0, 0.5], [0, 1], [1, 0], [1, 0.5], [1, 1], [0.5, 0], [0.5, 0.5],\n [0.5, 1]]'], {}), '([[0, 0], [0, 0.5], [0, 1], [1, 0], [1, 0.5], [1, 1], [0.5, 0], [\n 0.5, 0.5], [0.5, 1]])\n', (5577, 5668), True, 'import numpy as np\n'), ((6423, 6438), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6431, 6438), True, 'import matplotlib.pyplot as plt\n'), ((6440, 6482), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {'bbox_inches': '"""tight"""'}), "(filename, bbox_inches='tight')\n", (6451, 6482), True, 'import matplotlib.pyplot as plt\n'), ((1735, 1749), 'random.randint', 'randint', (['(7)', '(10)'], {}), '(7, 10)\n', (1742, 1749), False, 'from random import randint\n'), ((1958, 1972), 'random.randint', 'randint', (['(3)', '(10)'], {}), '(3, 10)\n', (1965, 1972), False, 'from random import randint\n'), ((2335, 2360), 'os.remove', 'os.remove', (['bleio_filename'], {}), '(bleio_filename)\n', (2344, 2360), False, 'import os\n'), ((2830, 2849), 'asyncio.sleep', 'asyncio.sleep', (['time'], {}), '(time)\n', (2843, 2849), False, 'import asyncio\n'), ((3395, 3411), 'numpy.zeros', 'np.zeros', (['(4, 2)'], {}), '((4, 2))\n', (3403, 3411), True, 'import numpy as np\n'), ((4087, 4105), 'numpy.mean', 'np.mean', (['p'], {'axis': '(0)'}), '(p, axis=0)\n', (4094, 4105), True, 'import numpy as np\n'), ((4518, 4540), 'numpy.atleast_2d', 'np.atleast_2d', (['a[0, :]'], {}), '(a[0, :])\n', (4531, 4540), True, 'import numpy as np\n'), ((5194, 5213), 'numpy.all', 'np.all', (['(d >= mindst)'], {}), '(d >= mindst)\n', (5200, 5213), True, 'import numpy as np\n'), ((5715, 5728), 'random.randint', 'randint', (['(1)', '(4)'], {}), '(1, 4)\n', (5722, 5728), False, 'from random import randint\n'), ((6388, 6421), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'color': 'color_theme'}), '(x, y, color=color_theme)\n', (6396, 6421), True, 'import matplotlib.pyplot as plt\n'), ((2886, 2897), 'scipy.special.binom', 'binom', (['n', 'k'], {}), '(n, k)\n', (2891, 2897), False, 'from scipy.special import binom\n'), ((3339, 3371), 'numpy.sum', 'np.sum', (['((self.p2 - self.p1) ** 2)'], {}), '((self.p2 - self.p1) ** 2)\n', (3345, 3371), True, 'import numpy as np\n'), ((4147, 4160), 'numpy.argsort', 'np.argsort', (['s'], {}), '(s)\n', (4157, 4160), True, 'import numpy as np\n'), ((4458, 4473), 'numpy.arctan', 'np.arctan', (['edgy'], {}), '(edgy)\n', (4467, 4473), True, 'import numpy as np\n'), ((4824, 4842), 'numpy.atleast_2d', 'np.atleast_2d', (['ang'], {}), '(ang)\n', (4837, 4842), True, 'import numpy as np\n'), ((5939, 5952), 'random.randint', 'randint', (['(2)', '(7)'], {}), '(2, 7)\n', (5946, 5952), False, 'from random import randint\n'), ((6004, 6020), 'random.randint', 'randint', (['(10)', '(100)'], {}), '(10, 100)\n', (6011, 6020), False, 'from random import randint\n'), ((986, 1026), 'discord.Game', 'discord.Game', ([], {'name': '"""!bleigießen"""', 'type': '(1)'}), "(name='!bleigießen', type=1)\n", (998, 1026), False, 'import discord\n'), ((1913, 1941), 'random.choice', 'random.choice', (['wait_messages'], {}), '(wait_messages)\n', (1926, 1941), False, 'import random\n'), ((2199, 2228), 'random.choice', 'random.choice', (['reply_messages'], {}), '(reply_messages)\n', (2212, 2228), False, 'import random\n'), ((4740, 4759), 'numpy.abs', 'np.abs', (['(ang2 - ang1)'], {}), '(ang2 - ang1)\n', (4746, 4759), True, 'import numpy as np\n'), ((6116, 6130), 'random.randint', 'randint', (['(0)', '(20)'], {}), '(0, 20)\n', (6123, 6130), False, 'from random import randint\n'), ((6150, 6164), 'random.randint', 'randint', (['(0)', '(20)'], {}), '(0, 20)\n', (6157, 6164), False, 'from random import randint\n'), ((6292, 6307), 'random.randint', 'randint', (['(0)', '(235)'], {}), '(0, 235)\n', (6299, 6307), False, 'from random import randint\n'), ((6312, 6327), 'random.randint', 'randint', (['(0)', '(235)'], {}), '(0, 235)\n', (6319, 6327), False, 'from random import randint\n'), ((6332, 6347), 'random.randint', 'randint', (['(0)', '(235)'], {}), '(0, 235)\n', (6339, 6347), False, 'from random import randint\n'), ((2137, 2165), 'discord.File', 'discord.File', (['bleio_filename'], {}), '(bleio_filename)\n', (2149, 2165), False, 'import discord\n'), ((6197, 6211), 'random.randint', 'randint', (['(6)', '(20)'], {}), '(6, 20)\n', (6204, 6211), False, 'from random import randint\n'), ((3588, 3607), 'numpy.cos', 'np.cos', (['self.angle1'], {}), '(self.angle1)\n', (3594, 3607), True, 'import numpy as np\n'), ((3625, 3644), 'numpy.sin', 'np.sin', (['self.angle1'], {}), '(self.angle1)\n', (3631, 3644), True, 'import numpy as np\n'), ((3690, 3717), 'numpy.cos', 'np.cos', (['(self.angle2 + np.pi)'], {}), '(self.angle2 + np.pi)\n', (3696, 3717), True, 'import numpy as np\n'), ((3733, 3760), 'numpy.sin', 'np.sin', (['(self.angle2 + np.pi)'], {}), '(self.angle2 + np.pi)\n', (3739, 3760), True, 'import numpy as np\n')] |
import os
import numpy as np
import matplotlib.pyplot as plt
def head0_der(u):
if (u >= 0.0):
if (u < 0.5):
return -8.0/3.0 * u
elif (u <= 1.5):
return 4.0/3.0 * u - 2.0
return 0.0
def head1_der(u):
if (u >= -1.0):
if (u < -0.5):
return 8.0/3.0 * (u+1.0)
elif (u < 0.5):
return -7.0/3.0 * u + 1.0/6.0
elif (u <= 1.5):
return (2.0*u - 3.0) * 0.5
return 0.0
def middle_der(u):
if (u >= -1.5):
if (u < -0.5):
return (3.0 + 2.0*u) * 0.5
elif (u < 0.5):
return -2.0 * u
elif (u < 1.5):
return (2.0*u - 3.0) * 0.5
return 0.0
def tail1_der(u):
if (u >= -1.5):
if (u < -0.5):
return (3.0 + 2.0*u) * 0.5
elif (u <= 0.5):
return -7.0/3.0 * u - 1.0/6.0
elif (u <= 1.0):
return -8.0/3.0 * (1.0-u)
return 0.0
def tail0_der(u):
if (u >= -1.5):
if (u < -0.5):
return 4.0/3.0 * u + 2.0
elif (u <= 0.0):
return -8.0/3.0 * u
return 0.0
def cal_func(func, x1, x2, u1, u2, num):
x = np.zeros(num+1)
y = np.zeros(num+1)
x_div = (x2 - x1) / num
u_div = (u2 - u1) / num
for i in range(num+1):
x[i] = x1 + x_div * i
y[i] = func(u1 + u_div * i)
return (x, y)
if __name__ == "__main__":
fig = plt.figure()
plot1 = fig.subplots(1, 1)
plot1.set_xlim([0.0, 5.0])
#plot1.set_ylim([0.0, 1.0])
plot1.plot([0.0, 5.0], [0.0, 0.0], "k")
# head0
x, y = cal_func(head0_der, 0.0, 1.5, 0.0, 1.5, 100)
plot1.plot(x, y)
# head1
x, y = cal_func(head1_der, 0.0, 2.5, -1.0, 1.5, 100)
plot1.plot(x, y)
# middle1
x, y = cal_func(middle_der, 0.5, 3.5, -1.5, 1.5, 100)
plot1.plot(x, y)
# middle2
x, y = cal_func(middle_der, 1.5, 4.5, -1.5, 1.5, 100)
plot1.plot(x, y)
# tail1
x, y = cal_func(tail1_der, 2.5, 5.0, -1.5, 1.0, 100)
plot1.plot(x, y)
# tail0
x, y = cal_func(tail0_der, 3.5, 5.0, -1.5, 0.0, 100)
plot1.plot(x, y)
plt.show()
| [
"matplotlib.pyplot.figure",
"numpy.zeros",
"matplotlib.pyplot.show"
] | [((1173, 1190), 'numpy.zeros', 'np.zeros', (['(num + 1)'], {}), '(num + 1)\n', (1181, 1190), True, 'import numpy as np\n'), ((1197, 1214), 'numpy.zeros', 'np.zeros', (['(num + 1)'], {}), '(num + 1)\n', (1205, 1214), True, 'import numpy as np\n'), ((1418, 1430), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1428, 1430), True, 'import matplotlib.pyplot as plt\n'), ((2118, 2128), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2126, 2128), True, 'import matplotlib.pyplot as plt\n')] |
"""
Make a tiny debuggable version of train_x_lpd_5_phr.npz
"""
import numpy as np
import sys
if __name__ == '__main__':
with np.load('train_x_lpd_5_phr.npz') as f:
data = np.zeros(f['shape'], np.bool_)
data[[x for x in f['nonzero']]] = True
data = data[:10000]
np.savez_compressed('train_x_lpd_5_phr_debug', data)
| [
"numpy.savez_compressed",
"numpy.zeros",
"numpy.load"
] | [((292, 344), 'numpy.savez_compressed', 'np.savez_compressed', (['"""train_x_lpd_5_phr_debug"""', 'data'], {}), "('train_x_lpd_5_phr_debug', data)\n", (311, 344), True, 'import numpy as np\n'), ((132, 164), 'numpy.load', 'np.load', (['"""train_x_lpd_5_phr.npz"""'], {}), "('train_x_lpd_5_phr.npz')\n", (139, 164), True, 'import numpy as np\n'), ((186, 216), 'numpy.zeros', 'np.zeros', (["f['shape']", 'np.bool_'], {}), "(f['shape'], np.bool_)\n", (194, 216), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 14 18:00:19 2018
@author: asus-task
This script is to demonstrate the process of decoding old (coded 0) and new (coded 1) and scramble (coded 2) images by the ERP (EEG) signals.
1. Stack the ERPs to form the dataset
2. Split the dataset into training (80%) and testing (20%) set
3. Split the training (64 X 61 X 1400 dimensional matrix) set with 50 ms window along the last dimension ==> (64 X 61 X 50 X 28)
4. Within each segment (along the last dimension where it is 28), a classification pipeline is trained
5. The classification pipeline contains:
a. vectorizer: https://martinos.org/mne/stable/generated/mne.decoding.Vectorizer.html?highlight=vectorizer
b. standardizer: http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler
c. linear SVM: http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html
6. These 28 classification pipelines are tested on the testing set within each segment, and performances are measure by ROC AUC
7. The training and testing set were rotated by 5-fold cross validation, thus, 28*5 = 140 ROC AUCs should be obtained
8. The order of the data is also shuffled/no shuffled to test if there is an effect of iteration order (1 no shuffle + 10 shuffle)
9. Since the testing process could happen in different time samples other than where the classification pipeline is trained, a temporal
generalization process is applied to obtained classification performances of a classification pipeline in which it is not trained on
10. A 5-fold cross validation is also nested with the temporal generalization
"""
############################### 3 classes ##########################
if __name__ == '__main__':
import os
os.chdir('D://Epochs')
import avr_reader
import mne
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
sns.set_style('white')
from glob import glob
import pickle
os.chdir('D:/Epochs')
from mne.decoding import LinearModel,get_coef,SlidingEstimator,cross_val_multiscore,GeneralizationAcrossTime
from sklearn.model_selection import StratifiedKFold,permutation_test_score,cross_val_score
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegressionCV,SGDClassifier
from sklearn.pipeline import Pipeline
from sklearn import metrics,utils
from tqdm import tqdm
from mne.decoding import Vectorizer
from scipy import stats
from sklearn.multiclass import OneVsOneClassifier
epochs = mne.read_epochs('D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif',preload=True)
def make_clf(vec=True):
clf = []
if vec:
clf.append(('vec',Vectorizer()))
clf.append(('std',StandardScaler()))
clf.append(('est',OneVsOneClassifier(SVC(max_iter=-1,random_state=12345,class_weight='balanced',
kernel='linear',probability=False))))
clf = Pipeline(clf)
return clf
results_ = []# for saving all the results
saving_dir = 'D:/NING - spindle/VCRT_study/results/'
if not os.path.exists(saving_dir):
os.mkdir(saving_dir)
################# first iteration: not shuffling the order of the subjects #################################
data = epochs.get_data() # 54 by 61 by 1400 matrix
labels = epochs.events[:,-1]# this is [0 0 0 0 ... 0 0 1 1 1 1 ... 1 1 1...2 2 2]
results={'scores_mean':[],'scores_std':[],'clf':[],'chance_mean':[],'pval':[],'activity':[],'chance_se':[]}
idx = np.arange(data.shape[-1]).reshape(-1,50) # 28 by 50 matrix
cv = StratifiedKFold(n_splits=5,shuffle=True,random_state=12345)# 5 fold cross validation
clfs = []
scores = []
# patterns = []
idx = np.arange(1400).reshape(-1,50)
for train,test in tqdm(cv.split(data,labels),desc='train-test,no shuffle'):# split the data into training set and testing set
X = data[train]
y = labels[train]
# fit a classifier at each of the 50 ms window with only the training data and record the trained classifier
clfs.append([make_clf(True).fit(X[:,:,ii],y) for ii in idx])
# get the decoding pattern learned by each trained classifier at each of the 50 ms window with only the training data
# temp_patterns = np.array([get_coef(c,attr='patterns_',inverse_transform=True) for c in clfs[-1]])
# patterns.append(temp_patterns)
X_ = data[test]
y_ = labels[test]
# compute the performance of each trained classifier at each of the 50 ms window with the testing data
scores_ = [metrics.f1_score(y_,clf.predict(X_[:,:,ii]),average='micro') for ii,clf in zip(idx,clfs[-1])]
scores.append(scores_)
scores = np.array(scores)
# patterns=np.array(patterns)
######################### chance estimation n_perm = 10000 #############
cv = StratifiedKFold(n_splits=5,shuffle=True,random_state=12345)# 5 fold cross validation
n_perm = 1000
counts = 0
chances = []
for n_perm_ in tqdm(range(int(1e5)),desc='permutation test'):# the most outer loop of the permutation test
try:# the stratified k fold cross validation might not work for some runs, but it doesn't matter, so I skip them
chances_ = []# second order temporal data storage
# during each permutation, we randomly shuffle the labels, so that there should not be any informative patterns
# that could be learned by the classifier. In other words, the feature data does not correlate to the labels
perm_labels = labels[np.random.choice(len(labels),size=labels.shape,replace=False)]
for train,test in cv.split(data,labels):# do the same procedure as a real cross validation
X = data[train]
y = perm_labels[train]
X_ = data[test]
y_ = perm_labels[test]
clfs_=[make_clf().fit(X[:,:,ii],y) for ii in idx]
scores_ = [metrics.f1_score(y_,clf.predict(X_[:,:,ii]),average='micro') for ii,clf in zip(idx,clfs[-1])]
chances_.append(scores_)
chances.append(chances_)
counts += 1
except:
print("something is wrong, but I don't care")
if counts > n_perm:
break
chances = np.array(chances)
np.save(saving_dir+"chance (3 class).npy", chances)
chances = np.load(saving_dir+"chance (3 class).npy")
# percentage of chance scores that exceed the observed score, and if it is less than 0.05,
# we claim the observed score statistically significant higher than chance level
pval = (np.array(chances.mean(1) > scores.mean(0)).sum(0)+1) / (n_perm +1)
results['scores_mean']=scores.mean(0)
results['scores_std']=scores.std(0)
results['chance_mean']=np.mean(chances,axis=1).mean(0)
results['chance_se']=np.std(chances.mean(1))/np.sqrt(n_perm)# standard error
results['clf']=clfs
results['pval']=pval
# average pattern learned by last dimension, which is the 50 ms window
# average pattern learned by the classifier over 5 folds
# results['activity']=patterns.mean(-1).mean(0)
pickle.dump(results,open(saving_dir+'temp_no_shuffle (3 classes).p','wb'))
results_.append(results)
for i_random in range(10):
data = epochs.get_data()
labels = epochs.events[:,-1]
results={'scores_mean':[],'scores_std':[],'clf':[],'chance_mean':[],'pval':[],'activity':[],'chance_se':[]}
for ii in range(100):
data,labels = utils.shuffle(data,labels)# only difference from above
idx = np.arange(data.shape[-1]).reshape(-1,50) # 28 by 50 matrix
# cv = StratifiedKFold(n_splits=5,shuffle=True,random_state=12345)# 5 fold cross validation
clfs = []
scores = []
# patterns = []
idx = np.arange(1400).reshape(-1,50)
for train,test in tqdm(cv.split(data,labels),desc='train-test, shuffle'):# split the data into training set and testing set
X = data[train]
y = labels[train]
# fit a classifier at each of the 50 ms window with only the training data and record the trained classifier
clfs.append([make_clf(True).fit(X[:,:,ii],y) for ii in idx])
# get the decoding pattern learned by each trained classifier at each of the 50 ms window with only the training data
# temp_patterns = np.array([get_coef(c,attr='patterns_',inverse_transform=True) for c in clfs[-1]])
# patterns.append(temp_patterns)
X_ = data[test]
y_ = labels[test]
# compute the performance of each trained classifier at each of the 50 ms window with the testing data
scores_ = [metrics.f1_score(y_,clf.predict(X_[:,:,ii]),average='micro') for ii,clf in zip(idx,clfs[-1])]
scores.append(scores_)
scores = np.array(scores)
# patterns=np.array(patterns)
pval = (np.array(chances.mean(1) > scores.mean(0)).sum(0)+1) / (n_perm +1)
results['scores_mean']=scores.mean(0)
results['scores_std']=scores.std(0)
results['chance_mean']=np.mean(chances,axis=1).mean(0)
results['chance_se']=np.std(chances.mean(1))/np.sqrt(n_perm)# standard error
results['clf']=clfs
results['pval']=pval
# average pattern learned by last dimension, which is the 50 ms window
# average pattern learned by the classifier over 5 folds
# results['activity']=patterns.mean(-1).mean(0)
pickle.dump(results,open(saving_dir+'temp_shuffle_%d (3 classes).p'%i_random,'wb'))
results_.append(results)
# pickle.dump(results_,open(saving_dir+'shuffle results (old vs new).p','wb'))
####################################################
cv = StratifiedKFold(n_splits=5,shuffle=True,random_state=12345)
clfs = []
for train,test in tqdm(cv.split(data,labels),desc='training'):
X = data[train]
y = labels[train]
clfs.append([make_clf().fit(X[:,:,ii],y) for ii in range(X.shape[-1])])
scores_within = []
for fold,(train,test) in tqdm(enumerate(cv.split(data,labels)),desc='test within'):
X = data[test]
y = labels[test]
scores_ = []
for clf in clfs[fold]:
scores_temp = [metrics.f1_score(y,clf.predict(X[:,:,ii]),average='micro') for ii in range(X.shape[-1])]
scores_.append(scores_temp)
scores_within.append(scores_)
scores_within = np.array(scores_within)
pickle.dump(scores_within,open(saving_dir+'temporal generalization(3 classes).p','wb'))
scores_within = pickle.load(open(saving_dir+'temporal generalization(3 classes).p','rb'))
font = {
'weight' : 'bold',
'size' : 20}
import matplotlib
matplotlib.rc('font', **font)
fig,ax = plt.subplots(figsize=(12,10))
im = ax.imshow(scores_within.mean(0),origin='lower',aspect='auto',extent=[0,1400,0,1400],cmap=plt.cm.RdBu_r,vmin=.33)
cbar=plt.colorbar(im)
cbar.set_label('F1 score (micro average)')
ax.set(xlabel='Test time',ylabel='Train time',
title='Old vs New vs Scramble Temporal Generalization\nLinear SVM, 5-fold CV')
fig.savefig(saving_dir+'Old vs New vs scr decoding generalization.png',dpi=500)
############### plot ######################################################################
from matplotlib import pyplot as plt
import pickle
import numpy as np
from glob import glob
working_dir = 'D:/NING - spindle/VCRT_study/results/'
shuffle_files = glob(working_dir+'*_shuffle*(3 classes).p')
results = [pickle.load(open(f,'rb')) for f in shuffle_files]
no_shuffle = results[0]
shuffle = results[1:]
import mne
epochs = mne.read_epochs('D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif',preload=False)
font = {
'weight' : 'bold',
'size' : 20}
import matplotlib
matplotlib.rc('font', **font)
fig,ax = plt.subplots(figsize=(16,8))
times = np.linspace(25,1375,28)
ax.plot(times,no_shuffle['scores_mean'],color='k',alpha=1.,label='Classifi.Score (F1 Mean)_no shuffle')
m,s = np.array(no_shuffle['scores_mean']),np.array(no_shuffle['scores_std'])/np.sqrt(5)
ax.fill_between(times,m+s,m-s,color='red',alpha=.3,label='Classifi.Score (SE)')
ax.plot(times,no_shuffle['chance_mean'],color='k',linestyle='--',alpha=1.,label='Chance level (Mean)')
mm,ss = np.array(no_shuffle['chance_mean']),np.array(no_shuffle['chance_se'])
ax.fill_between(times,m+s,m-s,color='red',alpha=.7,lw=0.5)
for ii, item in enumerate(shuffle):
if ii == 0:
ax.plot(times,item['scores_mean'],color='blue',alpha=.7,label='Classifi.Score (F1 Mean)_shuffle')
else:
ax.plot(times,item['scores_mean'],color='blue',alpha=1.)
m,s = np.array(item['scores_mean']),np.array(item['scores_std'])/np.sqrt(5)
ax.fill_between(times,m+s,m-s,color='red',alpha=.3)
ax.set(xlabel='Time (ms)',ylabel='Classifi.Score (F1)',title='Temporal Decoding\n Old vs New vs Scramble\nLinear SVM, 5-fold, n_permutation=1000',
xlim=(0,1400),xticks=times[::3])
pvals = np.vstack([item['pval'] for item in results[1:]])
pvals = np.vstack((no_shuffle['pval'],pvals),)
pval_set = np.sum(pvals < 0.05, axis=0)
pval_idx = np.where(pval_set> (11/2))[0]
for ii,idx in enumerate(pval_idx):
if ii == 0:
ax.axvspan(times[idx]-25,times[idx]+25,color='red',alpha=.2,label='pval < 0.05')
else:
ax.axvspan(times[idx]-25,times[idx]+25,color='red',alpha=.2)
ax.legend(fontsize='small')
fig.savefig('D:\\NING - spindle\\VCRT_study\\results\\'+'old vs new vs scr temporal decoding.png',dpi=500,bbox_inches = 'tight')
| [
"os.mkdir",
"matplotlib.rc",
"numpy.load",
"numpy.sum",
"sklearn.preprocessing.StandardScaler",
"numpy.mean",
"numpy.arange",
"glob.glob",
"sklearn.svm.SVC",
"os.chdir",
"mne.decoding.Vectorizer",
"os.path.exists",
"matplotlib.pyplot.colorbar",
"numpy.linspace",
"matplotlib.pyplot.subplo... | [((11780, 11825), 'glob.glob', 'glob', (["(working_dir + '*_shuffle*(3 classes).p')"], {}), "(working_dir + '*_shuffle*(3 classes).p')\n", (11784, 11825), False, 'from glob import glob\n'), ((11952, 12052), 'mne.read_epochs', 'mne.read_epochs', (['"""D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif"""'], {'preload': '(False)'}), "('D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif'\n , preload=False)\n", (11967, 12052), False, 'import mne\n'), ((12124, 12153), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {}), "('font', **font)\n", (12137, 12153), False, 'import matplotlib\n'), ((12163, 12192), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(16, 8)'}), '(figsize=(16, 8))\n', (12175, 12192), True, 'import matplotlib.pyplot as plt\n'), ((12200, 12225), 'numpy.linspace', 'np.linspace', (['(25)', '(1375)', '(28)'], {}), '(25, 1375, 28)\n', (12211, 12225), True, 'import numpy as np\n'), ((13300, 13349), 'numpy.vstack', 'np.vstack', (["[item['pval'] for item in results[1:]]"], {}), "([item['pval'] for item in results[1:]])\n", (13309, 13349), True, 'import numpy as np\n'), ((13362, 13400), 'numpy.vstack', 'np.vstack', (["(no_shuffle['pval'], pvals)"], {}), "((no_shuffle['pval'], pvals))\n", (13371, 13400), True, 'import numpy as np\n'), ((13412, 13440), 'numpy.sum', 'np.sum', (['(pvals < 0.05)'], {'axis': '(0)'}), '(pvals < 0.05, axis=0)\n', (13418, 13440), True, 'import numpy as np\n'), ((1799, 1821), 'os.chdir', 'os.chdir', (['"""D://Epochs"""'], {}), "('D://Epochs')\n", (1807, 1821), False, 'import os\n'), ((1972, 1994), 'seaborn.set_style', 'sns.set_style', (['"""white"""'], {}), "('white')\n", (1985, 1994), True, 'import seaborn as sns\n'), ((2043, 2064), 'os.chdir', 'os.chdir', (['"""D:/Epochs"""'], {}), "('D:/Epochs')\n", (2051, 2064), False, 'import os\n'), ((2672, 2771), 'mne.read_epochs', 'mne.read_epochs', (['"""D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif"""'], {'preload': '(True)'}), "('D:/NING - spindle/VCRT_study/data/0.1-40 Hz/3 classes-epo.fif'\n , preload=True)\n", (2687, 2771), False, 'import mne\n'), ((3771, 3832), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': '(5)', 'shuffle': '(True)', 'random_state': '(12345)'}), '(n_splits=5, shuffle=True, random_state=12345)\n', (3786, 3832), False, 'from sklearn.model_selection import StratifiedKFold, permutation_test_score, cross_val_score\n'), ((4903, 4919), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (4911, 4919), True, 'import numpy as np\n'), ((5044, 5105), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': '(5)', 'shuffle': '(True)', 'random_state': '(12345)'}), '(n_splits=5, shuffle=True, random_state=12345)\n', (5059, 5105), False, 'from sklearn.model_selection import StratifiedKFold, permutation_test_score, cross_val_score\n'), ((6482, 6499), 'numpy.array', 'np.array', (['chances'], {}), '(chances)\n', (6490, 6499), True, 'import numpy as np\n'), ((6506, 6559), 'numpy.save', 'np.save', (["(saving_dir + 'chance (3 class).npy')", 'chances'], {}), "(saving_dir + 'chance (3 class).npy', chances)\n", (6513, 6559), True, 'import numpy as np\n'), ((6572, 6616), 'numpy.load', 'np.load', (["(saving_dir + 'chance (3 class).npy')"], {}), "(saving_dir + 'chance (3 class).npy')\n", (6579, 6616), True, 'import numpy as np\n'), ((10002, 10063), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': '(5)', 'shuffle': '(True)', 'random_state': '(12345)'}), '(n_splits=5, shuffle=True, random_state=12345)\n', (10017, 10063), False, 'from sklearn.model_selection import StratifiedKFold, permutation_test_score, cross_val_score\n'), ((10710, 10733), 'numpy.array', 'np.array', (['scores_within'], {}), '(scores_within)\n', (10718, 10733), True, 'import numpy as np\n'), ((11032, 11061), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {}), "('font', **font)\n", (11045, 11061), False, 'import matplotlib\n'), ((11075, 11105), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 10)'}), '(figsize=(12, 10))\n', (11087, 11105), True, 'import matplotlib.pyplot as plt\n'), ((11236, 11252), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['im'], {}), '(im)\n', (11248, 11252), True, 'import matplotlib.pyplot as plt\n'), ((12334, 12369), 'numpy.array', 'np.array', (["no_shuffle['scores_mean']"], {}), "(no_shuffle['scores_mean'])\n", (12342, 12369), True, 'import numpy as np\n'), ((12607, 12642), 'numpy.array', 'np.array', (["no_shuffle['chance_mean']"], {}), "(no_shuffle['chance_mean'])\n", (12615, 12642), True, 'import numpy as np\n'), ((12643, 12676), 'numpy.array', 'np.array', (["no_shuffle['chance_se']"], {}), "(no_shuffle['chance_se'])\n", (12651, 12676), True, 'import numpy as np\n'), ((13452, 13479), 'numpy.where', 'np.where', (['(pval_set > 11 / 2)'], {}), '(pval_set > 11 / 2)\n', (13460, 13479), True, 'import numpy as np\n'), ((3116, 3129), 'sklearn.pipeline.Pipeline', 'Pipeline', (['clf'], {}), '(clf)\n', (3124, 3129), False, 'from sklearn.pipeline import Pipeline\n'), ((3264, 3290), 'os.path.exists', 'os.path.exists', (['saving_dir'], {}), '(saving_dir)\n', (3278, 3290), False, 'import os\n'), ((3300, 3320), 'os.mkdir', 'os.mkdir', (['saving_dir'], {}), '(saving_dir)\n', (3308, 3320), False, 'import os\n'), ((7071, 7086), 'numpy.sqrt', 'np.sqrt', (['n_perm'], {}), '(n_perm)\n', (7078, 7086), True, 'import numpy as np\n'), ((9074, 9090), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (9082, 9090), True, 'import numpy as np\n'), ((12370, 12404), 'numpy.array', 'np.array', (["no_shuffle['scores_std']"], {}), "(no_shuffle['scores_std'])\n", (12378, 12404), True, 'import numpy as np\n'), ((12405, 12415), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (12412, 12415), True, 'import numpy as np\n'), ((12979, 13008), 'numpy.array', 'np.array', (["item['scores_mean']"], {}), "(item['scores_mean'])\n", (12987, 13008), True, 'import numpy as np\n'), ((3698, 3723), 'numpy.arange', 'np.arange', (['data.shape[-1]'], {}), '(data.shape[-1])\n', (3707, 3723), True, 'import numpy as np\n'), ((3915, 3930), 'numpy.arange', 'np.arange', (['(1400)'], {}), '(1400)\n', (3924, 3930), True, 'import numpy as np\n'), ((6990, 7014), 'numpy.mean', 'np.mean', (['chances'], {'axis': '(1)'}), '(chances, axis=1)\n', (6997, 7014), True, 'import numpy as np\n'), ((7730, 7757), 'sklearn.utils.shuffle', 'utils.shuffle', (['data', 'labels'], {}), '(data, labels)\n', (7743, 7757), False, 'from sklearn import metrics, utils\n'), ((9436, 9451), 'numpy.sqrt', 'np.sqrt', (['n_perm'], {}), '(n_perm)\n', (9443, 9451), True, 'import numpy as np\n'), ((13009, 13037), 'numpy.array', 'np.array', (["item['scores_std']"], {}), "(item['scores_std'])\n", (13017, 13037), True, 'import numpy as np\n'), ((13038, 13048), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (13045, 13048), True, 'import numpy as np\n'), ((2898, 2914), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (2912, 2914), False, 'from sklearn.preprocessing import StandardScaler\n'), ((7799, 7824), 'numpy.arange', 'np.arange', (['data.shape[-1]'], {}), '(data.shape[-1])\n', (7808, 7824), True, 'import numpy as np\n'), ((8032, 8047), 'numpy.arange', 'np.arange', (['(1400)'], {}), '(1400)\n', (8041, 8047), True, 'import numpy as np\n'), ((9351, 9375), 'numpy.mean', 'np.mean', (['chances'], {'axis': '(1)'}), '(chances, axis=1)\n', (9358, 9375), True, 'import numpy as np\n'), ((2857, 2869), 'mne.decoding.Vectorizer', 'Vectorizer', ([], {}), '()\n', (2867, 2869), False, 'from mne.decoding import Vectorizer\n'), ((2962, 3064), 'sklearn.svm.SVC', 'SVC', ([], {'max_iter': '(-1)', 'random_state': '(12345)', 'class_weight': '"""balanced"""', 'kernel': '"""linear"""', 'probability': '(False)'}), "(max_iter=-1, random_state=12345, class_weight='balanced', kernel=\n 'linear', probability=False)\n", (2965, 3064), False, 'from sklearn.svm import SVC\n')] |
import torch
import torch.nn as nn
import numpy as np
import time
import torch.nn.functional as F
import sentencepiece as spm
import model_pairing
import model_utils
import random
import os
from torch.nn.modules.distance import CosineSimilarity
from torch.nn.utils.rnn import pad_packed_sequence as unpack
from torch.nn.utils.rnn import pack_padded_sequence as pack
from evaluate_similarity import evaluate
from torch import optim
from model_utils import Example
from tqdm import tqdm
def load_model(data, load_file, force_cpu=False):
if not force_cpu:
model = torch.load(load_file)
else:
model = torch.load(load_file, map_location=torch.device('cpu'))
state_dict = model['state_dict']
model_args = model['args']
vocab = model['vocab']
vocab_fr = model['vocab_fr']
optimizer = model['optimizer']
epoch = model['epoch']
if force_cpu:
model_args.gpu = False
if model_args.model == "avg":
model = Averaging(data, model_args, vocab, vocab_fr)
elif args.model == "lstm":
model = LSTM(data, model_args, vocab, vocab_fr)
model.load_state_dict(state_dict)
model.optimizer.load_state_dict(optimizer)
return model, epoch
class ParaModel(nn.Module):
def __init__(self, data, args, vocab, vocab_fr):
super(ParaModel, self).__init__()
self.raw_data = data
self.args = args
self.gpu = args.gpu
self.vocab = vocab
self.vocab_fr = vocab_fr
self.ngrams = args.ngrams
self.seg_length = args.seg_length
self.delta = args.delta
self.pool = args.pool
self.dropout = args.dropout
self.share_encoder = args.share_encoder
self.share_vocab = args.share_vocab
self.zero_unk = args.zero_unk
self.batchsize = args.batchsize
self.max_megabatch_size = args.megabatch_size
self.curr_megabatch_size = 1
self.megabatch = []
self.megabatch_anneal = args.megabatch_anneal
self.increment = False
self.sim_loss = nn.MarginRankingLoss(margin=self.delta)
self.cosine = CosineSimilarity()
self.embedding = nn.Embedding(len(self.vocab), self.args.dim)
if self.vocab_fr is not None:
self.embedding_fr = nn.Embedding(len(self.vocab_fr), self.args.dim)
self.sp = None
if args.sp_model:
self.sp = spm.SentencePieceProcessor()
self.sp.Load(args.sp_model)
def save_params(self, epoch):
torch.save({'state_dict': self.state_dict(),
'vocab': self.vocab,
'vocab_fr': self.vocab_fr,
'args': self.args,
'optimizer': self.optimizer.state_dict(),
'epoch': epoch}, "{0}_{1}.pt".format(self.args.outfile, epoch))
return "{0}_{1}.pt".format(self.args.outfile, epoch)
def save_final_params(self):
print("Saving final model...")
torch.save({'state_dict': self.state_dict(),
'vocab': self.vocab,
'vocab_fr': self.vocab_fr,
'args': self.args,
'optimizer': self.optimizer.state_dict(),
'epoch': self.args.epochs}, "{0}".format(self.args.outfile)) #.pt is in input string
def torchify_batch(self, batch):
max_len = 0
for i in batch:
if len(i.embeddings) > max_len:
max_len = len(i.embeddings)
batch_len = len(batch)
np_sents = np.zeros((batch_len, max_len), dtype='int32')
np_lens = np.zeros((batch_len,), dtype='int32')
for i, ex in enumerate(batch):
np_sents[i, :len(ex.embeddings)] = ex.embeddings
np_lens[i] = len(ex.embeddings)
idxs, lengths = torch.from_numpy(np_sents).long(), \
torch.from_numpy(np_lens).float().long()
if self.gpu:
idxs = idxs.cuda()
lengths = lengths.cuda()
return idxs, lengths
def loss_function(self, g1, g2, p1, p2):
g1g2 = self.cosine(g1, g2)
g1p1 = self.cosine(g1, p1)
g2p2 = self.cosine(g2, p2)
ones = torch.ones(g1g2.size()[0])
if self.gpu:
ones = ones.cuda()
loss = self.sim_loss(g1g2, g1p1, ones) + self.sim_loss(g1g2, g2p2, ones)
return loss
def scoring_function(self, g_idxs1, g_lengths1, g_idxs2, g_lengths2, fr0=0, fr1=0):
g1 = self.encode(g_idxs1, g_lengths1, fr=fr0)
g2 = self.encode(g_idxs2, g_lengths2, fr=fr1)
return self.cosine(g1, g2)
def pair_up_data(self):
idx = random.randint(0, self.seg_length)
pairs = []
for i in self.raw_data:
sent = i.sentence
sent = sent.split()
idx = min(idx, len(sent) - 2)
splits = []
start = 0
while idx < len(sent):
seg1 = sent[start:idx]
splits.append(seg1)
start = idx
idx += self.seg_length
idx = min(idx, len(sent))
if idx > len(sent):
seg = sent[start:len(sent)]
splits.append(seg)
splits = [" ".join(i) for i in splits]
random.shuffle(splits)
mid = len(splits) // 2
pairs.append((Example(splits[0:mid]), Example(splits[mid:])))
return pairs
def train_epochs(self, start_epoch=1):
start_time = time.time()
self.megabatch = []
self.ep_loss = 0
self.curr_idx = 0
self.eval()
print(evaluate(self, self.args))
self.train()
pbar = None
try:
for ep in range(start_epoch, self.args.epochs + 1):
self.data = self.pair_up_data()
self.mb = model_utils.get_minibatches_idx(len(self.data), self.args.batchsize, shuffle=True)
self.curr_idx = 0
self.ep_loss = 0
self.megabatch = []
cost = 0
counter = 0
if pbar is None:
pbar = tqdm(total=len(self.mb))
else:
pbar.reset()
while (cost is not None):
cost = model_pairing.compute_loss_one_batch(self)
if cost is None:
continue
self.ep_loss += cost.item()
pbar.update(1)
counter += 1
self.optimizer.zero_grad()
cost.backward()
torch.nn.utils.clip_grad_norm_(self.parameters, self.args.grad_clip)
self.optimizer.step()
self.eval()
tqdm.write(evaluate(self, self.args))
self.train()
if self.args.save_every_epoch:
self.save_params(ep)
tqdm.write('Epoch {0}\tCost: {1}'.format(ep, self.ep_loss / counter))
self.save_final_params()
except KeyboardInterrupt:
print("Training Interrupted")
pbar.close()
end_time = time.time()
print("Total Time:", (end_time - start_time))
class Averaging(ParaModel):
def __init__(self, data, args, vocab, vocab_fr):
super(Averaging, self).__init__(data, args, vocab, vocab_fr)
self.parameters = self.parameters()
self.optimizer = optim.Adam(self.parameters, lr=self.args.lr)
if args.gpu:
self.cuda()
print(self)
def forward(self, curr_batch):
g_idxs1 = curr_batch.g1
g_lengths1 = curr_batch.g1_l
g_idxs2 = curr_batch.g2
g_lengths2 = curr_batch.g2_l
p_idxs1 = curr_batch.p1
p_lengths1 = curr_batch.p1_l
p_idxs2 = curr_batch.p2
p_lengths2 = curr_batch.p2_l
g1 = self.encode(g_idxs1, g_lengths1)
g2 = self.encode(g_idxs2, g_lengths2, fr=1)
p1 = self.encode(p_idxs1, p_lengths1, fr=1)
p2 = self.encode(p_idxs2, p_lengths2)
return g1, g2, p1, p2
def encode(self, idxs, lengths, fr=0):
if fr and not self.share_vocab:
word_embs = self.embedding_fr(idxs)
else:
word_embs = self.embedding(idxs)
if self.dropout > 0:
F.dropout(word_embs, training=self.training)
if self.pool == "max":
word_embs = model_utils.max_pool(word_embs, lengths, self.args.gpu)
elif self.pool == "mean":
word_embs = model_utils.mean_pool(word_embs, lengths, self.args.gpu)
return word_embs
class LSTM(ParaModel):
def __init__(self, data, args, vocab, vocab_fr):
super(LSTM, self).__init__(data, args, vocab, vocab_fr)
self.hidden_dim = self.args.hidden_dim
self.e_hidden_init = torch.zeros(2, 1, self.args.hidden_dim)
self.e_cell_init = torch.zeros(2, 1, self.args.hidden_dim)
if self.gpu:
self.e_hidden_init = self.e_hidden_init.cuda()
self.e_cell_init = self.e_cell_init.cuda()
self.lstm = nn.LSTM(self.args.dim, self.hidden_dim, num_layers=1, bidirectional=True, batch_first=True)
if not self.share_encoder:
self.lstm_fr = nn.LSTM(self.args.dim, self.hidden_dim, num_layers=1,
bidirectional=True, batch_first=True)
self.parameters = self.parameters()
self.optimizer = optim.Adam(filter(lambda p: p.requires_grad, self.parameters), self.args.lr)
if self.gpu:
self.cuda()
print(self)
def encode(self, inputs, lengths, fr=0):
bsz, max_len = inputs.size()
e_hidden_init = self.e_hidden_init.expand(2, bsz, self.hidden_dim).contiguous()
e_cell_init = self.e_cell_init.expand(2, bsz, self.hidden_dim).contiguous()
lens, indices = torch.sort(lengths, 0, True)
if fr and not self.share_vocab:
in_embs = self.embedding_fr(inputs)
else:
in_embs = self.embedding(inputs)
if fr and not self.share_encoder:
if self.dropout > 0:
F.dropout(in_embs, training=self.training)
all_hids, (enc_last_hid, _) = self.lstm_fr(pack(in_embs[indices],
lens.tolist(), batch_first=True), (e_hidden_init, e_cell_init))
else:
if self.dropout > 0:
F.dropout(in_embs, training=self.training)
all_hids, (enc_last_hid, _) = self.lstm(pack(in_embs[indices],
lens.tolist(), batch_first=True), (e_hidden_init, e_cell_init))
_, _indices = torch.sort(indices, 0)
all_hids = unpack(all_hids, batch_first=True)[0][_indices]
if self.pool == "max":
embs = model_utils.max_pool(all_hids, lengths, self.gpu)
elif self.pool == "mean":
embs = model_utils.mean_pool(all_hids, lengths, self.gpu)
return embs
def forward(self, curr_batch):
g_idxs1 = curr_batch.g1
g_lengths1 = curr_batch.g1_l
g_idxs2 = curr_batch.g2
g_lengths2 = curr_batch.g2_l
p_idxs1 = curr_batch.p1
p_lengths1 = curr_batch.p1_l
p_idxs2 = curr_batch.p2
p_lengths2 = curr_batch.p2_l
g1 = self.encode(g_idxs1, g_lengths1)
g2 = self.encode(g_idxs2, g_lengths2, fr=1)
p1 = self.encode(p_idxs1, p_lengths1, fr=1)
p2 = self.encode(p_idxs2, p_lengths2)
return g1, g2, p1, p2
| [
"model_utils.Example",
"sentencepiece.SentencePieceProcessor",
"random.shuffle",
"torch.nn.functional.dropout",
"model_utils.max_pool",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.device",
"model_utils.mean_pool",
"random.randint",
"torch.load",
"torch.zeros",
"torch.nn.LSTM",
"model_pa... | [((574, 595), 'torch.load', 'torch.load', (['load_file'], {}), '(load_file)\n', (584, 595), False, 'import torch\n'), ((2057, 2096), 'torch.nn.MarginRankingLoss', 'nn.MarginRankingLoss', ([], {'margin': 'self.delta'}), '(margin=self.delta)\n', (2077, 2096), True, 'import torch.nn as nn\n'), ((2119, 2137), 'torch.nn.modules.distance.CosineSimilarity', 'CosineSimilarity', ([], {}), '()\n', (2135, 2137), False, 'from torch.nn.modules.distance import CosineSimilarity\n'), ((3492, 3537), 'numpy.zeros', 'np.zeros', (['(batch_len, max_len)'], {'dtype': '"""int32"""'}), "((batch_len, max_len), dtype='int32')\n", (3500, 3537), True, 'import numpy as np\n'), ((3556, 3593), 'numpy.zeros', 'np.zeros', (['(batch_len,)'], {'dtype': '"""int32"""'}), "((batch_len,), dtype='int32')\n", (3564, 3593), True, 'import numpy as np\n'), ((4610, 4644), 'random.randint', 'random.randint', (['(0)', 'self.seg_length'], {}), '(0, self.seg_length)\n', (4624, 4644), False, 'import random\n'), ((5457, 5468), 'time.time', 'time.time', ([], {}), '()\n', (5466, 5468), False, 'import time\n'), ((7141, 7152), 'time.time', 'time.time', ([], {}), '()\n', (7150, 7152), False, 'import time\n'), ((7427, 7471), 'torch.optim.Adam', 'optim.Adam', (['self.parameters'], {'lr': 'self.args.lr'}), '(self.parameters, lr=self.args.lr)\n', (7437, 7471), False, 'from torch import optim\n'), ((8839, 8878), 'torch.zeros', 'torch.zeros', (['(2)', '(1)', 'self.args.hidden_dim'], {}), '(2, 1, self.args.hidden_dim)\n', (8850, 8878), False, 'import torch\n'), ((8906, 8945), 'torch.zeros', 'torch.zeros', (['(2)', '(1)', 'self.args.hidden_dim'], {}), '(2, 1, self.args.hidden_dim)\n', (8917, 8945), False, 'import torch\n'), ((9103, 9198), 'torch.nn.LSTM', 'nn.LSTM', (['self.args.dim', 'self.hidden_dim'], {'num_layers': '(1)', 'bidirectional': '(True)', 'batch_first': '(True)'}), '(self.args.dim, self.hidden_dim, num_layers=1, bidirectional=True,\n batch_first=True)\n', (9110, 9198), True, 'import torch.nn as nn\n'), ((9881, 9909), 'torch.sort', 'torch.sort', (['lengths', '(0)', '(True)'], {}), '(lengths, 0, True)\n', (9891, 9909), False, 'import torch\n'), ((10716, 10738), 'torch.sort', 'torch.sort', (['indices', '(0)'], {}), '(indices, 0)\n', (10726, 10738), False, 'import torch\n'), ((2399, 2427), 'sentencepiece.SentencePieceProcessor', 'spm.SentencePieceProcessor', ([], {}), '()\n', (2425, 2427), True, 'import sentencepiece as spm\n'), ((5239, 5261), 'random.shuffle', 'random.shuffle', (['splits'], {}), '(splits)\n', (5253, 5261), False, 'import random\n'), ((5583, 5608), 'evaluate_similarity.evaluate', 'evaluate', (['self', 'self.args'], {}), '(self, self.args)\n', (5591, 5608), False, 'from evaluate_similarity import evaluate\n'), ((8322, 8366), 'torch.nn.functional.dropout', 'F.dropout', (['word_embs'], {'training': 'self.training'}), '(word_embs, training=self.training)\n', (8331, 8366), True, 'import torch.nn.functional as F\n'), ((8423, 8478), 'model_utils.max_pool', 'model_utils.max_pool', (['word_embs', 'lengths', 'self.args.gpu'], {}), '(word_embs, lengths, self.args.gpu)\n', (8443, 8478), False, 'import model_utils\n'), ((9258, 9353), 'torch.nn.LSTM', 'nn.LSTM', (['self.args.dim', 'self.hidden_dim'], {'num_layers': '(1)', 'bidirectional': '(True)', 'batch_first': '(True)'}), '(self.args.dim, self.hidden_dim, num_layers=1, bidirectional=True,\n batch_first=True)\n', (9265, 9353), True, 'import torch.nn as nn\n'), ((10857, 10906), 'model_utils.max_pool', 'model_utils.max_pool', (['all_hids', 'lengths', 'self.gpu'], {}), '(all_hids, lengths, self.gpu)\n', (10877, 10906), False, 'import model_utils\n'), ((657, 676), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (669, 676), False, 'import torch\n'), ((8537, 8593), 'model_utils.mean_pool', 'model_utils.mean_pool', (['word_embs', 'lengths', 'self.args.gpu'], {}), '(word_embs, lengths, self.args.gpu)\n', (8558, 8593), False, 'import model_utils\n'), ((10150, 10192), 'torch.nn.functional.dropout', 'F.dropout', (['in_embs'], {'training': 'self.training'}), '(in_embs, training=self.training)\n', (10159, 10192), True, 'import torch.nn.functional as F\n'), ((10454, 10496), 'torch.nn.functional.dropout', 'F.dropout', (['in_embs'], {'training': 'self.training'}), '(in_embs, training=self.training)\n', (10463, 10496), True, 'import torch.nn.functional as F\n'), ((10758, 10792), 'torch.nn.utils.rnn.pad_packed_sequence', 'unpack', (['all_hids'], {'batch_first': '(True)'}), '(all_hids, batch_first=True)\n', (10764, 10792), True, 'from torch.nn.utils.rnn import pad_packed_sequence as unpack\n'), ((10960, 11010), 'model_utils.mean_pool', 'model_utils.mean_pool', (['all_hids', 'lengths', 'self.gpu'], {}), '(all_hids, lengths, self.gpu)\n', (10981, 11010), False, 'import model_utils\n'), ((3764, 3790), 'torch.from_numpy', 'torch.from_numpy', (['np_sents'], {}), '(np_sents)\n', (3780, 3790), False, 'import torch\n'), ((5323, 5345), 'model_utils.Example', 'Example', (['splits[0:mid]'], {}), '(splits[0:mid])\n', (5330, 5345), False, 'from model_utils import Example\n'), ((5347, 5368), 'model_utils.Example', 'Example', (['splits[mid:]'], {}), '(splits[mid:])\n', (5354, 5368), False, 'from model_utils import Example\n'), ((6252, 6294), 'model_pairing.compute_loss_one_batch', 'model_pairing.compute_loss_one_batch', (['self'], {}), '(self)\n', (6288, 6294), False, 'import model_pairing\n'), ((6586, 6654), 'torch.nn.utils.clip_grad_norm_', 'torch.nn.utils.clip_grad_norm_', (['self.parameters', 'self.args.grad_clip'], {}), '(self.parameters, self.args.grad_clip)\n', (6616, 6654), False, 'import torch\n'), ((6753, 6778), 'evaluate_similarity.evaluate', 'evaluate', (['self', 'self.args'], {}), '(self, self.args)\n', (6761, 6778), False, 'from evaluate_similarity import evaluate\n'), ((3825, 3850), 'torch.from_numpy', 'torch.from_numpy', (['np_lens'], {}), '(np_lens)\n', (3841, 3850), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
"""
Master Thesis <NAME>
Parameter File
"""
###############################################################################
## IMPORT PACKAGES & SCRIPTS ##
###############################################################################
#### PACKAGES ####
import gurobipy as gp
import numpy as np
###############################################################################
## GUROBI PARAMETERS ##
###############################################################################
# gp.setParam("NonConvex",-1) # enable non convex constraints, enable = 2
gp.setParam("OutputFlag",0) # solver output, enable = 1
# gp.setParam("DualReductions", 0) # check if feasible or unbounded: enable = 0
# gp.setParam("MIPGap",2e-4) # MIP gap, default = 1e-4
###############################################################################
## GENERAL ##
###############################################################################
### NETWORK ###
N_BUS = 18 # number of buses
N_PH = 3 # number of phases - {1,3}
S_BASE = 0.1 # base power [MVA]
### TIME HORIZON ###
TIME_HORZ = 1 # time horizon [h]
TIMESTEP = 0.25 # timestep [h]
T = int(TIME_HORZ/TIMESTEP) # number of timesteps [#]
### CONVERGENCE CRITERIA ###
ETA_BFS = 1e-5 # bfs standalone
ETA_BFSOPF = 5e-4 # bfs-opf voltage mismatch
ETA_MARG_V = 1e-4 # bus voltage uncertainty margin
# if jumping solutions for BFS-OPF: weighted average solution update BFS-OPF
BFSUPD = 0 # smooth solution update: enable = 1, disable = 0
A_BFS = 0.90 # factor
### ITERATION COUNTERS ###
M_MAX = 10 # maximum iterations outer CC loop
M_MIN = 4 # minimum iterations for outer CC loop
B_MAX = 10 # maximum iterations bfs-opf
K_MAX = 10 # maximum inner bfs iterations
### FORECAST ###
V_FCST = 1 # forecast version, for definition see forecast script header
PV_MAX = 8 # 8 kWp installations for data set to normalize
N_DAY = 2 # number of days for monte-carlo simulation
###############################################################################
## FLAGS: DISABLE = 0 , ENABLE = 1 ##
###############################################################################
### UNITS ###
FLGBAT = 1 # BESS
FLGSHED = 1 # load shedding
FLGSHIFT = 1 # load shifting
FLGCURT = 1 # active power curtailment
FLGOLTC = 1 # OLTC trafo
FLGLOAD = 1 # load profile: 0 = constant, 1 = time varying
FLGPF = 1 # power factor limit PV inverters
FLGPV = 1 # installed capacity PV from input file: 0 = input data, 1 = load dependent
FLGCC = 0 # chance constraints
FLGDRCC = 0 # distributionally robust or gaussian: 1 = DR, 0 = Gaussian
###############################################################################
## PARAMETER VARIATION: DISABLE = 0 , ENABLE = 1 ##
###############################################################################
FLGVAR_LOAD = 0 # load variation
FLGVAR_PV = 0 # PV variation
FCSTCASE = ['summer'] # seasonal forecast
if FLGVAR_LOAD == 0 and FLGVAR_PV == 0:
LOADCASE = [1] # single case with nominal load
PVCASE = [0.5] # single case with nominal PV
elif FLGVAR_LOAD == 1 and FLGVAR_PV == 0:
LOADCASE = [0.75,1,1.25] # load variation
PVCASE = [0.5] # single case with nominal PV
elif FLGVAR_LOAD == 1 and FLGVAR_PV == 1:
LOADCASE = [0.5,1,1.5] # load variation
PVCASE = [0.5,1,1.5] # single case with nominal PV
elif FLGVAR_LOAD == 0 and FLGVAR_PV == 1:
LOADCASE = [1] # load variation
PVCASE = [0.5,1,1.5] # single case with nominal PV
### UNBALANCED LOADING ###
# share of total load/PV to phase a,b,c
UNBALANCE = 'lightly' # degree of unbalance (symmetric,ligthly,heavily)
if N_PH == 3:
if UNBALANCE == 'symmetric':
LOADSHARE = [1/3,1/3,1/3]
PVSHARE = LOADSHARE
elif UNBALANCE == 'lightly':
LOADSHARE = [0.35,0.25,0.4]
PVSHARE = LOADSHARE
elif UNBALANCE == 'heavily':
LOADSHARE = [0.2,0.15,0.65]
PVSHARE = LOADSHARE
else:
LOADSHARE = [1]
PVSHARE = LOADSHARE
###############################################################################
## CHANCE-CONSTRAINTS ##
###############################################################################
# if jumping solutions: weighted average solution for uncertainty margin
MARGUPD = 1 # enable = 1, disable = 0
A_MARG = 0.95 # factor
### UNCERTAINTY MARGIN ###
# power ratio gamma
FLGCC_GMA = 0 # pre-defined gamma or from OPF: pre-defined = 0 - from OPF = 1
power_factor = 0.95 # pre-defined power factor
CC_GMA = np.sqrt((1-power_factor**2)/power_factor**2) # pre-defined power ratio
| [
"gurobipy.setParam",
"numpy.sqrt"
] | [((599, 627), 'gurobipy.setParam', 'gp.setParam', (['"""OutputFlag"""', '(0)'], {}), "('OutputFlag', 0)\n", (610, 627), True, 'import gurobipy as gp\n'), ((4593, 4645), 'numpy.sqrt', 'np.sqrt', (['((1 - power_factor ** 2) / power_factor ** 2)'], {}), '((1 - power_factor ** 2) / power_factor ** 2)\n', (4600, 4645), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import pyrap.tables as pt
import numpy as np
import string
def read_corr(msname):
tt=pt.table(msname,readonly=False)
c=tt.getcol('DATA')
S=np.linalg.norm(c)
n=(np.random.normal(-1,1,c.shape)+1j*np.random.normal(-1,1,c.shape))
# mean should be zero
n=n-np.mean(n)
N=np.linalg.norm(n)
scalefac=0.05*(S/N)
tt.putcol('DATA',c+n*scalefac)
tt.close()
if __name__ == '__main__':
# addes noise to MS
#args MS
import sys
argc=len(sys.argv)
if argc==2:
read_corr(sys.argv[1])
exit()
| [
"pyrap.tables.table",
"numpy.mean",
"numpy.linalg.norm",
"numpy.random.normal"
] | [((111, 143), 'pyrap.tables.table', 'pt.table', (['msname'], {'readonly': '(False)'}), '(msname, readonly=False)\n', (119, 143), True, 'import pyrap.tables as pt\n'), ((169, 186), 'numpy.linalg.norm', 'np.linalg.norm', (['c'], {}), '(c)\n', (183, 186), True, 'import numpy as np\n'), ((303, 320), 'numpy.linalg.norm', 'np.linalg.norm', (['n'], {}), '(n)\n', (317, 320), True, 'import numpy as np\n'), ((192, 224), 'numpy.random.normal', 'np.random.normal', (['(-1)', '(1)', 'c.shape'], {}), '(-1, 1, c.shape)\n', (208, 224), True, 'import numpy as np\n'), ((288, 298), 'numpy.mean', 'np.mean', (['n'], {}), '(n)\n', (295, 298), True, 'import numpy as np\n'), ((226, 258), 'numpy.random.normal', 'np.random.normal', (['(-1)', '(1)', 'c.shape'], {}), '(-1, 1, c.shape)\n', (242, 258), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
import state
import commands
from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK
import sys, os
import math
from algorithm import *
import numpy as np
from math import floor, ceil, sqrt
import cProfile
def next_best_point(st, bot=None):
minX = bot.region["minX"]
maxX = bot.region["maxX"]
minZ = bot.region["minZ"]
maxZ = bot.region["maxZ"]
# print(bot.region)
for y, x, z in np.transpose(np.where(np.transpose(st.matrix._ndarray, (1, 0, 2)) == state.Voxel.MODEL)):
if minX <= x < maxX and minZ <= z < maxZ:
coord = Coord(int(x), int(y), int(z))
if st.matrix.would_be_grounded(coord):
# print(coord)
return coord
for y, x, z in np.transpose(np.where(np.transpose(st.matrix._ndarray, (1, 0, 2)) == state.Voxel.MODEL)):
if minX - (maxX-minX)/2 <= x < maxX + (maxX-minX)/2 and minZ - (maxZ-minZ)/2 <= z < maxZ + (maxZ-minZ)/2:
coord = Coord(int(x), int(y), int(z))
if st.matrix.would_be_grounded(coord):
# print(coord)
return coord
return None
def dig_mofo(st, bot, pt):
print("dig dig dig")
print(bot.pos)
bot.actions=[]
print(pt)
path = None
if path is None:
start = Coord(st.R-1, pt.y, pt.z)
path = shortest_path(st, bot, start)
dir = RIGHT
n = st.R-pt.x-2
if path is None:
start = Coord(pt.x, pt.y, 0)
path = shortest_path(st, bot, start)
dir = FORWARD
n = pt.z-1
if path is None:
start = Coord(pt.x, pt.y, st.R-1)
path = shortest_path(st, bot, start)
dir = BACK
n = st.R-pt.z-2
if path is None:
start = Coord(0, pt.y, pt.z)
path = shortest_path(st, bot, start)
dir = LEFT
n = pt.x-1
if path is not None:
# print("got path")
print(path)
compress(st, bot, path)
else:
print("couldn't find path to pt: "+str(start))
for i in range(n):
bot.smove(dir)
start += dir
bot.void(dir)
bot.fill(dir)
for i in range(n):
bot.smove(dir.mul(-1))
start += dir.mul(-1)
if st.matrix[start + dir].is_model():
bot.fill(dir)
print("finished digging")
def solve(st):
stuck_steps=0
while not st.is_model_finished():
stuck_bots=0
for bot in st.bots:
if len(bot.actions) > 0:
continue
# print(bot)
# n+=1
# if n>1000:
# return
# pt = next_best_point(st, bot)
pt = st.matrix.fill_next(bot)
# print(bot.pos)
# print("pt")
# print(pt)
# print("")
if pt is None:
continue
else:
if (pt - bot.pos).mlen() == 1 and pt.y <= bot.pos.y:
bot.fill(pt - bot.pos)
if st.matrix.nfull % 100 == 0:
# print every 100 fills
print(st)
else:
found = False
for a in pt.adjacent(st.R):
if not st.matrix._ndarray[a.x,a.y,a.z] & (state.Voxel.BOT | state.Voxel.FULL):
# print("path")
path = shortest_path(st, bot, a)
# if len(path) > 10:
# print(path)
# print([b.pos for b in st.bots])
if path is not None:
# print("got path")
compress(st, bot, path)
found=True
break
elif bot.pos.y < st.R - 1:
bot.smove(UP)
else:
stuck_steps += 1
print("bot at {} can't get to {} (no void adjacent)".format(bot.pos, pt))
if stuck_steps > st.R:
dig_mofo(st, bot, pt)
if stuck_steps > st.R * 2:
raise ValueError("stuck too long")
if not found:
stuck_bots += 1
if any(len(bot.actions)>0 for bot in st.bots):
# for bot in st.bots:
# print(bot.pos)
# if len(bot.actions)>0:
# print(bot.actions[0])
# print("stepping")
st.step()
if stuck_bots == len(st.bots):
raise ValueError( 'all bots stuck!' )
def shortest_path_algo(st):
bot = st.bots[0]
bot.smove(UP)
minX, maxX, minY, maxY, minZ, maxZ = st.matrix.bounds
print(st.matrix.bounds)
minarea, maxbots = 6 * 6, 20
width, depth = maxX - minX, maxZ - minZ
mostarea = width * depth / maxbots
rsize = ceil(sqrt(max(mostarea, minarea)))
xbots, zbots = max(floor(width / rsize), 1), max(floor(depth / rsize), 1)
nbots = xbots * zbots
print("nbots: {}".format(nbots))
regions = []
for x in range(xbots):
rX = min([maxX, minX + (x+1) * rsize])
if maxX - rX < rsize:
rX = maxX
for z in range(zbots):
rZ = min([maxZ, minZ + (z+1) * rsize])
if maxZ - rZ < rsize:
rZ = maxZ
region = {
"minX": int(minX + x * rsize),
"maxX": int(rX),
"minZ": int(minZ + z * rsize),
"maxZ": int(rZ)
}
print(region)
regions.append(region)
# print(convex_hull(st))
# print(st.matrix.bounds)
st.step_all()
for i in range(1, nbots):
# print(st.bots[0].seeds)
sorted(st.bots, key=lambda bot: -len(bot.seeds))[0].fission(FORWARD, 0)
st.step_all()
b = st.bots[i]
b.region = regions[nbots-i-1]
path = shortest_path(st, b, Coord(b.region["minX"], 1, b.region["minZ"]))
if path:
compress(st, b, path)
st.step_all()
b = st.bots[0]
b.region = regions[nbots-1]
path = shortest_path(st, b, Coord(b.region["minX"], 1, b.region["minZ"]))
if path:
compress(st, b, path)
st.step_all()
solve(st)
print("finished solve")
st.step_all()
def main(*args, **kwargs):
success = True
st = state.State.create(*args, **kwargs)
try:
cProfile.runctx('shortest_path_algo(st)', {}, {'st': st, 'shortest_path_algo': shortest_path_algo}, sort='cumulative')
except Exception as e:
print(e)
success = False
bot = st.bots[0]
for bot2 in st.bots[1:]:
for a in bot.pos.adjacent(st.R):
if st.matrix[a].is_void():
path = shortest_path(st, bot2, a)
if path is not None:
print("found path")
compress(st, bot2, path)
break
st.step_all()
bot.fusionp(bot2.pos - bot.pos)
bot2.fusions(bot.pos - bot2.pos)
st.step_all()
# shortest_path_algo(st)
back_to_base(st, bot)
bot.halt()
while st.step():
pass
return st, success
if __name__ == '__main__':
problem = int(sys.argv[1])
st, success = main(problem=problem)
suffix = '_failed' if not success else ''
print( st )
print( 'energy: {}, default: {}, score: {:0.3f}/{:0.3f}'.format( st.energy, st.default_energy, st.score, st.score_max ) )
data = commands.export_nbt( st.trace )
with open("submission/FA"+str(problem).zfill(3)+suffix+".nbt", "wb") as file:
file.write(data)
| [
"coord.Coord",
"state.State.create",
"math.floor",
"cProfile.runctx",
"numpy.transpose",
"commands.export_nbt"
] | [((6505, 6540), 'state.State.create', 'state.State.create', (['*args'], {}), '(*args, **kwargs)\n', (6523, 6540), False, 'import state\n'), ((7629, 7658), 'commands.export_nbt', 'commands.export_nbt', (['st.trace'], {}), '(st.trace)\n', (7648, 7658), False, 'import commands\n'), ((1307, 1334), 'coord.Coord', 'Coord', (['(st.R - 1)', 'pt.y', 'pt.z'], {}), '(st.R - 1, pt.y, pt.z)\n', (1312, 1334), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((1464, 1484), 'coord.Coord', 'Coord', (['pt.x', 'pt.y', '(0)'], {}), '(pt.x, pt.y, 0)\n', (1469, 1484), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((1609, 1636), 'coord.Coord', 'Coord', (['pt.x', 'pt.y', '(st.R - 1)'], {}), '(pt.x, pt.y, st.R - 1)\n', (1614, 1636), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((1765, 1785), 'coord.Coord', 'Coord', (['(0)', 'pt.y', 'pt.z'], {}), '(0, pt.y, pt.z)\n', (1770, 1785), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((6278, 6322), 'coord.Coord', 'Coord', (["b.region['minX']", '(1)', "b.region['minZ']"], {}), "(b.region['minX'], 1, b.region['minZ'])\n", (6283, 6322), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((6558, 6680), 'cProfile.runctx', 'cProfile.runctx', (['"""shortest_path_algo(st)"""', '{}', "{'st': st, 'shortest_path_algo': shortest_path_algo}"], {'sort': '"""cumulative"""'}), "('shortest_path_algo(st)', {}, {'st': st,\n 'shortest_path_algo': shortest_path_algo}, sort='cumulative')\n", (6573, 6680), False, 'import cProfile\n'), ((5075, 5095), 'math.floor', 'floor', (['(width / rsize)'], {}), '(width / rsize)\n', (5080, 5095), False, 'from math import floor, ceil, sqrt\n'), ((5105, 5125), 'math.floor', 'floor', (['(depth / rsize)'], {}), '(depth / rsize)\n', (5110, 5125), False, 'from math import floor, ceil, sqrt\n'), ((6076, 6120), 'coord.Coord', 'Coord', (["b.region['minX']", '(1)', "b.region['minZ']"], {}), "(b.region['minX'], 1, b.region['minZ'])\n", (6081, 6120), False, 'from coord import Coord, diff, UP, DOWN, LEFT, RIGHT, FORWARD, BACK\n'), ((463, 506), 'numpy.transpose', 'np.transpose', (['st.matrix._ndarray', '(1, 0, 2)'], {}), '(st.matrix._ndarray, (1, 0, 2))\n', (475, 506), True, 'import numpy as np\n'), ((784, 827), 'numpy.transpose', 'np.transpose', (['st.matrix._ndarray', '(1, 0, 2)'], {}), '(st.matrix._ndarray, (1, 0, 2))\n', (796, 827), True, 'import numpy as np\n')] |
"""
Wraps geometric procedures
"""
import copy
import json
from typing import Any, Dict, List
import numpy as np
from ..extras import find_module
from ..interface.models import TorsionDriveRecord
from .service_util import BaseService, TaskManager
__all__ = ["TorsionDriveService"]
__td_api = find_module("torsiondrive")
def _check_td():
if __td_api is None:
raise ModuleNotFoundError(
"Unable to find TorsionDriveRecord which must be installed to use the TorsionDriveService"
)
class TorsionDriveService(BaseService):
# Index info
service: str = "torsiondrive"
program: str = "torsiondrive"
procedure: str = "torsiondrive"
# Program info
optimization_program: str
# Output
output: TorsionDriveRecord = None # added default
# Temporaries
torsiondrive_state: Dict[str, Any]
optimization_history: Dict[str, List[str]] = {}
# Task helpers
task_map: Dict[str, List[str]] = {}
task_manager: TaskManager = TaskManager()
# Templates
dihedral_template: str
optimization_template: str
molecule_template: str
@classmethod
def initialize_from_api(cls, storage_socket, logger, service_input, tag=None, priority=None):
_check_td()
import torsiondrive
from torsiondrive import td_api
# Build the record
output = TorsionDriveRecord(
**service_input.dict(exclude={"initial_molecule"}),
initial_molecule=[x.id for x in service_input.initial_molecule],
provenance={
"creator": "torsiondrive",
"version": torsiondrive.__version__,
"routine": "torsiondrive.td_api",
},
final_energy_dict={},
minimum_positions={},
optimization_history={},
)
meta = {"output": output}
# Remove identity info from molecule template
molecule_template = copy.deepcopy(service_input.initial_molecule[0].dict(encoding="json"))
molecule_template.pop("id", None)
molecule_template.pop("identifiers", None)
meta["molecule_template"] = json.dumps(molecule_template)
# Initiate torsiondrive meta
meta["torsiondrive_state"] = td_api.create_initial_state(
dihedrals=output.keywords.dihedrals,
grid_spacing=output.keywords.grid_spacing,
elements=molecule_template["symbols"],
init_coords=[x.geometry for x in service_input.initial_molecule],
dihedral_ranges=output.keywords.dihedral_ranges,
energy_decrease_thresh=output.keywords.energy_decrease_thresh,
energy_upper_limit=output.keywords.energy_upper_limit,
)
# Build dihedral template
dihedral_template = []
for idx in output.keywords.dihedrals:
tmp = {"type": "dihedral", "indices": idx}
dihedral_template.append(tmp)
meta["dihedral_template"] = json.dumps(dihedral_template)
# Build optimization template
opt_template = {
"meta": {"procedure": "optimization", "qc_spec": output.qc_spec.dict(), "tag": meta.pop("tag", None)}
}
opt_template["meta"].update(output.optimization_spec.dict())
meta["optimization_template"] = json.dumps(opt_template)
# Move around geometric data
meta["optimization_program"] = output.optimization_spec.program
meta["hash_index"] = output.get_hash_index()
meta["task_tag"] = tag
meta["task_priority"] = priority
return cls(**meta, storage_socket=storage_socket, logger=logger)
def iterate(self):
_check_td()
from torsiondrive import td_api
self.status = "RUNNING"
# Check if tasks are done
if self.task_manager.done() is False:
return False
complete_tasks = self.task_manager.get_tasks()
# Populate task results
task_results = {}
for key, task_ids in self.task_map.items():
task_results[key] = []
for task_id in task_ids:
# Cycle through all tasks for this entry
ret = complete_tasks[task_id]
# Lookup molecules
mol_keys = self.storage_socket.get_molecules(id=[ret["initial_molecule"], ret["final_molecule"]])[
"data"
]
task_results[key].append((mol_keys[0].geometry, mol_keys[1].geometry, ret["energies"][-1]))
td_api.update_state(self.torsiondrive_state, task_results)
# Create new tasks from the current state
next_tasks = td_api.next_jobs_from_state(self.torsiondrive_state, verbose=True)
# All done
if len(next_tasks) == 0:
self.status = "COMPLETE"
self.update_output()
return True
self.submit_optimization_tasks(next_tasks)
return False
def submit_optimization_tasks(self, task_dict):
_check_td()
from torsiondrive import td_api
new_tasks = {}
task_map = {}
for key, geoms in task_dict.items():
task_map[key] = []
for num, geom in enumerate(geoms):
# Update molecule
packet = json.loads(self.optimization_template)
# Construct constraints
constraints = json.loads(self.dihedral_template)
grid_id = td_api.grid_id_from_string(key)
for con_num, k in enumerate(grid_id):
constraints[con_num]["value"] = k
# update existing constraints to support the "extra constraints" feature
packet["meta"]["keywords"].setdefault("constraints", {})
packet["meta"]["keywords"]["constraints"].setdefault("set", [])
packet["meta"]["keywords"]["constraints"]["set"].extend(constraints)
# Build new molecule
mol = json.loads(self.molecule_template)
mol["geometry"] = geom
packet["data"] = [mol]
task_key = "{}-{}".format(key, num)
new_tasks[task_key] = packet
task_map[key].append(task_key)
self.task_manager.submit_tasks("optimization", new_tasks)
self.task_map = task_map
# Update history
for key, task_ids in self.task_map.items():
if key not in self.optimization_history:
self.optimization_history[key] = []
for task_id in task_ids:
self.optimization_history[key].append(self.task_manager.required_tasks[task_id])
self.update_output()
def update_output(self):
"""
Finishes adding data to the TorsionDriveRecord object
"""
_check_td()
from torsiondrive import td_api
# # Get lowest energies and positions
min_positions = {}
final_energy = {}
for k, v in self.torsiondrive_state["grid_status"].items():
idx = int(np.argmin([x[2] for x in v]))
key = json.dumps(td_api.grid_id_from_string(k))
min_positions[key] = idx
final_energy[key] = v[idx][2]
history = {json.dumps(td_api.grid_id_from_string(k)): v for k, v in self.optimization_history.items()}
self.output = self.output.copy(
update={
"status": self.status,
"minimum_positions": min_positions,
"final_energy_dict": final_energy,
"optimization_history": history,
}
)
return True
| [
"json.loads",
"torsiondrive.td_api.create_initial_state",
"json.dumps",
"numpy.argmin",
"torsiondrive.td_api.grid_id_from_string",
"torsiondrive.td_api.next_jobs_from_state",
"torsiondrive.td_api.update_state"
] | [((2147, 2176), 'json.dumps', 'json.dumps', (['molecule_template'], {}), '(molecule_template)\n', (2157, 2176), False, 'import json\n'), ((2252, 2653), 'torsiondrive.td_api.create_initial_state', 'td_api.create_initial_state', ([], {'dihedrals': 'output.keywords.dihedrals', 'grid_spacing': 'output.keywords.grid_spacing', 'elements': "molecule_template['symbols']", 'init_coords': '[x.geometry for x in service_input.initial_molecule]', 'dihedral_ranges': 'output.keywords.dihedral_ranges', 'energy_decrease_thresh': 'output.keywords.energy_decrease_thresh', 'energy_upper_limit': 'output.keywords.energy_upper_limit'}), "(dihedrals=output.keywords.dihedrals,\n grid_spacing=output.keywords.grid_spacing, elements=molecule_template[\n 'symbols'], init_coords=[x.geometry for x in service_input.\n initial_molecule], dihedral_ranges=output.keywords.dihedral_ranges,\n energy_decrease_thresh=output.keywords.energy_decrease_thresh,\n energy_upper_limit=output.keywords.energy_upper_limit)\n", (2279, 2653), False, 'from torsiondrive import td_api\n'), ((2973, 3002), 'json.dumps', 'json.dumps', (['dihedral_template'], {}), '(dihedral_template)\n', (2983, 3002), False, 'import json\n'), ((3300, 3324), 'json.dumps', 'json.dumps', (['opt_template'], {}), '(opt_template)\n', (3310, 3324), False, 'import json\n'), ((4515, 4573), 'torsiondrive.td_api.update_state', 'td_api.update_state', (['self.torsiondrive_state', 'task_results'], {}), '(self.torsiondrive_state, task_results)\n', (4534, 4573), False, 'from torsiondrive import td_api\n'), ((4646, 4712), 'torsiondrive.td_api.next_jobs_from_state', 'td_api.next_jobs_from_state', (['self.torsiondrive_state'], {'verbose': '(True)'}), '(self.torsiondrive_state, verbose=True)\n', (4673, 4712), False, 'from torsiondrive import td_api\n'), ((5277, 5315), 'json.loads', 'json.loads', (['self.optimization_template'], {}), '(self.optimization_template)\n', (5287, 5315), False, 'import json\n'), ((5387, 5421), 'json.loads', 'json.loads', (['self.dihedral_template'], {}), '(self.dihedral_template)\n', (5397, 5421), False, 'import json\n'), ((5448, 5479), 'torsiondrive.td_api.grid_id_from_string', 'td_api.grid_id_from_string', (['key'], {}), '(key)\n', (5474, 5479), False, 'from torsiondrive import td_api\n'), ((5975, 6009), 'json.loads', 'json.loads', (['self.molecule_template'], {}), '(self.molecule_template)\n', (5985, 6009), False, 'import json\n'), ((7048, 7076), 'numpy.argmin', 'np.argmin', (['[x[2] for x in v]'], {}), '([x[2] for x in v])\n', (7057, 7076), True, 'import numpy as np\n'), ((7107, 7136), 'torsiondrive.td_api.grid_id_from_string', 'td_api.grid_id_from_string', (['k'], {}), '(k)\n', (7133, 7136), False, 'from torsiondrive import td_api\n'), ((7248, 7277), 'torsiondrive.td_api.grid_id_from_string', 'td_api.grid_id_from_string', (['k'], {}), '(k)\n', (7274, 7277), False, 'from torsiondrive import td_api\n')] |
import numpy as np
from keras import backend as K
import os
import sys
def main():
K.set_image_dim_ordering('tf')
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from keras_video_classifier.library.utility.plot_utils import plot_and_save_history,plot_history_2win
from keras_video_classifier.library.recurrent_networks import VGG16LSTMVideoClassifier
from keras_video_classifier.library.utility.ucf.UCF101_loader import load_ucf
print("PASS LOAD UCF")
data_set_name = 'UCF-101'
input_dir_path = os.path.join(os.path.dirname(__file__), 'very_large_data')
output_dir_path = os.path.join(os.path.dirname(__file__), 'models', data_set_name)
report_dir_path = os.path.join(os.path.dirname(__file__), 'reports', data_set_name)
np.random.seed(42)
# this line downloads the video files of UCF-101 dataset if they are not available in the very_large_data folder
load_ucf(input_dir_path)
classifier = VGG16LSTMVideoClassifier()
history = classifier.fit(data_dir_path=input_dir_path, model_dir_path=output_dir_path, vgg16_include_top=False,
data_set_name=data_set_name)
print("history = classifier.fit")
plot_and_save_history(history, VGG16LSTMVideoClassifier.model_name,report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-hi-dim-history.png')
plot_history_2win(history, VGG16LSTMVideoClassifier.model_name,report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name + '-hi-dim-history2win.png')
if __name__ == '__main__':
main()
| [
"numpy.random.seed",
"keras_video_classifier.library.utility.plot_utils.plot_history_2win",
"os.path.dirname",
"keras_video_classifier.library.utility.plot_utils.plot_and_save_history",
"keras_video_classifier.library.recurrent_networks.VGG16LSTMVideoClassifier",
"keras.backend.set_image_dim_ordering",
... | [((89, 119), 'keras.backend.set_image_dim_ordering', 'K.set_image_dim_ordering', (['"""tf"""'], {}), "('tf')\n", (113, 119), True, 'from keras import backend as K\n'), ((788, 806), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (802, 806), True, 'import numpy as np\n'), ((929, 953), 'keras_video_classifier.library.utility.ucf.UCF101_loader.load_ucf', 'load_ucf', (['input_dir_path'], {}), '(input_dir_path)\n', (937, 953), False, 'from keras_video_classifier.library.utility.ucf.UCF101_loader import load_ucf\n'), ((972, 998), 'keras_video_classifier.library.recurrent_networks.VGG16LSTMVideoClassifier', 'VGG16LSTMVideoClassifier', ([], {}), '()\n', (996, 998), False, 'from keras_video_classifier.library.recurrent_networks import VGG16LSTMVideoClassifier\n'), ((1216, 1377), 'keras_video_classifier.library.utility.plot_utils.plot_and_save_history', 'plot_and_save_history', (['history', 'VGG16LSTMVideoClassifier.model_name', "(report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name +\n '-hi-dim-history.png')"], {}), "(history, VGG16LSTMVideoClassifier.model_name, \n report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name +\n '-hi-dim-history.png')\n", (1237, 1377), False, 'from keras_video_classifier.library.utility.plot_utils import plot_and_save_history, plot_history_2win\n'), ((1372, 1533), 'keras_video_classifier.library.utility.plot_utils.plot_history_2win', 'plot_history_2win', (['history', 'VGG16LSTMVideoClassifier.model_name', "(report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name +\n '-hi-dim-history2win.png')"], {}), "(history, VGG16LSTMVideoClassifier.model_name, \n report_dir_path + '/' + VGG16LSTMVideoClassifier.model_name +\n '-hi-dim-history2win.png')\n", (1389, 1533), False, 'from keras_video_classifier.library.utility.plot_utils import plot_and_save_history, plot_history_2win\n'), ((562, 587), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (577, 587), False, 'import os\n'), ((643, 668), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (658, 668), False, 'import os\n'), ((730, 755), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (745, 755), False, 'import os\n'), ((153, 178), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (168, 178), False, 'import os\n')] |
"""
Time Series Statistics
----------------------
"""
import math
from typing import List, Optional, Tuple, Union
import matplotlib.pyplot as plt
import numpy as np
from scipy.signal import argrelmax
from scipy.stats import norm
from statsmodels.tsa.seasonal import STL, seasonal_decompose
from statsmodels.tsa.stattools import acf, adfuller, grangercausalitytests, kpss, pacf
from darts import TimeSeries
from darts.logging import get_logger, raise_if, raise_if_not, raise_log
from .missing_values import fill_missing_values
from .utils import ModelMode, SeasonalityMode
logger = get_logger(__name__)
def check_seasonality(
ts: TimeSeries, m: Optional[int] = None, max_lag: int = 24, alpha: float = 0.05
):
"""
Checks whether the TimeSeries `ts` is seasonal with period `m` or not.
If `m` is None, we work under the assumption that there is a unique seasonality period, which is inferred
from the Auto-correlation Function (ACF).
Parameters
----------
ts
The time series to check for seasonality.
m
The seasonality period to check.
max_lag
The maximal lag allowed in the ACF.
alpha
The desired confidence level (default 5%).
Returns
-------
Tuple[bool, int]
A tuple `(season, m)`, where season is a boolean indicating whether the series has seasonality or not
and `m` is the seasonality period.
"""
ts._assert_univariate()
if m is not None and (m < 2 or not isinstance(m, int)):
raise_log(ValueError("m must be an integer greater than 1."), logger)
if m is not None and m > max_lag:
raise_log(ValueError("max_lag must be greater than or equal to m."), logger)
n_unique = np.unique(ts.values()).shape[0]
if n_unique == 1: # Check for non-constant TimeSeries
return False, 0
r = acf(
ts.values(), nlags=max_lag, fft=False
) # In case user wants to check for seasonality higher than 24 steps.
# Finds local maxima of Auto-Correlation Function
candidates = argrelmax(r)[0]
if len(candidates) == 0:
return False, 0
if m is not None:
# Check for local maximum when m is user defined.
test = m not in candidates
if test:
return False, m
candidates = [m]
# Remove r[0], the auto-correlation at lag order 0, that introduces bias.
r = r[1:]
# The non-adjusted upper limit of the significance interval.
band_upper = r.mean() + norm.ppf(1 - alpha / 2) * r.var()
# Significance test, stops at first admissible value. The two '-1' below
# compensate for the index change due to the restriction of the original r to r[1:].
for candidate in candidates:
stat = _bartlett_formula(r, candidate - 1, len(ts))
if r[candidate - 1] > stat * band_upper:
return True, candidate
return False, 0
def _bartlett_formula(r: np.ndarray, m: int, length: int) -> float:
"""
Computes the standard error of `r` at order `m` with respect to `length` according to Bartlett's formula.
Parameters
----------
r
The array whose standard error is to be computed.
m
The order of the standard error.
length
The size of the underlying sample to be used.
Returns
-------
float
The standard error of `r` with order `m`.
"""
if m == 1:
return math.sqrt(1 / length)
else:
return math.sqrt((1 + 2 * sum(map(lambda x: x**2, r[: m - 1]))) / length)
def extract_trend_and_seasonality(
ts: TimeSeries,
freq: int = None,
model: Union[SeasonalityMode, ModelMode] = ModelMode.MULTIPLICATIVE,
method: str = "naive",
**kwargs,
) -> Tuple[TimeSeries, TimeSeries]:
"""
Extracts trend and seasonality from a TimeSeries instance using `statsmodels.tsa`.
Parameters
----------
ts
The series to decompose
freq
The seasonality period to use.
model
The type of decomposition to use.
Must be ``from darts import ModelMode, SeasonalityMode`` Enum member.
Either ``MULTIPLICATIVE`` or ``ADDITIVE``.
Defaults ``ModelMode.MULTIPLICATIVE``.
method
The method to be used to decompose the series.
- "naive" : Seasonal decomposition using moving averages [1]_.
- "STL" : Season-Trend decomposition using LOESS [2]_. Only compatible with ``ADDITIVE`` model type.
kwargs
Other keyword arguments are passed down to the decomposition method.
Returns
-------
Tuple[TimeSeries, TimeSeries]
A tuple of (trend, seasonal) time series.
References
-------
.. [1] https://www.statsmodels.org/devel/generated/statsmodels.tsa.seasonal.seasonal_decompose.html
.. [2] https://www.statsmodels.org/devel/generated/statsmodels.tsa.seasonal.STL.html
"""
ts._assert_univariate()
raise_if_not(
model in ModelMode or model in SeasonalityMode,
f"Unknown value for model_mode: {model}.",
logger,
)
raise_if_not(
model is not SeasonalityMode.NONE,
"The model must be either MULTIPLICATIVE or ADDITIVE.",
)
if method == "naive":
decomp = seasonal_decompose(
ts.pd_series(), period=freq, model=model.value, extrapolate_trend="freq"
)
elif method == "STL":
raise_if_not(
model in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE],
f"Only ADDITIVE model is compatible with the STL method. Current model is {model}.",
logger,
)
decomp = STL(
endog=ts.pd_series(),
period=freq,
**kwargs,
).fit()
else:
raise_log(ValueError(f"Unknown value for method: {method}"), logger)
season = TimeSeries.from_times_and_values(
ts.time_index,
decomp.seasonal,
static_covariates=ts.static_covariates,
hierarchy=ts.hierarchy,
)
trend = TimeSeries.from_times_and_values(
ts.time_index,
decomp.trend,
static_covariates=ts.static_covariates,
hierarchy=ts.hierarchy,
)
return trend, season
def remove_from_series(
ts: TimeSeries, other: TimeSeries, model: Union[SeasonalityMode, ModelMode]
) -> TimeSeries:
"""
Removes the TimeSeries `other` from the TimeSeries `ts` as specified by `model`.
Use e.g. to remove an additive or multiplicative trend from a series.
Parameters
----------
ts
The TimeSeries to be modified.
other
The TimeSeries to remove.
model
The type of model considered.
Must be `from darts import ModelMode, SeasonalityMode` Enums member.
Either MULTIPLICATIVE or ADDITIVE.
Returns
-------
TimeSeries
A TimeSeries defined by removing `other` from `ts`.
"""
ts._assert_univariate()
raise_if_not(
model in ModelMode or model in SeasonalityMode,
f"Unknown value for model_mode: {model}.",
logger,
)
if model.value == "multiplicative":
new_ts = ts / other
elif model.value == "additive":
new_ts = ts - other
else:
raise_log(
ValueError(
"Invalid parameter; must be either ADDITIVE or MULTIPLICATIVE. Was: {}".format(
model
)
)
)
return new_ts
def remove_seasonality(
ts: TimeSeries,
freq: int = None,
model: SeasonalityMode = SeasonalityMode.MULTIPLICATIVE,
method: str = "naive",
**kwargs,
) -> TimeSeries:
"""
Adjusts the TimeSeries `ts` for a seasonality of order `frequency` using the `model` decomposition.
Parameters
----------
ts
The TimeSeries to adjust.
freq
The seasonality period to use.
model
The type of decomposition to use.
Must be a `from darts import SeasonalityMode` Enum member.
Either SeasonalityMode.MULTIPLICATIVE or SeasonalityMode.ADDITIVE.
Defaults SeasonalityMode.MULTIPLICATIVE.
method
The method to be used to decompose the series.
- "naive" : Seasonal decomposition using moving averages [1]_.
- "STL" : Season-Trend decomposition using LOESS [2]_. Only compatible with ``ADDITIVE`` model type.
Defaults to "naive"
kwargs
Other keyword arguments are passed down to the decomposition method.
Returns
-------
TimeSeries
A new TimeSeries instance that corresponds to the seasonality-adjusted 'ts'.
References
-------
.. [1] https://www.statsmodels.org/devel/generated/statsmodels.tsa.seasonal.seasonal_decompose.html
.. [2] https://www.statsmodels.org/devel/generated/statsmodels.tsa.seasonal.STL.html
"""
ts._assert_univariate()
raise_if_not(
model is not SeasonalityMode.NONE,
"The model must be either MULTIPLICATIVE or ADDITIVE.",
)
raise_if(
model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and method == "STL",
f"Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}.",
logger,
)
_, seasonality = extract_trend_and_seasonality(ts, freq, model, method, **kwargs)
new_ts = remove_from_series(ts, seasonality, model)
return new_ts
def remove_trend(
ts: TimeSeries,
model: ModelMode = ModelMode.MULTIPLICATIVE,
method: str = "naive",
**kwargs,
) -> TimeSeries:
"""
Adjusts the TimeSeries `ts` for a trend using the `model` decomposition.
Parameters
----------
ts
The TimeSeries to adjust.
model
The type of decomposition to use.
Must be a `from darts import ModelMode` Enum member.
Either ModelMode.MULTIPLICATIVE or ModelMode.ADDITIVE.
Defaults ModelMode.MULTIPLICATIVE.
method
The method to be used to decompose the series.
- "naive" : Seasonal decomposition using moving averages [1]_.
- "STL" : Season-Trend decomposition using LOESS [2]_. Only compatible with ``ADDITIVE`` model type.
Defaults to "naive"
kwargs
Other keyword arguments are passed down to the decomposition method.
Returns
-------
TimeSeries
A new TimeSeries instance that corresponds to the trend-adjusted 'ts'.
"""
ts._assert_univariate()
raise_if(
model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and method == "STL",
f"Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}.",
logger,
)
trend, _ = extract_trend_and_seasonality(ts, model=model, method=method, **kwargs)
new_ts = remove_from_series(ts, trend, model)
return new_ts
def stationarity_tests(
ts: TimeSeries,
p_value_threshold_adfuller: float = 0.05,
p_value_threshold_kpss: float = 0.05,
) -> bool:
"""
Double test on stationarity using both Kwiatkowski-Phillips-Schmidt-Shin and Augmented
Dickey-Fuller statistical tests.
WARNING
Because Augmented Dickey-Fuller is testing null hypothesis that ts IS NOT stationary and
Kwiatkowski-Phillips-Schmidt-Shin that
ts IS stationary, we can't really decide on the same p_value threshold for both tests in general.
It seems reasonable to keep them both at 0.05.
If other threshold has to be tested, they have to go in opposite direction (for example,
p_value_threshold_adfuller = 0.01 and p_value_threshold_kpss = 0.1).
Parameters
----------
ts
The TimeSeries to test.
p_value_threshold_adfuller
p_value threshold to reject stationarity for Augmented Dickey-Fuller test.
p_value_threshold_kpss
p_value threshold to reject non-stationarity for Kwiatkowski-Phillips-Schmidt-Shin test.
Returns
-------
bool
If ts is stationary or not.
"""
adf_res = stationarity_test_adf(ts)
kpss_res = stationarity_test_kpss(ts)
return (adf_res[1] < p_value_threshold_adfuller) and (
kpss_res[1] > p_value_threshold_kpss
)
def stationarity_test_kpss(
ts: TimeSeries, regression: str = "c", nlags: Union[str, int] = "auto"
) -> set:
"""
Provides Kwiatkowski-Phillips-Schmidt-Shin test for stationarity for a time series,
using :func:`statsmodels.tsa.stattools.kpss`. See [1]_.
Parameters
----------
ts
The time series to test.
regression
The null hypothesis for the KPSS test.
'c' : The data is stationary around a constant (default).
'ct' : The data is stationary around a trend.
nlags
Indicates the number of lags to be used. If 'auto' (default), lags is calculated using the data-dependent method
of Hobijn et al. (1998). See also Andrews (1991), Newey & West (1994), and Schwert (1989). If set to 'legacy',
uses int(12 * (n / 100)**(1 / 4)) , as outlined in Schwert (1989).
Returns
-------
set
| kpss_stat: The test statistic.
| pvalue: The p-value of the test. The p-value is interpolated from Table 1 in [2]_,
| and a boundary point is returned if the test statistic is outside the table of critical values,
| that is, if the p-value is outside the interval (0.01, 0.1).
| lags: The truncation lag parameter.
| crit: The critical values at 10%, 5%, 2.5% and 1%. Based on [2]_.
References
----------
.. [1] https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.kpss.html
.. [2] Kwiatkowski et al. (1992)
"""
ts._assert_univariate()
ts._assert_deterministic()
return kpss(ts.values(copy=False), regression, nlags)
def stationarity_test_adf(
ts: TimeSeries,
maxlag: Union[None, int] = None,
regression: str = "c",
autolag: Union[None, str] = "AIC",
) -> set:
"""
Provides Augmented Dickey-Fuller unit root test for a time series,
using :func:`statsmodels.tsa.stattools.adfuller`. See [1]_.
Parameters
----------
ts
The time series to test.
maxlag
Maximum lag which is included in test, default value of 12*(nobs/100)^{1/4} is used when None.
regression
Constant and trend order to include in regression.
"c" : constant only (default).
"ct" : constant and trend.
"ctt" : constant, and linear and quadratic trend.
"n" : no constant, no trend.
autolag
Method to use when automatically determining the lag length among the values 0, 1, …, maxlag.
If "AIC" (default) or "BIC", then the number of lags is chosen to minimize the corresponding
information criterion. "t-stat" based choice of maxlag. Starts with maxlag and drops a lag
until the t-statistic on the last lag length is significant using a 5%-sized test.
If None, then the number of included lags is set to maxlag.
Returns
-------
set
| adf: The test statistic.
| pvalue: MacKinnon's approximate p-value based on [2]_.
| usedlag: The number of lags used.
| nobs: The number of observations used for the ADF regression and calculation of the critical values.
| critical: Critical values for the test statistic at the 1 %, 5 %, and 10 % levels. Based on [2]_.
| icbest: The maximized information criterion if autolag is not None.
References
----------
.. [1] https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.adfuller.html
.. [2] MacKinnon (1994, 2010)
"""
ts._assert_univariate()
ts._assert_deterministic()
return adfuller(ts.values(copy=False), maxlag, regression, autolag)
def granger_causality_tests(
ts_cause: TimeSeries,
ts_effect: TimeSeries,
maxlag: int,
addconst: bool = True,
verbose: bool = True,
) -> None:
"""
Provides four tests for granger non causality of 2 time series using
:func:`statsmodels.tsa.stattools.grangercausalitytests`.
See [1]_.
Parameters
----------
ts_cause
A univariate deterministic time series. The statistical test determines if this time series
'Granger causes' the time series ts_effect (second parameter). Missing values are not supported.
if H_0 (non causality) is rejected (p near 0), then there is a 'granger causality'.
ts_effect
Univariate time series 'Granger caused' by ts_cause.
maxlag
If an integer, computes the test for all lags up to maxlag.
If an iterable, computes the tests only for the lags in maxlag.
addconst
Include a constant in the model.
verbose
Print results.
Returns
-------
Dict
All test results, dictionary keys are the number of lags. For each lag the values are a tuple,
with the first element a dictionary with test statistic, pvalues, degrees of freedom, the second element are
the OLS estimation results for the restricted model, the unrestricted model and the restriction (contrast)
matrix for the parameter f_test.
References
----------
.. [1] https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.grangercausalitytests.html
"""
ts_cause._assert_univariate()
ts_effect._assert_univariate()
ts_cause._assert_deterministic()
ts_effect._assert_deterministic()
raise_if_not(
ts_cause.freq == ts_effect.freq,
"ts_cause and ts_effect must have the same frequency.",
)
if not ts_cause.has_same_time_as(ts_effect):
logger.warning(
"ts_cause and ts_effect time series have different time index. "
"We will slice-intersect ts_cause with ts_effect."
)
ts_cause = ts_cause.slice_intersect(ts_effect)
ts_effect = ts_effect.slice_intersect(ts_cause)
if not stationarity_tests(ts_cause):
logger.warning(
"ts_cause doesn't seem to be stationary. Please review granger causality validity in your problem context."
)
if not stationarity_tests(ts_effect):
logger.warning(
"ts_effect doesn't seem to be stationary. Please review granger causality validity in your problem context."
)
return grangercausalitytests(
np.concatenate(
(ts_effect.values(copy=False), ts_cause.values(copy=False)), axis=1
),
maxlag,
addconst,
verbose,
)
def plot_acf(
ts: TimeSeries,
m: Optional[int] = None,
max_lag: int = 24,
alpha: float = 0.05,
bartlett_confint: bool = True,
fig_size: Tuple[int, int] = (10, 5),
axis: Optional[plt.axis] = None,
) -> None:
"""
Plots the ACF of `ts`, highlighting it at lag `m`, with corresponding significance interval.
Uses :func:`statsmodels.tsa.stattools.acf` [1]_
Parameters
----------
ts
The TimeSeries whose ACF should be plotted.
m
Optionally, a time lag to highlight on the plot.
max_lag
The maximal lag order to consider.
alpha
The confidence interval to display.
bartlett_confint
The boolean value indicating whether the confidence interval should be
calculated using Bartlett's formula. If set to True, the confidence interval
can be used in the model identification stage for fitting ARIMA models.
If set to False, the confidence interval can be used to test for randomness
(i.e. there is no time dependence in the data) of the data.
fig_size
The size of the figure to be displayed.
axis
Optionally, an axis object to plot the ACF on.
References
----------
.. [1] https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.acf.html
"""
ts._assert_univariate()
raise_if(
max_lag is None or not (1 <= max_lag < len(ts)),
"max_lag must be greater than or equal to 1 and less than len(ts).",
)
raise_if(
m is not None and not (0 <= m <= max_lag),
"m must be greater than or equal to 0 and less than or equal to max_lag.",
)
raise_if(
alpha is None or not (0 < alpha < 1),
"alpha must be greater than 0 and less than 1.",
)
r, confint = acf(
ts.values(),
nlags=max_lag,
fft=False,
alpha=alpha,
bartlett_confint=bartlett_confint,
)
if axis is None:
plt.figure(figsize=fig_size)
axis = plt
for i in range(len(r)):
axis.plot(
(i, i),
(0, r[i]),
color=("#b512b8" if m is not None and i == m else "black"),
lw=(1 if m is not None and i == m else 0.5),
)
# Adjusts the upper band of the confidence interval to center it on the x axis.
upp_band = [confint[lag][1] - r[lag] for lag in range(1, max_lag + 1)]
axis.fill_between(
np.arange(1, max_lag + 1),
upp_band,
[-x for x in upp_band],
color="#003DFD",
alpha=0.25,
)
axis.plot((0, max_lag + 1), (0, 0), color="black")
def plot_pacf(
ts: TimeSeries,
m: Optional[int] = None,
max_lag: int = 24,
method: str = "ywadjusted",
alpha: float = 0.05,
fig_size: Tuple[int, int] = (10, 5),
axis: Optional[plt.axis] = None,
) -> None:
"""
Plots the Partial ACF of `ts`, highlighting it at lag `m`, with corresponding significance interval.
Uses :func:`statsmodels.tsa.stattools.pacf` [1]_
Parameters
----------
ts
The TimeSeries whose ACF should be plotted.
m
Optionally, a time lag to highlight on the plot.
max_lag
The maximal lag order to consider.
method
The method to be used for the PACF calculation.
- | "yw" or "ywadjusted" : Yule-Walker with sample-size adjustment in
| denominator for acovf. Default.
- "ywm" or "ywmle" : Yule-Walker without adjustment.
- "ols" : regression of time series on lags of it and on constant.
- "ols-inefficient" : regression of time series on lags using a single
common sample to estimate all pacf coefficients.
- "ols-adjusted" : regression of time series on lags with a bias
adjustment.
- "ld" or "ldadjusted" : Levinson-Durbin recursion with bias
correction.
- "ldb" or "ldbiased" : Levinson-Durbin recursion without bias
correction.
alpha
The confidence interval to display.
fig_size
The size of the figure to be displayed.
axis
Optionally, an axis object to plot the ACF on.
References
----------
.. [1] https://www.statsmodels.org/dev/generated/statsmodels.tsa.stattools.pacf.html
"""
ts._assert_univariate()
raise_if(
max_lag is None or not (1 <= max_lag < len(ts) // 2),
"max_lag must be greater than or equal to 1 and less than len(ts)//2.",
)
raise_if(
m is not None and not (0 <= m <= max_lag),
"m must be greater than or equal to 0 and less than or equal to max_lag.",
)
raise_if(
alpha is None or not (0 < alpha < 1),
"alpha must be greater than 0 and less than 1.",
)
r, confint = pacf(ts.values(), nlags=max_lag, method=method, alpha=alpha)
if axis is None:
plt.figure(figsize=fig_size)
axis = plt
for i in range(len(r)):
axis.plot(
(i, i),
(0, r[i]),
color=("#b512b8" if m is not None and i == m else "black"),
lw=(1 if m is not None and i == m else 0.5),
)
# Adjusts the upper band of the confidence interval to center it on the x axis.
upp_band = [confint[lag][1] - r[lag] for lag in range(1, max_lag + 1)]
axis.fill_between(
np.arange(1, max_lag + 1),
upp_band,
[-x for x in upp_band],
color="#003DFD",
alpha=0.25,
)
axis.plot((0, max_lag + 1), (0, 0), color="black")
def plot_hist(
data: Union[TimeSeries, List[float], np.ndarray],
bins: Optional[Union[int, np.ndarray, List[float]]] = None,
density: bool = False,
title: Optional[str] = None,
fig_size: Optional[Tuple[int, int]] = None,
ax: Optional[plt.axis] = None,
) -> None:
"""This function plots the histogram of values in a TimeSeries instance or an array-like.
All types of TimeSeries are supported (uni-, multivariate, deterministic, stochastic).
Depending on the number of components in `data`, up to four histograms can be plotted on one figure.
All stochastic samples will be displayed with the corresponding component.
If `data` is an array-like, ALL values will be displayed in the same histogram.
Parameters
----------
data
TimeSeries instance or an array-like from which to plot the histogram.
bins
Optionally, either an integer value for the number of bins to be displayed
or an array-like of floats determining the position of bins.
density
bool, if `density` is set to True, the bin counts will be converted to probability density
title
The title of the figure to be displayed
fig_size
The size of the figure to be displayed.
ax
Optionally, an axis object to plot the histogram on.
"""
n_plots_max = 4
if isinstance(data, TimeSeries):
if len(data.components) > n_plots_max:
logger.warning(
"TimeSeries contains more than 4 components. Only the first 4 components will be displayed."
)
components = list(data.components[:n_plots_max])
values = data[components].all_values(copy=False).flatten(order="F")
else:
values = data if isinstance(data, np.ndarray) else np.array(data)
if len(values.shape) > 1:
logger.warning(
"Input array-like data with `dim>1d` will be flattened and displayed in one histogram."
)
components = ["Data"]
values = values.flatten(order="F")
# compute the number of columns and rows for subplots depending on shape of input data
n_components = len(components)
n_cols = 1 if n_components == 1 else 2
n_rows = math.ceil(n_components / n_cols)
title = "Histogram" if title is None else title
if ax is None:
fig = plt.figure(constrained_layout=True, figsize=fig_size)
gs = fig.add_gridspec(n_rows, n_cols)
fig.suptitle(title)
ax_all = [fig.add_subplot(gs[i]) for i in range(n_components)]
else:
if n_components > 1:
logger.warning(
"Only the first component is plotted when calling plot_hist() with a given `ax`"
)
ax.set_title(title)
ax_all = [ax]
n_entries = len(values) // n_components
for i, label, ax_i in zip(range(n_components), components, ax_all):
ax_i.hist(
values[i * n_entries : (i + 1) * n_entries],
bins=bins,
density=density,
label=label,
)
ax_i.set_xlabel("value")
ax_i.set_ylabel("count" if not density else "probability density")
ax_i.legend()
def plot_residuals_analysis(
residuals: TimeSeries, num_bins: int = 20, fill_nan: bool = True
) -> None:
"""Plots data relevant to residuals.
This function takes a univariate TimeSeries instance of residuals and plots their values,
their distribution and their ACF.
Please note that if the residual TimeSeries instance contains NaN values, the plots
might be displayed incorrectly. If `fill_nan` is set to True, the missing values will
be interpolated.
Parameters
----------
residuals
Univariate TimeSeries instance representing residuals.
num_bins
Optionally, an integer value determining the number of bins in the histogram.
fill_nan
A boolean value indicating whether NaN values should be filled in the residuals.
"""
residuals._assert_univariate()
fig = plt.figure(constrained_layout=True, figsize=(8, 6))
gs = fig.add_gridspec(2, 2)
if fill_nan:
residuals = fill_missing_values(residuals)
# plot values
ax1 = fig.add_subplot(gs[:1, :])
residuals.plot(ax=ax1)
ax1.set_ylabel("value")
ax1.set_title("Residual values")
# plot histogram and distribution
res_mean, res_std = np.mean(residuals.univariate_values()), np.std(
residuals.univariate_values()
)
res_min, res_max = min(residuals.univariate_values()), max(
residuals.univariate_values()
)
x = np.linspace(res_min, res_max, 100)
ax2 = fig.add_subplot(gs[1:, 1:])
plot_hist(residuals, bins=num_bins, ax=ax2)
ax2.plot(
x,
norm(res_mean, res_std).pdf(x)
* len(residuals)
* (res_max - res_min)
/ num_bins,
)
ax2.yaxis.set_major_locator(plt.MaxNLocator(integer=True))
ax2.set_title("Distribution")
ax2.set_ylabel("count")
ax2.set_xlabel("value")
# plot ACF
ax3 = fig.add_subplot(gs[1:, :1])
plot_acf(residuals, axis=ax3)
ax3.set_ylabel("ACF value")
ax3.set_xlabel("lag")
ax3.set_title("ACF")
| [
"scipy.stats.norm.ppf",
"darts.TimeSeries.from_times_and_values",
"scipy.stats.norm",
"darts.logging.raise_if",
"darts.logging.raise_if_not",
"math.sqrt",
"math.ceil",
"matplotlib.pyplot.figure",
"scipy.signal.argrelmax",
"numpy.arange",
"numpy.array",
"numpy.linspace",
"matplotlib.pyplot.Ma... | [((586, 606), 'darts.logging.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (596, 606), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((4899, 5014), 'darts.logging.raise_if_not', 'raise_if_not', (['(model in ModelMode or model in SeasonalityMode)', 'f"""Unknown value for model_mode: {model}."""', 'logger'], {}), "(model in ModelMode or model in SeasonalityMode,\n f'Unknown value for model_mode: {model}.', logger)\n", (4911, 5014), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((5046, 5153), 'darts.logging.raise_if_not', 'raise_if_not', (['(model is not SeasonalityMode.NONE)', '"""The model must be either MULTIPLICATIVE or ADDITIVE."""'], {}), "(model is not SeasonalityMode.NONE,\n 'The model must be either MULTIPLICATIVE or ADDITIVE.')\n", (5058, 5153), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((5800, 5932), 'darts.TimeSeries.from_times_and_values', 'TimeSeries.from_times_and_values', (['ts.time_index', 'decomp.seasonal'], {'static_covariates': 'ts.static_covariates', 'hierarchy': 'ts.hierarchy'}), '(ts.time_index, decomp.seasonal,\n static_covariates=ts.static_covariates, hierarchy=ts.hierarchy)\n', (5832, 5932), False, 'from darts import TimeSeries\n'), ((5980, 6109), 'darts.TimeSeries.from_times_and_values', 'TimeSeries.from_times_and_values', (['ts.time_index', 'decomp.trend'], {'static_covariates': 'ts.static_covariates', 'hierarchy': 'ts.hierarchy'}), '(ts.time_index, decomp.trend,\n static_covariates=ts.static_covariates, hierarchy=ts.hierarchy)\n', (6012, 6109), False, 'from darts import TimeSeries\n'), ((6890, 7005), 'darts.logging.raise_if_not', 'raise_if_not', (['(model in ModelMode or model in SeasonalityMode)', 'f"""Unknown value for model_mode: {model}."""', 'logger'], {}), "(model in ModelMode or model in SeasonalityMode,\n f'Unknown value for model_mode: {model}.', logger)\n", (6902, 7005), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((8810, 8917), 'darts.logging.raise_if_not', 'raise_if_not', (['(model is not SeasonalityMode.NONE)', '"""The model must be either MULTIPLICATIVE or ADDITIVE."""'], {}), "(model is not SeasonalityMode.NONE,\n 'The model must be either MULTIPLICATIVE or ADDITIVE.')\n", (8822, 8917), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((8941, 9143), 'darts.logging.raise_if', 'raise_if', (["(model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and method ==\n 'STL')", 'f"""Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}."""', 'logger'], {}), "(model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and \n method == 'STL',\n f'Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}.'\n , logger)\n", (8949, 9143), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((10367, 10569), 'darts.logging.raise_if', 'raise_if', (["(model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and method ==\n 'STL')", 'f"""Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}."""', 'logger'], {}), "(model not in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE] and \n method == 'STL',\n f'Only ADDITIVE seasonality is compatible with the STL method. Current model is {model}.'\n , logger)\n", (10375, 10569), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((17331, 17436), 'darts.logging.raise_if_not', 'raise_if_not', (['(ts_cause.freq == ts_effect.freq)', '"""ts_cause and ts_effect must have the same frequency."""'], {}), "(ts_cause.freq == ts_effect.freq,\n 'ts_cause and ts_effect must have the same frequency.')\n", (17343, 17436), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((19896, 20024), 'darts.logging.raise_if', 'raise_if', (['(m is not None and not 0 <= m <= max_lag)', '"""m must be greater than or equal to 0 and less than or equal to max_lag."""'], {}), "(m is not None and not 0 <= m <= max_lag,\n 'm must be greater than or equal to 0 and less than or equal to max_lag.')\n", (19904, 20024), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((20050, 20147), 'darts.logging.raise_if', 'raise_if', (['(alpha is None or not 0 < alpha < 1)', '"""alpha must be greater than 0 and less than 1."""'], {}), "(alpha is None or not 0 < alpha < 1,\n 'alpha must be greater than 0 and less than 1.')\n", (20058, 20147), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((22861, 22989), 'darts.logging.raise_if', 'raise_if', (['(m is not None and not 0 <= m <= max_lag)', '"""m must be greater than or equal to 0 and less than or equal to max_lag."""'], {}), "(m is not None and not 0 <= m <= max_lag,\n 'm must be greater than or equal to 0 and less than or equal to max_lag.')\n", (22869, 22989), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((23015, 23112), 'darts.logging.raise_if', 'raise_if', (['(alpha is None or not 0 < alpha < 1)', '"""alpha must be greater than 0 and less than 1."""'], {}), "(alpha is None or not 0 < alpha < 1,\n 'alpha must be greater than 0 and less than 1.')\n", (23023, 23112), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((26138, 26170), 'math.ceil', 'math.ceil', (['(n_components / n_cols)'], {}), '(n_components / n_cols)\n', (26147, 26170), False, 'import math\n'), ((27942, 27993), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'constrained_layout': '(True)', 'figsize': '(8, 6)'}), '(constrained_layout=True, figsize=(8, 6))\n', (27952, 27993), True, 'import matplotlib.pyplot as plt\n'), ((28514, 28548), 'numpy.linspace', 'np.linspace', (['res_min', 'res_max', '(100)'], {}), '(res_min, res_max, 100)\n', (28525, 28548), True, 'import numpy as np\n'), ((2049, 2061), 'scipy.signal.argrelmax', 'argrelmax', (['r'], {}), '(r)\n', (2058, 2061), False, 'from scipy.signal import argrelmax\n'), ((3411, 3432), 'math.sqrt', 'math.sqrt', (['(1 / length)'], {}), '(1 / length)\n', (3420, 3432), False, 'import math\n'), ((20355, 20383), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'fig_size'}), '(figsize=fig_size)\n', (20365, 20383), True, 'import matplotlib.pyplot as plt\n'), ((20825, 20850), 'numpy.arange', 'np.arange', (['(1)', '(max_lag + 1)'], {}), '(1, max_lag + 1)\n', (20834, 20850), True, 'import numpy as np\n'), ((23243, 23271), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'fig_size'}), '(figsize=fig_size)\n', (23253, 23271), True, 'import matplotlib.pyplot as plt\n'), ((23713, 23738), 'numpy.arange', 'np.arange', (['(1)', '(max_lag + 1)'], {}), '(1, max_lag + 1)\n', (23722, 23738), True, 'import numpy as np\n'), ((26257, 26310), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'constrained_layout': '(True)', 'figsize': 'fig_size'}), '(constrained_layout=True, figsize=fig_size)\n', (26267, 26310), True, 'import matplotlib.pyplot as plt\n'), ((28812, 28841), 'matplotlib.pyplot.MaxNLocator', 'plt.MaxNLocator', ([], {'integer': '(True)'}), '(integer=True)\n', (28827, 28841), True, 'import matplotlib.pyplot as plt\n'), ((2494, 2517), 'scipy.stats.norm.ppf', 'norm.ppf', (['(1 - alpha / 2)'], {}), '(1 - alpha / 2)\n', (2502, 2517), False, 'from scipy.stats import norm\n'), ((5368, 5539), 'darts.logging.raise_if_not', 'raise_if_not', (['(model in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE])', 'f"""Only ADDITIVE model is compatible with the STL method. Current model is {model}."""', 'logger'], {}), "(model in [SeasonalityMode.ADDITIVE, ModelMode.ADDITIVE],\n f'Only ADDITIVE model is compatible with the STL method. Current model is {model}.'\n , logger)\n", (5380, 5539), False, 'from darts.logging import get_logger, raise_if, raise_if_not, raise_log\n'), ((25686, 25700), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (25694, 25700), True, 'import numpy as np\n'), ((28668, 28691), 'scipy.stats.norm', 'norm', (['res_mean', 'res_std'], {}), '(res_mean, res_std)\n', (28672, 28691), False, 'from scipy.stats import norm\n')] |
# coding=utf-8
# Copyright 2022 The Deeplab2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for data_utils."""
import io
import numpy as np
from PIL import Image
import tensorflow as tf
from deeplab2.data import data_utils
def _encode_png_image(image):
"""Helper method to encode input image in PNG format."""
buffer = io.BytesIO()
Image.fromarray(image).save(buffer, format='png')
return buffer.getvalue()
class DataUtilsTest(tf.test.TestCase):
def _create_test_image(self, height, width):
rng = np.random.RandomState(319281498)
return rng.randint(0, 255, size=(height, width, 3), dtype=np.uint8)
def test_encode_and_decode(self):
"""Checks decode created tf.Example for semantic segmentation."""
test_image_height = 20
test_image_width = 15
filename = 'dummy'
image = self._create_test_image(test_image_height, test_image_width)
# Take the last channel as dummy label.
label = image[..., 0]
example = data_utils.create_tfexample(
image_data=_encode_png_image(image),
image_format='png', filename=filename,
label_data=_encode_png_image(label), label_format='png')
# Parse created example, expect getting identical results.
parser = data_utils.SegmentationDecoder(is_panoptic_dataset=False)
parsed_tensors = parser(example.SerializeToString())
self.assertIn('image', parsed_tensors)
self.assertIn('image_name', parsed_tensors)
self.assertIn('label', parsed_tensors)
self.assertEqual(filename, parsed_tensors['image_name'])
np.testing.assert_array_equal(image, parsed_tensors['image'].numpy())
# Decoded label is a 3-D array with last dimension of 1.
decoded_label = parsed_tensors['label'].numpy()
np.testing.assert_array_equal(label, decoded_label[..., 0])
def test_encode_and_decode_panoptic(self):
test_image_height = 31
test_image_width = 17
filename = 'dummy'
image = self._create_test_image(test_image_height, test_image_width)
# Create dummy panoptic label in np.int32 dtype.
label = np.dot(image.astype(np.int32), [1, 256, 256 * 256]).astype(np.int32)
example = data_utils.create_tfexample(
image_data=_encode_png_image(image),
image_format='png', filename=filename,
label_data=label.tostring(), label_format='raw')
parser = data_utils.SegmentationDecoder(is_panoptic_dataset=True)
parsed_tensors = parser(example.SerializeToString())
self.assertIn('image', parsed_tensors)
self.assertIn('image_name', parsed_tensors)
self.assertIn('label', parsed_tensors)
self.assertEqual(filename, parsed_tensors['image_name'])
np.testing.assert_array_equal(image, parsed_tensors['image'].numpy())
# Decoded label is a 3-D array with last dimension of 1.
decoded_label = parsed_tensors['label'].numpy()
np.testing.assert_array_equal(label, decoded_label[..., 0])
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow.test.main",
"deeplab2.data.data_utils.SegmentationDecoder",
"io.BytesIO",
"numpy.testing.assert_array_equal",
"numpy.random.RandomState",
"PIL.Image.fromarray"
] | [((845, 857), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (855, 857), False, 'import io\n'), ((3436, 3450), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (3448, 3450), True, 'import tensorflow as tf\n'), ((1036, 1068), 'numpy.random.RandomState', 'np.random.RandomState', (['(319281498)'], {}), '(319281498)\n', (1057, 1068), True, 'import numpy as np\n'), ((1746, 1803), 'deeplab2.data.data_utils.SegmentationDecoder', 'data_utils.SegmentationDecoder', ([], {'is_panoptic_dataset': '(False)'}), '(is_panoptic_dataset=False)\n', (1776, 1803), False, 'from deeplab2.data import data_utils\n'), ((2248, 2307), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['label', 'decoded_label[..., 0]'], {}), '(label, decoded_label[..., 0])\n', (2277, 2307), True, 'import numpy as np\n'), ((2844, 2900), 'deeplab2.data.data_utils.SegmentationDecoder', 'data_utils.SegmentationDecoder', ([], {'is_panoptic_dataset': '(True)'}), '(is_panoptic_dataset=True)\n', (2874, 2900), False, 'from deeplab2.data import data_utils\n'), ((3345, 3404), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['label', 'decoded_label[..., 0]'], {}), '(label, decoded_label[..., 0])\n', (3374, 3404), True, 'import numpy as np\n'), ((860, 882), 'PIL.Image.fromarray', 'Image.fromarray', (['image'], {}), '(image)\n', (875, 882), False, 'from PIL import Image\n')] |
import os
import shutil
import subprocess
from matplotlib import image
from numpy import testing as np
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
PYDV_DIR = os.path.dirname(TEST_DIR)
BASELINE_DIR = os.path.join(TEST_DIR, 'baseline')
# ------------------------ #
# --- Prepare the data --- #
# ------------------------ #
# The output directory will store the generated images to compare against the baseline
output_dir = os.path.join(TEST_DIR, 'output')
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
# Generate a list of commands for PyDV to process. Between each command, we will
# place an "image" statement, which will cause PyDV to save the current state of
# the plot.
commands = [
f"""rd {os.path.join(TEST_DIR, "testData.txt")}
cur 1 2""",
"legend off",
"erase",
"""cur 1 2
L1 a b""",
"L2 a b 3.0 5.5",
"del c d",
"color a blue",
"color a red",
"add a b",
"annot FOO 3 7",
"convolve a b",
"""del d
copy a""",
"cos a",
"""del d
dashstyle b [2, 2, 4, 2]""",
"dataid off",
"""dataid on
delannot 1""",
"derivative a",
"""del d
dy b 2.5
dx b 3""",
"""dx b -3
divide c a""",
"""del d
divx c 2
divy c 2""",
"dom 0 10",
"dom de",
"exp a",
"log a",
"grid off",
"""grid on
integrate a""",
"""del d
linespoints a on
marker a . 20""",
"lnwidth b 10",
"""lnwidth b 3
makecurve (1 2 3) (5 2 3)""",
"""del d
mx c 2""",
"my a 3",
"recip a",
"scatter b on",
"""scatter b off
cos b""",
"acos b",
"cosh b",
"acosh b",
"sin c",
"asin c",
"sinh c",
"asinh c",
"sqr b",
"sqrt b",
"sqrx b",
"sqrtx b",
"tan a",
"atan a",
"tanh a",
"atanh a",
"a - b",
"""del d
b ** 2""",
"c / b",
"smooth d",
"""dy d -3
abs d""",
"""erase
legend on
gaussian 1 1 5""",
"exp A",
"log A",
"expx A",
"logx A",
"""exp A
sin A
log A"""
]
commands_file = os.path.join(output_dir, 'pydv_commands')
with open(commands_file, 'w') as fp:
for i, command in enumerate(commands):
image_file = os.path.join(output_dir, f"test_image_{i+1:02d}")
fp.write(command)
fp.write(f"\nimage {image_file} png\n\n")
fp.write("\nquit")
# Execute PyDv
exec_command = f"{os.path.join(PYDV_DIR, 'pydv', 'pdv')} -i {commands_file}"
process = subprocess.Popen(exec_command.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
# ----------------- #
# --- Run tests --- #
# ----------------- #
# # Helper text to generate the below tests for pytest
# with open('delete_me.txt', 'w') as fp:
# for i in range(60):
# filename = f"test_image_{i+1:02d}.png"
# statement=f"""
# def test_image_{i+1:02d}():
# baseline = image.imread(os.path.join(BASELINE_DIR, '{filename}'))
# output = image.imread(os.path.join(output_dir, '{filename}'))
# np.assert_equal(baseline, output)
# """
# fp.write(statement)
# statement = ''
def test_image_01():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_01.png'))
output = image.imread(os.path.join(output_dir, 'test_image_01.png'))
np.assert_equal(baseline, output)
def test_image_02():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_02.png'))
output = image.imread(os.path.join(output_dir, 'test_image_02.png'))
np.assert_equal(baseline, output)
def test_image_03():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_03.png'))
output = image.imread(os.path.join(output_dir, 'test_image_03.png'))
np.assert_equal(baseline, output)
def test_image_04():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_04.png'))
output = image.imread(os.path.join(output_dir, 'test_image_04.png'))
np.assert_equal(baseline, output)
def test_image_05():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_05.png'))
output = image.imread(os.path.join(output_dir, 'test_image_05.png'))
np.assert_equal(baseline, output)
def test_image_06():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_06.png'))
output = image.imread(os.path.join(output_dir, 'test_image_06.png'))
np.assert_equal(baseline, output)
def test_image_07():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_07.png'))
output = image.imread(os.path.join(output_dir, 'test_image_07.png'))
np.assert_equal(baseline, output)
def test_image_08():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_08.png'))
output = image.imread(os.path.join(output_dir, 'test_image_08.png'))
np.assert_equal(baseline, output)
def test_image_09():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_09.png'))
output = image.imread(os.path.join(output_dir, 'test_image_09.png'))
np.assert_equal(baseline, output)
def test_image_10():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_10.png'))
output = image.imread(os.path.join(output_dir, 'test_image_10.png'))
np.assert_equal(baseline, output)
def test_image_11():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_11.png'))
output = image.imread(os.path.join(output_dir, 'test_image_11.png'))
np.assert_equal(baseline, output)
def test_image_12():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_12.png'))
output = image.imread(os.path.join(output_dir, 'test_image_12.png'))
np.assert_equal(baseline, output)
def test_image_13():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_13.png'))
output = image.imread(os.path.join(output_dir, 'test_image_13.png'))
np.assert_equal(baseline, output)
def test_image_14():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_14.png'))
output = image.imread(os.path.join(output_dir, 'test_image_14.png'))
np.assert_equal(baseline, output)
def test_image_15():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_15.png'))
output = image.imread(os.path.join(output_dir, 'test_image_15.png'))
np.assert_equal(baseline, output)
def test_image_16():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_16.png'))
output = image.imread(os.path.join(output_dir, 'test_image_16.png'))
np.assert_equal(baseline, output)
def test_image_17():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_17.png'))
output = image.imread(os.path.join(output_dir, 'test_image_17.png'))
np.assert_equal(baseline, output)
def test_image_18():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_18.png'))
output = image.imread(os.path.join(output_dir, 'test_image_18.png'))
np.assert_equal(baseline, output)
def test_image_19():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_19.png'))
output = image.imread(os.path.join(output_dir, 'test_image_19.png'))
np.assert_equal(baseline, output)
def test_image_20():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_20.png'))
output = image.imread(os.path.join(output_dir, 'test_image_20.png'))
np.assert_equal(baseline, output)
def test_image_21():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_21.png'))
output = image.imread(os.path.join(output_dir, 'test_image_21.png'))
np.assert_equal(baseline, output)
def test_image_22():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_22.png'))
output = image.imread(os.path.join(output_dir, 'test_image_22.png'))
np.assert_equal(baseline, output)
def test_image_23():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_23.png'))
output = image.imread(os.path.join(output_dir, 'test_image_23.png'))
np.assert_equal(baseline, output)
def test_image_24():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_24.png'))
output = image.imread(os.path.join(output_dir, 'test_image_24.png'))
np.assert_equal(baseline, output)
def test_image_25():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_25.png'))
output = image.imread(os.path.join(output_dir, 'test_image_25.png'))
np.assert_equal(baseline, output)
def test_image_26():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_26.png'))
output = image.imread(os.path.join(output_dir, 'test_image_26.png'))
np.assert_equal(baseline, output)
def test_image_27():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_27.png'))
output = image.imread(os.path.join(output_dir, 'test_image_27.png'))
np.assert_equal(baseline, output)
def test_image_28():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_28.png'))
output = image.imread(os.path.join(output_dir, 'test_image_28.png'))
np.assert_equal(baseline, output)
def test_image_29():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_29.png'))
output = image.imread(os.path.join(output_dir, 'test_image_29.png'))
np.assert_equal(baseline, output)
def test_image_30():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_30.png'))
output = image.imread(os.path.join(output_dir, 'test_image_30.png'))
np.assert_equal(baseline, output)
def test_image_31():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_31.png'))
output = image.imread(os.path.join(output_dir, 'test_image_31.png'))
np.assert_equal(baseline, output)
def test_image_32():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_32.png'))
output = image.imread(os.path.join(output_dir, 'test_image_32.png'))
np.assert_equal(baseline, output)
def test_image_33():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_33.png'))
output = image.imread(os.path.join(output_dir, 'test_image_33.png'))
np.assert_equal(baseline, output)
def test_image_34():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_34.png'))
output = image.imread(os.path.join(output_dir, 'test_image_34.png'))
np.assert_equal(baseline, output)
def test_image_35():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_35.png'))
output = image.imread(os.path.join(output_dir, 'test_image_35.png'))
np.assert_equal(baseline, output)
def test_image_36():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_36.png'))
output = image.imread(os.path.join(output_dir, 'test_image_36.png'))
np.assert_equal(baseline, output)
def test_image_37():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_37.png'))
output = image.imread(os.path.join(output_dir, 'test_image_37.png'))
np.assert_equal(baseline, output)
def test_image_38():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_38.png'))
output = image.imread(os.path.join(output_dir, 'test_image_38.png'))
np.assert_equal(baseline, output)
def test_image_39():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_39.png'))
output = image.imread(os.path.join(output_dir, 'test_image_39.png'))
np.assert_equal(baseline, output)
def test_image_40():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_40.png'))
output = image.imread(os.path.join(output_dir, 'test_image_40.png'))
np.assert_equal(baseline, output)
def test_image_41():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_41.png'))
output = image.imread(os.path.join(output_dir, 'test_image_41.png'))
np.assert_equal(baseline, output)
def test_image_42():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_42.png'))
output = image.imread(os.path.join(output_dir, 'test_image_42.png'))
np.assert_equal(baseline, output)
def test_image_43():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_43.png'))
output = image.imread(os.path.join(output_dir, 'test_image_43.png'))
np.assert_equal(baseline, output)
def test_image_44():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_44.png'))
output = image.imread(os.path.join(output_dir, 'test_image_44.png'))
np.assert_equal(baseline, output)
def test_image_45():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_45.png'))
output = image.imread(os.path.join(output_dir, 'test_image_45.png'))
np.assert_equal(baseline, output)
def test_image_46():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_46.png'))
output = image.imread(os.path.join(output_dir, 'test_image_46.png'))
np.assert_equal(baseline, output)
def test_image_47():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_47.png'))
output = image.imread(os.path.join(output_dir, 'test_image_47.png'))
np.assert_equal(baseline, output)
def test_image_48():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_48.png'))
output = image.imread(os.path.join(output_dir, 'test_image_48.png'))
np.assert_equal(baseline, output)
def test_image_49():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_49.png'))
output = image.imread(os.path.join(output_dir, 'test_image_49.png'))
np.assert_equal(baseline, output)
def test_image_50():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_50.png'))
output = image.imread(os.path.join(output_dir, 'test_image_50.png'))
np.assert_equal(baseline, output)
def test_image_51():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_51.png'))
output = image.imread(os.path.join(output_dir, 'test_image_51.png'))
np.assert_equal(baseline, output)
def test_image_52():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_52.png'))
output = image.imread(os.path.join(output_dir, 'test_image_52.png'))
np.assert_equal(baseline, output)
def test_image_53():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_53.png'))
output = image.imread(os.path.join(output_dir, 'test_image_53.png'))
np.assert_equal(baseline, output)
def test_image_54():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_54.png'))
output = image.imread(os.path.join(output_dir, 'test_image_54.png'))
np.assert_equal(baseline, output)
def test_image_55():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_55.png'))
output = image.imread(os.path.join(output_dir, 'test_image_55.png'))
np.assert_equal(baseline, output)
def test_image_56():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_56.png'))
output = image.imread(os.path.join(output_dir, 'test_image_56.png'))
np.assert_equal(baseline, output)
def test_image_57():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_57.png'))
output = image.imread(os.path.join(output_dir, 'test_image_57.png'))
np.assert_equal(baseline, output)
def test_image_58():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_58.png'))
output = image.imread(os.path.join(output_dir, 'test_image_58.png'))
np.assert_equal(baseline, output)
def test_image_59():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_59.png'))
output = image.imread(os.path.join(output_dir, 'test_image_59.png'))
np.assert_equal(baseline, output)
def test_image_60():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_60.png'))
output = image.imread(os.path.join(output_dir, 'test_image_60.png'))
np.assert_equal(baseline, output)
| [
"os.path.abspath",
"os.makedirs",
"os.path.dirname",
"os.path.exists",
"numpy.testing.assert_equal",
"shutil.rmtree",
"os.path.join"
] | [((171, 196), 'os.path.dirname', 'os.path.dirname', (['TEST_DIR'], {}), '(TEST_DIR)\n', (186, 196), False, 'import os\n'), ((212, 246), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""baseline"""'], {}), "(TEST_DIR, 'baseline')\n", (224, 246), False, 'import os\n'), ((436, 468), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""output"""'], {}), "(TEST_DIR, 'output')\n", (448, 468), False, 'import os\n'), ((472, 498), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (486, 498), False, 'import os\n'), ((530, 553), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (541, 553), False, 'import os\n'), ((1771, 1812), 'os.path.join', 'os.path.join', (['output_dir', '"""pydv_commands"""'], {}), "(output_dir, 'pydv_commands')\n", (1783, 1812), False, 'import os\n'), ((133, 158), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (148, 158), False, 'import os\n'), ((504, 529), 'shutil.rmtree', 'shutil.rmtree', (['output_dir'], {}), '(output_dir)\n', (517, 529), False, 'import shutil\n'), ((2980, 3013), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (2995, 3013), True, 'from numpy import testing as np\n'), ((3190, 3223), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (3205, 3223), True, 'from numpy import testing as np\n'), ((3400, 3433), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (3415, 3433), True, 'from numpy import testing as np\n'), ((3610, 3643), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (3625, 3643), True, 'from numpy import testing as np\n'), ((3820, 3853), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (3835, 3853), True, 'from numpy import testing as np\n'), ((4030, 4063), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (4045, 4063), True, 'from numpy import testing as np\n'), ((4240, 4273), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (4255, 4273), True, 'from numpy import testing as np\n'), ((4450, 4483), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (4465, 4483), True, 'from numpy import testing as np\n'), ((4660, 4693), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (4675, 4693), True, 'from numpy import testing as np\n'), ((4870, 4903), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (4885, 4903), True, 'from numpy import testing as np\n'), ((5080, 5113), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (5095, 5113), True, 'from numpy import testing as np\n'), ((5290, 5323), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (5305, 5323), True, 'from numpy import testing as np\n'), ((5500, 5533), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (5515, 5533), True, 'from numpy import testing as np\n'), ((5710, 5743), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (5725, 5743), True, 'from numpy import testing as np\n'), ((5920, 5953), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (5935, 5953), True, 'from numpy import testing as np\n'), ((6130, 6163), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (6145, 6163), True, 'from numpy import testing as np\n'), ((6340, 6373), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (6355, 6373), True, 'from numpy import testing as np\n'), ((6550, 6583), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (6565, 6583), True, 'from numpy import testing as np\n'), ((6760, 6793), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (6775, 6793), True, 'from numpy import testing as np\n'), ((6970, 7003), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (6985, 7003), True, 'from numpy import testing as np\n'), ((7180, 7213), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (7195, 7213), True, 'from numpy import testing as np\n'), ((7390, 7423), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (7405, 7423), True, 'from numpy import testing as np\n'), ((7600, 7633), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (7615, 7633), True, 'from numpy import testing as np\n'), ((7810, 7843), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (7825, 7843), True, 'from numpy import testing as np\n'), ((8020, 8053), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (8035, 8053), True, 'from numpy import testing as np\n'), ((8230, 8263), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (8245, 8263), True, 'from numpy import testing as np\n'), ((8440, 8473), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (8455, 8473), True, 'from numpy import testing as np\n'), ((8650, 8683), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (8665, 8683), True, 'from numpy import testing as np\n'), ((8860, 8893), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (8875, 8893), True, 'from numpy import testing as np\n'), ((9070, 9103), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (9085, 9103), True, 'from numpy import testing as np\n'), ((9280, 9313), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (9295, 9313), True, 'from numpy import testing as np\n'), ((9490, 9523), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (9505, 9523), True, 'from numpy import testing as np\n'), ((9700, 9733), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (9715, 9733), True, 'from numpy import testing as np\n'), ((9910, 9943), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (9925, 9943), True, 'from numpy import testing as np\n'), ((10120, 10153), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (10135, 10153), True, 'from numpy import testing as np\n'), ((10330, 10363), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (10345, 10363), True, 'from numpy import testing as np\n'), ((10540, 10573), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (10555, 10573), True, 'from numpy import testing as np\n'), ((10750, 10783), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (10765, 10783), True, 'from numpy import testing as np\n'), ((10960, 10993), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (10975, 10993), True, 'from numpy import testing as np\n'), ((11170, 11203), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (11185, 11203), True, 'from numpy import testing as np\n'), ((11380, 11413), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (11395, 11413), True, 'from numpy import testing as np\n'), ((11590, 11623), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (11605, 11623), True, 'from numpy import testing as np\n'), ((11800, 11833), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (11815, 11833), True, 'from numpy import testing as np\n'), ((12010, 12043), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (12025, 12043), True, 'from numpy import testing as np\n'), ((12220, 12253), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (12235, 12253), True, 'from numpy import testing as np\n'), ((12430, 12463), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (12445, 12463), True, 'from numpy import testing as np\n'), ((12640, 12673), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (12655, 12673), True, 'from numpy import testing as np\n'), ((12850, 12883), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (12865, 12883), True, 'from numpy import testing as np\n'), ((13060, 13093), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (13075, 13093), True, 'from numpy import testing as np\n'), ((13270, 13303), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (13285, 13303), True, 'from numpy import testing as np\n'), ((13480, 13513), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (13495, 13513), True, 'from numpy import testing as np\n'), ((13690, 13723), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (13705, 13723), True, 'from numpy import testing as np\n'), ((13900, 13933), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (13915, 13933), True, 'from numpy import testing as np\n'), ((14110, 14143), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (14125, 14143), True, 'from numpy import testing as np\n'), ((14320, 14353), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (14335, 14353), True, 'from numpy import testing as np\n'), ((14530, 14563), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (14545, 14563), True, 'from numpy import testing as np\n'), ((14740, 14773), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (14755, 14773), True, 'from numpy import testing as np\n'), ((14950, 14983), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (14965, 14983), True, 'from numpy import testing as np\n'), ((15160, 15193), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (15175, 15193), True, 'from numpy import testing as np\n'), ((15370, 15403), 'numpy.testing.assert_equal', 'np.assert_equal', (['baseline', 'output'], {}), '(baseline, output)\n', (15385, 15403), True, 'from numpy import testing as np\n'), ((1914, 1965), 'os.path.join', 'os.path.join', (['output_dir', 'f"""test_image_{i + 1:02d}"""'], {}), "(output_dir, f'test_image_{i + 1:02d}')\n", (1926, 1965), False, 'import os\n'), ((2097, 2134), 'os.path.join', 'os.path.join', (['PYDV_DIR', '"""pydv"""', '"""pdv"""'], {}), "(PYDV_DIR, 'pydv', 'pdv')\n", (2109, 2134), False, 'import os\n'), ((2854, 2901), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_01.png"""'], {}), "(BASELINE_DIR, 'test_image_01.png')\n", (2866, 2901), False, 'import os\n'), ((2929, 2974), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_01.png"""'], {}), "(output_dir, 'test_image_01.png')\n", (2941, 2974), False, 'import os\n'), ((3064, 3111), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_02.png"""'], {}), "(BASELINE_DIR, 'test_image_02.png')\n", (3076, 3111), False, 'import os\n'), ((3139, 3184), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_02.png"""'], {}), "(output_dir, 'test_image_02.png')\n", (3151, 3184), False, 'import os\n'), ((3274, 3321), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_03.png"""'], {}), "(BASELINE_DIR, 'test_image_03.png')\n", (3286, 3321), False, 'import os\n'), ((3349, 3394), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_03.png"""'], {}), "(output_dir, 'test_image_03.png')\n", (3361, 3394), False, 'import os\n'), ((3484, 3531), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_04.png"""'], {}), "(BASELINE_DIR, 'test_image_04.png')\n", (3496, 3531), False, 'import os\n'), ((3559, 3604), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_04.png"""'], {}), "(output_dir, 'test_image_04.png')\n", (3571, 3604), False, 'import os\n'), ((3694, 3741), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_05.png"""'], {}), "(BASELINE_DIR, 'test_image_05.png')\n", (3706, 3741), False, 'import os\n'), ((3769, 3814), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_05.png"""'], {}), "(output_dir, 'test_image_05.png')\n", (3781, 3814), False, 'import os\n'), ((3904, 3951), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_06.png"""'], {}), "(BASELINE_DIR, 'test_image_06.png')\n", (3916, 3951), False, 'import os\n'), ((3979, 4024), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_06.png"""'], {}), "(output_dir, 'test_image_06.png')\n", (3991, 4024), False, 'import os\n'), ((4114, 4161), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_07.png"""'], {}), "(BASELINE_DIR, 'test_image_07.png')\n", (4126, 4161), False, 'import os\n'), ((4189, 4234), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_07.png"""'], {}), "(output_dir, 'test_image_07.png')\n", (4201, 4234), False, 'import os\n'), ((4324, 4371), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_08.png"""'], {}), "(BASELINE_DIR, 'test_image_08.png')\n", (4336, 4371), False, 'import os\n'), ((4399, 4444), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_08.png"""'], {}), "(output_dir, 'test_image_08.png')\n", (4411, 4444), False, 'import os\n'), ((4534, 4581), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_09.png"""'], {}), "(BASELINE_DIR, 'test_image_09.png')\n", (4546, 4581), False, 'import os\n'), ((4609, 4654), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_09.png"""'], {}), "(output_dir, 'test_image_09.png')\n", (4621, 4654), False, 'import os\n'), ((4744, 4791), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_10.png"""'], {}), "(BASELINE_DIR, 'test_image_10.png')\n", (4756, 4791), False, 'import os\n'), ((4819, 4864), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_10.png"""'], {}), "(output_dir, 'test_image_10.png')\n", (4831, 4864), False, 'import os\n'), ((4954, 5001), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_11.png"""'], {}), "(BASELINE_DIR, 'test_image_11.png')\n", (4966, 5001), False, 'import os\n'), ((5029, 5074), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_11.png"""'], {}), "(output_dir, 'test_image_11.png')\n", (5041, 5074), False, 'import os\n'), ((5164, 5211), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_12.png"""'], {}), "(BASELINE_DIR, 'test_image_12.png')\n", (5176, 5211), False, 'import os\n'), ((5239, 5284), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_12.png"""'], {}), "(output_dir, 'test_image_12.png')\n", (5251, 5284), False, 'import os\n'), ((5374, 5421), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_13.png"""'], {}), "(BASELINE_DIR, 'test_image_13.png')\n", (5386, 5421), False, 'import os\n'), ((5449, 5494), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_13.png"""'], {}), "(output_dir, 'test_image_13.png')\n", (5461, 5494), False, 'import os\n'), ((5584, 5631), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_14.png"""'], {}), "(BASELINE_DIR, 'test_image_14.png')\n", (5596, 5631), False, 'import os\n'), ((5659, 5704), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_14.png"""'], {}), "(output_dir, 'test_image_14.png')\n", (5671, 5704), False, 'import os\n'), ((5794, 5841), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_15.png"""'], {}), "(BASELINE_DIR, 'test_image_15.png')\n", (5806, 5841), False, 'import os\n'), ((5869, 5914), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_15.png"""'], {}), "(output_dir, 'test_image_15.png')\n", (5881, 5914), False, 'import os\n'), ((6004, 6051), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_16.png"""'], {}), "(BASELINE_DIR, 'test_image_16.png')\n", (6016, 6051), False, 'import os\n'), ((6079, 6124), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_16.png"""'], {}), "(output_dir, 'test_image_16.png')\n", (6091, 6124), False, 'import os\n'), ((6214, 6261), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_17.png"""'], {}), "(BASELINE_DIR, 'test_image_17.png')\n", (6226, 6261), False, 'import os\n'), ((6289, 6334), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_17.png"""'], {}), "(output_dir, 'test_image_17.png')\n", (6301, 6334), False, 'import os\n'), ((6424, 6471), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_18.png"""'], {}), "(BASELINE_DIR, 'test_image_18.png')\n", (6436, 6471), False, 'import os\n'), ((6499, 6544), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_18.png"""'], {}), "(output_dir, 'test_image_18.png')\n", (6511, 6544), False, 'import os\n'), ((6634, 6681), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_19.png"""'], {}), "(BASELINE_DIR, 'test_image_19.png')\n", (6646, 6681), False, 'import os\n'), ((6709, 6754), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_19.png"""'], {}), "(output_dir, 'test_image_19.png')\n", (6721, 6754), False, 'import os\n'), ((6844, 6891), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_20.png"""'], {}), "(BASELINE_DIR, 'test_image_20.png')\n", (6856, 6891), False, 'import os\n'), ((6919, 6964), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_20.png"""'], {}), "(output_dir, 'test_image_20.png')\n", (6931, 6964), False, 'import os\n'), ((7054, 7101), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_21.png"""'], {}), "(BASELINE_DIR, 'test_image_21.png')\n", (7066, 7101), False, 'import os\n'), ((7129, 7174), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_21.png"""'], {}), "(output_dir, 'test_image_21.png')\n", (7141, 7174), False, 'import os\n'), ((7264, 7311), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_22.png"""'], {}), "(BASELINE_DIR, 'test_image_22.png')\n", (7276, 7311), False, 'import os\n'), ((7339, 7384), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_22.png"""'], {}), "(output_dir, 'test_image_22.png')\n", (7351, 7384), False, 'import os\n'), ((7474, 7521), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_23.png"""'], {}), "(BASELINE_DIR, 'test_image_23.png')\n", (7486, 7521), False, 'import os\n'), ((7549, 7594), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_23.png"""'], {}), "(output_dir, 'test_image_23.png')\n", (7561, 7594), False, 'import os\n'), ((7684, 7731), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_24.png"""'], {}), "(BASELINE_DIR, 'test_image_24.png')\n", (7696, 7731), False, 'import os\n'), ((7759, 7804), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_24.png"""'], {}), "(output_dir, 'test_image_24.png')\n", (7771, 7804), False, 'import os\n'), ((7894, 7941), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_25.png"""'], {}), "(BASELINE_DIR, 'test_image_25.png')\n", (7906, 7941), False, 'import os\n'), ((7969, 8014), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_25.png"""'], {}), "(output_dir, 'test_image_25.png')\n", (7981, 8014), False, 'import os\n'), ((8104, 8151), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_26.png"""'], {}), "(BASELINE_DIR, 'test_image_26.png')\n", (8116, 8151), False, 'import os\n'), ((8179, 8224), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_26.png"""'], {}), "(output_dir, 'test_image_26.png')\n", (8191, 8224), False, 'import os\n'), ((8314, 8361), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_27.png"""'], {}), "(BASELINE_DIR, 'test_image_27.png')\n", (8326, 8361), False, 'import os\n'), ((8389, 8434), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_27.png"""'], {}), "(output_dir, 'test_image_27.png')\n", (8401, 8434), False, 'import os\n'), ((8524, 8571), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_28.png"""'], {}), "(BASELINE_DIR, 'test_image_28.png')\n", (8536, 8571), False, 'import os\n'), ((8599, 8644), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_28.png"""'], {}), "(output_dir, 'test_image_28.png')\n", (8611, 8644), False, 'import os\n'), ((8734, 8781), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_29.png"""'], {}), "(BASELINE_DIR, 'test_image_29.png')\n", (8746, 8781), False, 'import os\n'), ((8809, 8854), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_29.png"""'], {}), "(output_dir, 'test_image_29.png')\n", (8821, 8854), False, 'import os\n'), ((8944, 8991), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_30.png"""'], {}), "(BASELINE_DIR, 'test_image_30.png')\n", (8956, 8991), False, 'import os\n'), ((9019, 9064), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_30.png"""'], {}), "(output_dir, 'test_image_30.png')\n", (9031, 9064), False, 'import os\n'), ((9154, 9201), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_31.png"""'], {}), "(BASELINE_DIR, 'test_image_31.png')\n", (9166, 9201), False, 'import os\n'), ((9229, 9274), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_31.png"""'], {}), "(output_dir, 'test_image_31.png')\n", (9241, 9274), False, 'import os\n'), ((9364, 9411), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_32.png"""'], {}), "(BASELINE_DIR, 'test_image_32.png')\n", (9376, 9411), False, 'import os\n'), ((9439, 9484), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_32.png"""'], {}), "(output_dir, 'test_image_32.png')\n", (9451, 9484), False, 'import os\n'), ((9574, 9621), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_33.png"""'], {}), "(BASELINE_DIR, 'test_image_33.png')\n", (9586, 9621), False, 'import os\n'), ((9649, 9694), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_33.png"""'], {}), "(output_dir, 'test_image_33.png')\n", (9661, 9694), False, 'import os\n'), ((9784, 9831), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_34.png"""'], {}), "(BASELINE_DIR, 'test_image_34.png')\n", (9796, 9831), False, 'import os\n'), ((9859, 9904), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_34.png"""'], {}), "(output_dir, 'test_image_34.png')\n", (9871, 9904), False, 'import os\n'), ((9994, 10041), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_35.png"""'], {}), "(BASELINE_DIR, 'test_image_35.png')\n", (10006, 10041), False, 'import os\n'), ((10069, 10114), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_35.png"""'], {}), "(output_dir, 'test_image_35.png')\n", (10081, 10114), False, 'import os\n'), ((10204, 10251), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_36.png"""'], {}), "(BASELINE_DIR, 'test_image_36.png')\n", (10216, 10251), False, 'import os\n'), ((10279, 10324), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_36.png"""'], {}), "(output_dir, 'test_image_36.png')\n", (10291, 10324), False, 'import os\n'), ((10414, 10461), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_37.png"""'], {}), "(BASELINE_DIR, 'test_image_37.png')\n", (10426, 10461), False, 'import os\n'), ((10489, 10534), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_37.png"""'], {}), "(output_dir, 'test_image_37.png')\n", (10501, 10534), False, 'import os\n'), ((10624, 10671), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_38.png"""'], {}), "(BASELINE_DIR, 'test_image_38.png')\n", (10636, 10671), False, 'import os\n'), ((10699, 10744), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_38.png"""'], {}), "(output_dir, 'test_image_38.png')\n", (10711, 10744), False, 'import os\n'), ((10834, 10881), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_39.png"""'], {}), "(BASELINE_DIR, 'test_image_39.png')\n", (10846, 10881), False, 'import os\n'), ((10909, 10954), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_39.png"""'], {}), "(output_dir, 'test_image_39.png')\n", (10921, 10954), False, 'import os\n'), ((11044, 11091), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_40.png"""'], {}), "(BASELINE_DIR, 'test_image_40.png')\n", (11056, 11091), False, 'import os\n'), ((11119, 11164), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_40.png"""'], {}), "(output_dir, 'test_image_40.png')\n", (11131, 11164), False, 'import os\n'), ((11254, 11301), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_41.png"""'], {}), "(BASELINE_DIR, 'test_image_41.png')\n", (11266, 11301), False, 'import os\n'), ((11329, 11374), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_41.png"""'], {}), "(output_dir, 'test_image_41.png')\n", (11341, 11374), False, 'import os\n'), ((11464, 11511), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_42.png"""'], {}), "(BASELINE_DIR, 'test_image_42.png')\n", (11476, 11511), False, 'import os\n'), ((11539, 11584), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_42.png"""'], {}), "(output_dir, 'test_image_42.png')\n", (11551, 11584), False, 'import os\n'), ((11674, 11721), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_43.png"""'], {}), "(BASELINE_DIR, 'test_image_43.png')\n", (11686, 11721), False, 'import os\n'), ((11749, 11794), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_43.png"""'], {}), "(output_dir, 'test_image_43.png')\n", (11761, 11794), False, 'import os\n'), ((11884, 11931), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_44.png"""'], {}), "(BASELINE_DIR, 'test_image_44.png')\n", (11896, 11931), False, 'import os\n'), ((11959, 12004), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_44.png"""'], {}), "(output_dir, 'test_image_44.png')\n", (11971, 12004), False, 'import os\n'), ((12094, 12141), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_45.png"""'], {}), "(BASELINE_DIR, 'test_image_45.png')\n", (12106, 12141), False, 'import os\n'), ((12169, 12214), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_45.png"""'], {}), "(output_dir, 'test_image_45.png')\n", (12181, 12214), False, 'import os\n'), ((12304, 12351), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_46.png"""'], {}), "(BASELINE_DIR, 'test_image_46.png')\n", (12316, 12351), False, 'import os\n'), ((12379, 12424), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_46.png"""'], {}), "(output_dir, 'test_image_46.png')\n", (12391, 12424), False, 'import os\n'), ((12514, 12561), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_47.png"""'], {}), "(BASELINE_DIR, 'test_image_47.png')\n", (12526, 12561), False, 'import os\n'), ((12589, 12634), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_47.png"""'], {}), "(output_dir, 'test_image_47.png')\n", (12601, 12634), False, 'import os\n'), ((12724, 12771), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_48.png"""'], {}), "(BASELINE_DIR, 'test_image_48.png')\n", (12736, 12771), False, 'import os\n'), ((12799, 12844), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_48.png"""'], {}), "(output_dir, 'test_image_48.png')\n", (12811, 12844), False, 'import os\n'), ((12934, 12981), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_49.png"""'], {}), "(BASELINE_DIR, 'test_image_49.png')\n", (12946, 12981), False, 'import os\n'), ((13009, 13054), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_49.png"""'], {}), "(output_dir, 'test_image_49.png')\n", (13021, 13054), False, 'import os\n'), ((13144, 13191), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_50.png"""'], {}), "(BASELINE_DIR, 'test_image_50.png')\n", (13156, 13191), False, 'import os\n'), ((13219, 13264), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_50.png"""'], {}), "(output_dir, 'test_image_50.png')\n", (13231, 13264), False, 'import os\n'), ((13354, 13401), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_51.png"""'], {}), "(BASELINE_DIR, 'test_image_51.png')\n", (13366, 13401), False, 'import os\n'), ((13429, 13474), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_51.png"""'], {}), "(output_dir, 'test_image_51.png')\n", (13441, 13474), False, 'import os\n'), ((13564, 13611), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_52.png"""'], {}), "(BASELINE_DIR, 'test_image_52.png')\n", (13576, 13611), False, 'import os\n'), ((13639, 13684), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_52.png"""'], {}), "(output_dir, 'test_image_52.png')\n", (13651, 13684), False, 'import os\n'), ((13774, 13821), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_53.png"""'], {}), "(BASELINE_DIR, 'test_image_53.png')\n", (13786, 13821), False, 'import os\n'), ((13849, 13894), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_53.png"""'], {}), "(output_dir, 'test_image_53.png')\n", (13861, 13894), False, 'import os\n'), ((13984, 14031), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_54.png"""'], {}), "(BASELINE_DIR, 'test_image_54.png')\n", (13996, 14031), False, 'import os\n'), ((14059, 14104), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_54.png"""'], {}), "(output_dir, 'test_image_54.png')\n", (14071, 14104), False, 'import os\n'), ((14194, 14241), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_55.png"""'], {}), "(BASELINE_DIR, 'test_image_55.png')\n", (14206, 14241), False, 'import os\n'), ((14269, 14314), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_55.png"""'], {}), "(output_dir, 'test_image_55.png')\n", (14281, 14314), False, 'import os\n'), ((14404, 14451), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_56.png"""'], {}), "(BASELINE_DIR, 'test_image_56.png')\n", (14416, 14451), False, 'import os\n'), ((14479, 14524), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_56.png"""'], {}), "(output_dir, 'test_image_56.png')\n", (14491, 14524), False, 'import os\n'), ((14614, 14661), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_57.png"""'], {}), "(BASELINE_DIR, 'test_image_57.png')\n", (14626, 14661), False, 'import os\n'), ((14689, 14734), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_57.png"""'], {}), "(output_dir, 'test_image_57.png')\n", (14701, 14734), False, 'import os\n'), ((14824, 14871), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_58.png"""'], {}), "(BASELINE_DIR, 'test_image_58.png')\n", (14836, 14871), False, 'import os\n'), ((14899, 14944), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_58.png"""'], {}), "(output_dir, 'test_image_58.png')\n", (14911, 14944), False, 'import os\n'), ((15034, 15081), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_59.png"""'], {}), "(BASELINE_DIR, 'test_image_59.png')\n", (15046, 15081), False, 'import os\n'), ((15109, 15154), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_59.png"""'], {}), "(output_dir, 'test_image_59.png')\n", (15121, 15154), False, 'import os\n'), ((15244, 15291), 'os.path.join', 'os.path.join', (['BASELINE_DIR', '"""test_image_60.png"""'], {}), "(BASELINE_DIR, 'test_image_60.png')\n", (15256, 15291), False, 'import os\n'), ((15319, 15364), 'os.path.join', 'os.path.join', (['output_dir', '"""test_image_60.png"""'], {}), "(output_dir, 'test_image_60.png')\n", (15331, 15364), False, 'import os\n'), ((750, 788), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""testData.txt"""'], {}), "(TEST_DIR, 'testData.txt')\n", (762, 788), False, 'import os\n')] |
import numpy as np
def get_phases(t,P,t0):
"""
Given input times, a period (or posterior dist of periods)
and time of transit center (or posterior), returns the
phase at each time t.
"""
if type(t) is not float:
phase = ((t - np.median(t0))/np.median(P)) % 1
ii = np.where(phase>=0.5)[0]
phase[ii] = phase[ii]-1.0
else:
phase = ((t - np.median(t0))/np.median(P)) % 1
if phase>=0.5:
phase = phase - 1.0
return phase
def function_quantiles(X, alpha = 0.68, method = 'median'):
"""
If `X` is a matrix of length N x M, where there are N evaluations of a model at M index-points, this function returns the
credibility band of the model given these samples.
Parameters
----------
X : numpy.array
Array containing N evaluations of a model in the rows at M index (e.g., time-) points.
alpha : float
Credibility band percentage.
method : string
Method to use to generate the bands; `median` is default (and only supported mode for now).
Returns
-------
median_model : numpy.array
Array of length M denoting the median model
upper_band : numpy.array
Array of length M denoting the upper `alpha`*100 credibility band.
lower_band : numpy.array
Array of length M denoting the lower `alpha`*100 credibility band.
"""
median_model, lower_band, upper_band = np.zeros(X.shape[1]), np.zeros(X.shape[1]), np.zeros(X.shape[1])
for i in range(X.shape[1]):
median_model[i], upper_band[i], lower_band[i] = get_quantiles(X[:,i], alpha = alpha)
return median_model, upper_band, lower_band
def get_quantiles(dist,alpha = 0.68, method = 'median'):
"""
get_quantiles function
DESCRIPTION
This function returns, in the default case, the parameter median and the error%
credibility around it. This assumes you give a non-ordered
distribution of parameters.
OUTPUTS
Median of the parameter,upper credibility bound, lower credibility bound
"""
ordered_dist = dist[np.argsort(dist)]
param = 0.0
# Define the number of samples from posterior
nsamples = len(dist)
nsamples_at_each_side = int(nsamples*(alpha/2.)+1)
if(method == 'median'):
med_idx = 0
if(nsamples%2 == 0.0): # Number of points is even
med_idx_up = int(nsamples/2.)+1
med_idx_down = med_idx_up-1
param = (ordered_dist[med_idx_up]+ordered_dist[med_idx_down])/2.
return param,ordered_dist[med_idx_up+nsamples_at_each_side],\
ordered_dist[med_idx_down-nsamples_at_each_side]
else:
med_idx = int(nsamples/2.)
param = ordered_dist[med_idx]
return param,ordered_dist[med_idx+nsamples_at_each_side],\
ordered_dist[med_idx-nsamples_at_each_side]
| [
"numpy.argsort",
"numpy.where",
"numpy.zeros",
"numpy.median"
] | [((1444, 1464), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (1452, 1464), True, 'import numpy as np\n'), ((1466, 1486), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (1474, 1486), True, 'import numpy as np\n'), ((1488, 1508), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (1496, 1508), True, 'import numpy as np\n'), ((2112, 2128), 'numpy.argsort', 'np.argsort', (['dist'], {}), '(dist)\n', (2122, 2128), True, 'import numpy as np\n'), ((306, 328), 'numpy.where', 'np.where', (['(phase >= 0.5)'], {}), '(phase >= 0.5)\n', (314, 328), True, 'import numpy as np\n'), ((275, 287), 'numpy.median', 'np.median', (['P'], {}), '(P)\n', (284, 287), True, 'import numpy as np\n'), ((411, 423), 'numpy.median', 'np.median', (['P'], {}), '(P)\n', (420, 423), True, 'import numpy as np\n'), ((260, 273), 'numpy.median', 'np.median', (['t0'], {}), '(t0)\n', (269, 273), True, 'import numpy as np\n'), ((396, 409), 'numpy.median', 'np.median', (['t0'], {}), '(t0)\n', (405, 409), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# (C) Copyright 2020 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import numpy as np
import pandas as pd
import xarray as xr
from climetlab.core.metadata import annotate, annotation
class Owner:
pass
def test_pandas_annotations():
data = dict(a=["foo", "bar"], b=[1, 2])
df1 = pd.DataFrame(data, columns=["a", "b"])
obj1 = Owner()
assert annotate(df1, obj1, foo=42) is df1
assert "climetlab-0" in df1._metadata
a1 = annotation(df1)
assert a1.get("foo") == 42
assert a1.owner is obj1
df2 = df1[df1.b == 42]
a2 = annotation(df2)
assert a2.get("foo") == 42
assert a2.owner is obj1
assert a1 is a2
del obj1
assert a2.owner is None
obj3 = Owner
df3 = pd.DataFrame(data, columns=["a", "b"])
annotate(df3, obj3, bar=42)
a3 = annotation(df3)
assert a1 is not a3
assert "climetlab-0" in df3._metadata
def test_xarray_annotations():
# Examples from xarray documentation
# Data array
############
data = np.random.rand(4, 3)
locs = ["IA", "IL", "IN"]
times = pd.date_range("2000-01-01", periods=4)
xr1 = xr.DataArray(data, coords=[times, locs], dims=["time", "space"])
obj1 = Owner()
assert annotate(xr1, obj1, foo=42) is xr1
a1 = annotation(xr1)
assert a1.get("foo") == 42
# Dataset
#########
temp = 15 + 8 * np.random.randn(2, 2, 3)
precip = 10 * np.random.rand(2, 2, 3)
lon = [[-99.83, -99.32], [-99.79, -99.23]]
lat = [[42.25, 42.21], [42.63, 42.59]]
xr2 = xr.Dataset(
{
"temperature": (["x", "y", "time"], temp),
"precipitation": (["x", "y", "time"], precip),
},
coords={
"lon": (["x", "y"], lon),
"lat": (["x", "y"], lat),
"time": pd.date_range("2014-09-06", periods=3),
"reference_time": pd.Timestamp("2014-09-05"),
},
)
annotate(xr2, obj1, bar=42)
a1 = annotation(xr2)
assert a1.get("bar") == 42
# Dataset from Data array
# annotation must be preserved
# xr3 = xr1.to_dataset(name="test")
# a3 = annotation(xr3)
# assert a3.get("foo") == 42
if __name__ == "__main__":
for k, f in sorted(globals().items()):
if k.startswith("test_") and callable(f):
print(k)
f()
| [
"pandas.DataFrame",
"climetlab.core.metadata.annotate",
"pandas.date_range",
"pandas.Timestamp",
"numpy.random.randn",
"xarray.DataArray",
"numpy.random.rand",
"climetlab.core.metadata.annotation"
] | [((633, 671), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': "['a', 'b']"}), "(data, columns=['a', 'b'])\n", (645, 671), True, 'import pandas as pd\n'), ((790, 805), 'climetlab.core.metadata.annotation', 'annotation', (['df1'], {}), '(df1)\n', (800, 805), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((903, 918), 'climetlab.core.metadata.annotation', 'annotation', (['df2'], {}), '(df2)\n', (913, 918), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((1070, 1108), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': "['a', 'b']"}), "(data, columns=['a', 'b'])\n", (1082, 1108), True, 'import pandas as pd\n'), ((1113, 1140), 'climetlab.core.metadata.annotate', 'annotate', (['df3', 'obj3'], {'bar': '(42)'}), '(df3, obj3, bar=42)\n', (1121, 1140), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((1151, 1166), 'climetlab.core.metadata.annotation', 'annotation', (['df3'], {}), '(df3)\n', (1161, 1166), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((1356, 1376), 'numpy.random.rand', 'np.random.rand', (['(4)', '(3)'], {}), '(4, 3)\n', (1370, 1376), True, 'import numpy as np\n'), ((1419, 1457), 'pandas.date_range', 'pd.date_range', (['"""2000-01-01"""'], {'periods': '(4)'}), "('2000-01-01', periods=4)\n", (1432, 1457), True, 'import pandas as pd\n'), ((1468, 1532), 'xarray.DataArray', 'xr.DataArray', (['data'], {'coords': '[times, locs]', 'dims': "['time', 'space']"}), "(data, coords=[times, locs], dims=['time', 'space'])\n", (1480, 1532), True, 'import xarray as xr\n'), ((1608, 1623), 'climetlab.core.metadata.annotation', 'annotation', (['xr1'], {}), '(xr1)\n', (1618, 1623), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((2253, 2280), 'climetlab.core.metadata.annotate', 'annotate', (['xr2', 'obj1'], {'bar': '(42)'}), '(xr2, obj1, bar=42)\n', (2261, 2280), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((2290, 2305), 'climetlab.core.metadata.annotation', 'annotation', (['xr2'], {}), '(xr2)\n', (2300, 2305), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((703, 730), 'climetlab.core.metadata.annotate', 'annotate', (['df1', 'obj1'], {'foo': '(42)'}), '(df1, obj1, foo=42)\n', (711, 730), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((1563, 1590), 'climetlab.core.metadata.annotate', 'annotate', (['xr1', 'obj1'], {'foo': '(42)'}), '(xr1, obj1, foo=42)\n', (1571, 1590), False, 'from climetlab.core.metadata import annotate, annotation\n'), ((1748, 1771), 'numpy.random.rand', 'np.random.rand', (['(2)', '(2)', '(3)'], {}), '(2, 2, 3)\n', (1762, 1771), True, 'import numpy as np\n'), ((1705, 1729), 'numpy.random.randn', 'np.random.randn', (['(2)', '(2)', '(3)'], {}), '(2, 2, 3)\n', (1720, 1729), True, 'import numpy as np\n'), ((2133, 2171), 'pandas.date_range', 'pd.date_range', (['"""2014-09-06"""'], {'periods': '(3)'}), "('2014-09-06', periods=3)\n", (2146, 2171), True, 'import pandas as pd\n'), ((2203, 2229), 'pandas.Timestamp', 'pd.Timestamp', (['"""2014-09-05"""'], {}), "('2014-09-05')\n", (2215, 2229), True, 'import pandas as pd\n')] |
#!/usr/bin/python
# pip install lxml
import sys
import os
import json
import xml.etree.ElementTree as ET
from pycocotools.coco import COCO
from pycocotools import mask
import glob
import numpy as np
from skimage import measure
from PIL import Image
START_BOUNDING_BOX_ID = 1
PRE_DEFINE_CATEGORIES = None
# If necessary, pre-define category and its id
# PRE_DEFINE_CATEGORIES = {"aeroplane": 1, "bicycle": 2, "bird": 3, "boat": 4,
# "bottle":5, "bus": 6, "car": 7, "cat": 8, "chair": 9,
# "cow": 10, "diningtable": 11, "dog": 12, "horse": 13,
# "motorbike": 14, "person": 15, "pottedplant": 16,
# "sheep": 17, "sofa": 18, "train": 19, "tvmonitor": 20}
def get(root, name):
vars = root.findall(name)
return vars
def get_and_check(root, name, length):
vars = root.findall(name)
if len(vars) == 0:
raise ValueError("Can not find %s in %s." % (name, root.tag))
if length > 0 and len(vars) != length:
raise ValueError(
"The size of %s is supposed to be %d, but is %d."
% (name, length, len(vars))
)
if length == 1:
vars = vars[0]
return vars
def close_contour(contour):
if not np.array_equal(contour[0], contour[-1]):
contour = np.vstack((contour, contour[0]))
return contour
def binary_mask_to_polygon(binary_mask, tolerance=0):
"""Converts a binary mask to COCO polygon representation
Args:
binary_mask: a 2D binary numpy array where '1's represent the object
tolerance: Maximum distance from original points of polygon to approximated
polygonal chain. If tolerance is 0, the original coordinate array is returned.
"""
polygons = []
# pad mask to close contours of shapes which start and end at an edge
padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0)
contours = measure.find_contours(padded_binary_mask, 0.5)
contours = np.subtract(contours, 1)
for contour in contours:
contour = close_contour(contour)
contour = measure.approximate_polygon(contour, tolerance)
if len(contour) < 3:
continue
contour = np.flip(contour, axis=1)
segmentation = contour.ravel().tolist()
# after padding and subtracting 1 we may get -0.5 points in our segmentation
segmentation = [0 if i < 0 else i for i in segmentation]
polygons.append(segmentation)
return polygons
def get_filename_as_int(filename):
try:
filename = filename.replace("\\", "/")
filename = os.path.splitext(os.path.basename(filename))[0]
return int(filename)
except:
raise ValueError("Filename %s is supposed to be an integer." % (filename))
def get_categories(xml_files):
"""Generate category name to id mapping from a list of xml files.
Arguments:
xml_files {list} -- A list of xml file paths.
Returns:
dict -- category name to id mapping.
"""
classes_names = []
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
for member in root.findall("object"):
classes_names.append(member[0].text)
classes_names = list(set(classes_names))
classes_names.sort()
return {name: (i+1) for i, name in enumerate(classes_names)}
def convert(xml_files, json_file, mask_dir, train_files, val=False, inst_dir=None):
json_dict = {"images": [], "type": "instances", "annotations": [], "categories": []}
if PRE_DEFINE_CATEGORIES is not None:
categories = PRE_DEFINE_CATEGORIES
else:
categories = get_categories(xml_files)
bnd_id = START_BOUNDING_BOX_ID
now=0
for xml_file in xml_files:
tree = ET.parse(xml_file)
root = tree.getroot()
path = get(root, "path")
if len(path) == 1:
filename = os.path.basename(path[0].text)
elif len(path) == 0:
filename = get_and_check(root, "filename", 1).text
else:
raise ValueError("%d paths found in %s" % (len(path), xml_file))
mask_path = os.path.join(mask_dir, filename[:-4] + '.png')
if not (filename[:-4] in train_files):
print ('skip this %s'%filename)
continue
now+=1
#The filename must be a number
image_id = get_filename_as_int(filename)
size = get_and_check(root, "size", 1)
width = int(get_and_check(size, "width", 1).text)
height = int(get_and_check(size, "height", 1).text)
image = {
"file_name": filename,
"height": height,
"width": width,
"id": image_id,
}
json_dict["images"].append(image)
mask_cls = np.asarray(Image.open(mask_path), dtype=np.int32)
if val:
inst_path = os.path.join(inst_dir, filename[:-4] + '.png')
inst_mask = np.asarray(Image.open(inst_path), dtype=np.int32)
this_mask = inst_mask.copy()
inst_id_list = np.unique(inst_mask)
sum_pix = 0
max_id = 0
for inst in inst_id_list:
if inst==0 or inst==255:
continue
this_mask = this_mask*0.0
this_mask[inst_mask==inst]=1
category_id = np.unique(mask_cls[this_mask==1])[0]
this_mask = np.array(this_mask).astype(np.uint8)
segmentation = binary_mask_to_polygon(this_mask, tolerance=2)
binary_mask_encoded = mask.encode(np.asfortranarray(this_mask.astype(np.uint8)))
area = mask.area(binary_mask_encoded)
bounding_box = mask.toBbox(binary_mask_encoded)
if segmentation ==[]:
this_mask = inst_mask.copy()
this_mask = this_mask*0.0
xmin = int(bounding_box[0])
xmax = int(bounding_box[0]+bounding_box[2])
ymin = int(bounding_box[1])
ymax = int(bounding_box[1]+bounding_box[3])
this_mask[ymin:ymax,xmin:xmax]=1
this_mask = np.array(this_mask).astype(np.uint8)
segmentation = binary_mask_to_polygon(this_mask, tolerance=2)
if segmentation==[]:
continue
ann = {
"area": area.tolist(),#o_width * o_height,
"iscrowd": 0,
"image_id": image_id,
"bbox": bounding_box.tolist(),
"category_id": int(category_id),
"id": bnd_id,
"ignore": 0,
"segmentation": segmentation,
}
json_dict["annotations"].append(ann)
bnd_id = bnd_id + 1
else:
for obj in get(root, "object"):
category = get_and_check(obj, "name", 1).text
if category not in categories:
new_id = len(categories)
categories[category] = new_id
category_id = categories[category]
bndbox = get_and_check(obj, "bndbox", 1)
xmin = int(get_and_check(bndbox, "xmin", 1).text) - 1
ymin = int(get_and_check(bndbox, "ymin", 1).text) - 1
xmax = int(get_and_check(bndbox, "xmax", 1).text)
ymax = int(get_and_check(bndbox, "ymax", 1).text)
assert xmax > xmin
assert ymax > ymin
o_width = abs(xmax - xmin)
o_height = abs(ymax - ymin)
this_mask = mask_cls.copy()
this_mask = this_mask*0.0
this_mask[ymin:ymax,xmin:xmax][mask_cls[ymin:ymax,xmin:xmax]==category_id]=1
this_mask = np.array(this_mask).astype(np.uint8)
segmentation = binary_mask_to_polygon(this_mask, tolerance=2)
binary_mask_encoded = mask.encode(np.asfortranarray(this_mask.astype(np.uint8)))
area = mask.area(binary_mask_encoded)
if segmentation ==[]:
this_mask = mask_cls.copy()
this_mask = this_mask*0.0
this_mask[ymin:ymax,xmin:xmax]=1
this_mask = np.array(this_mask).astype(np.uint8)
segmentation = binary_mask_to_polygon(this_mask, tolerance=2)
if segmentation==[]:
continue
ann = {
"area": o_width * o_height,
"iscrowd": 0,
"image_id": image_id,
"bbox": [xmin, ymin, o_width, o_height],
"category_id": category_id,
"id": bnd_id,
"ignore": 0,
"segmentation": segmentation,
}
json_dict["annotations"].append(ann)
bnd_id = bnd_id + 1
for cate, cid in categories.items():
cat = {"supercategory": "none", "id": cid, "name": cate}
json_dict["categories"].append(cat)
os.makedirs(os.path.dirname(json_file), exist_ok=True)
json_fp = open(json_file, "w")
json_str = json.dumps(json_dict)
json_fp.write(json_str)
json_fp.close()
print ('-->%d'%now)
if __name__ == "__main__":
source_dir = './data/VOCdevkit/VOC2012/' #Edit this to your own dataset path.
xml_dir = source_dir + 'Annotations' #Path of xml data directory.
train_or_val= 'trainaug' #trainaug|val
if train_or_val=='val':
val = True #With GT objects and Masks.
inst_dir = source_dir +'SegmentationObject'
mask_dir = source_dir +'SegmentationClass'
else:
val = False
inst_dir = None
mask_dir = './data/gen_labels/FR_95/mask' #Path of generated pseudo label directory.
train_list = os.path.join(source_dir+'ImageSets/Segmentation', train_or_val + ".txt") #Path of data list directory.
train_files = [i.strip() for i in open(train_list) if not i.strip() == ' ']
json_file = './voc_inst_%s.json'%train_or_val #Save to current dir.
xml_files = glob.glob(os.path.join(xml_dir, "*.xml"))
# If you want to do train/test split, you can pass a subset of xml files to convert function.
print("Number of xml files: {}".format(len(xml_files)))
convert(xml_files, json_file, mask_dir, train_files, val=val, inst_dir=inst_dir)
print("Success: {}".format(json_file))
| [
"numpy.pad",
"xml.etree.ElementTree.parse",
"numpy.flip",
"numpy.subtract",
"os.path.basename",
"os.path.dirname",
"numpy.unique",
"pycocotools.mask.area",
"pycocotools.mask.toBbox",
"json.dumps",
"PIL.Image.open",
"numpy.array",
"skimage.measure.find_contours",
"numpy.array_equal",
"os.... | [((1787, 1855), 'numpy.pad', 'np.pad', (['binary_mask'], {'pad_width': '(1)', 'mode': '"""constant"""', 'constant_values': '(0)'}), "(binary_mask, pad_width=1, mode='constant', constant_values=0)\n", (1793, 1855), True, 'import numpy as np\n'), ((1871, 1917), 'skimage.measure.find_contours', 'measure.find_contours', (['padded_binary_mask', '(0.5)'], {}), '(padded_binary_mask, 0.5)\n', (1892, 1917), False, 'from skimage import measure\n'), ((1933, 1957), 'numpy.subtract', 'np.subtract', (['contours', '(1)'], {}), '(contours, 1)\n', (1944, 1957), True, 'import numpy as np\n'), ((9237, 9258), 'json.dumps', 'json.dumps', (['json_dict'], {}), '(json_dict)\n', (9247, 9258), False, 'import json\n'), ((9897, 9971), 'os.path.join', 'os.path.join', (["(source_dir + 'ImageSets/Segmentation')", "(train_or_val + '.txt')"], {}), "(source_dir + 'ImageSets/Segmentation', train_or_val + '.txt')\n", (9909, 9971), False, 'import os\n'), ((1172, 1211), 'numpy.array_equal', 'np.array_equal', (['contour[0]', 'contour[-1]'], {}), '(contour[0], contour[-1])\n', (1186, 1211), True, 'import numpy as np\n'), ((1231, 1263), 'numpy.vstack', 'np.vstack', (['(contour, contour[0])'], {}), '((contour, contour[0]))\n', (1240, 1263), True, 'import numpy as np\n'), ((2046, 2093), 'skimage.measure.approximate_polygon', 'measure.approximate_polygon', (['contour', 'tolerance'], {}), '(contour, tolerance)\n', (2073, 2093), False, 'from skimage import measure\n'), ((2162, 2186), 'numpy.flip', 'np.flip', (['contour'], {'axis': '(1)'}), '(contour, axis=1)\n', (2169, 2186), True, 'import numpy as np\n'), ((3046, 3064), 'xml.etree.ElementTree.parse', 'ET.parse', (['xml_file'], {}), '(xml_file)\n', (3054, 3064), True, 'import xml.etree.ElementTree as ET\n'), ((3733, 3751), 'xml.etree.ElementTree.parse', 'ET.parse', (['xml_file'], {}), '(xml_file)\n', (3741, 3751), True, 'import xml.etree.ElementTree as ET\n'), ((4099, 4145), 'os.path.join', 'os.path.join', (['mask_dir', "(filename[:-4] + '.png')"], {}), "(mask_dir, filename[:-4] + '.png')\n", (4111, 4145), False, 'import os\n'), ((9144, 9170), 'os.path.dirname', 'os.path.dirname', (['json_file'], {}), '(json_file)\n', (9159, 9170), False, 'import os\n'), ((10178, 10208), 'os.path.join', 'os.path.join', (['xml_dir', '"""*.xml"""'], {}), "(xml_dir, '*.xml')\n", (10190, 10208), False, 'import os\n'), ((3865, 3895), 'os.path.basename', 'os.path.basename', (['path[0].text'], {}), '(path[0].text)\n', (3881, 3895), False, 'import os\n'), ((4747, 4768), 'PIL.Image.open', 'Image.open', (['mask_path'], {}), '(mask_path)\n', (4757, 4768), False, 'from PIL import Image\n'), ((4826, 4872), 'os.path.join', 'os.path.join', (['inst_dir', "(filename[:-4] + '.png')"], {}), "(inst_dir, filename[:-4] + '.png')\n", (4838, 4872), False, 'import os\n'), ((5015, 5035), 'numpy.unique', 'np.unique', (['inst_mask'], {}), '(inst_mask)\n', (5024, 5035), True, 'import numpy as np\n'), ((2574, 2600), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (2590, 2600), False, 'import os\n'), ((4908, 4929), 'PIL.Image.open', 'Image.open', (['inst_path'], {}), '(inst_path)\n', (4918, 4929), False, 'from PIL import Image\n'), ((5608, 5638), 'pycocotools.mask.area', 'mask.area', (['binary_mask_encoded'], {}), '(binary_mask_encoded)\n', (5617, 5638), False, 'from pycocotools import mask\n'), ((5670, 5702), 'pycocotools.mask.toBbox', 'mask.toBbox', (['binary_mask_encoded'], {}), '(binary_mask_encoded)\n', (5681, 5702), False, 'from pycocotools import mask\n'), ((8063, 8093), 'pycocotools.mask.area', 'mask.area', (['binary_mask_encoded'], {}), '(binary_mask_encoded)\n', (8072, 8093), False, 'from pycocotools import mask\n'), ((5308, 5343), 'numpy.unique', 'np.unique', (['mask_cls[this_mask == 1]'], {}), '(mask_cls[this_mask == 1])\n', (5317, 5343), True, 'import numpy as np\n'), ((5373, 5392), 'numpy.array', 'np.array', (['this_mask'], {}), '(this_mask)\n', (5381, 5392), True, 'import numpy as np\n'), ((7828, 7847), 'numpy.array', 'np.array', (['this_mask'], {}), '(this_mask)\n', (7836, 7847), True, 'import numpy as np\n'), ((6145, 6164), 'numpy.array', 'np.array', (['this_mask'], {}), '(this_mask)\n', (6153, 6164), True, 'import numpy as np\n'), ((8311, 8330), 'numpy.array', 'np.array', (['this_mask'], {}), '(this_mask)\n', (8319, 8330), True, 'import numpy as np\n')] |
import pytest
import torch
import torch.nn as nn
from torch import sin, cos
import numpy as np
from neurodiffeq import diff
from neurodiffeq.generators import GeneratorSpherical
from neurodiffeq.function_basis import ZonalSphericalHarmonics
from neurodiffeq.networks import FCNN
from neurodiffeq.operators import spherical_curl
from neurodiffeq.operators import spherical_grad
from neurodiffeq.operators import spherical_div
from neurodiffeq.operators import spherical_laplacian
from neurodiffeq.operators import spherical_vector_laplacian
from neurodiffeq.operators import spherical_to_cartesian, cartesian_to_spherical
@pytest.fixture(autouse=True)
def magic():
torch.manual_seed(42)
np.random.seed(42)
class HarmonicsNN(nn.Module):
def __init__(self, degrees, harmonics_fn):
super(HarmonicsNN, self).__init__()
self.net_r = FCNN(1, n_output_units=len(degrees))
self.harmonics_fn = harmonics_fn
def forward(self, r, theta, phi):
R = self.net_r(r)
Y = self.harmonics_fn(theta, phi)
return (R * Y).sum(dim=1, keepdim=True)
EPS = 1e-4
degrees = list(range(10))
@pytest.fixture
def x():
n_points, r_min, r_max = 1024, 1.0, 10.0
g = GeneratorSpherical(n_points, r_min=r_min, r_max=r_max)
return [t.reshape(-1, 1) for t in g.get_examples()]
@pytest.fixture
def U(x):
F = [HarmonicsNN(degrees, ZonalSphericalHarmonics(degrees=degrees)) for _ in range(3)]
return tuple(f(*x) for f in F)
@pytest.fixture
def u(x):
return HarmonicsNN(degrees, ZonalSphericalHarmonics(degrees=degrees))(*x)
def test_cartesian_to_spherical():
x = torch.rand(1000, 1, requires_grad=True)
y = torch.rand(1000, 1, requires_grad=True)
z = torch.rand(1000, 1, requires_grad=True)
r, theta, phi = cartesian_to_spherical(x, y, z)
assert torch.allclose(r * torch.sin(theta) * cos(phi), x)
assert torch.allclose(r * torch.sin(theta) * sin(phi), y)
assert torch.allclose(r * torch.cos(theta), z)
def test_spherical_to_cartesian():
r = torch.rand(1000, 1, requires_grad=True)
theta = torch.rand(1000, 1, requires_grad=True) * np.pi
phi = torch.rand(1000, 1, requires_grad=True) * np.pi * 2
x, y, z = spherical_to_cartesian(r, theta, phi)
assert torch.allclose(r * torch.sin(theta) * cos(phi), x)
assert torch.allclose(r * torch.sin(theta) * sin(phi), y)
assert torch.allclose(r * torch.cos(theta), z)
def test_spherical_div(U, x):
out = spherical_div(*U, *x)
ur, utheta, uphi = U
r, theta, phi = x
ans = diff(r ** 2 * ur, r) / r ** 2 + \
diff(utheta * sin(theta), theta) / (r * sin(theta)) + \
diff(uphi, phi) / (r * sin(theta))
assert torch.allclose(out, ans)
def test_spherical_grad(u, x):
out_r, out_theta, out_phi = spherical_grad(u, *x)
r, theta, phi = x
assert torch.allclose(out_r, diff(u, r))
assert torch.allclose(out_theta, diff(u, theta) / r)
assert torch.allclose(out_phi, diff(u, phi) / (r * sin(theta)))
def test_spherical_curl(U, x):
out_r, out_theta, out_phi = spherical_curl(*U, *x)
ur, utheta, uphi = U
r, theta, phi = x
assert torch.allclose(out_r, (diff(uphi * sin(theta), theta) - diff(utheta, phi)) / (r * sin(theta)))
assert torch.allclose(out_theta, (diff(ur, phi) / sin(theta) - diff(r * uphi, r)) / r)
assert torch.allclose(out_phi, (diff(r * utheta, r) - diff(ur, theta)) / r)
def test_spherical_laplacian(u, x):
out = spherical_laplacian(u, *x)
r, theta, phi = x
assert torch.allclose(
out,
diff(r ** 2 * diff(u, r), r) / r ** 2
+ diff(sin(theta) * diff(u, theta), theta) / (r ** 2 * sin(theta))
+ diff(u, phi, order=2) / (r ** 2 * sin(theta) ** 2)
)
def test_spherical_vector_laplacian(U, x):
out_r, out_theta, out_phi = spherical_vector_laplacian(*U, *x)
ur, utheta, uphi = U
r, theta, phi = x
def scalar_lap(u):
return diff(r ** 2 * diff(u, r), r) / r ** 2 \
+ diff(sin(theta) * diff(u, theta), theta) / (r ** 2 * sin(theta)) \
+ diff(u, phi, order=2) / (r ** 2 * sin(theta) ** 2)
assert torch.allclose(
out_r,
scalar_lap(ur)
- 2 * ur / r ** 2
- 2 / (r ** 2 * sin(theta)) * diff(utheta * sin(theta), theta)
- 2 / (r ** 2 * sin(theta)) * diff(uphi, phi)
)
assert torch.allclose(
out_theta,
scalar_lap(utheta)
- utheta / (r ** 2 * sin(theta) ** 2)
+ 2 / r ** 2 * diff(ur, theta)
- 2 * cos(theta) / (r ** 2 * sin(theta) ** 2) * diff(uphi, phi)
)
assert torch.allclose(
out_phi,
scalar_lap(uphi)
- uphi / (r ** 2 * sin(theta) ** 2)
+ 2 / (r ** 2 * sin(theta)) * diff(ur, phi)
+ 2 * cos(theta) / (r ** 2 * sin(theta) ** 2) * diff(utheta, phi)
)
| [
"neurodiffeq.diff",
"numpy.random.seed",
"neurodiffeq.operators.cartesian_to_spherical",
"torch.manual_seed",
"neurodiffeq.operators.spherical_to_cartesian",
"neurodiffeq.operators.spherical_curl",
"pytest.fixture",
"neurodiffeq.function_basis.ZonalSphericalHarmonics",
"neurodiffeq.operators.spheric... | [((624, 652), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (638, 652), False, 'import pytest\n'), ((670, 691), 'torch.manual_seed', 'torch.manual_seed', (['(42)'], {}), '(42)\n', (687, 691), False, 'import torch\n'), ((696, 714), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (710, 714), True, 'import numpy as np\n'), ((1211, 1265), 'neurodiffeq.generators.GeneratorSpherical', 'GeneratorSpherical', (['n_points'], {'r_min': 'r_min', 'r_max': 'r_max'}), '(n_points, r_min=r_min, r_max=r_max)\n', (1229, 1265), False, 'from neurodiffeq.generators import GeneratorSpherical\n'), ((1627, 1666), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (1637, 1666), False, 'import torch\n'), ((1675, 1714), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (1685, 1714), False, 'import torch\n'), ((1723, 1762), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (1733, 1762), False, 'import torch\n'), ((1783, 1814), 'neurodiffeq.operators.cartesian_to_spherical', 'cartesian_to_spherical', (['x', 'y', 'z'], {}), '(x, y, z)\n', (1805, 1814), False, 'from neurodiffeq.operators import spherical_to_cartesian, cartesian_to_spherical\n'), ((2035, 2074), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (2045, 2074), False, 'import torch\n'), ((2211, 2248), 'neurodiffeq.operators.spherical_to_cartesian', 'spherical_to_cartesian', (['r', 'theta', 'phi'], {}), '(r, theta, phi)\n', (2233, 2248), False, 'from neurodiffeq.operators import spherical_to_cartesian, cartesian_to_spherical\n'), ((2466, 2487), 'neurodiffeq.operators.spherical_div', 'spherical_div', (['*U', '*x'], {}), '(*U, *x)\n', (2479, 2487), False, 'from neurodiffeq.operators import spherical_div\n'), ((2702, 2726), 'torch.allclose', 'torch.allclose', (['out', 'ans'], {}), '(out, ans)\n', (2716, 2726), False, 'import torch\n'), ((2792, 2813), 'neurodiffeq.operators.spherical_grad', 'spherical_grad', (['u', '*x'], {}), '(u, *x)\n', (2806, 2813), False, 'from neurodiffeq.operators import spherical_grad\n'), ((3071, 3093), 'neurodiffeq.operators.spherical_curl', 'spherical_curl', (['*U', '*x'], {}), '(*U, *x)\n', (3085, 3093), False, 'from neurodiffeq.operators import spherical_curl\n'), ((3466, 3492), 'neurodiffeq.operators.spherical_laplacian', 'spherical_laplacian', (['u', '*x'], {}), '(u, *x)\n', (3485, 3492), False, 'from neurodiffeq.operators import spherical_laplacian\n'), ((3820, 3854), 'neurodiffeq.operators.spherical_vector_laplacian', 'spherical_vector_laplacian', (['*U', '*x'], {}), '(*U, *x)\n', (3846, 3854), False, 'from neurodiffeq.operators import spherical_vector_laplacian\n'), ((2087, 2126), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (2097, 2126), False, 'import torch\n'), ((2869, 2879), 'neurodiffeq.diff', 'diff', (['u', 'r'], {}), '(u, r)\n', (2873, 2879), False, 'from neurodiffeq import diff\n'), ((1380, 1420), 'neurodiffeq.function_basis.ZonalSphericalHarmonics', 'ZonalSphericalHarmonics', ([], {'degrees': 'degrees'}), '(degrees=degrees)\n', (1403, 1420), False, 'from neurodiffeq.function_basis import ZonalSphericalHarmonics\n'), ((1536, 1576), 'neurodiffeq.function_basis.ZonalSphericalHarmonics', 'ZonalSphericalHarmonics', ([], {'degrees': 'degrees'}), '(degrees=degrees)\n', (1559, 1576), False, 'from neurodiffeq.function_basis import ZonalSphericalHarmonics\n'), ((1864, 1872), 'torch.cos', 'cos', (['phi'], {}), '(phi)\n', (1867, 1872), False, 'from torch import sin, cos\n'), ((1926, 1934), 'torch.sin', 'sin', (['phi'], {}), '(phi)\n', (1929, 1934), False, 'from torch import sin, cos\n'), ((1969, 1985), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (1978, 1985), False, 'import torch\n'), ((2145, 2184), 'torch.rand', 'torch.rand', (['(1000)', '(1)'], {'requires_grad': '(True)'}), '(1000, 1, requires_grad=True)\n', (2155, 2184), False, 'import torch\n'), ((2298, 2306), 'torch.cos', 'cos', (['phi'], {}), '(phi)\n', (2301, 2306), False, 'from torch import sin, cos\n'), ((2360, 2368), 'torch.sin', 'sin', (['phi'], {}), '(phi)\n', (2363, 2368), False, 'from torch import sin, cos\n'), ((2403, 2419), 'torch.cos', 'torch.cos', (['theta'], {}), '(theta)\n', (2412, 2419), False, 'import torch\n'), ((2655, 2670), 'neurodiffeq.diff', 'diff', (['uphi', 'phi'], {}), '(uphi, phi)\n', (2659, 2670), False, 'from neurodiffeq import diff\n'), ((2918, 2932), 'neurodiffeq.diff', 'diff', (['u', 'theta'], {}), '(u, theta)\n', (2922, 2932), False, 'from neurodiffeq import diff\n'), ((2973, 2985), 'neurodiffeq.diff', 'diff', (['u', 'phi'], {}), '(u, phi)\n', (2977, 2985), False, 'from neurodiffeq import diff\n'), ((1845, 1861), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (1854, 1861), False, 'import torch\n'), ((1907, 1923), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (1916, 1923), False, 'import torch\n'), ((2279, 2295), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (2288, 2295), False, 'import torch\n'), ((2341, 2357), 'torch.sin', 'torch.sin', (['theta'], {}), '(theta)\n', (2350, 2357), False, 'import torch\n'), ((2545, 2565), 'neurodiffeq.diff', 'diff', (['(r ** 2 * ur)', 'r'], {}), '(r ** 2 * ur, r)\n', (2549, 2565), False, 'from neurodiffeq import diff\n'), ((2678, 2688), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (2681, 2688), False, 'from torch import sin, cos\n'), ((2993, 3003), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (2996, 3003), False, 'from torch import sin, cos\n'), ((3208, 3225), 'neurodiffeq.diff', 'diff', (['utheta', 'phi'], {}), '(utheta, phi)\n', (3212, 3225), False, 'from neurodiffeq import diff\n'), ((3234, 3244), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3237, 3244), False, 'from torch import sin, cos\n'), ((3314, 3331), 'neurodiffeq.diff', 'diff', (['(r * uphi)', 'r'], {}), '(r * uphi, r)\n', (3318, 3331), False, 'from neurodiffeq import diff\n'), ((3374, 3393), 'neurodiffeq.diff', 'diff', (['(r * utheta)', 'r'], {}), '(r * utheta, r)\n', (3378, 3393), False, 'from neurodiffeq import diff\n'), ((3396, 3411), 'neurodiffeq.diff', 'diff', (['ur', 'theta'], {}), '(ur, theta)\n', (3400, 3411), False, 'from neurodiffeq import diff\n'), ((3686, 3707), 'neurodiffeq.diff', 'diff', (['u', 'phi'], {'order': '(2)'}), '(u, phi, order=2)\n', (3690, 3707), False, 'from neurodiffeq import diff\n'), ((4082, 4103), 'neurodiffeq.diff', 'diff', (['u', 'phi'], {'order': '(2)'}), '(u, phi, order=2)\n', (4086, 4103), False, 'from neurodiffeq import diff\n'), ((4334, 4349), 'neurodiffeq.diff', 'diff', (['uphi', 'phi'], {}), '(uphi, phi)\n', (4338, 4349), False, 'from neurodiffeq import diff\n'), ((4570, 4585), 'neurodiffeq.diff', 'diff', (['uphi', 'phi'], {}), '(uphi, phi)\n', (4574, 4585), False, 'from neurodiffeq import diff\n'), ((4813, 4830), 'neurodiffeq.diff', 'diff', (['utheta', 'phi'], {}), '(utheta, phi)\n', (4817, 4830), False, 'from neurodiffeq import diff\n'), ((2629, 2639), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (2632, 2639), False, 'from torch import sin, cos\n'), ((3285, 3298), 'neurodiffeq.diff', 'diff', (['ur', 'phi'], {}), '(ur, phi)\n', (3289, 3298), False, 'from neurodiffeq import diff\n'), ((3301, 3311), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3304, 3311), False, 'from torch import sin, cos\n'), ((4498, 4513), 'neurodiffeq.diff', 'diff', (['ur', 'theta'], {}), '(ur, theta)\n', (4502, 4513), False, 'from neurodiffeq import diff\n'), ((4743, 4756), 'neurodiffeq.diff', 'diff', (['ur', 'phi'], {}), '(ur, phi)\n', (4747, 4756), False, 'from neurodiffeq import diff\n'), ((2603, 2613), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (2606, 2613), False, 'from torch import sin, cos\n'), ((3187, 3197), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3190, 3197), False, 'from torch import sin, cos\n'), ((3664, 3674), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3667, 3674), False, 'from torch import sin, cos\n'), ((3720, 3730), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3723, 3730), False, 'from torch import sin, cos\n'), ((4051, 4061), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4054, 4061), False, 'from torch import sin, cos\n'), ((4116, 4126), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4119, 4126), False, 'from torch import sin, cos\n'), ((4320, 4330), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4323, 4330), False, 'from torch import sin, cos\n'), ((4528, 4538), 'torch.cos', 'cos', (['theta'], {}), '(theta)\n', (4531, 4538), False, 'from torch import sin, cos\n'), ((4771, 4781), 'torch.cos', 'cos', (['theta'], {}), '(theta)\n', (4774, 4781), False, 'from torch import sin, cos\n'), ((3577, 3587), 'neurodiffeq.diff', 'diff', (['u', 'r'], {}), '(u, r)\n', (3581, 3587), False, 'from neurodiffeq import diff\n'), ((3616, 3626), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (3619, 3626), False, 'from torch import sin, cos\n'), ((3629, 3643), 'neurodiffeq.diff', 'diff', (['u', 'theta'], {}), '(u, theta)\n', (3633, 3643), False, 'from neurodiffeq import diff\n'), ((3955, 3965), 'neurodiffeq.diff', 'diff', (['u', 'r'], {}), '(u, r)\n', (3959, 3965), False, 'from neurodiffeq import diff\n'), ((4003, 4013), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4006, 4013), False, 'from torch import sin, cos\n'), ((4016, 4030), 'neurodiffeq.diff', 'diff', (['u', 'theta'], {}), '(u, theta)\n', (4020, 4030), False, 'from neurodiffeq import diff\n'), ((4249, 4259), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4252, 4259), False, 'from torch import sin, cos\n'), ((4277, 4287), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4280, 4287), False, 'from torch import sin, cos\n'), ((4551, 4561), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4554, 4561), False, 'from torch import sin, cos\n'), ((4729, 4739), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4732, 4739), False, 'from torch import sin, cos\n'), ((4794, 4804), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4797, 4804), False, 'from torch import sin, cos\n'), ((4458, 4468), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4461, 4468), False, 'from torch import sin, cos\n'), ((4688, 4698), 'torch.sin', 'sin', (['theta'], {}), '(theta)\n', (4691, 4698), False, 'from torch import sin, cos\n')] |
#!/usr/bin/env python3
# coding: utf-8
"""
PanelResolver class
Copyright 2017 MicaSense, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in the
Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import math
import numpy as np
import cv2
import re
import pyzbar.pyzbar as pyzbar
from skimage import measure
import matplotlib.pyplot as plt
import micasense.image as image
class Panel(object):
def __init__(
self, img: image.Image, panel_corners=None, ignore_autocalibration=False
):
# if we have panel images with QR metadata, panel detection is not called,
# so this can be forced here
if img is None:
raise IOError("Must provide an image")
self.image = img
bias = img.radiance().min()
scale = img.radiance().max() - bias
self.gray8b = np.zeros(img.radiance().shape, dtype="uint8")
cv2.convertScaleAbs(
img.undistorted(img.radiance()),
self.gray8b,
256.0 / scale,
-1.0 * scale * bias,
)
if (self.image.auto_calibration_image) and ~ignore_autocalibration:
self.__panel_type = "auto" # panels the camera found we call auto
if panel_corners is not None:
self.__panel_bounds = np.array(panel_corners)
else:
self.__panel_bounds = np.array(self.image.panel_region)
self.panel_albedo = self.image.panel_albedo
self.serial = self.image.panel_serial
self.qr_area = None
self.qr_bounds = None
self.panel_std = None
self.saturated_panel_pixels_pct = None
self.panel_pixels_mean = None
self.panel_version = None
if re.search(r"RP\d{2}-(\d{7})-\D{2}", self.image.panel_serial):
self.serial = self.image.panel_serial
self.panel_version = int(self.image.panel_serial[2:4])
else:
self.__panel_type = "search" # panels we search for we call search
self.serial = None
self.qr_area = None
self.qr_bounds = None
self.panel_std = None
self.saturated_panel_pixels_pct = None
self.panel_pixels_mean = None
self.panel_version = None
if panel_corners is not None:
self.__panel_bounds = np.array(panel_corners)
else:
self.__panel_bounds = None
def __expect_panel(self):
return self.image.band_name.upper() != "LWIR"
def __find_qr(self):
decoded = pyzbar.decode(self.gray8b, symbols=[pyzbar.ZBarSymbol.QRCODE])
for symbol in decoded:
serial_str = symbol.data.decode("UTF-8")
m = re.search(r"RP\d{2}-(\d{7})-\D{2}", serial_str)
if m:
self.serial = serial_str
self.panel_version = int(self.serial[2:4])
self.qr_bounds = []
for point in symbol.polygon:
self.qr_bounds.append([point.x, point.y])
self.qr_bounds = np.asarray(self.qr_bounds, np.int32)
self.qr_area = cv2.contourArea(self.qr_bounds)
# print (symbol.polygon)
# print (self.qr_bounds)
break
def __pt_in_image_bounds(self, pt):
width, height = self.image.size()
if pt[0] >= width or pt[0] < 0:
return False
if pt[1] >= height or pt[1] < 0:
return False
return True
def reflectance_from_panel_serial(self):
if self.__panel_type == "auto":
return self.panel_albedo
if self.serial is None:
self.__find_qr()
if self.serial is None:
raise ValueError("Panel serial number not found")
if self.panel_version >= 4:
min_wl = float(self.serial[-14:-10])
min_rf = float(self.serial[-10:-7]) / 1000.0
max_wl = float(self.serial[-7:-3])
max_rf = float(self.serial[-3:]) / 1000.0
c = np.polyfit([min_wl, max_wl], [min_rf, max_rf], 1)
p = np.poly1d(c)
return p(self.image.center_wavelength)
else:
return None
def qr_corners(self):
if self.__panel_type == "auto":
return None
if self.qr_bounds is None:
self.__find_qr()
return self.qr_bounds
def panel_detected(self):
if self.__expect_panel() is False:
return False
if self.__panel_type == "auto":
return True
if self.serial is None:
self.__find_qr()
return self.qr_bounds is not None
def panel_corners(self):
"""get the corners of a panel region based on the qr code location
Our algorithm to do this uses a 'reference' qr code location and
it's associate panel region. We find the affine transform
between the reference qr and our qr, and apply that same transform to the
reference panel region to find our panel region. Because of a limitation
of the pyzbar library, the rotation of the absolute QR code isn't known,
so we then try all 4 rotations and test against a cost function which is the
minimum of the standard devation divided by the mean value for the panel region"""
if self.__panel_bounds is not None:
return self.__panel_bounds
if self.serial is None:
self.__find_qr()
if self.serial is None: # didn't find a panel in this image
return None
if self.panel_version < 3:
# reference_panel_pts = np.asarray(
# [[894, 469], [868, 232], [630, 258], [656, 496]],
# dtype=np.int32,
# )
# reference_qr_pts = np.asarray(
# [[898, 748], [880, 567], [701, 584], [718, 762]],
# dtype=np.int32
# )
# use the actual panel measures here - we use units of [mm]
# the panel is 154.4 x 152.4 mm , vs. the 84 x 84 mm for the QR code
# it is left 143.20 mm from the QR code
# use the inner 50% square of the panel
s = 76.2
p = 42
t_off = np.array([-143.2, 0])
elif (self.panel_version >= 3) and (self.panel_version < 6):
s = 50
p = 45
t_off = np.array([-145.8, 0])
# reference_panel_pts = np.asarray(
# [[557, 350], [550, 480], [695, 480], [700, 350]], dtype=np.int32
# )
# reference_qr_pts = np.asarray(
# [[821, 324], [819, 506], [996, 509], [999, 330]], dtype=np.int32
# )
elif self.panel_version >= 6:
# use the actual panel measures here - we use units of [mm]
# the panel is 100 x 100 mm , vs. the 91 x 91 mm for the QR code
# it is down 125.94 mm from the QR code
# use the inner 50% square of the panel
p = 41
s = 50
t_off = np.array([0, -130.84])
reference_panel_pts = (
np.asarray([[-s, s], [s, s], [s, -s], [-s, -s]], dtype=np.float32) * 0.5
+ t_off
)
reference_qr_pts = np.asarray(
[[-p, p], [p, p], [p, -p], [-p, -p]], dtype=np.float32
)
bounds = []
costs = []
for rotation in range(0, 4):
qr_points = np.roll(reference_qr_pts, rotation, axis=0)
src = np.asarray([tuple(row) for row in qr_points[:]], np.float32)
dst = np.asarray([tuple(row) for row in self.qr_corners()[:]], np.float32)
# we determine the homography from the 4 corner points
warp_matrix = cv2.getPerspectiveTransform(src, dst)
# warp_matrix = cv2.getAffineTransform(src, dst)
pts = np.asarray([reference_panel_pts], "float32")
panel_bounds = cv2.convexHull(
cv2.perspectiveTransform(pts, warp_matrix), clockwise=False
)
panel_bounds = np.squeeze(panel_bounds) # remove nested lists
bounds_in_image = True
for i, point in enumerate(panel_bounds):
if not self.__pt_in_image_bounds(point):
bounds_in_image = False
if bounds_in_image:
mean, std, _, _ = self.region_stats(
self.image.raw(), panel_bounds, sat_threshold=65000
)
bounds.append(panel_bounds.astype(np.int32))
costs.append(std / mean)
idx = costs.index(min(costs))
self.__panel_bounds = bounds[idx]
return self.__panel_bounds
def ordered_panel_coordinates(self):
"""
Return panel region coordinates in a predictable order. Panel region
coordinates that are automatically detected by the camera are ordered
differently than coordinates detected by Panel.panel_corners().
:return: [ (ur), (ul), (ll), (lr) ] to mirror Image.panel_region
attribute order
"""
pc = self.panel_corners()
pc = sorted(pc, key=lambda x: x[0])
# get the coordinates on the "left" and "right" side of the bounding box
left_coords = pc[:2]
right_coords = pc[2:]
# sort y values ascending for correct order
left_coords = sorted(left_coords, key=lambda y: y[0])
right_coords = sorted(right_coords, key=lambda y: y[0])
return [
tuple(right_coords[1]),
tuple(left_coords[1]),
tuple(left_coords[0]),
tuple(right_coords[0]),
]
def region_stats(self, img, region, sat_threshold=None):
"""Provide regional statistics for a image over a region
Inputs: img is any image ndarray, region is a skimage shape
Outputs: mean, std, count, and saturated count tuple for the region"""
rev_panel_pts = np.fliplr(region) # skimage and opencv coords are reversed
w, h = img.shape
mask = measure.grid_points_in_poly((w, h), rev_panel_pts)
num_pixels = mask.sum()
panel_pixels = img[mask]
stdev = panel_pixels.std()
mean_value = panel_pixels.mean()
saturated_count = 0
if sat_threshold is not None:
saturated_count = (panel_pixels > sat_threshold).sum()
# set saturated pixels here
if num_pixels > 0:
self.saturated_panel_pixels_pct = (100.0 * saturated_count) / num_pixels
return mean_value, stdev, num_pixels, saturated_count
def raw(self):
raw_img = self.image.undistorted(self.image.raw())
return self.region_stats(raw_img, self.panel_corners(), sat_threshold=65000)
def intensity(self):
intensity_img = self.image.undistorted(self.image.intensity())
return self.region_stats(intensity_img, self.panel_corners(), sat_threshold=65000)
def radiance(self):
radiance_img = self.image.undistorted(self.image.radiance())
return self.region_stats(radiance_img, self.panel_corners())
def reflectance_mean(self):
reflectance_image = self.image.reflectance()
if reflectance_image is None:
print(
"First calculate the reflectance image by providing a\n"
" band specific irradiance to the calling image.reflectance(irradiance)"
)
mean, _, _, _ = self.region_stats(reflectance_image, self.panel_corners())
return mean
def irradiance_mean(self, reflectance):
radiance_mean, _, _, _ = self.radiance()
return radiance_mean * math.pi / reflectance
def plot_image(self):
display_img = cv2.cvtColor(self.gray8b, cv2.COLOR_GRAY2RGB)
if self.panel_detected():
if self.qr_corners() is not None:
cv2.drawContours(display_img, [self.qr_corners()], 0, (255, 0, 0), 3)
cv2.drawContours(display_img, [self.panel_corners()], 0, (0, 0, 255), 3)
font = cv2.FONT_HERSHEY_DUPLEX
if self.panel_detected():
if self.qr_corners() is not None:
xloc = self.qr_corners()[0][0] - 100
yloc = self.qr_corners()[0][1] + 100
else:
xloc = self.panel_corners()[0][0] - 100
yloc = self.panel_corners()[0][1] + 100
cv2.putText(
display_img,
str(self.serial).split("_")[0],
(xloc, yloc),
font,
1,
255,
2,
)
return display_img
def plot(self, figsize=(14, 14)):
display_img = self.plot_image()
fig, ax = plt.subplots(figsize=figsize)
ax.imshow(display_img)
plt.tight_layout()
plt.show()
return fig, ax
| [
"skimage.measure.grid_points_in_poly",
"matplotlib.pyplot.tight_layout",
"numpy.poly1d",
"cv2.contourArea",
"matplotlib.pyplot.show",
"numpy.polyfit",
"cv2.cvtColor",
"pyzbar.pyzbar.decode",
"numpy.asarray",
"numpy.roll",
"cv2.getPerspectiveTransform",
"numpy.fliplr",
"numpy.array",
"re.se... | [((3507, 3569), 'pyzbar.pyzbar.decode', 'pyzbar.decode', (['self.gray8b'], {'symbols': '[pyzbar.ZBarSymbol.QRCODE]'}), '(self.gray8b, symbols=[pyzbar.ZBarSymbol.QRCODE])\n', (3520, 3569), True, 'import pyzbar.pyzbar as pyzbar\n'), ((8203, 8269), 'numpy.asarray', 'np.asarray', (['[[-p, p], [p, p], [p, -p], [-p, -p]]'], {'dtype': 'np.float32'}), '([[-p, p], [p, p], [p, -p], [-p, -p]], dtype=np.float32)\n', (8213, 8269), True, 'import numpy as np\n'), ((10909, 10926), 'numpy.fliplr', 'np.fliplr', (['region'], {}), '(region)\n', (10918, 10926), True, 'import numpy as np\n'), ((11009, 11059), 'skimage.measure.grid_points_in_poly', 'measure.grid_points_in_poly', (['(w, h)', 'rev_panel_pts'], {}), '((w, h), rev_panel_pts)\n', (11036, 11059), False, 'from skimage import measure\n'), ((12689, 12734), 'cv2.cvtColor', 'cv2.cvtColor', (['self.gray8b', 'cv2.COLOR_GRAY2RGB'], {}), '(self.gray8b, cv2.COLOR_GRAY2RGB)\n', (12701, 12734), False, 'import cv2\n'), ((13693, 13722), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (13705, 13722), True, 'import matplotlib.pyplot as plt\n'), ((13762, 13780), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (13778, 13780), True, 'import matplotlib.pyplot as plt\n'), ((13789, 13799), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13797, 13799), True, 'import matplotlib.pyplot as plt\n'), ((2670, 2732), 're.search', 're.search', (['"""RP\\\\d{2}-(\\\\d{7})-\\\\D{2}"""', 'self.image.panel_serial'], {}), "('RP\\\\d{2}-(\\\\d{7})-\\\\D{2}', self.image.panel_serial)\n", (2679, 2732), False, 'import re\n'), ((3670, 3719), 're.search', 're.search', (['"""RP\\\\d{2}-(\\\\d{7})-\\\\D{2}"""', 'serial_str'], {}), "('RP\\\\d{2}-(\\\\d{7})-\\\\D{2}', serial_str)\n", (3679, 3719), False, 'import re\n'), ((4988, 5037), 'numpy.polyfit', 'np.polyfit', (['[min_wl, max_wl]', '[min_rf, max_rf]', '(1)'], {}), '([min_wl, max_wl], [min_rf, max_rf], 1)\n', (4998, 5037), True, 'import numpy as np\n'), ((5054, 5066), 'numpy.poly1d', 'np.poly1d', (['c'], {}), '(c)\n', (5063, 5066), True, 'import numpy as np\n'), ((7193, 7214), 'numpy.array', 'np.array', (['[-143.2, 0]'], {}), '([-143.2, 0])\n', (7201, 7214), True, 'import numpy as np\n'), ((8392, 8435), 'numpy.roll', 'np.roll', (['reference_qr_pts', 'rotation'], {'axis': '(0)'}), '(reference_qr_pts, rotation, axis=0)\n', (8399, 8435), True, 'import numpy as np\n'), ((8697, 8734), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (8724, 8734), False, 'import cv2\n'), ((8816, 8860), 'numpy.asarray', 'np.asarray', (['[reference_panel_pts]', '"""float32"""'], {}), "([reference_panel_pts], 'float32')\n", (8826, 8860), True, 'import numpy as np\n'), ((9021, 9045), 'numpy.squeeze', 'np.squeeze', (['panel_bounds'], {}), '(panel_bounds)\n', (9031, 9045), True, 'import numpy as np\n'), ((2204, 2227), 'numpy.array', 'np.array', (['panel_corners'], {}), '(panel_corners)\n', (2212, 2227), True, 'import numpy as np\n'), ((2284, 2317), 'numpy.array', 'np.array', (['self.image.panel_region'], {}), '(self.image.panel_region)\n', (2292, 2317), True, 'import numpy as np\n'), ((3293, 3316), 'numpy.array', 'np.array', (['panel_corners'], {}), '(panel_corners)\n', (3301, 3316), True, 'import numpy as np\n'), ((4012, 4048), 'numpy.asarray', 'np.asarray', (['self.qr_bounds', 'np.int32'], {}), '(self.qr_bounds, np.int32)\n', (4022, 4048), True, 'import numpy as np\n'), ((4080, 4111), 'cv2.contourArea', 'cv2.contourArea', (['self.qr_bounds'], {}), '(self.qr_bounds)\n', (4095, 4111), False, 'import cv2\n'), ((7343, 7364), 'numpy.array', 'np.array', (['[-145.8, 0]'], {}), '([-145.8, 0])\n', (7351, 7364), True, 'import numpy as np\n'), ((8073, 8139), 'numpy.asarray', 'np.asarray', (['[[-s, s], [s, s], [s, -s], [-s, -s]]'], {'dtype': 'np.float32'}), '([[-s, s], [s, s], [s, -s], [-s, -s]], dtype=np.float32)\n', (8083, 8139), True, 'import numpy as np\n'), ((8920, 8962), 'cv2.perspectiveTransform', 'cv2.perspectiveTransform', (['pts', 'warp_matrix'], {}), '(pts, warp_matrix)\n', (8944, 8962), False, 'import cv2\n'), ((8005, 8027), 'numpy.array', 'np.array', (['[0, -130.84]'], {}), '([0, -130.84])\n', (8013, 8027), True, 'import numpy as np\n')] |
# Copyright (c) Microsoft Corporation and contributors.
# Licensed under the MIT License.
import warnings
import numpy as np
from scipy import stats
from ..embed import select_dimension, AdjacencySpectralEmbed
from ..utils import import_graph, fit_plug_in_variance_estimator
from ..align import SignFlips
from ..align import SeedlessProcrustes
from .base import BaseInference
from sklearn.utils import check_array
from sklearn.metrics import pairwise_distances
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.metrics.pairwise import PAIRED_DISTANCES
from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS
from hyppo.ksample import KSample
from hyppo._utils import gaussian
_VALID_DISTANCES = list(PAIRED_DISTANCES.keys())
_VALID_KERNELS = list(PAIRWISE_KERNEL_FUNCTIONS.keys())
_VALID_KERNELS.append("gaussian") # can use hyppo's medial gaussian kernel too
_VALID_METRICS = _VALID_DISTANCES + _VALID_KERNELS
_VALID_TESTS = ["cca", "dcorr", "hhg", "rv", "hsic", "mgc"]
class LatentDistributionTest(BaseInference):
"""Two-sample hypothesis test for the problem of determining whether two random
dot product graphs have the same distributions of latent positions.
This test can operate on two graphs where there is no known matching
between the vertices of the two graphs, or even when the number of vertices
is different. Currently, testing is only supported for undirected graphs.
Read more in the :ref:`tutorials <inference_tutorials>`
Parameters
----------
test : str (default="hsic")
Backend hypothesis test to use, one of ["cca", "dcorr", "hhg", "rv", "hsic", "mgc"].
These tests are typically used for independence testing, but here they
are used for a two-sample hypothesis test on the latent positions of
two graphs. See :class:`hyppo.ksample.KSample` for more information.
metric : str or function (default="gaussian")
Distance or a kernel metric to use, either a callable or a valid string.
If a callable, then it should behave similarly to either
:func:`sklearn.metrics.pairwise_distances` or to
:func:`sklearn.metrics.pairwise.pairwise_kernels`.
If a string, then it should be either one of the keys in either
`sklearn.metrics.pairwise.PAIRED_DISTANCES` or in
`sklearn.metrics.pairwise.PAIRWISE_KERNEL_FUNCTIONS`, or "gaussian",
which will use a gaussian kernel with an adaptively selected bandwidth.
It is recommended to use kernels (e.g. "gaussian") with kernel-based
hsic test and distances (e.g. "euclidean") with all other tests.
n_components : int or None (default=None)
Number of embedding dimensions. If None, the optimal embedding
dimensions are found by the Zhu and Godsi algorithm.
See :func:`~graspologic.embed.selectSVD` for more information.
This argument is ignored if `input_graph` is False.
n_bootstraps : int (default=200)
Number of bootstrap iterations for the backend hypothesis test.
See :class:`hyppo.ksample.KSample` for more information.
workers : int (default=1)
Number of workers to use. If more than 1, parallelizes the code.
Supply -1 to use all cores available to the Process.
size_correction : bool (default=True)
Ignored when the two graphs have the same number of vertices. The test
degrades in validity as the number of vertices of the two graphs
diverge from each other, unless a correction is performed.
- True
Whenever the two graphs have different numbers of vertices,
estimates the plug-in estimator for the variance and uses it to
correct the embedding of the larger graph.
- False
Does not perform any modifications (not recommended).
pooled : bool (default=False)
Ignored whenever the two graphs have the same number of vertices or
`size_correction` is set to False. In order to correct the adjacency
spectral embedding used in the test, it is needed to estimate the
variance for each of the latent position estimates in the larger graph,
which requires to compute different sample moments. These moments can
be computed either over the larger graph (False), or over both graphs
(True). Setting it to True should not affect the behavior of the test
under the null hypothesis, but it is not clear whether it has more
power or less power under which alternatives. Generally not recomended,
as it is untested and included for experimental purposes.
align_type : str, {'sign_flips' (default), 'seedless_procrustes'} or None
Random dot product graphs have an inherent non-identifiability,
associated with their latent positions. Thus, two embeddings of
different graphs may not be orthogonally aligned. Without this accounted
for, two embeddings of different graphs may appear different, even
if the distributions of the true latent positions are the same.
There are several options in terms of how this can be addresssed:
- 'sign_flips'
A simple heuristic that flips the signs of one of the embeddings,
if the medians of the two embeddings in that dimension differ from
each other. See :class:`~graspologic.align.SignFlips` for more
information on this procedure. In the limit, this is guaranteed to
lead to a valid test, as long as matrix :math:`X^T X`, where
:math:`X` is the latent positions does not have repeated non-zero
eigenvalues. This may, however, result in an invalid test in the
finite sample case if the some eigenvalues are same or close.
- 'seedless_procrustes'
An algorithm that learns an orthogonal alignment matrix. This
procedure is slower than sign flips, but is guaranteed to yield a
valid test in the limit, and also makes the test more valid in some
finite sample cases, in which the eigenvalues are very close to
each other. See `~graspologic.align.SignFlips` for more information
on the procedure.
- None
Do not use any alignment technique. This is strongly not
recommended, as it may often result in a test that is not valid.
align_kws : dict
Keyword arguments for the aligner of choice, either
`~graspologic.align.SignFlips` or
`~graspologic.align.SeedlessProcrustes`, depending on the align_type.
See respective classes for more information.
input_graph : bool (default=True)
Flag whether to expect two full graphs, or the embeddings.
- True
.fit and .fit_predict() expect graphs, either as NetworkX graph objects
or as adjacency matrices, provided as ndarrays of size (n, n) and
(m, m). They will be embedded using adjacency spectral embeddings.
- False
.fit() and .fit_predict() expect adjacency spectral embeddings of
the graphs, they must be ndarrays of size (n, d) and (m, d), where
d must be same. n_components attribute is ignored in this case.
Attributes
----------
metric_func_ : callable
A callable associated with the specified metric. See `metric`.
null_distribution_ : ndarray, shape (n_bootstraps, )
The distribution of T statistics generated under the null.
sample_T_statistic_ : float
The observed difference between the embedded latent positions of the
two input graphs.
p_value_ : float
The overall p value from the test.
References
----------
.. [1] <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2017).
"A nonparametric two-sample hypothesis testing problem for random graphs."
Bernoulli, 23(3), 1599-1630.
.. [2] <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. (2019).
"hyppo: A Comprehensive Multivariate Hypothesis Testing Python Package."
arXiv:1907.02088.
.. [3] <NAME>., <NAME>., <NAME>., <NAME>. (2020).
"Correcting a Nonparametric Two-sample Graph Hypothesis Test for Graphs with Different Numbers of Vertices"
arXiv:2008.09434
"""
def __init__(
self,
test="dcorr",
metric="euclidean",
n_components=None,
n_bootstraps=200,
workers=1,
size_correction=True,
pooled=False,
align_type="sign_flips",
align_kws={},
input_graph=True,
):
# check test argument
if not isinstance(test, str):
msg = "test must be a str, not {}".format(type(test))
raise TypeError(msg)
elif test not in _VALID_TESTS:
msg = "Unknown test {}. Valid tests are {}".format(test, _VALID_TESTS)
raise ValueError(msg)
# metric argument is checked when metric_func_ is instantiated
# check n_components argument
if n_components is not None:
if not isinstance(n_components, int):
msg = "n_components must be an int, not {}.".format(type(n_components))
raise TypeError(msg)
# check n_bootstraps argument
if not isinstance(n_bootstraps, int):
msg = "n_bootstraps must be an int, not {}".format(type(n_bootstraps))
raise TypeError(msg)
elif n_bootstraps < 0:
msg = "{} is invalid number of bootstraps, must be non-negative"
raise ValueError(msg.format(n_bootstraps))
# check workers argument
if not isinstance(workers, int):
msg = "workers must be an int, not {}".format(type(workers))
raise TypeError(msg)
# check size_correction argument
if not isinstance(size_correction, bool):
msg = "size_correction must be a bool, not {}".format(type(size_correction))
raise TypeError(msg)
# check pooled argument
if not isinstance(pooled, bool):
msg = "pooled must be a bool, not {}".format(type(pooled))
raise TypeError(msg)
# check align_type argument
if (not isinstance(align_type, str)) and (align_type is not None):
msg = "align_type must be a string or None, not {}".format(type(align_type))
raise TypeError(msg)
align_types_supported = ["sign_flips", "seedless_procrustes", None]
if align_type not in align_types_supported:
msg = "supported align types are {}".format(align_types_supported)
raise ValueError(msg)
# check align_kws argument
if not isinstance(align_kws, dict):
msg = "align_kws must be a dictionary of keyword arguments, not {}".format(
type(align_kws)
)
raise TypeError(msg)
# check input_graph argument
if not isinstance(input_graph, bool):
msg = "input_graph must be a bool, not {}".format(type(input_graph))
raise TypeError(msg)
super().__init__(n_components=n_components)
self.test = test
self.metric = metric
self.metric_func_ = self._instantiate_metric_func(metric, test)
self.n_bootstraps = n_bootstraps
self.workers = workers
self.size_correction = size_correction
self.pooled = pooled
self.input_graph = input_graph
self.align_type = align_type
self.align_kws = align_kws
def _instantiate_metric_func(self, metric, test):
# check metric argument
if not isinstance(metric, str) and not callable(metric):
msg = "Metric must be str or callable, not {}".format(type(metric))
raise TypeError(msg)
elif metric not in _VALID_METRICS and not callable(metric):
msg = "Unknown metric {}. Valid metrics are {}, or a callable".format(
metric, _VALID_METRICS
)
raise ValueError(msg)
if callable(metric):
metric_func = metric
else:
if metric in _VALID_DISTANCES:
if test == "hsic":
msg = (
f"{test} is a kernel-based test, but {metric} "
"is a distance. results may not be optimal. it is "
"recomended to use either a different test or one of "
f"the kernels: {_VALID_KERNELS} as a metric."
)
warnings.warn(msg, UserWarning)
def metric_func(X, Y=None, metric=metric, workers=None):
return pairwise_distances(X, Y, metric=metric, n_jobs=workers)
elif metric == "gaussian":
if test != "hsic":
msg = (
f"{test} is a distance-based test, but {metric} "
"is a kernel. results may not be optimal. it is "
"recomended to use either a hisc as a test or one of "
f"the distances: {_VALID_DISTANCES} as a metric."
)
warnings.warn(msg, UserWarning)
metric_func = gaussian
else:
if test != "hsic":
msg = (
f"{test} is a distance-based test, but {metric} "
"is a kernel. results may not be optimal. it is "
"recomended to use either a hisc as a test or one of "
f"the distances: {_VALID_DISTANCES} as a metric."
)
warnings.warn(msg, UserWarning)
def metric_func(X, Y=None, metric=metric, workers=None):
return pairwise_kernels(X, Y, metric=metric, n_jobs=workers)
return metric_func
def _embed(self, A1, A2):
if self.n_components is None:
num_dims1 = select_dimension(A1)[0][-1]
num_dims2 = select_dimension(A2)[0][-1]
self.n_components = max(num_dims1, num_dims2)
ase = AdjacencySpectralEmbed(n_components=self.n_components)
X1_hat = ase.fit_transform(A1)
X2_hat = ase.fit_transform(A2)
if isinstance(X1_hat, tuple) and isinstance(X2_hat, tuple):
X1_hat = np.concatenate(X1_hat, axis=-1)
X2_hat = np.concatenate(X2_hat, axis=-1)
elif isinstance(X1_hat, tuple) ^ isinstance(X2_hat, tuple):
msg = (
"input graphs do not have same directedness. "
"consider symmetrizing the directed graph."
)
raise ValueError(msg)
return X1_hat, X2_hat
def _sample_modified_ase(self, X, Y, pooled=False):
N, M = len(X), len(Y)
# return if graphs are same order, else else ensure X the larger graph.
if N == M:
return X, Y
elif M > N:
reverse_order = True
X, Y = Y, X
N, M = M, N
else:
reverse_order = False
# estimate the central limit theorem variance
if pooled:
two_samples = np.concatenate([X, Y], axis=0)
get_sigma = fit_plug_in_variance_estimator(two_samples)
else:
get_sigma = fit_plug_in_variance_estimator(X)
X_sigmas = get_sigma(X) * (N - M) / (N * M)
# increase the variance of X by sampling from the asy dist
X_sampled = np.zeros(X.shape)
# TODO may be parallelized, but requires keeping track of random state
for i in range(N):
X_sampled[i, :] = X[i, :] + stats.multivariate_normal.rvs(cov=X_sigmas[i])
# return the embeddings in the appropriate order
return (Y, X_sampled) if reverse_order else (X_sampled, Y)
def fit(self, A1, A2):
"""
Fits the test to the two input graphs
Parameters
----------
A1, A2 : variable (see description)
The two graphs, or their embeddings to run a hypothesis test on.
Expected variable type and shape depends on input_graph attribute:
- input_graph=True
expects two unembedded graphs either as NetworkX graph objects, or as
two np.ndarrays, representing the adjacency matrices. In this
case will be embedded using adjacency spectral embedding.
- input_graph-False
expects two already embedded graphs. In this case they must be
arrays of shape (n, d) and (m, d), where d, the number of
components, must be shared.
Note that regardless of how the graphs are passed, they need not
have the same number of vertices.
Returns
-------
self
"""
if self.input_graph:
A1 = import_graph(A1)
A2 = import_graph(A2)
X1_hat, X2_hat = self._embed(A1, A2)
else:
# check for nx objects, since they are castable to arrays,
# but we don't want that
if not isinstance(A1, np.ndarray):
msg = (
f"Embedding of the first graph is of type {type(A1)}, not "
"np.ndarray. If input_graph is False, the inputs need to be "
"adjacency spectral embeddings, with shapes (n, d) and "
"(m, d), passed as np.ndarrays."
)
raise TypeError(msg)
if not isinstance(A2, np.ndarray):
msg = (
f"Embedding of the second graph is of type {type(A2)}, not an "
"array. If input_graph is False, the inputs need to be "
"adjacency spectral embeddings, with shapes (n, d) and "
"(m, d), passed as np.ndarrays."
)
raise TypeError(msg)
if A1.ndim != 2:
msg = (
"Embedding array of the first graph does not have two dimensions. "
"If input_graph is False, the inputs need to be adjacency "
"spectral embeddings, with shapes (n, d) and (m, d)"
)
raise ValueError(msg)
if A2.ndim != 2:
msg = (
"Embedding array of the second graph does not have two dimensions. "
"If input_graph is False, the inputs need to be adjacency "
"spectral embeddings, with shapes (n, d) and (m, d)"
)
raise ValueError(msg)
if A1.shape[1] != A2.shape[1]:
msg = (
"Two input embeddings have different number of components. "
"If input_graph is False, the inputs need to be adjacency "
"spectral embeddings, with shapes (n, d) and (m, d)"
)
raise ValueError(msg)
# checking for inf values
X1_hat = check_array(A1)
X2_hat = check_array(A2)
if self.align_type == "sign_flips":
aligner = SignFlips(**self.align_kws)
X1_hat = aligner.fit_transform(X1_hat, X2_hat)
elif self.align_type == "seedless_procrustes":
aligner = SeedlessProcrustes(**self.align_kws)
X1_hat = aligner.fit_transform(X1_hat, X2_hat)
if self.size_correction:
X1_hat, X2_hat = self._sample_modified_ase(
X1_hat, X2_hat, pooled=self.pooled
)
self.metric_func_ = self._instantiate_metric_func(self.metric, self.test)
test_obj = KSample(self.test, compute_distance=self.metric_func_)
data = test_obj.test(
X1_hat, X2_hat, reps=self.n_bootstraps, workers=self.workers, auto=False
)
self.null_distribution_ = test_obj.indep_test.null_dist
self.sample_T_statistic_ = data[0]
self.p_value_ = data[1]
return self
def fit_predict(self, A1, A2):
"""
Fits the test to the two input graphs and returns the p-value
Parameters
----------
A1, A2 : variable (see description)
The two graphs, or their embeddings to run a hypothesis test on.
Expected variable type and shape depends on input_graph attribute:
- input_graph=True
expects two unembedded graphs either as NetworkX graph objects, or as
two np.ndarrays, representing the adjacency matrices. In this
case will be embedded using adjacency spectral embedding.
- input_graph-False
expects two already embedded graphs. In this case they must be
arrays of shape (n, d) and (m, d), where d, the number of
components, must be shared.
Note that regardless of how the graphs are passed, they need not to
have the same number of vertices.
Returns
-------
p_value_ : float
The overall p value from the test
"""
# abstract method overwritten in order to have a custom doc string
self.fit(A1, A2)
return self.p_value_
| [
"sklearn.metrics.pairwise.PAIRWISE_KERNEL_FUNCTIONS.keys",
"scipy.stats.multivariate_normal.rvs",
"sklearn.metrics.pairwise.PAIRED_DISTANCES.keys",
"sklearn.utils.check_array",
"sklearn.metrics.pairwise_distances",
"numpy.zeros",
"hyppo.ksample.KSample",
"sklearn.metrics.pairwise.pairwise_kernels",
... | [((728, 751), 'sklearn.metrics.pairwise.PAIRED_DISTANCES.keys', 'PAIRED_DISTANCES.keys', ([], {}), '()\n', (749, 751), False, 'from sklearn.metrics.pairwise import PAIRED_DISTANCES\n'), ((775, 807), 'sklearn.metrics.pairwise.PAIRWISE_KERNEL_FUNCTIONS.keys', 'PAIRWISE_KERNEL_FUNCTIONS.keys', ([], {}), '()\n', (805, 807), False, 'from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS\n'), ((15564, 15581), 'numpy.zeros', 'np.zeros', (['X.shape'], {}), '(X.shape)\n', (15572, 15581), True, 'import numpy as np\n'), ((19767, 19821), 'hyppo.ksample.KSample', 'KSample', (['self.test'], {'compute_distance': 'self.metric_func_'}), '(self.test, compute_distance=self.metric_func_)\n', (19774, 19821), False, 'from hyppo.ksample import KSample\n'), ((14418, 14449), 'numpy.concatenate', 'np.concatenate', (['X1_hat'], {'axis': '(-1)'}), '(X1_hat, axis=-1)\n', (14432, 14449), True, 'import numpy as np\n'), ((14471, 14502), 'numpy.concatenate', 'np.concatenate', (['X2_hat'], {'axis': '(-1)'}), '(X2_hat, axis=-1)\n', (14485, 14502), True, 'import numpy as np\n'), ((15253, 15283), 'numpy.concatenate', 'np.concatenate', (['[X, Y]'], {'axis': '(0)'}), '([X, Y], axis=0)\n', (15267, 15283), True, 'import numpy as np\n'), ((19130, 19145), 'sklearn.utils.check_array', 'check_array', (['A1'], {}), '(A1)\n', (19141, 19145), False, 'from sklearn.utils import check_array\n'), ((19167, 19182), 'sklearn.utils.check_array', 'check_array', (['A2'], {}), '(A2)\n', (19178, 19182), False, 'from sklearn.utils import check_array\n'), ((15728, 15774), 'scipy.stats.multivariate_normal.rvs', 'stats.multivariate_normal.rvs', ([], {'cov': 'X_sigmas[i]'}), '(cov=X_sigmas[i])\n', (15757, 15774), False, 'from scipy import stats\n'), ((12604, 12635), 'warnings.warn', 'warnings.warn', (['msg', 'UserWarning'], {}), '(msg, UserWarning)\n', (12617, 12635), False, 'import warnings\n'), ((12737, 12792), 'sklearn.metrics.pairwise_distances', 'pairwise_distances', (['X', 'Y'], {'metric': 'metric', 'n_jobs': 'workers'}), '(X, Y, metric=metric, n_jobs=workers)\n', (12755, 12792), False, 'from sklearn.metrics import pairwise_distances\n'), ((13239, 13270), 'warnings.warn', 'warnings.warn', (['msg', 'UserWarning'], {}), '(msg, UserWarning)\n', (13252, 13270), False, 'import warnings\n'), ((13734, 13765), 'warnings.warn', 'warnings.warn', (['msg', 'UserWarning'], {}), '(msg, UserWarning)\n', (13747, 13765), False, 'import warnings\n'), ((13867, 13920), 'sklearn.metrics.pairwise.pairwise_kernels', 'pairwise_kernels', (['X', 'Y'], {'metric': 'metric', 'n_jobs': 'workers'}), '(X, Y, metric=metric, n_jobs=workers)\n', (13883, 13920), False, 'from sklearn.metrics.pairwise import pairwise_kernels\n')] |
"""
@brief test tree node (time=2s)
"""
import unittest
import numpy
from pyquickhelper.pycode import ExtTestCase
from mlprodict.testing import check_is_almost_equal
class TestTesting(ExtTestCase):
def test_check_is_almost_equal(self):
l1 = numpy.array([1, 2])
l2 = numpy.array([1, 2])
check_is_almost_equal(l1, l2)
l1 = 3
l2 = numpy.array([1, 2])
self.assertRaise(lambda: check_is_almost_equal(l1, l2), TypeError)
l1 = numpy.array([1, 3])
l2 = numpy.array([1, 2])
self.assertRaise(lambda: check_is_almost_equal(l1, l2), AssertionError)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"numpy.array",
"mlprodict.testing.check_is_almost_equal"
] | [((654, 669), 'unittest.main', 'unittest.main', ([], {}), '()\n', (667, 669), False, 'import unittest\n'), ((261, 280), 'numpy.array', 'numpy.array', (['[1, 2]'], {}), '([1, 2])\n', (272, 280), False, 'import numpy\n'), ((294, 313), 'numpy.array', 'numpy.array', (['[1, 2]'], {}), '([1, 2])\n', (305, 313), False, 'import numpy\n'), ((322, 351), 'mlprodict.testing.check_is_almost_equal', 'check_is_almost_equal', (['l1', 'l2'], {}), '(l1, l2)\n', (343, 351), False, 'from mlprodict.testing import check_is_almost_equal\n'), ((380, 399), 'numpy.array', 'numpy.array', (['[1, 2]'], {}), '([1, 2])\n', (391, 399), False, 'import numpy\n'), ((488, 507), 'numpy.array', 'numpy.array', (['[1, 3]'], {}), '([1, 3])\n', (499, 507), False, 'import numpy\n'), ((521, 540), 'numpy.array', 'numpy.array', (['[1, 2]'], {}), '([1, 2])\n', (532, 540), False, 'import numpy\n'), ((433, 462), 'mlprodict.testing.check_is_almost_equal', 'check_is_almost_equal', (['l1', 'l2'], {}), '(l1, l2)\n', (454, 462), False, 'from mlprodict.testing import check_is_almost_equal\n'), ((574, 603), 'mlprodict.testing.check_is_almost_equal', 'check_is_almost_equal', (['l1', 'l2'], {}), '(l1, l2)\n', (595, 603), False, 'from mlprodict.testing import check_is_almost_equal\n')] |
# code to get tflite running a model on raspberry pi source from
#https://github.com/EdjeElectronics/TensorFlow-Lite-Object-Detection-on-Android-and-Raspberry-Pi/blob/master/TFLite_detection_stream.py
#
#
import os
import argparse
import cv2
import numpy as np
import sys
import time
from threading import Thread
import importlib.util
import GarageServo
import GarageServoController
import time
from datetime import datetime
class VideoStream:
"""Camera object that controls video streaming from the Picamera"""
def __init__(self, resolution=(640, 480), framerate=30):
self.stream = cv2.VideoCapture(0)
self.stream.set(cv2.CAP_PROP_EXPOSURE, -1)
ret = self.stream.set(cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*'MJPG'))
ret = self.stream.set(3, resolution[0])
ret = self.stream.set(4, resolution[1])
(self.grabbed, self.frame) = self.stream.read()
self.stopped = False
def start(self):
Thread(target=self.update, args=()).start()
return self
def update(self):
while True:
if self.stopped:
self.stream.release()
return
(self.grabbed, self.frame) = self.stream.read()
def read(self):
return self.frame
def stop(self):
self.stopped = True
# Define and parse input arguments
parser = argparse.ArgumentParser()
parser.add_argument('--modeldir', help='Folder the .tflite file is located in',
required=True)
args = parser.parse_args()
MODEL_NAME = args.modeldir
GRAPH_NAME = args.graph
LABELMAP_NAME = args.labels
min_conf_threshold = float(args.threshold)
resW, resH = args.resolution.split('x')
imW, imH = int(resW), int(resH)
use_TPU = args.edgetpu
from tflite_runtime.interpreter import Interpreter
CWD_PATH = os.getcwd()
PATH_TO_CKPT = os.path.join(CWD_PATH, MODEL_NAME, GRAPH_NAME)
PATH_TO_LABELS = os.path.join(CWD_PATH, MODEL_NAME, LABELMAP_NAME)
with open(PATH_TO_LABELS, 'r') as f:
labels = [line.strip() for line in f.readlines()]
if labels[0] == '???':
del (labels[0])
interpreter = Interpreter(model_path=PATH_TO_CKPT)
interpreter.allocate_tensors()
# Get model details
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
height = input_details[0]['shape'][1]
width = input_details[0]['shape'][2]
floating_model = (input_details[0]['dtype'] == np.float32)
input_mean = 127.5
input_std = 127.5
frame_rate_calc = 1
freq = cv2.getTickFrequency()
videostream = VideoStream(resolution=(imW, imH), framerate=30).start()
time.sleep(1)
servos = []
servo1 = GarageServo.GarageServo(0, 10, 0, 7.9)
servo2 = GarageServo.GarageServo(0, 11, 1, 7.95)
servos.append(servo1)
servos.append(servo2)
servo_controller = GarageServoController.GarageServoController(servos)
tlr_cnt = 0
deployed = False
last_tlr_seen = datetime.now()
while True:
# Start timer (for calculating frame rate)
t1 = cv2.getTickCount()
# Grab frame from video stream
frame1 = videostream.read()
# Acquire frame and resize to expected shape [1xHxWx3]
frame = frame1.copy()
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame_resized = cv2.resize(frame_rgb, (width, height))
input_data = np.expand_dims(frame_resized, axis=0)
# Normalize pixel values if using a floating model (i.e. if model is non-quantized)
if floating_model:
input_data = (np.float32(input_data) - input_mean) / input_std
# Perform the actual detection by running the model with the image as input
interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
# Retrieve detection results
boxes = interpreter.get_tensor(output_details[0]['index'])[0] # Bounding box coordinates of detected objects
classes = interpreter.get_tensor(output_details[1]['index'])[0] # Class index of detected objects
scores = interpreter.get_tensor(output_details[2]['index'])[0] # Confidence of detected objects
# num = interpreter.get_tensor(output_details[3]['index'])[0] # Total number of detected objects (inaccurate and not needed)
# Loop over all detections and draw detection box if confidence is above minimum threshold
for i in range(len(scores)):
if ((scores[i] > min_conf_threshold) and (scores[i] <= 1.0)):
# Get bounding box coordinates and draw box
# Interpreter can return coordinates that are outside of image dimensions, need to force them to be within image using max() and min()
ymin = int(max(1, (boxes[i][0] * imH)))
xmin = int(max(1, (boxes[i][1] * imW)))
ymax = int(min(imH, (boxes[i][2] * imH)))
xmax = int(min(imW, (boxes[i][3] * imW)))
cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), (10, 255, 0), 2)
# Draw label
object_name = labels[int(classes[i])] # Look up object name from "labels" array using class index
label = '%s: %d%%' % (object_name, int(scores[i] * 100)) # Example: 'person: 72%'
labelSize, baseLine = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.7, 2) # Get font size
label_ymin = max(ymin, labelSize[1] + 10) # Make sure not to draw label too close to top of window
cv2.rectangle(frame, (xmin, label_ymin - labelSize[1] - 10),
(xmin + labelSize[0], label_ymin + baseLine - 10), (255, 255, 255),
cv2.FILLED) # Draw white box to put label text in
cv2.putText(frame, label, (xmin, label_ymin - 7), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 0),
2) # Draw label text
if object_name == "tlr" and not deployed:
tlr_cnt = tlr_cnt + 1
if tlr_cnt == 10:
tlr_cnt = 0
servo_controller.mov_to_park_pos()
deployed = True
if deployed and object_name == "tlr":
last_tlr_seen = datetime.now()
duration = datetime.now() - last_tlr_seen
duration_in_sec = duration.total_seconds()
if deployed and duration_in_sec > 15 and 'tlr' not in classes:
servo_controller.move_to_home()
deployed = False
# Draw framerate in corner of frame
cv2.putText(frame, 'FPS: {0:.2f}'.format(frame_rate_calc), (30, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2,
cv2.LINE_AA)
cv2.putText(frame, 'Deployed: {0:.2f}'.format(deployed), (30, 75), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2,
cv2.LINE_AA)
cv2.putText(frame, 'tlr_cnt: {0:.2f}'.format(tlr_cnt), (30, 100), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 0), 2,
cv2.LINE_AA)
# All the results have been drawn on the frame, so it's time to display it.
cv2.imshow('Object detector', frame)
# Calculate framerate
t2 = cv2.getTickCount()
time1 = (t2 - t1) / freq
frame_rate_calc = 1 / time1
# Press 'q' to quit
if cv2.waitKey(1) == ord('q'):
break
if cv2.waitKey(1) == ord('c'):
servo_controller.calibrate_park_pos()
# Clean up
cv2.destroyAllWindows()
videostream.stop() | [
"argparse.ArgumentParser",
"cv2.VideoWriter_fourcc",
"cv2.getTickCount",
"cv2.rectangle",
"cv2.imshow",
"os.path.join",
"cv2.getTickFrequency",
"cv2.cvtColor",
"GarageServo.GarageServo",
"tflite_runtime.interpreter.Interpreter",
"cv2.destroyAllWindows",
"datetime.datetime.now",
"cv2.resize",... | [((1368, 1393), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1391, 1393), False, 'import argparse\n'), ((1819, 1830), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1828, 1830), False, 'import os\n'), ((1847, 1893), 'os.path.join', 'os.path.join', (['CWD_PATH', 'MODEL_NAME', 'GRAPH_NAME'], {}), '(CWD_PATH, MODEL_NAME, GRAPH_NAME)\n', (1859, 1893), False, 'import os\n'), ((1912, 1961), 'os.path.join', 'os.path.join', (['CWD_PATH', 'MODEL_NAME', 'LABELMAP_NAME'], {}), '(CWD_PATH, MODEL_NAME, LABELMAP_NAME)\n', (1924, 1961), False, 'import os\n'), ((2113, 2149), 'tflite_runtime.interpreter.Interpreter', 'Interpreter', ([], {'model_path': 'PATH_TO_CKPT'}), '(model_path=PATH_TO_CKPT)\n', (2124, 2149), False, 'from tflite_runtime.interpreter import Interpreter\n'), ((2502, 2524), 'cv2.getTickFrequency', 'cv2.getTickFrequency', ([], {}), '()\n', (2522, 2524), False, 'import cv2\n'), ((2597, 2610), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2607, 2610), False, 'import time\n'), ((2633, 2671), 'GarageServo.GarageServo', 'GarageServo.GarageServo', (['(0)', '(10)', '(0)', '(7.9)'], {}), '(0, 10, 0, 7.9)\n', (2656, 2671), False, 'import GarageServo\n'), ((2681, 2720), 'GarageServo.GarageServo', 'GarageServo.GarageServo', (['(0)', '(11)', '(1)', '(7.95)'], {}), '(0, 11, 1, 7.95)\n', (2704, 2720), False, 'import GarageServo\n'), ((2786, 2837), 'GarageServoController.GarageServoController', 'GarageServoController.GarageServoController', (['servos'], {}), '(servos)\n', (2829, 2837), False, 'import GarageServoController\n'), ((2885, 2899), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2897, 2899), False, 'from datetime import datetime\n'), ((7135, 7158), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (7156, 7158), False, 'import cv2\n'), ((2969, 2987), 'cv2.getTickCount', 'cv2.getTickCount', ([], {}), '()\n', (2985, 2987), False, 'import cv2\n'), ((3159, 3197), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (3171, 3197), False, 'import cv2\n'), ((3218, 3256), 'cv2.resize', 'cv2.resize', (['frame_rgb', '(width, height)'], {}), '(frame_rgb, (width, height))\n', (3228, 3256), False, 'import cv2\n'), ((3274, 3311), 'numpy.expand_dims', 'np.expand_dims', (['frame_resized'], {'axis': '(0)'}), '(frame_resized, axis=0)\n', (3288, 3311), True, 'import numpy as np\n'), ((6815, 6851), 'cv2.imshow', 'cv2.imshow', (['"""Object detector"""', 'frame'], {}), "('Object detector', frame)\n", (6825, 6851), False, 'import cv2\n'), ((6888, 6906), 'cv2.getTickCount', 'cv2.getTickCount', ([], {}), '()\n', (6904, 6906), False, 'import cv2\n'), ((601, 620), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (617, 620), False, 'import cv2\n'), ((6037, 6051), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6049, 6051), False, 'from datetime import datetime\n'), ((7000, 7014), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (7011, 7014), False, 'import cv2\n'), ((7049, 7063), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (7060, 7063), False, 'import cv2\n'), ((723, 754), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'MJPG'"], {}), "(*'MJPG')\n", (745, 754), False, 'import cv2\n'), ((4776, 4841), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(xmin, ymin)', '(xmax, ymax)', '(10, 255, 0)', '(2)'], {}), '(frame, (xmin, ymin), (xmax, ymax), (10, 255, 0), 2)\n', (4789, 4841), False, 'import cv2\n'), ((5108, 5164), 'cv2.getTextSize', 'cv2.getTextSize', (['label', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.7)', '(2)'], {}), '(label, cv2.FONT_HERSHEY_SIMPLEX, 0.7, 2)\n', (5123, 5164), False, 'import cv2\n'), ((5306, 5450), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(xmin, label_ymin - labelSize[1] - 10)', '(xmin + labelSize[0], label_ymin + baseLine - 10)', '(255, 255, 255)', 'cv2.FILLED'], {}), '(frame, (xmin, label_ymin - labelSize[1] - 10), (xmin +\n labelSize[0], label_ymin + baseLine - 10), (255, 255, 255), cv2.FILLED)\n', (5319, 5450), False, 'import cv2\n'), ((5550, 5648), 'cv2.putText', 'cv2.putText', (['frame', 'label', '(xmin, label_ymin - 7)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.7)', '(0, 0, 0)', '(2)'], {}), '(frame, label, (xmin, label_ymin - 7), cv2.FONT_HERSHEY_SIMPLEX,\n 0.7, (0, 0, 0), 2)\n', (5561, 5648), False, 'import cv2\n'), ((969, 1004), 'threading.Thread', 'Thread', ([], {'target': 'self.update', 'args': '()'}), '(target=self.update, args=())\n', (975, 1004), False, 'from threading import Thread\n'), ((3446, 3468), 'numpy.float32', 'np.float32', (['input_data'], {}), '(input_data)\n', (3456, 3468), True, 'import numpy as np\n'), ((6006, 6020), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6018, 6020), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
'''
This module defines :class:`EpochArray`, an array of epochs. Introduced for
performance reasons.
:class:`EpochArray` derives from :class:`BaseNeo`, from
:module:`neo.core.baseneo`.
'''
# needed for python 3 compatibility
from __future__ import absolute_import, division, print_function
import sys
import numpy as np
import quantities as pq
from neo.core.baseneo import BaseNeo, merge_annotations
PY_VER = sys.version_info[0]
class EpochArray(BaseNeo):
'''
Array of epochs. Introduced for performance reason.
An :class:`EpochArray` is prefered to a list of :class:`Epoch` objects.
*Usage*::
>>> from neo.core import EpochArray
>>> from quantities import s, ms
>>> import numpy as np
>>>
>>> epcarr = EpochArray(times=np.arange(0, 30, 10)*s,
... durations=[10, 5, 7]*ms,
... labels=np.array(['btn0', 'btn1', 'btn2'],
... dtype='S'))
>>>
>>> epcarr.times
array([ 0., 10., 20.]) * s
>>> epcarr.durations
array([ 10., 5., 7.]) * ms
>>> epcarr.labels
array(['btn0', 'btn1', 'btn2'],
dtype='|S4')
*Required attributes/properties*:
:times: (quantity array 1D) The starts of the time periods.
:durations: (quantity array 1D) The length of the time period.
:labels: (numpy.array 1D dtype='S') Names or labels for the
time periods.
*Recommended attributes/properties*:
:name: (str) A label for the dataset,
:description: (str) Text description,
:file_origin: (str) Filesystem path or URL of the original data file.
Note: Any other additional arguments are assumed to be user-specific
metadata and stored in :attr:`annotations`,
'''
_single_parent_objects = ('Segment',)
_necessary_attrs = (('times', pq.Quantity, 1),
('durations', pq.Quantity, 1),
('labels', np.ndarray, 1, np.dtype('S')))
def __init__(self, times=None, durations=None, labels=None,
name=None, description=None, file_origin=None, **annotations):
'''
Initialize a new :class:`EpochArray` instance.
'''
BaseNeo.__init__(self, name=name, file_origin=file_origin,
description=description, **annotations)
if times is None:
times = np.array([]) * pq.s
if durations is None:
durations = np.array([]) * pq.s
if labels is None:
labels = np.array([], dtype='S')
self.times = times
self.durations = durations
self.labels = labels
self.segment = None
def __repr__(self):
'''
Returns a string representing the :class:`EpochArray`.
'''
# need to convert labels to unicode for python 3 or repr is messed up
if PY_VER == 3:
labels = self.labels.astype('U')
else:
labels = self.labels
objs = ['%s@%s for %s' % (label, time, dur) for
label, time, dur in zip(labels, self.times, self.durations)]
return '<EpochArray: %s>' % ', '.join(objs)
def merge(self, other):
'''
Merge the another :class:`EpochArray` into this one.
The :class:`EpochArray` objects are concatenated horizontally
(column-wise), :func:`np.hstack`).
If the attributes of the two :class:`EpochArray` are not
compatible, and Exception is raised.
'''
othertimes = other.times.rescale(self.times.units)
otherdurations = other.durations.rescale(self.durations.units)
times = np.hstack([self.times, othertimes]) * self.times.units
durations = np.hstack([self.durations,
otherdurations]) * self.durations.units
labels = np.hstack([self.labels, other.labels])
kwargs = {}
for name in ("name", "description", "file_origin"):
attr_self = getattr(self, name)
attr_other = getattr(other, name)
if attr_self == attr_other:
kwargs[name] = attr_self
else:
kwargs[name] = "merge(%s, %s)" % (attr_self, attr_other)
merged_annotations = merge_annotations(self.annotations,
other.annotations)
kwargs.update(merged_annotations)
return EpochArray(times=times, durations=durations, labels=labels,
**kwargs)
| [
"neo.core.baseneo.BaseNeo.__init__",
"numpy.dtype",
"numpy.hstack",
"numpy.array",
"neo.core.baseneo.merge_annotations"
] | [((2330, 2433), 'neo.core.baseneo.BaseNeo.__init__', 'BaseNeo.__init__', (['self'], {'name': 'name', 'file_origin': 'file_origin', 'description': 'description'}), '(self, name=name, file_origin=file_origin, description=\n description, **annotations)\n', (2346, 2433), False, 'from neo.core.baseneo import BaseNeo, merge_annotations\n'), ((3955, 3993), 'numpy.hstack', 'np.hstack', (['[self.labels, other.labels]'], {}), '([self.labels, other.labels])\n', (3964, 3993), True, 'import numpy as np\n'), ((4366, 4420), 'neo.core.baseneo.merge_annotations', 'merge_annotations', (['self.annotations', 'other.annotations'], {}), '(self.annotations, other.annotations)\n', (4383, 4420), False, 'from neo.core.baseneo import BaseNeo, merge_annotations\n'), ((2082, 2095), 'numpy.dtype', 'np.dtype', (['"""S"""'], {}), "('S')\n", (2090, 2095), True, 'import numpy as np\n'), ((2643, 2666), 'numpy.array', 'np.array', (['[]'], {'dtype': '"""S"""'}), "([], dtype='S')\n", (2651, 2666), True, 'import numpy as np\n'), ((3765, 3800), 'numpy.hstack', 'np.hstack', (['[self.times, othertimes]'], {}), '([self.times, othertimes])\n', (3774, 3800), True, 'import numpy as np\n'), ((3840, 3883), 'numpy.hstack', 'np.hstack', (['[self.durations, otherdurations]'], {}), '([self.durations, otherdurations])\n', (3849, 3883), True, 'import numpy as np\n'), ((2501, 2513), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2509, 2513), True, 'import numpy as np\n'), ((2575, 2587), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2583, 2587), True, 'import numpy as np\n')] |
#!/usr/bin/python3
import pandas as pd
import torch
from torch.utils.data import Dataset
from torch.autograd import Variable
import numpy as np
import time
class CollisionDataset(Dataset):
"""
Abstract class for the collion detection
Args
path: (string) path to the dataset
"""
def __init__(self, csv_path):
data = pd.read_csv(csv_path)
self._data = data.values
# self._data = np.loadtxt(csv_path, delimiter=',', dtype=np.float32)
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
input_num = self._data.shape[1]-1
# inputs = torch.FloatTensor(self._data.iloc[idx,0:input_num])
# labels = torch.IntTensor([self._data.iat[idx,input_num]])
inputs = torch.from_numpy(self._data[idx,0:input_num]).float()
labels = torch.from_numpy(np.asarray(self._data[idx,input_num],dtype=int))
return inputs, labels
@property
def input_dim_(self):
return len(self[0][0])
| [
"pandas.read_csv",
"numpy.asarray",
"torch.from_numpy"
] | [((353, 374), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {}), '(csv_path)\n', (364, 374), True, 'import pandas as pd\n'), ((859, 908), 'numpy.asarray', 'np.asarray', (['self._data[idx, input_num]'], {'dtype': 'int'}), '(self._data[idx, input_num], dtype=int)\n', (869, 908), True, 'import numpy as np\n'), ((771, 817), 'torch.from_numpy', 'torch.from_numpy', (['self._data[idx, 0:input_num]'], {}), '(self._data[idx, 0:input_num])\n', (787, 817), False, 'import torch\n')] |
import numpy as np
n,m = map(int, input().split())
b = np.array([list(map(int, input().split())) for _ in range(n)], dtype = np.int32)
np.set_printoptions(legacy='1.13')
print(np.mean(b, axis = 1))
print(np.var(b, axis = 0))
print(np.std(b)) | [
"numpy.mean",
"numpy.set_printoptions",
"numpy.var",
"numpy.std"
] | [((138, 172), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'legacy': '"""1.13"""'}), "(legacy='1.13')\n", (157, 172), True, 'import numpy as np\n'), ((179, 197), 'numpy.mean', 'np.mean', (['b'], {'axis': '(1)'}), '(b, axis=1)\n', (186, 197), True, 'import numpy as np\n'), ((207, 224), 'numpy.var', 'np.var', (['b'], {'axis': '(0)'}), '(b, axis=0)\n', (213, 224), True, 'import numpy as np\n'), ((234, 243), 'numpy.std', 'np.std', (['b'], {}), '(b)\n', (240, 243), True, 'import numpy as np\n')] |
import torch
import numpy as np
def compute_cmvn_epoch(opt, train_loader, enhance_model, feat_model):
enhance_model.eval()
feat_model.eval()
torch.set_grad_enabled(False)
##print(enhance_model.state_dict())
enhance_cmvn_file = os.path.join(opt.exp_path, 'enhance_cmvn.npy')
for i, (data) in enumerate(train_loader, start=0):
utt_ids, spk_ids, clean_inputs, clean_log_inputs, mix_inputs, mix_log_inputs, cos_angles, targets, input_sizes, target_sizes = data
enhance_out = enhance_model(mix_inputs, mix_log_inputs, input_sizes)
enhance_cmvn = feat_model.compute_cmvn(enhance_out, input_sizes)
if enhance_cmvn is not None:
np.save(enhance_cmvn_file, enhance_cmvn)
print('save enhance_cmvn to {}'.format(enhance_cmvn_file))
break
enhance_cmvn = torch.FloatTensor(enhance_cmvn)
enhance_model.train()
feat_model.train()
torch.set_grad_enabled(True)
return enhance_cmvn | [
"numpy.save",
"torch.FloatTensor",
"torch.set_grad_enabled"
] | [((153, 182), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(False)'], {}), '(False)\n', (175, 182), False, 'import torch\n'), ((837, 868), 'torch.FloatTensor', 'torch.FloatTensor', (['enhance_cmvn'], {}), '(enhance_cmvn)\n', (854, 868), False, 'import torch\n'), ((922, 950), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(True)'], {}), '(True)\n', (944, 950), False, 'import torch\n'), ((688, 728), 'numpy.save', 'np.save', (['enhance_cmvn_file', 'enhance_cmvn'], {}), '(enhance_cmvn_file, enhance_cmvn)\n', (695, 728), True, 'import numpy as np\n')] |
import os
import pickle
import json
import numpy as np
from kopt import CompileFN, test_fn
from hyperopt import fmin, tpe, hp, Trials
import keras.optimizers as opt
from . import io
from .network import AE_types
def hyper(args):
adata = io.read_dataset(args.input,
transpose=args.transpose,
test_split=False)
hyper_params = {
"data": {
"norm_input_log": hp.choice('d_norm_log', (True, False)),
"norm_input_zeromean": hp.choice('d_norm_zeromean', (True, False)),
"norm_input_sf": hp.choice('d_norm_sf', (True, False)),
},
"model": {
"lr": hp.loguniform("m_lr", np.log(1e-3), np.log(1e-2)),
"ridge": hp.loguniform("m_ridge", np.log(1e-7), np.log(1e-1)),
"l1_enc_coef": hp.loguniform("m_l1_enc_coef", np.log(1e-7), np.log(1e-1)),
"hidden_size": hp.choice("m_hiddensize", ((64,32,64), (32,16,32),
(64,64), (32,32), (16,16),
(16,), (32,), (64,), (128,))),
"activation": hp.choice("m_activation", ('relu', 'selu', 'elu',
'PReLU', 'linear', 'LeakyReLU')),
"aetype": hp.choice("m_aetype", ('zinb', 'zinb-conddisp')),
"batchnorm": hp.choice("m_batchnorm", (True, False)),
"dropout": hp.uniform("m_do", 0, 0.7),
"input_dropout": hp.uniform("m_input_do", 0, 0.8),
},
"fit": {
"epochs": args.hyperepoch
}
}
def data_fn(norm_input_log, norm_input_zeromean, norm_input_sf):
ad = adata.copy()
ad = io.normalize(ad,
size_factors=norm_input_sf,
logtrans_input=norm_input_log,
normalize_input=norm_input_zeromean)
x_train = {'count': ad.X, 'size_factors': ad.obs.size_factors}
y_train = ad.raw.X
return (x_train, y_train),
def model_fn(train_data, lr, hidden_size, activation, aetype, batchnorm,
dropout, input_dropout, ridge, l1_enc_coef):
net = AE_types[aetype](train_data[1].shape[1],
hidden_size=hidden_size,
l2_coef=0.0,
l1_coef=0.0,
l2_enc_coef=0.0,
l1_enc_coef=l1_enc_coef,
ridge=ridge,
hidden_dropout=dropout,
input_dropout=input_dropout,
batchnorm=batchnorm,
activation=activation,
init='glorot_uniform',
debug=args.debug)
net.build()
net.model.summary()
optimizer = opt.__dict__['RMSprop'](lr=lr, clipvalue=5.0)
net.model.compile(loss=net.loss, optimizer=optimizer)
return net.model
output_dir = os.path.join(args.outputdir, 'hyperopt_results')
objective = CompileFN('autoencoder_hyperpar_db', 'myexp1',
data_fn=data_fn,
model_fn=model_fn,
loss_metric='loss',
loss_metric_mode='min',
valid_split=.2,
save_model=None,
save_results=True,
use_tensorboard=False,
save_dir=output_dir)
test_fn(objective, hyper_params, save_model=None)
trials = Trials()
best = fmin(objective,
hyper_params,
trials=trials,
algo=tpe.suggest,
max_evals=args.hypern,
catch_eval_exceptions=True)
with open(os.path.join(output_dir, 'trials.pickle'), 'wb') as f:
pickle.dump(trials, f)
#TODO: map indices in "best" back to choice-based hyperpars before saving
with open(os.path.join(output_dir, 'best.json'), 'wt') as f:
json.dump(best, f, sort_keys=True, indent=4)
print(best)
#TODO: not just save the best conf but also train the model with these params
| [
"json.dump",
"pickle.dump",
"hyperopt.hp.uniform",
"numpy.log",
"hyperopt.Trials",
"hyperopt.hp.choice",
"hyperopt.fmin",
"kopt.CompileFN",
"kopt.test_fn",
"os.path.join"
] | [((3022, 3070), 'os.path.join', 'os.path.join', (['args.outputdir', '"""hyperopt_results"""'], {}), "(args.outputdir, 'hyperopt_results')\n", (3034, 3070), False, 'import os\n'), ((3087, 3324), 'kopt.CompileFN', 'CompileFN', (['"""autoencoder_hyperpar_db"""', '"""myexp1"""'], {'data_fn': 'data_fn', 'model_fn': 'model_fn', 'loss_metric': '"""loss"""', 'loss_metric_mode': '"""min"""', 'valid_split': '(0.2)', 'save_model': 'None', 'save_results': '(True)', 'use_tensorboard': '(False)', 'save_dir': 'output_dir'}), "('autoencoder_hyperpar_db', 'myexp1', data_fn=data_fn, model_fn=\n model_fn, loss_metric='loss', loss_metric_mode='min', valid_split=0.2,\n save_model=None, save_results=True, use_tensorboard=False, save_dir=\n output_dir)\n", (3096, 3324), False, 'from kopt import CompileFN, test_fn\n'), ((3549, 3598), 'kopt.test_fn', 'test_fn', (['objective', 'hyper_params'], {'save_model': 'None'}), '(objective, hyper_params, save_model=None)\n', (3556, 3598), False, 'from kopt import CompileFN, test_fn\n'), ((3613, 3621), 'hyperopt.Trials', 'Trials', ([], {}), '()\n', (3619, 3621), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((3633, 3751), 'hyperopt.fmin', 'fmin', (['objective', 'hyper_params'], {'trials': 'trials', 'algo': 'tpe.suggest', 'max_evals': 'args.hypern', 'catch_eval_exceptions': '(True)'}), '(objective, hyper_params, trials=trials, algo=tpe.suggest, max_evals=\n args.hypern, catch_eval_exceptions=True)\n', (3637, 3751), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((3905, 3927), 'pickle.dump', 'pickle.dump', (['trials', 'f'], {}), '(trials, f)\n', (3916, 3927), False, 'import pickle\n'), ((4080, 4124), 'json.dump', 'json.dump', (['best', 'f'], {'sort_keys': '(True)', 'indent': '(4)'}), '(best, f, sort_keys=True, indent=4)\n', (4089, 4124), False, 'import json\n'), ((451, 489), 'hyperopt.hp.choice', 'hp.choice', (['"""d_norm_log"""', '(True, False)'], {}), "('d_norm_log', (True, False))\n", (460, 489), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((530, 573), 'hyperopt.hp.choice', 'hp.choice', (['"""d_norm_zeromean"""', '(True, False)'], {}), "('d_norm_zeromean', (True, False))\n", (539, 573), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((608, 645), 'hyperopt.hp.choice', 'hp.choice', (['"""d_norm_sf"""', '(True, False)'], {}), "('d_norm_sf', (True, False))\n", (617, 645), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((963, 1081), 'hyperopt.hp.choice', 'hp.choice', (['"""m_hiddensize"""', '((64, 32, 64), (32, 16, 32), (64, 64), (32, 32), (16, 16), (16,), (32,), (\n 64,), (128,))'], {}), "('m_hiddensize', ((64, 32, 64), (32, 16, 32), (64, 64), (32, 32),\n (16, 16), (16,), (32,), (64,), (128,)))\n", (972, 1081), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((1218, 1304), 'hyperopt.hp.choice', 'hp.choice', (['"""m_activation"""', "('relu', 'selu', 'elu', 'PReLU', 'linear', 'LeakyReLU')"], {}), "('m_activation', ('relu', 'selu', 'elu', 'PReLU', 'linear',\n 'LeakyReLU'))\n", (1227, 1304), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((1385, 1433), 'hyperopt.hp.choice', 'hp.choice', (['"""m_aetype"""', "('zinb', 'zinb-conddisp')"], {}), "('m_aetype', ('zinb', 'zinb-conddisp'))\n", (1394, 1433), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((1464, 1503), 'hyperopt.hp.choice', 'hp.choice', (['"""m_batchnorm"""', '(True, False)'], {}), "('m_batchnorm', (True, False))\n", (1473, 1503), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((1532, 1558), 'hyperopt.hp.uniform', 'hp.uniform', (['"""m_do"""', '(0)', '(0.7)'], {}), "('m_do', 0, 0.7)\n", (1542, 1558), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((1593, 1625), 'hyperopt.hp.uniform', 'hp.uniform', (['"""m_input_do"""', '(0)', '(0.8)'], {}), "('m_input_do', 0, 0.8)\n", (1603, 1625), False, 'from hyperopt import fmin, tpe, hp, Trials\n'), ((3842, 3883), 'os.path.join', 'os.path.join', (['output_dir', '"""trials.pickle"""'], {}), "(output_dir, 'trials.pickle')\n", (3854, 3883), False, 'import os\n'), ((4021, 4058), 'os.path.join', 'os.path.join', (['output_dir', '"""best.json"""'], {}), "(output_dir, 'best.json')\n", (4033, 4058), False, 'import os\n'), ((733, 746), 'numpy.log', 'np.log', (['(0.001)'], {}), '(0.001)\n', (739, 746), True, 'import numpy as np\n'), ((747, 759), 'numpy.log', 'np.log', (['(0.01)'], {}), '(0.01)\n', (753, 759), True, 'import numpy as np\n'), ((812, 825), 'numpy.log', 'np.log', (['(1e-07)'], {}), '(1e-07)\n', (818, 825), True, 'import numpy as np\n'), ((826, 837), 'numpy.log', 'np.log', (['(0.1)'], {}), '(0.1)\n', (832, 837), True, 'import numpy as np\n'), ((903, 916), 'numpy.log', 'np.log', (['(1e-07)'], {}), '(1e-07)\n', (909, 916), True, 'import numpy as np\n'), ((917, 928), 'numpy.log', 'np.log', (['(0.1)'], {}), '(0.1)\n', (923, 928), True, 'import numpy as np\n')] |
#%%
from xmlrpc.client import boolean
import numpy as np
from numpy.linalg import matrix_power
from typing import Callable, List
#%%
class graph_data:
def __init__(self, graph: np.array, features: np.array):
n1, n2 = np.shape(graph)
if (n1 != n2):
raise ValueError("graph must be a square matrix")
t1, t2 = np.shape(features)
if (n1 != t1):
raise ValueError("the number of rows of features does not match the number of nodes in the graph")
self.graph = graph
self.features = features
def propagate(self, depth : int, attention:np.array) -> np.array:
f = np.zeros(shape=self.features.shape)
f[attention, :] = self.features[attention, :]
for i in range(0, depth):
f = self.graph @ f
return(f)
def get_feature_vector(self, depths: List[int],
attensions: List[np.array]) -> np.array:
feature_list = []
for depth in depths:
for attention in attensions:
p = self.propagate(depth, attention)
feature_list.append(p)
conc = np.concatenate(feature_list, axis=1)
return(conc)
def get_index(self, index : int,
sizes:List[int]) -> List[int]:
indices = []
for n in range(0, len(sizes)):
s = sizes[len(sizes) - 1 - n]
i = index % s
index = int((index - i) / s)
indices.insert(0, i)
return(indices)
def get_number_of_nodes(self):
return(np.shape(self.graph)[0])
def get_number_of_features(self):
return(np.shape(self.features)[1])
def get_single_feature(self, index_in_feature_vector : int,
depths: List[int],
attensions: List[np.array],
threshold: np.generic = 0) -> np.generic:
depth_index, attention_index, col_index = \
self.get_index(index_in_feature_vector, [len(depths), len(attensions), self.features.shape[1]])
depth = depths[depth_index]
attention = attensions[attention_index]
p = self.propagate(depth, attention)
col = p[:, col_index]
return(col, attention)
def get_attentions(self, index_in_feature_vector : int,
threshold: np.generic,
depths: List[int],
attensions: List[np.array]) -> List[List[int]]:
depth_index, attention_index, col_index = \
self.get_index(index_in_feature_vector, [len(depths), len(attensions), self.features.shape[1]])
depth = depths[depth_index]
attention = attensions[attention_index]
p = self.propagate(depth, attention)
col = p[:, col_index]
gt_attention = [i for i in attention if (col[i] > threshold)]
lte_attention = [i for i in attention if (col[i] <= threshold)]
local_attention = agg.get_attention(col, threshold)
return([gt_attention, lte_attention])
| [
"numpy.shape",
"numpy.zeros",
"numpy.concatenate"
] | [((232, 247), 'numpy.shape', 'np.shape', (['graph'], {}), '(graph)\n', (240, 247), True, 'import numpy as np\n'), ((350, 368), 'numpy.shape', 'np.shape', (['features'], {}), '(features)\n', (358, 368), True, 'import numpy as np\n'), ((647, 682), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.features.shape'}), '(shape=self.features.shape)\n', (655, 682), True, 'import numpy as np\n'), ((1146, 1182), 'numpy.concatenate', 'np.concatenate', (['feature_list'], {'axis': '(1)'}), '(feature_list, axis=1)\n', (1160, 1182), True, 'import numpy as np\n'), ((1575, 1595), 'numpy.shape', 'np.shape', (['self.graph'], {}), '(self.graph)\n', (1583, 1595), True, 'import numpy as np\n'), ((1654, 1677), 'numpy.shape', 'np.shape', (['self.features'], {}), '(self.features)\n', (1662, 1677), True, 'import numpy as np\n')] |
import numpy as np
from skimage._shared.testing import assert_equal
from skimage import data
from skimage import transform as tf
from skimage.color import rgb2gray
from skimage.feature import (BRIEF, match_descriptors,
corner_peaks, corner_harris)
from skimage._shared import testing
def test_binary_descriptors_unequal_descriptor_sizes_error():
"""Sizes of descriptors of keypoints to be matched should be equal."""
descs1 = np.array([[True, True, False, True],
[False, True, False, True]])
descs2 = np.array([[True, False, False, True, False],
[False, True, True, True, False]])
with testing.raises(ValueError):
match_descriptors(descs1, descs2)
def test_binary_descriptors():
descs1 = np.array([[True, True, False, True, True],
[False, True, False, True, True]])
descs2 = np.array([[True, False, False, True, False],
[False, False, True, True, True]])
matches = match_descriptors(descs1, descs2)
assert_equal(matches, [[0, 0], [1, 1]])
def test_binary_descriptors_rotation_crosscheck_false():
"""Verify matched keypoints and their corresponding masks results between
image and its rotated version with the expected keypoint pairs with
cross_check disabled."""
img = data.astronaut()
img = rgb2gray(img)
tform = tf.SimilarityTransform(scale=1, rotation=0.15, translation=(0, 0))
rotated_img = tf.warp(img, tform, clip=False)
extractor = BRIEF(descriptor_size=512)
keypoints1 = corner_peaks(corner_harris(img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(img, keypoints1)
descriptors1 = extractor.descriptors
keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(rotated_img, keypoints2)
descriptors2 = extractor.descriptors
matches = match_descriptors(descriptors1, descriptors2, cross_check=False)
exp_matches1 = np.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46])
exp_matches2 = np.array([ 0, 31, 2, 3, 1, 4, 6, 4, 38, 5, 27, 7,
13, 10, 9, 27, 7, 11, 15, 8, 23, 14, 12, 16,
10, 25, 18, 19, 21, 20, 41, 24, 25, 26, 28, 27,
22, 23, 29, 30, 31, 32, 35, 33, 34, 30, 36])
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
# minkowski takes a different code path, therefore we test it explicitly
matches = match_descriptors(descriptors1, descriptors2,
metric='minkowski', cross_check=False)
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
# it also has an extra parameter
matches = match_descriptors(descriptors1, descriptors2,
metric='minkowski', p=4, cross_check=False)
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
def test_binary_descriptors_rotation_crosscheck_true():
"""Verify matched keypoints and their corresponding masks results between
image and its rotated version with the expected keypoint pairs with
cross_check enabled."""
img = data.astronaut()
img = rgb2gray(img)
tform = tf.SimilarityTransform(scale=1, rotation=0.15, translation=(0, 0))
rotated_img = tf.warp(img, tform, clip=False)
extractor = BRIEF(descriptor_size=512)
keypoints1 = corner_peaks(corner_harris(img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(img, keypoints1)
descriptors1 = extractor.descriptors
keypoints2 = corner_peaks(corner_harris(rotated_img), min_distance=5,
threshold_abs=0, threshold_rel=0.1)
extractor.extract(rotated_img, keypoints2)
descriptors2 = extractor.descriptors
matches = match_descriptors(descriptors1, descriptors2, cross_check=True)
exp_matches1 = np.array([ 0, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 17,
18, 19, 21, 22, 23, 26, 27, 28, 29, 31, 32, 33,
34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46])
exp_matches2 = np.array([ 0, 2, 3, 1, 4, 6, 5, 7, 13, 10, 9, 11,
15, 8, 14, 12, 16, 18, 19, 21, 20, 24, 25, 26,
28, 27, 22, 23, 29, 30, 31, 32, 35, 33, 34, 36])
assert_equal(matches[:, 0], exp_matches1)
assert_equal(matches[:, 1], exp_matches2)
def test_max_distance():
descs1 = np.zeros((10, 128))
descs2 = np.zeros((15, 128))
descs1[0, :] = 1
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=0.1, cross_check=False)
assert len(matches) == 9
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=np.sqrt(128.1),
cross_check=False)
assert len(matches) == 10
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=0.1,
cross_check=True)
assert_equal(matches, [[1, 0]])
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_distance=np.sqrt(128.1),
cross_check=True)
assert_equal(matches, [[1, 0]])
def test_max_ratio():
descs1 = 10 * np.arange(10)[:, None].astype(np.float32)
descs2 = 10 * np.arange(15)[:, None].astype(np.float32)
descs2[0] = 5.0
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=1.0, cross_check=False)
assert_equal(len(matches), 10)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=0.6, cross_check=False)
assert_equal(len(matches), 10)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=0.5, cross_check=False)
assert_equal(len(matches), 9)
descs1[0] = 7.5
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=0.5, cross_check=False)
assert_equal(len(matches), 9)
descs2 = 10 * np.arange(1)[:, None].astype(np.float32)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=1.0, cross_check=False)
assert_equal(len(matches), 10)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=0.5, cross_check=False)
assert_equal(len(matches), 10)
descs1 = 10 * np.arange(1)[:, None].astype(np.float32)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=1.0, cross_check=False)
assert_equal(len(matches), 1)
matches = match_descriptors(descs1, descs2, metric='euclidean',
max_ratio=0.5, cross_check=False)
assert_equal(len(matches), 1)
| [
"skimage._shared.testing.raises",
"skimage.color.rgb2gray",
"skimage._shared.testing.assert_equal",
"skimage.data.astronaut",
"numpy.zeros",
"skimage.feature.match_descriptors",
"skimage.transform.SimilarityTransform",
"skimage.feature.corner_harris",
"skimage.feature.BRIEF",
"numpy.array",
"num... | [((465, 530), 'numpy.array', 'np.array', (['[[True, True, False, True], [False, True, False, True]]'], {}), '([[True, True, False, True], [False, True, False, True]])\n', (473, 530), True, 'import numpy as np\n'), ((567, 646), 'numpy.array', 'np.array', (['[[True, False, False, True, False], [False, True, True, True, False]]'], {}), '([[True, False, False, True, False], [False, True, True, True, False]])\n', (575, 646), True, 'import numpy as np\n'), ((795, 872), 'numpy.array', 'np.array', (['[[True, True, False, True, True], [False, True, False, True, True]]'], {}), '([[True, True, False, True, True], [False, True, False, True, True]])\n', (803, 872), True, 'import numpy as np\n'), ((909, 988), 'numpy.array', 'np.array', (['[[True, False, False, True, False], [False, False, True, True, True]]'], {}), '([[True, False, False, True, False], [False, False, True, True, True]])\n', (917, 988), True, 'import numpy as np\n'), ((1026, 1059), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {}), '(descs1, descs2)\n', (1043, 1059), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((1064, 1103), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches', '[[0, 0], [1, 1]]'], {}), '(matches, [[0, 0], [1, 1]])\n', (1076, 1103), False, 'from skimage._shared.testing import assert_equal\n'), ((1352, 1368), 'skimage.data.astronaut', 'data.astronaut', ([], {}), '()\n', (1366, 1368), False, 'from skimage import data\n'), ((1379, 1392), 'skimage.color.rgb2gray', 'rgb2gray', (['img'], {}), '(img)\n', (1387, 1392), False, 'from skimage.color import rgb2gray\n'), ((1405, 1471), 'skimage.transform.SimilarityTransform', 'tf.SimilarityTransform', ([], {'scale': '(1)', 'rotation': '(0.15)', 'translation': '(0, 0)'}), '(scale=1, rotation=0.15, translation=(0, 0))\n', (1427, 1471), True, 'from skimage import transform as tf\n'), ((1490, 1521), 'skimage.transform.warp', 'tf.warp', (['img', 'tform'], {'clip': '(False)'}), '(img, tform, clip=False)\n', (1497, 1521), True, 'from skimage import transform as tf\n'), ((1539, 1565), 'skimage.feature.BRIEF', 'BRIEF', ([], {'descriptor_size': '(512)'}), '(descriptor_size=512)\n', (1544, 1565), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((2023, 2087), 'skimage.feature.match_descriptors', 'match_descriptors', (['descriptors1', 'descriptors2'], {'cross_check': '(False)'}), '(descriptors1, descriptors2, cross_check=False)\n', (2040, 2087), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((2108, 2304), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, \n 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,\n 39, 40, 41, 42, 43, 44, 45, 46]'], {}), '([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,\n 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,\n 37, 38, 39, 40, 41, 42, 43, 44, 45, 46])\n', (2116, 2304), True, 'import numpy as np\n'), ((2413, 2609), 'numpy.array', 'np.array', (['[0, 31, 2, 3, 1, 4, 6, 4, 38, 5, 27, 7, 13, 10, 9, 27, 7, 11, 15, 8, 23, 14,\n 12, 16, 10, 25, 18, 19, 21, 20, 41, 24, 25, 26, 28, 27, 22, 23, 29, 30,\n 31, 32, 35, 33, 34, 30, 36]'], {}), '([0, 31, 2, 3, 1, 4, 6, 4, 38, 5, 27, 7, 13, 10, 9, 27, 7, 11, 15, \n 8, 23, 14, 12, 16, 10, 25, 18, 19, 21, 20, 41, 24, 25, 26, 28, 27, 22, \n 23, 29, 30, 31, 32, 35, 33, 34, 30, 36])\n', (2421, 2609), True, 'import numpy as np\n'), ((2703, 2744), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 0]', 'exp_matches1'], {}), '(matches[:, 0], exp_matches1)\n', (2715, 2744), False, 'from skimage._shared.testing import assert_equal\n'), ((2749, 2790), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 1]', 'exp_matches2'], {}), '(matches[:, 1], exp_matches2)\n', (2761, 2790), False, 'from skimage._shared.testing import assert_equal\n'), ((2883, 2971), 'skimage.feature.match_descriptors', 'match_descriptors', (['descriptors1', 'descriptors2'], {'metric': '"""minkowski"""', 'cross_check': '(False)'}), "(descriptors1, descriptors2, metric='minkowski',\n cross_check=False)\n", (2900, 2971), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((3004, 3045), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 0]', 'exp_matches1'], {}), '(matches[:, 0], exp_matches1)\n', (3016, 3045), False, 'from skimage._shared.testing import assert_equal\n'), ((3050, 3091), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 1]', 'exp_matches2'], {}), '(matches[:, 1], exp_matches2)\n', (3062, 3091), False, 'from skimage._shared.testing import assert_equal\n'), ((3144, 3237), 'skimage.feature.match_descriptors', 'match_descriptors', (['descriptors1', 'descriptors2'], {'metric': '"""minkowski"""', 'p': '(4)', 'cross_check': '(False)'}), "(descriptors1, descriptors2, metric='minkowski', p=4,\n cross_check=False)\n", (3161, 3237), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((3270, 3311), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 0]', 'exp_matches1'], {}), '(matches[:, 0], exp_matches1)\n', (3282, 3311), False, 'from skimage._shared.testing import assert_equal\n'), ((3316, 3357), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 1]', 'exp_matches2'], {}), '(matches[:, 1], exp_matches2)\n', (3328, 3357), False, 'from skimage._shared.testing import assert_equal\n'), ((3604, 3620), 'skimage.data.astronaut', 'data.astronaut', ([], {}), '()\n', (3618, 3620), False, 'from skimage import data\n'), ((3631, 3644), 'skimage.color.rgb2gray', 'rgb2gray', (['img'], {}), '(img)\n', (3639, 3644), False, 'from skimage.color import rgb2gray\n'), ((3657, 3723), 'skimage.transform.SimilarityTransform', 'tf.SimilarityTransform', ([], {'scale': '(1)', 'rotation': '(0.15)', 'translation': '(0, 0)'}), '(scale=1, rotation=0.15, translation=(0, 0))\n', (3679, 3723), True, 'from skimage import transform as tf\n'), ((3742, 3773), 'skimage.transform.warp', 'tf.warp', (['img', 'tform'], {'clip': '(False)'}), '(img, tform, clip=False)\n', (3749, 3773), True, 'from skimage import transform as tf\n'), ((3791, 3817), 'skimage.feature.BRIEF', 'BRIEF', ([], {'descriptor_size': '(512)'}), '(descriptor_size=512)\n', (3796, 3817), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((4275, 4338), 'skimage.feature.match_descriptors', 'match_descriptors', (['descriptors1', 'descriptors2'], {'cross_check': '(True)'}), '(descriptors1, descriptors2, cross_check=True)\n', (4292, 4338), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((4359, 4511), 'numpy.array', 'np.array', (['[0, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 17, 18, 19, 21, 22, 23, 26, 27, 28, \n 29, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46]'], {}), '([0, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 17, 18, 19, 21, 22, 23, 26, \n 27, 28, 29, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46])\n', (4367, 4511), True, 'import numpy as np\n'), ((4591, 4739), 'numpy.array', 'np.array', (['[0, 2, 3, 1, 4, 6, 5, 7, 13, 10, 9, 11, 15, 8, 14, 12, 16, 18, 19, 21, 20, \n 24, 25, 26, 28, 27, 22, 23, 29, 30, 31, 32, 35, 33, 34, 36]'], {}), '([0, 2, 3, 1, 4, 6, 5, 7, 13, 10, 9, 11, 15, 8, 14, 12, 16, 18, 19,\n 21, 20, 24, 25, 26, 28, 27, 22, 23, 29, 30, 31, 32, 35, 33, 34, 36])\n', (4599, 4739), True, 'import numpy as np\n'), ((4808, 4849), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 0]', 'exp_matches1'], {}), '(matches[:, 0], exp_matches1)\n', (4820, 4849), False, 'from skimage._shared.testing import assert_equal\n'), ((4854, 4895), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches[:, 1]', 'exp_matches2'], {}), '(matches[:, 1], exp_matches2)\n', (4866, 4895), False, 'from skimage._shared.testing import assert_equal\n'), ((4936, 4955), 'numpy.zeros', 'np.zeros', (['(10, 128)'], {}), '((10, 128))\n', (4944, 4955), True, 'import numpy as np\n'), ((4969, 4988), 'numpy.zeros', 'np.zeros', (['(15, 128)'], {}), '((15, 128))\n', (4977, 4988), True, 'import numpy as np\n'), ((5027, 5121), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_distance': '(0.1)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_distance=0.1,\n cross_check=False)\n", (5044, 5121), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((5410, 5503), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_distance': '(0.1)', 'cross_check': '(True)'}), "(descs1, descs2, metric='euclidean', max_distance=0.1,\n cross_check=True)\n", (5427, 5503), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((5570, 5601), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches', '[[1, 0]]'], {}), '(matches, [[1, 0]])\n', (5582, 5601), False, 'from skimage._shared.testing import assert_equal\n'), ((5789, 5820), 'skimage._shared.testing.assert_equal', 'assert_equal', (['matches', '[[1, 0]]'], {}), '(matches, [[1, 0]])\n', (5801, 5820), False, 'from skimage._shared.testing import assert_equal\n'), ((6001, 6092), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(1.0)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=1.0,\n cross_check=False)\n", (6018, 6092), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((6171, 6262), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(0.6)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=0.6,\n cross_check=False)\n", (6188, 6262), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((6341, 6432), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(0.5)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=0.5,\n cross_check=False)\n", (6358, 6432), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((6531, 6622), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(0.5)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=0.5,\n cross_check=False)\n", (6548, 6622), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((6760, 6851), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(1.0)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=1.0,\n cross_check=False)\n", (6777, 6851), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((6930, 7021), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(0.5)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=0.5,\n cross_check=False)\n", (6947, 7021), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((7160, 7251), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(1.0)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=1.0,\n cross_check=False)\n", (7177, 7251), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((7329, 7420), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {'metric': '"""euclidean"""', 'max_ratio': '(0.5)', 'cross_check': '(False)'}), "(descs1, descs2, metric='euclidean', max_ratio=0.5,\n cross_check=False)\n", (7346, 7420), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((679, 705), 'skimage._shared.testing.raises', 'testing.raises', (['ValueError'], {}), '(ValueError)\n', (693, 705), False, 'from skimage._shared import testing\n'), ((715, 748), 'skimage.feature.match_descriptors', 'match_descriptors', (['descs1', 'descs2'], {}), '(descs1, descs2)\n', (732, 748), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((1597, 1615), 'skimage.feature.corner_harris', 'corner_harris', (['img'], {}), '(img)\n', (1610, 1615), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((1810, 1836), 'skimage.feature.corner_harris', 'corner_harris', (['rotated_img'], {}), '(rotated_img)\n', (1823, 1836), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((3849, 3867), 'skimage.feature.corner_harris', 'corner_harris', (['img'], {}), '(img)\n', (3862, 3867), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((4062, 4088), 'skimage.feature.corner_harris', 'corner_harris', (['rotated_img'], {}), '(rotated_img)\n', (4075, 4088), False, 'from skimage.feature import BRIEF, match_descriptors, corner_peaks, corner_harris\n'), ((5296, 5310), 'numpy.sqrt', 'np.sqrt', (['(128.1)'], {}), '(128.1)\n', (5303, 5310), True, 'import numpy as np\n'), ((5718, 5732), 'numpy.sqrt', 'np.sqrt', (['(128.1)'], {}), '(128.1)\n', (5725, 5732), True, 'import numpy as np\n'), ((5863, 5876), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (5872, 5876), True, 'import numpy as np\n'), ((5923, 5936), 'numpy.arange', 'np.arange', (['(15)'], {}), '(15)\n', (5932, 5936), True, 'import numpy as np\n'), ((6704, 6716), 'numpy.arange', 'np.arange', (['(1)'], {}), '(1)\n', (6713, 6716), True, 'import numpy as np\n'), ((7104, 7116), 'numpy.arange', 'np.arange', (['(1)'], {}), '(1)\n', (7113, 7116), True, 'import numpy as np\n')] |
import numpy as np
import pytest
import torch
from scipy.spatial.distance import pdist, squareform
from finetuner.tuner.pytorch.losses import get_distance
N_BATCH = 10
N_DIM = 128
@pytest.mark.parametrize('distance', ['cosine', 'euclidean', 'sqeuclidean'])
def test_dist(distance):
embeddings = np.random.rand(N_BATCH, N_DIM)
real_dists = squareform(pdist(embeddings, metric=distance))
dists = get_distance(torch.tensor(embeddings), distance)
np.testing.assert_almost_equal(real_dists, dists.numpy())
| [
"numpy.random.rand",
"pytest.mark.parametrize",
"scipy.spatial.distance.pdist",
"torch.tensor"
] | [((185, 260), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""distance"""', "['cosine', 'euclidean', 'sqeuclidean']"], {}), "('distance', ['cosine', 'euclidean', 'sqeuclidean'])\n", (208, 260), False, 'import pytest\n'), ((304, 334), 'numpy.random.rand', 'np.random.rand', (['N_BATCH', 'N_DIM'], {}), '(N_BATCH, N_DIM)\n', (318, 334), True, 'import numpy as np\n'), ((364, 398), 'scipy.spatial.distance.pdist', 'pdist', (['embeddings'], {'metric': 'distance'}), '(embeddings, metric=distance)\n', (369, 398), False, 'from scipy.spatial.distance import pdist, squareform\n'), ((425, 449), 'torch.tensor', 'torch.tensor', (['embeddings'], {}), '(embeddings)\n', (437, 449), False, 'import torch\n')] |
import unittest
import numpy as np
import tensorflow as tf
from elasticdl_preprocessing.layers.concatenate_with_offset import (
ConcatenateWithOffset,
)
from elasticdl_preprocessing.tests.test_utils import (
ragged_tensor_equal,
sparse_tensor_equal,
)
class ConcatenateWithOffsetTest(unittest.TestCase):
def test_concatenate_with_offset(self):
tensor_1 = tf.constant([[1], [1], [1]])
tensor_2 = tf.constant([[2], [2], [2]])
offsets = [0, 10]
concat_layer = ConcatenateWithOffset(offsets=offsets, axis=1)
output = concat_layer([tensor_1, tensor_2])
expected_out = np.array([[1, 12], [1, 12], [1, 12]])
self.assertTrue(np.array_equal(output.numpy(), expected_out))
ragged_tensor_1 = tf.ragged.constant([[1], [], [1]], dtype=tf.int64)
ragged_tensor_2 = tf.ragged.constant([[2], [2], []], dtype=tf.int64)
output = concat_layer([ragged_tensor_1, ragged_tensor_2])
expected_out = tf.ragged.constant([[1, 12], [12], [1]], dtype=tf.int64)
self.assertTrue(ragged_tensor_equal(output, expected_out))
sparse_tensor_1 = ragged_tensor_1.to_sparse()
sparse_tensor_2 = ragged_tensor_2.to_sparse()
output = concat_layer([sparse_tensor_1, sparse_tensor_2])
expected_out = tf.SparseTensor(
indices=np.array([[0, 0], [0, 1], [1, 1], [2, 0]]),
values=np.array([1, 12, 12, 1]),
dense_shape=(3, 2),
)
self.assertTrue(sparse_tensor_equal(output, expected_out))
| [
"elasticdl_preprocessing.tests.test_utils.sparse_tensor_equal",
"elasticdl_preprocessing.tests.test_utils.ragged_tensor_equal",
"tensorflow.constant",
"elasticdl_preprocessing.layers.concatenate_with_offset.ConcatenateWithOffset",
"numpy.array",
"tensorflow.ragged.constant"
] | [((383, 411), 'tensorflow.constant', 'tf.constant', (['[[1], [1], [1]]'], {}), '([[1], [1], [1]])\n', (394, 411), True, 'import tensorflow as tf\n'), ((431, 459), 'tensorflow.constant', 'tf.constant', (['[[2], [2], [2]]'], {}), '([[2], [2], [2]])\n', (442, 459), True, 'import tensorflow as tf\n'), ((509, 555), 'elasticdl_preprocessing.layers.concatenate_with_offset.ConcatenateWithOffset', 'ConcatenateWithOffset', ([], {'offsets': 'offsets', 'axis': '(1)'}), '(offsets=offsets, axis=1)\n', (530, 555), False, 'from elasticdl_preprocessing.layers.concatenate_with_offset import ConcatenateWithOffset\n'), ((632, 669), 'numpy.array', 'np.array', (['[[1, 12], [1, 12], [1, 12]]'], {}), '([[1, 12], [1, 12], [1, 12]])\n', (640, 669), True, 'import numpy as np\n'), ((767, 817), 'tensorflow.ragged.constant', 'tf.ragged.constant', (['[[1], [], [1]]'], {'dtype': 'tf.int64'}), '([[1], [], [1]], dtype=tf.int64)\n', (785, 817), True, 'import tensorflow as tf\n'), ((844, 894), 'tensorflow.ragged.constant', 'tf.ragged.constant', (['[[2], [2], []]'], {'dtype': 'tf.int64'}), '([[2], [2], []], dtype=tf.int64)\n', (862, 894), True, 'import tensorflow as tf\n'), ((984, 1040), 'tensorflow.ragged.constant', 'tf.ragged.constant', (['[[1, 12], [12], [1]]'], {'dtype': 'tf.int64'}), '([[1, 12], [12], [1]], dtype=tf.int64)\n', (1002, 1040), True, 'import tensorflow as tf\n'), ((1065, 1106), 'elasticdl_preprocessing.tests.test_utils.ragged_tensor_equal', 'ragged_tensor_equal', (['output', 'expected_out'], {}), '(output, expected_out)\n', (1084, 1106), False, 'from elasticdl_preprocessing.tests.test_utils import ragged_tensor_equal, sparse_tensor_equal\n'), ((1498, 1539), 'elasticdl_preprocessing.tests.test_utils.sparse_tensor_equal', 'sparse_tensor_equal', (['output', 'expected_out'], {}), '(output, expected_out)\n', (1517, 1539), False, 'from elasticdl_preprocessing.tests.test_utils import ragged_tensor_equal, sparse_tensor_equal\n'), ((1343, 1385), 'numpy.array', 'np.array', (['[[0, 0], [0, 1], [1, 1], [2, 0]]'], {}), '([[0, 0], [0, 1], [1, 1], [2, 0]])\n', (1351, 1385), True, 'import numpy as np\n'), ((1406, 1430), 'numpy.array', 'np.array', (['[1, 12, 12, 1]'], {}), '([1, 12, 12, 1])\n', (1414, 1430), True, 'import numpy as np\n')] |
import numpy as np
def print_results(iter, FO_evaluations, gbest, pworst,
error_fnc, error_x, swarm_size, n_variables,
intVar, print_freq):
"""
Auxiliary function to print PSO results
:param iter: numer of iteration
:param FO_evaluations:
:param gbest: global best particle
:param pworst: worst particle
:param error_fnc: normalized error of the obj function ||pworst_fitness - gbest_fitness||
:param error_x: normalized error of the obj function ||pworst_position - gbest_position||
:param swarm_size: number of particles
:param n_variables: number of dimmesions
:param intVar: array or list containing the indexes for the variables that must be integers
:param print_freq: frequency with the number of iterations that prints
:return:
"""
intVar = np.array(intVar)
if iter == 1:
print(' \n')
print('# STANDARD PARTICLE SWARM OPTIMIZATION ALGORITHM - gbest version ### \n')
print(' * Swarm size ................. {}\n'.format(swarm_size))
print(' * # continuous variables ..... {}\n'.format(n_variables - intVar.size))
print(' * # integer variables ....... {}\n'.format(intVar.size))
print(' \n')
if (iter == 1) or (iter/(print_freq) == round(iter/print_freq)):
if (iter == 1) or (iter/(print_freq*20) == round(iter/(print_freq))):
print(' --------------------------------------------------------------------------------------------\n')
print(' Iteration \t FO_evals \t gBest Fitness \t pWorst Fitness\t error_FO \t error_x\n')
print(' --------------------------------------------------------------------------------------------\n')
print('{:8.0f} \t {:5.0f} \t {:15.3e} \t {:11.3e} \t {:11.3e} \t {:6.3e} \n'.format(
iter, FO_evaluations, gbest, pworst, error_fnc, error_x))
| [
"numpy.array"
] | [((861, 877), 'numpy.array', 'np.array', (['intVar'], {}), '(intVar)\n', (869, 877), True, 'import numpy as np\n')] |
# Copyright 2021 The ByT5 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add Tasks to registry."""
import functools
import random
from byt5.tasks import DEFAULT_BYTE_OUTPUT_FEATURES
from byt5.tasks import DEFAULT_MT5_OUTPUT_FEATURES
from byt5.tasks import DEFAULT_PREPROCESSORS
import numpy
import seqio
import t5.data
from t5.data import preprocessors
# Place downloaded data from https://sigmorphon.github.io/sharedtasks/2020 in
# the following directory.
SIGMORPHON_DIR = None
FEATURE_MAP = {
'byt5': DEFAULT_BYTE_OUTPUT_FEATURES,
'mt5': DEFAULT_MT5_OUTPUT_FEATURES
}
# ====================== SIGMORPHON-2020 TASK-1 ====================
# Task 1: Multilingual Grapheme-to-Phoneme Conversion
# Please see website https://sigmorphon.github.io/sharedtasks/2020/task1/
# for details.
def get_2020_task1_preprocessor(language):
return [
functools.partial(
preprocessors.preprocess_tsv,
inputs_format=f' {language} ' + '{0}',
targets_format='{1}',
num_fields=2),
]
def metrics_task1_2020(targets, predictions):
"""Computes word error rate and edit distance metrics."""
def edit_distance(x, y) -> int:
# Implementation from
# https://github.com/sigmorphon/2020/blob/master/task1/evaluation/evallib.py
idim = len(x) + 1
jdim = len(y) + 1
table = numpy.zeros((idim, jdim), dtype=numpy.uint8)
table[1:, 0] = 1
table[0, 1:] = 1
for i in range(1, idim):
for j in range(1, jdim):
if x[i - 1] == y[j - 1]:
table[i][j] = table[i - 1][j - 1]
else:
c1 = table[i - 1][j]
c2 = table[i][j - 1]
c3 = table[i - 1][j - 1]
table[i][j] = min(c1, c2, c3) + 1
return int(table[-1][-1])
# Word-level measures.
correct = 0
incorrect = 0
# Label-level measures.
total_edits = 0
total_length = 0
for gold, hypo in zip(targets, predictions):
edits = edit_distance(gold, hypo)
length = len(gold)
if edits == 0:
correct += 1
else:
incorrect += 1
total_edits += edits
total_length += length
wer = incorrect / (correct + incorrect)
ler = 100 * total_edits / total_length
return {'wer': wer, 'ler': ler}
langs = [
'arm', 'bul', 'fre', 'geo', 'hin', 'hun', 'ice', 'kor', 'lit', 'gre', 'ady',
'dut', 'jpn', 'rum', 'vie'
]
year = '2020'
task = 'task1'
data_dir = f'{SIGMORPHON_DIR}/{year}/{task}/data/'
for lang in langs:
for prefix, output_features in FEATURE_MAP.items():
seqio.TaskRegistry.add(
f'{prefix}_sigmorphon_{year}_{task}.{lang}',
source=seqio.TextLineDataSource(
split_to_filepattern={
'train': f'{data_dir}/train/{lang}_train.tsv',
'validation': f'{data_dir}/dev/{lang}_dev.tsv',
'test': f'{data_dir}/test/{lang}_test.tsv'}),
preprocessors=get_2020_task1_preprocessor(lang) + DEFAULT_PREPROCESSORS,
output_features=output_features,
metric_fns=[metrics_task1_2020])
for prefix in ['mt5', 'byt5']:
t5.data.MixtureRegistry.add(
f'{prefix}_sigmorphon_{year}_{task}',
[f'{prefix}_sigmorphon_{year}_{task}.{lang}' for lang in langs],
default_rate=1.)
# ====================== SIGMORPHON-2020 TASK-0 ====================
# Task 0: Typologically Diverse Morphological Inflection
# Please see website https://sigmorphon.github.io/sharedtasks/2020/task0/
# for details.
def get_2020_task0_preprocessor(language):
return [
functools.partial(
preprocessors.preprocess_tsv,
inputs_format=f'{language}' + ' {0} ' + 'form={2}',
targets_format='{1}',
num_fields=3),
]
def metrics_task0_2020(targets, predictions):
"""Calculates exact match and edit distance based metrics."""
def distance(str1, str2):
"""Levenshtein distance."""
# Implementation from
# https://github.com/sigmorphon2020/task0-data/blob/master/evaluate.py
m = numpy.zeros([len(str2) + 1, len(str1) + 1])
for x in range(1, len(str2) + 1):
m[x][0] = m[x - 1][0] + 1
for y in range(1, len(str1) + 1):
m[0][y] = m[0][y - 1] + 1
for x in range(1, len(str2) + 1):
for y in range(1, len(str1) + 1):
if str1[y - 1] == str2[x - 1]:
dg = 0
else:
dg = 1
m[x][y] = min(m[x - 1][y] + 1, m[x][y - 1] + 1, m[x - 1][y - 1] + dg)
return int(m[len(str2)][len(str1)])
correct, dist, total = 0., 0., 0.
for target, prediction in zip(targets, predictions):
if target == prediction:
correct += 1
dist += distance(target, prediction)
total += 1
return {
'accuracy': round(correct / total * 100, 2),
'distance': round(dist / total, 2)
}
surprise_lang_path_prefix = [
'SURPRISE-LANGUAGES/Afro-Asiatic/mlt', 'SURPRISE-LANGUAGES/Germanic/gsw',
'SURPRISE-LANGUAGES/Nilo-Sahan/dje', 'SURPRISE-LANGUAGES/Romance/frm',
'SURPRISE-LANGUAGES/Indo-Aryan/urd', 'SURPRISE-LANGUAGES/Uralic/kpv',
'SURPRISE-LANGUAGES/Sino-Tibetan/bod', 'SURPRISE-LANGUAGES/Germanic/nno',
'SURPRISE-LANGUAGES/Uralic/olo', 'SURPRISE-LANGUAGES/Romance/fur',
'SURPRISE-LANGUAGES/Romance/cat', 'SURPRISE-LANGUAGES/Afro-Asiatic/syc',
'SURPRISE-LANGUAGES/Algic/cre', 'SURPRISE-LANGUAGES/Turkic/kir',
'SURPRISE-LANGUAGES/Uralic/lud', 'SURPRISE-LANGUAGES/Uralic/udm',
'SURPRISE-LANGUAGES/Iranian/pus', 'SURPRISE-LANGUAGES/Romance/ast',
'SURPRISE-LANGUAGES/Germanic/gml', 'SURPRISE-LANGUAGES/Turkic/bak',
'SURPRISE-LANGUAGES/Indo-Aryan/hin', 'SURPRISE-LANGUAGES/Iranian/fas',
'SURPRISE-LANGUAGES/Niger-Congo/sna', 'SURPRISE-LANGUAGES/Romance/xno',
'SURPRISE-LANGUAGES/Romance/vec', 'SURPRISE-LANGUAGES/Dravidian/kan',
'SURPRISE-LANGUAGES/Afro-Asiatic/orm', 'SURPRISE-LANGUAGES/Turkic/uzb',
'SURPRISE-LANGUAGES/Uto-Aztecan/ood', 'SURPRISE-LANGUAGES/Turkic/tuk',
'SURPRISE-LANGUAGES/Iranian/tgk', 'SURPRISE-LANGUAGES/Romance/lld',
'SURPRISE-LANGUAGES/Turkic/kaz', 'SURPRISE-LANGUAGES/Indo-Aryan/ben',
'SURPRISE-LANGUAGES/Siouan/dak', 'SURPRISE-LANGUAGES/Romance/glg',
'SURPRISE-LANGUAGES/Turkic/kjh', 'SURPRISE-LANGUAGES/Turkic/crh',
'SURPRISE-LANGUAGES/Indo-Aryan/san', 'SURPRISE-LANGUAGES/Dravidian/tel',
'SURPRISE-LANGUAGES/Tungusic/evn', 'SURPRISE-LANGUAGES/Turkic/aze',
'SURPRISE-LANGUAGES/Uralic/vro', 'SURPRISE-LANGUAGES/Turkic/uig',
'SURPRISE-LANGUAGES/Australian/mwf'
]
development_lang_path_prefix = [
'DEVELOPMENT-LANGUAGES/germanic/swe', 'DEVELOPMENT-LANGUAGES/germanic/ang',
'DEVELOPMENT-LANGUAGES/oto-manguean/azg',
'DEVELOPMENT-LANGUAGES/uralic/vep', 'DEVELOPMENT-LANGUAGES/niger-congo/lin',
'DEVELOPMENT-LANGUAGES/niger-congo/nya',
'DEVELOPMENT-LANGUAGES/germanic/frr', 'DEVELOPMENT-LANGUAGES/uralic/vot',
'DEVELOPMENT-LANGUAGES/austronesian/mlg',
'DEVELOPMENT-LANGUAGES/oto-manguean/ctp',
'DEVELOPMENT-LANGUAGES/oto-manguean/otm',
'DEVELOPMENT-LANGUAGES/oto-manguean/ote',
'DEVELOPMENT-LANGUAGES/uralic/fin',
'DEVELOPMENT-LANGUAGES/oto-manguean/cpa',
'DEVELOPMENT-LANGUAGES/austronesian/mao',
'DEVELOPMENT-LANGUAGES/uralic/mdf', 'DEVELOPMENT-LANGUAGES/germanic/dan',
'DEVELOPMENT-LANGUAGES/niger-congo/gaa',
'DEVELOPMENT-LANGUAGES/oto-manguean/cly',
'DEVELOPMENT-LANGUAGES/uralic/mhr', 'DEVELOPMENT-LANGUAGES/niger-congo/zul',
'DEVELOPMENT-LANGUAGES/uralic/krl', 'DEVELOPMENT-LANGUAGES/niger-congo/kon',
'DEVELOPMENT-LANGUAGES/oto-manguean/czn',
'DEVELOPMENT-LANGUAGES/germanic/gmh', 'DEVELOPMENT-LANGUAGES/uralic/izh',
'DEVELOPMENT-LANGUAGES/austronesian/ceb',
'DEVELOPMENT-LANGUAGES/germanic/nob',
'DEVELOPMENT-LANGUAGES/austronesian/tgl',
'DEVELOPMENT-LANGUAGES/austronesian/hil',
'DEVELOPMENT-LANGUAGES/niger-congo/lug',
'DEVELOPMENT-LANGUAGES/niger-congo/sot',
'DEVELOPMENT-LANGUAGES/niger-congo/swa',
'DEVELOPMENT-LANGUAGES/germanic/isl',
'DEVELOPMENT-LANGUAGES/oto-manguean/pei',
'DEVELOPMENT-LANGUAGES/uralic/sme', 'DEVELOPMENT-LANGUAGES/germanic/nld',
'DEVELOPMENT-LANGUAGES/niger-congo/aka',
'DEVELOPMENT-LANGUAGES/germanic/eng',
'DEVELOPMENT-LANGUAGES/oto-manguean/zpv',
'DEVELOPMENT-LANGUAGES/uralic/est', 'DEVELOPMENT-LANGUAGES/uralic/liv',
'DEVELOPMENT-LANGUAGES/oto-manguean/xty',
'DEVELOPMENT-LANGUAGES/germanic/deu', 'DEVELOPMENT-LANGUAGES/uralic/myv'
]
year = '2020'
task = 'task0'
data_dir = f'{SIGMORPHON_DIR}/{year}/task0-data/'
langs = [
path_prefix.split('/')[-1]
for path_prefix in surprise_lang_path_prefix + development_lang_path_prefix
]
random.shuffle(langs)
path_prefixes = surprise_lang_path_prefix + development_lang_path_prefix
for prefix, output_features in FEATURE_MAP.items():
for path_prefix in path_prefixes:
lang = path_prefix.split('/')[-1]
split_to_filepattern = {
'train': f'{data_dir}/{path_prefix}.trn',
'validation': f'{data_dir}/{path_prefix}.dev',
'test': f'{data_dir}/GOLD-TEST/{lang}.tst',
}
seqio.TaskRegistry.add(
f'{prefix}_sigmorphon_{year}_{task}.{lang}',
source=seqio.TextLineDataSource(
split_to_filepattern=split_to_filepattern),
preprocessors=get_2020_task0_preprocessor(lang) + DEFAULT_PREPROCESSORS,
output_features=output_features,
metric_fns=[metrics_task0_2020])
seqio.TaskRegistry.add(
f'{prefix}_sigmorphon_{year}_{task}.all',
source=seqio.TextLineDataSource(
split_to_filepattern={
'test': f'{data_dir}/test.tsv',
'validation': f'{data_dir}/validation.tsv',}),
preprocessors=[preprocessors.preprocess_tsv,
*DEFAULT_PREPROCESSORS,],
output_features=output_features,
metric_fns=[metrics_task0_2020])
for prefix in ['mt5', 'byt5']:
t5.data.MixtureRegistry.add(
f'{prefix}_sigmorphon_{year}_{task}',
[f'{prefix}_sigmorphon_{year}_{task}.{lang}' for lang in langs],
default_rate=1.)
| [
"numpy.zeros",
"random.shuffle",
"functools.partial",
"seqio.TextLineDataSource"
] | [((9063, 9084), 'random.shuffle', 'random.shuffle', (['langs'], {}), '(langs)\n', (9077, 9084), False, 'import random\n'), ((1369, 1496), 'functools.partial', 'functools.partial', (['preprocessors.preprocess_tsv'], {'inputs_format': "(f' {language} ' + '{0}')", 'targets_format': '"""{1}"""', 'num_fields': '(2)'}), "(preprocessors.preprocess_tsv, inputs_format=\n f' {language} ' + '{0}', targets_format='{1}', num_fields=2)\n", (1386, 1496), False, 'import functools\n'), ((1844, 1888), 'numpy.zeros', 'numpy.zeros', (['(idim, jdim)'], {'dtype': 'numpy.uint8'}), '((idim, jdim), dtype=numpy.uint8)\n', (1855, 1888), False, 'import numpy\n'), ((3986, 4125), 'functools.partial', 'functools.partial', (['preprocessors.preprocess_tsv'], {'inputs_format': "(f'{language}' + ' {0} ' + 'form={2}')", 'targets_format': '"""{1}"""', 'num_fields': '(3)'}), "(preprocessors.preprocess_tsv, inputs_format=f'{language}' +\n ' {0} ' + 'form={2}', targets_format='{1}', num_fields=3)\n", (4003, 4125), False, 'import functools\n'), ((9906, 10033), 'seqio.TextLineDataSource', 'seqio.TextLineDataSource', ([], {'split_to_filepattern': "{'test': f'{data_dir}/test.tsv', 'validation': f'{data_dir}/validation.tsv'}"}), "(split_to_filepattern={'test':\n f'{data_dir}/test.tsv', 'validation': f'{data_dir}/validation.tsv'})\n", (9930, 10033), False, 'import seqio\n'), ((3094, 3292), 'seqio.TextLineDataSource', 'seqio.TextLineDataSource', ([], {'split_to_filepattern': "{'train': f'{data_dir}/train/{lang}_train.tsv', 'validation':\n f'{data_dir}/dev/{lang}_dev.tsv', 'test':\n f'{data_dir}/test/{lang}_test.tsv'}"}), "(split_to_filepattern={'train':\n f'{data_dir}/train/{lang}_train.tsv', 'validation':\n f'{data_dir}/dev/{lang}_dev.tsv', 'test':\n f'{data_dir}/test/{lang}_test.tsv'})\n", (3118, 3292), False, 'import seqio\n'), ((9573, 9640), 'seqio.TextLineDataSource', 'seqio.TextLineDataSource', ([], {'split_to_filepattern': 'split_to_filepattern'}), '(split_to_filepattern=split_to_filepattern)\n', (9597, 9640), False, 'import seqio\n')] |
# -*- coding: utf-8 -*-
import numpy as np
from ..io import edf
from ..io import xiaedf
class LazyFunction(object):
def __init__(self, samemerge=False):
self.samemerge = samemerge
def __str__(self):
return self._func.__class__.__name__
def __eq__(self, other):
return str(self) == str(other)
def __ne__(self, other):
return not self.__eq__(other)
def merge(self, other):
if self == other:
return self.samemerge
else:
return False
class lazy_transmission(LazyFunction):
def __call__(self, fluxt, flux0):
with np.errstate(divide="ignore", invalid="ignore"):
return np.divide(fluxt, flux0)
def __str__(self):
return "transmission"
transmission_func = lazy_transmission()
class lazy_absorbance(LazyFunction):
def __call__(self, transmission):
with np.errstate(divide="ignore", invalid="ignore"):
return -np.log(np.clip(transmission, 0, 1))
def __str__(self):
return "absorbance"
absorbance_func = lazy_absorbance()
class lazy_xrfnorm(LazyFunction):
def __call__(self, xrf, flux, fluxref, xiaimage, detnr):
if fluxref:
norm = fluxref.to("Hz").magnitude / xiaedf.normalizer(flux)
else:
norm = 1
if xiaimage:
xiaimage.onlyicrocr(True)
xiaimage.exclude_detectors = []
xiaimage.include_detectors = [detnr]
stats = xiaimage.stats
dtcor = xiaedf.deadtimecorrector(stats[..., 0, 0], stats[..., 1, 0])
dtcor = dtcor.reshape(xrf.shape)
else:
dtcor = 1
return xrf * norm * dtcor
def __str__(self):
return "xrfnorm"
xrfnorm_func = lazy_xrfnorm()
class lazy_nanmean(LazyFunction):
def __init__(self):
super(lazy_nanmean, self).__init__(samemerge=True)
def __call__(self, x):
return np.nanmean(list(x), axis=0)
def __str__(self):
return "nanmean"
nanmean_func = lazy_nanmean()
class lazy_nansum(LazyFunction):
def __init__(self):
super(lazy_nansum, self).__init__(samemerge=True)
def __call__(self, x):
return np.nansum(list(x), axis=0)
def __str__(self):
return "nansum"
nansum_func = lazy_nansum()
class lazy_nanmax(LazyFunction):
def __init__(self):
super(lazy_nanmax, self).__init__(samemerge=True)
def __call__(self, x):
return np.nanmax(list(x), axis=0)
def __str__(self):
return "nanmax"
nanmax_func = lazy_nanmax()
class lazy_sum(LazyFunction):
def __init__(self):
super(lazy_sum, self).__init__(samemerge=True)
def __call__(self, x):
return sum(x)
def __str__(self):
return "sum"
sum_func = lazy_sum()
class lazy_readedf(LazyFunction):
def __init__(self):
super(lazy_readedf, self).__init__(samemerge=True)
def __call__(self, x):
return x
def __str__(self):
return "readedf"
readedf_func = lazy_readedf()
class LazyArgument(object):
def __init__(self, arg):
self._arg = arg
def data(self, *args):
return self._arg
def __repr__(self):
return self._arg
def __str__(self):
return self.__repr__()
class LazyArgumentEdf(LazyArgument):
def __init__(self, filename):
self._filename = filename
def data(self, *args):
return edf.edfimage(self._filename).data
def __repr__(self):
return self._filename
class LazyArgumentH5Dataset(LazyArgument):
def __init__(self, path):
self._path = path
def data(self, islice, stackdim):
with self._path.open(mode="r") as dset:
if stackdim == 0:
data = dset[islice, ...]
elif stackdim == 1:
data = dset[:, islice, :]
else:
data = dset[..., islice]
return data
def __repr__(self):
return self._path.__repr__()
def __str__(self):
return self.__repr__()
class LazyStackSlice(LazyArgument):
def __init__(self, func=None, unpackargs=True):
if func is None:
self._func = readedf_func
else:
self._func = func
self._args = []
self._unpackargs = unpackargs
def data(self, *info):
if self._unpackargs:
return self._func(*list(self._arggen(*info)))
else:
return self._func(self._arggen(*info))
def _arggen(self, *info):
for x in self._args:
if isinstance(x, LazyArgument):
yield x.data(*info)
else:
yield x
def appendarg(self, arg):
if isinstance(arg, self.__class__):
if self._func.merge(arg._func):
self._args.extend(arg._args)
return
self._args.append(arg)
def appendarg_edf(self, filename):
self.appendarg(LazyArgumentEdf(filename))
def appendarg_h5dataset(self, path):
self.appendarg(LazyArgumentH5Dataset(path))
def __repr__(self):
return "{}({})".format(self._func, ",".join([str(arg) for arg in self._args]))
| [
"numpy.divide",
"numpy.errstate",
"numpy.clip"
] | [((622, 668), 'numpy.errstate', 'np.errstate', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (633, 668), True, 'import numpy as np\n'), ((689, 712), 'numpy.divide', 'np.divide', (['fluxt', 'flux0'], {}), '(fluxt, flux0)\n', (698, 712), True, 'import numpy as np\n'), ((899, 945), 'numpy.errstate', 'np.errstate', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (910, 945), True, 'import numpy as np\n'), ((974, 1001), 'numpy.clip', 'np.clip', (['transmission', '(0)', '(1)'], {}), '(transmission, 0, 1)\n', (981, 1001), True, 'import numpy as np\n')] |
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.environments.random_tf_environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.environments import random_tf_environment
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import test_utils
class RandomTFEnvironmentTest(test_utils.TestCase):
def setUp(self):
self.observation_spec = tensor_spec.TensorSpec((2, 3), tf.float32)
self.time_step_spec = ts.time_step_spec(self.observation_spec)
self.action_spec = tensor_spec.TensorSpec((2,), tf.float32)
self.random_env = random_tf_environment.RandomTFEnvironment(
self.time_step_spec, self.action_spec)
def test_state_saved_after_reset(self):
initial_time_step = self.evaluate(self.random_env.reset())
current_time_step = self.evaluate(self.random_env.current_time_step())
np.testing.assert_almost_equal(initial_time_step.step_type,
current_time_step.step_type)
np.testing.assert_almost_equal(initial_time_step.observation,
current_time_step.observation)
np.testing.assert_almost_equal(initial_time_step.discount,
current_time_step.discount)
np.testing.assert_almost_equal(initial_time_step.reward,
current_time_step.reward)
def test_state_saved_after_step(self):
self.evaluate(self.random_env.reset())
random_action = self.evaluate(
tensor_spec.sample_spec_nest(self.action_spec, outer_dims=(1,)))
expected_time_step = self.evaluate(self.random_env.step(random_action))
current_time_step = self.evaluate(self.random_env.current_time_step())
np.testing.assert_almost_equal(expected_time_step.step_type,
current_time_step.step_type)
np.testing.assert_almost_equal(expected_time_step.observation,
current_time_step.observation)
np.testing.assert_almost_equal(expected_time_step.discount,
current_time_step.discount)
np.testing.assert_almost_equal(expected_time_step.reward,
current_time_step.reward)
def test_auto_reset(self):
time_step = self.evaluate(self.random_env.reset())
random_action = self.evaluate(
tensor_spec.sample_spec_nest(self.action_spec, outer_dims=(1,)))
attempts = 0
# With a 1/10 chance of resetting on each step, the probability of failure
# after 500 attempts should be 0.9^500, roughly 1e-23. If we miss more than
# 500 attempts, we can safely assume the test is broken.
while not time_step.is_last() and attempts < 500:
time_step = self.evaluate(self.random_env.step(random_action))
attempts += 1
self.assertLess(attempts, 500)
self.assertTrue(time_step.is_last())
current_time_step = self.evaluate(self.random_env.current_time_step())
self.assertTrue(current_time_step.is_last())
first_time_step = self.evaluate(self.random_env.step(random_action))
self.assertTrue(first_time_step.is_first())
def test_step_batched_action(self):
self.evaluate(self.random_env.reset())
random_action = self.evaluate(
tensor_spec.sample_spec_nest(self.action_spec, outer_dims=(5,)))
self.evaluate(self.random_env.step(random_action))
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow.test.main",
"tf_agents.specs.tensor_spec.TensorSpec",
"numpy.testing.assert_almost_equal",
"tf_agents.specs.tensor_spec.sample_spec_nest",
"tf_agents.trajectories.time_step.time_step_spec",
"tf_agents.environments.random_tf_environment.RandomTFEnvironment"
] | [((4175, 4189), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (4187, 4189), True, 'import tensorflow as tf\n'), ((1165, 1207), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['(2, 3)', 'tf.float32'], {}), '((2, 3), tf.float32)\n', (1187, 1207), False, 'from tf_agents.specs import tensor_spec\n'), ((1234, 1274), 'tf_agents.trajectories.time_step.time_step_spec', 'ts.time_step_spec', (['self.observation_spec'], {}), '(self.observation_spec)\n', (1251, 1274), True, 'from tf_agents.trajectories import time_step as ts\n'), ((1298, 1338), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['(2,)', 'tf.float32'], {}), '((2,), tf.float32)\n', (1320, 1338), False, 'from tf_agents.specs import tensor_spec\n'), ((1361, 1446), 'tf_agents.environments.random_tf_environment.RandomTFEnvironment', 'random_tf_environment.RandomTFEnvironment', (['self.time_step_spec', 'self.action_spec'], {}), '(self.time_step_spec, self.action_spec\n )\n', (1402, 1446), False, 'from tf_agents.environments import random_tf_environment\n'), ((1637, 1729), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['initial_time_step.step_type', 'current_time_step.step_type'], {}), '(initial_time_step.step_type,\n current_time_step.step_type)\n', (1667, 1729), True, 'import numpy as np\n'), ((1765, 1861), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['initial_time_step.observation', 'current_time_step.observation'], {}), '(initial_time_step.observation,\n current_time_step.observation)\n', (1795, 1861), True, 'import numpy as np\n'), ((1897, 1987), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['initial_time_step.discount', 'current_time_step.discount'], {}), '(initial_time_step.discount,\n current_time_step.discount)\n', (1927, 1987), True, 'import numpy as np\n'), ((2023, 2110), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['initial_time_step.reward', 'current_time_step.reward'], {}), '(initial_time_step.reward, current_time_step.\n reward)\n', (2053, 2110), True, 'import numpy as np\n'), ((2491, 2584), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['expected_time_step.step_type', 'current_time_step.step_type'], {}), '(expected_time_step.step_type,\n current_time_step.step_type)\n', (2521, 2584), True, 'import numpy as np\n'), ((2620, 2717), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['expected_time_step.observation', 'current_time_step.observation'], {}), '(expected_time_step.observation,\n current_time_step.observation)\n', (2650, 2717), True, 'import numpy as np\n'), ((2753, 2844), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['expected_time_step.discount', 'current_time_step.discount'], {}), '(expected_time_step.discount,\n current_time_step.discount)\n', (2783, 2844), True, 'import numpy as np\n'), ((2880, 2968), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['expected_time_step.reward', 'current_time_step.reward'], {}), '(expected_time_step.reward, current_time_step\n .reward)\n', (2910, 2968), True, 'import numpy as np\n'), ((2269, 2332), 'tf_agents.specs.tensor_spec.sample_spec_nest', 'tensor_spec.sample_spec_nest', (['self.action_spec'], {'outer_dims': '(1,)'}), '(self.action_spec, outer_dims=(1,))\n', (2297, 2332), False, 'from tf_agents.specs import tensor_spec\n'), ((3127, 3190), 'tf_agents.specs.tensor_spec.sample_spec_nest', 'tensor_spec.sample_spec_nest', (['self.action_spec'], {'outer_dims': '(1,)'}), '(self.action_spec, outer_dims=(1,))\n', (3155, 3190), False, 'from tf_agents.specs import tensor_spec\n'), ((4023, 4086), 'tf_agents.specs.tensor_spec.sample_spec_nest', 'tensor_spec.sample_spec_nest', (['self.action_spec'], {'outer_dims': '(5,)'}), '(self.action_spec, outer_dims=(5,))\n', (4051, 4086), False, 'from tf_agents.specs import tensor_spec\n')] |
import numpy as np
from data_loader import DataLoader
import random
class ReccurentNetwork:
def __init__(self, data, size):
self.data = data
self.input_size = size
self.output_size = size
self.hidden_size = 100
# Initialize weights and biases
self.W_input = np.random.uniform(
-np.sqrt(1./self.input_size), np.sqrt(1./self.input_size),
(self.hidden_size, self.input_size))
self.W_output = np.random.uniform(
-np.sqrt(1./self.hidden_size), np.sqrt(1./self.hidden_size),
(self.output_size, self.hidden_size))
self.W_hidden = np.random.uniform(
-np.sqrt(1./self.hidden_size), np.sqrt(1./self.hidden_size),
(self.hidden_size, self.hidden_size))
self.b_input = np.zeros((self.hidden_size, 1))
self.b_output = np.zeros((self.output_size, 1))
def update_batch(self, loader):
random.shuffle( self.data )
mini_batch_size = 100
data_size = len(self.data)
batches = [
self.data[k:k+mini_batch_size] for k
in range(0, data_size, mini_batch_size)
]
mem_dW_output = np.zeros_like(self.W_output)
mem_dW_hidden = np.zeros_like(self.W_hidden)
mem_dW_input = np.zeros_like(self.W_input)
mem_db_output = np.zeros_like(self.b_output)
mem_db_input = np.zeros_like(self.b_input)
loss_acc = 0.0
for batch in batches:
loss = 0.0
eta = 0.1
for x, d in batch:
z_hidden, u_hidden, y = self.forward(x)
loss += self.loss(y, d)
dW_input, dW_hidden, dW_output, db_input, db_output = self.backdrop(
x, u_hidden, z_hidden, y, d
)
for param, dparam, mem in zip(
[self.W_input, self.W_hidden, self.W_output, self.b_input, self.b_output],
[dW_input, dW_hidden, dW_output, db_input, db_output],
[mem_dW_input, mem_dW_hidden, mem_dW_output, mem_db_input, mem_db_output],
):
mem += dparam * dparam
param += -eta * dparam / np.sqrt(mem + 1e-8)
print('Batch', loss/mini_batch_size, mini_batch_size)
self.test(loader)
loss_acc += loss/mini_batch_size
print('Batch Avg', loss_acc/len(batches))
def loss(self, y, d):
loss = 0.0
for t in range(len(y)):
target_idx = np.argmax(d[t])
loss += -np.log(y[t][target_idx,0])
return loss
def forward(self, x):
t_max = len(x)
z_hidden = [np.zeros((self.hidden_size,1)) for t in range(t_max)]
u_hidden = [np.zeros((self.hidden_size,1)) for t in range(t_max)]
y = [np.zeros((self.output_size,1)) for t in range(t_max)]
for t in range(t_max):
# Hidden layer
u_hidden[t] = self.W_input.dot(x[t]) + self.b_input
if t >= 1:
u_hidden[t] += self.W_hidden.dot(z_hidden[t-1])
z_hidden[t] = np.tanh(u_hidden[t])
# Output layer
y[t] = self.softmax(self.W_output.dot(z_hidden[t]) + self.b_output)
return (z_hidden, u_hidden, y)
def backdrop(self, x, u_hidden, z_hidden, y, d):
t_max = len(y)
delta_hidden = [np.zeros((self.output_size, 1)) for t in range(t_max)]
dW_output = np.zeros_like(self.W_output)
dW_hidden = np.zeros_like(self.W_hidden)
dW_input = np.zeros_like(self.W_input)
db_output = np.zeros_like(self.b_output)
db_input = np.zeros_like(self.b_input)
for t in reversed(range(t_max)):
delta_output = y[t].copy()
delta_output[np.argmax(d[t])] -= 1.0
delta_hidden[t] = self.W_output.T.dot(delta_output)
if t <= t_max - 2:
delta_hidden[t] += self.W_hidden.T.dot(delta_hidden[t+1])
delta_hidden[t] *= 1 - z_hidden[t]**2 # self.tanh_d(u_hidden[t])
dW_input += delta_hidden[t].dot(x[t].T)
db_output += delta_output
db_input += delta_hidden[t]
dW_hidden += delta_hidden[t].dot(z_hidden[t-1].T)
dW_output += delta_output.dot(z_hidden[t].T)
for dparam in [dW_input, dW_hidden, dW_output, db_input, db_output]:
np.clip(dparam, -5, 5, out=dparam)
return (dW_input, dW_hidden, dW_output, db_input, db_output)
def test(self, loader):
# for init_c in np.random.choice(list(loader.chars), 10):
for attempt in range(10):
init_c = '('
print(init_c, end='')
c_idx = loader.char_to_idx[init_c]
for t in range(100):
z_hidden, u_hidden, y = self.forward(
[loader._one_hot_vec(len(loader.chars)+1, c_idx)]
)
c_idx = np.random.choice(range(len(loader.chars)+1), p=y[-1].ravel())
if c_idx >= len(loader.chars):
break
print(loader.idx_to_char[c_idx], end='')
print()
### Misc functions
def tanh_d(self, x):
return 1.0 - np.tanh(x)**2
def softmax(self, x):
de = np.exp(x - np.max(x))
return de/np.sum(de)
def softmax_d(self, x):
soft_max_v = self.softmax(x)
return soft_max_v*(1 - soft_max_v)
if __name__ == '__main__':
loader = DataLoader()
rnn = ReccurentNetwork(loader.char_vecs, len(loader.chars)+1)
epoch = 1
while True:
print('Epoch', epoch)
rnn.update_batch(loader)
epoch+=1
| [
"numpy.zeros_like",
"numpy.sum",
"numpy.tanh",
"numpy.log",
"numpy.argmax",
"random.shuffle",
"numpy.zeros",
"numpy.clip",
"data_loader.DataLoader",
"numpy.max",
"numpy.sqrt"
] | [((5584, 5596), 'data_loader.DataLoader', 'DataLoader', ([], {}), '()\n', (5594, 5596), False, 'from data_loader import DataLoader\n'), ((807, 838), 'numpy.zeros', 'np.zeros', (['(self.hidden_size, 1)'], {}), '((self.hidden_size, 1))\n', (815, 838), True, 'import numpy as np\n'), ((863, 894), 'numpy.zeros', 'np.zeros', (['(self.output_size, 1)'], {}), '((self.output_size, 1))\n', (871, 894), True, 'import numpy as np\n'), ((948, 973), 'random.shuffle', 'random.shuffle', (['self.data'], {}), '(self.data)\n', (962, 973), False, 'import random\n'), ((1197, 1225), 'numpy.zeros_like', 'np.zeros_like', (['self.W_output'], {}), '(self.W_output)\n', (1210, 1225), True, 'import numpy as np\n'), ((1250, 1278), 'numpy.zeros_like', 'np.zeros_like', (['self.W_hidden'], {}), '(self.W_hidden)\n', (1263, 1278), True, 'import numpy as np\n'), ((1302, 1329), 'numpy.zeros_like', 'np.zeros_like', (['self.W_input'], {}), '(self.W_input)\n', (1315, 1329), True, 'import numpy as np\n'), ((1355, 1383), 'numpy.zeros_like', 'np.zeros_like', (['self.b_output'], {}), '(self.b_output)\n', (1368, 1383), True, 'import numpy as np\n'), ((1407, 1434), 'numpy.zeros_like', 'np.zeros_like', (['self.b_input'], {}), '(self.b_input)\n', (1420, 1434), True, 'import numpy as np\n'), ((3515, 3543), 'numpy.zeros_like', 'np.zeros_like', (['self.W_output'], {}), '(self.W_output)\n', (3528, 3543), True, 'import numpy as np\n'), ((3564, 3592), 'numpy.zeros_like', 'np.zeros_like', (['self.W_hidden'], {}), '(self.W_hidden)\n', (3577, 3592), True, 'import numpy as np\n'), ((3612, 3639), 'numpy.zeros_like', 'np.zeros_like', (['self.W_input'], {}), '(self.W_input)\n', (3625, 3639), True, 'import numpy as np\n'), ((3661, 3689), 'numpy.zeros_like', 'np.zeros_like', (['self.b_output'], {}), '(self.b_output)\n', (3674, 3689), True, 'import numpy as np\n'), ((3709, 3736), 'numpy.zeros_like', 'np.zeros_like', (['self.b_input'], {}), '(self.b_input)\n', (3722, 3736), True, 'import numpy as np\n'), ((374, 404), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.input_size)'], {}), '(1.0 / self.input_size)\n', (381, 404), True, 'import numpy as np\n'), ((538, 569), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.hidden_size)'], {}), '(1.0 / self.hidden_size)\n', (545, 569), True, 'import numpy as np\n'), ((704, 735), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.hidden_size)'], {}), '(1.0 / self.hidden_size)\n', (711, 735), True, 'import numpy as np\n'), ((2558, 2573), 'numpy.argmax', 'np.argmax', (['d[t]'], {}), '(d[t])\n', (2567, 2573), True, 'import numpy as np\n'), ((2724, 2755), 'numpy.zeros', 'np.zeros', (['(self.hidden_size, 1)'], {}), '((self.hidden_size, 1))\n', (2732, 2755), True, 'import numpy as np\n'), ((2798, 2829), 'numpy.zeros', 'np.zeros', (['(self.hidden_size, 1)'], {}), '((self.hidden_size, 1))\n', (2806, 2829), True, 'import numpy as np\n'), ((2865, 2896), 'numpy.zeros', 'np.zeros', (['(self.output_size, 1)'], {}), '((self.output_size, 1))\n', (2873, 2896), True, 'import numpy as np\n'), ((3154, 3174), 'numpy.tanh', 'np.tanh', (['u_hidden[t]'], {}), '(u_hidden[t])\n', (3161, 3174), True, 'import numpy as np\n'), ((3440, 3471), 'numpy.zeros', 'np.zeros', (['(self.output_size, 1)'], {}), '((self.output_size, 1))\n', (3448, 3471), True, 'import numpy as np\n'), ((4474, 4508), 'numpy.clip', 'np.clip', (['dparam', '(-5)', '(5)'], {'out': 'dparam'}), '(dparam, -5, 5, out=dparam)\n', (4481, 4508), True, 'import numpy as np\n'), ((5423, 5433), 'numpy.sum', 'np.sum', (['de'], {}), '(de)\n', (5429, 5433), True, 'import numpy as np\n'), ((345, 375), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.input_size)'], {}), '(1.0 / self.input_size)\n', (352, 375), True, 'import numpy as np\n'), ((508, 539), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.hidden_size)'], {}), '(1.0 / self.hidden_size)\n', (515, 539), True, 'import numpy as np\n'), ((674, 705), 'numpy.sqrt', 'np.sqrt', (['(1.0 / self.hidden_size)'], {}), '(1.0 / self.hidden_size)\n', (681, 705), True, 'import numpy as np\n'), ((2595, 2622), 'numpy.log', 'np.log', (['y[t][target_idx, 0]'], {}), '(y[t][target_idx, 0])\n', (2601, 2622), True, 'import numpy as np\n'), ((3851, 3866), 'numpy.argmax', 'np.argmax', (['d[t]'], {}), '(d[t])\n', (3860, 3866), True, 'import numpy as np\n'), ((5329, 5339), 'numpy.tanh', 'np.tanh', (['x'], {}), '(x)\n', (5336, 5339), True, 'import numpy as np\n'), ((5394, 5403), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (5400, 5403), True, 'import numpy as np\n'), ((2243, 2263), 'numpy.sqrt', 'np.sqrt', (['(mem + 1e-08)'], {}), '(mem + 1e-08)\n', (2250, 2263), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# __init__.py
"""A module to simulate optical transfer functions and point spread functions.
If this file is run as a script (python -m pyotf.otf) it will compare
the HanserPSF to the SheppardPSF in a plot.
https://en.wikipedia.org/wiki/Optical_transfer_function
https://en.wikipedia.org/wiki/Point_spread_function
Copyright (c) 2020, <NAME>
"""
import copy
import logging
from functools import cached_property
import numpy as np
from numpy.fft import fftfreq, fftshift, ifftn
from numpy.linalg import norm
from .utils import NumericProperty, cart2pol, easy_fft, easy_ifft, psqrt, slice_maker
from .display import psf_plot, otf_plot
from .zernike import name2noll, zernike, noll2degrees
logger = logging.getLogger(__name__)
class BasePSF(object):
"""A base class for objects that can calculate OTF's and PSF's.
It is not intended to be used alone
To fully describe a PSF or OTF of an objective lens, assuming no
abberation, we generally need a few parameters:
- The wavelength of operation (assume monochromatic light)
- the numerical aperature of the objective
- the index of refraction of the medium
For numerical calculations we'll also want to know the x/y resolution
and number of points. Note that it is assumed that z is the optical
axis of the objective lens
"""
# Define all the numeric properties of the base class
wl = NumericProperty(attr="_wl", vartype=(float, int), doc="Wavelength of emission, in nm")
na = NumericProperty(attr="_na", vartype=(float, int), doc="Numerical Aperature")
ni = NumericProperty(attr="_ni", vartype=(float, int), doc="Refractive index")
size = NumericProperty(attr="_size", vartype=int, doc="x/y size")
zsize = NumericProperty(attr="_zsize", vartype=int, doc="z size")
def __init__(
self, wl, na, ni, res, size, zres=None, zsize=None, vec_corr="none", condition="sine"
):
"""Generate a PSF object.
Parameters
----------
wl : numeric
Emission wavelength of the simulation
na : numeric
Numerical aperature of the simulation
ni : numeric
index of refraction for the media
res : numeric
x/y resolution of the simulation, must have same units as wl
size : int
x/y size of the simulation
Optional Parameters
-------------------
zres : numeric
z resolution of simuation, must have same units a wl
zsize : int
z size of simulation
vec_corr : str
keyword to indicate whether to include vectorial effects
Valid options are: "none", "x", "y", "z", "total"
Default is: "none"
condition : str
keyword to indicate whether to model the sine or herschel conditions
**Herschel's Condition** invariance of axial magnification
**Abbe's Sine Condition** invariance of lateral magnification
conditions
Valid options are: "none", "sine", "herschel"
Default is: "sine"
Note: "none" is not a physical solution
"""
self.wl = wl
self.na = na
self.ni = ni
self.res = res
self.size = size
# if zres is not passed, set it to res
if zres is None:
zres = res
self.zres = zres
# if zsize isn't passed set it to size
if zsize is None:
zsize = size
self.zsize = zsize
self.vec_corr = vec_corr
self.condition = condition
def __repr__(self):
"""Return representation of PSF object."""
return (
f"{self.__class__.__name__}(wl={self.wl}, na={self.na}, ni={self.ni},"
+ f" res={self.res}, size={self.size}, zres={self.zres}, zsize={self.zsize},"
+ f" vec_corr='{self.vec_corr}', condition='{self.condition}')"
)
def _attribute_changed(self):
"""Attribute has changed.
Sets internal state variables to None so that when the
user asks for them they are recalculated
"""
for attr in ("PSFa", "OTFa", "PSFi", "OTFi"):
try:
delattr(self, attr)
except AttributeError:
logger.debug(f"{attr} wasn't available to delete")
@property
def zres(self):
"""Z resolution."""
return self._zres
@zres.setter
def zres(self, value):
# make sure z res is positive
if not value > 0:
raise ValueError("zres must be positive")
self._zres = value
self._attribute_changed()
@property
def res(self):
"""X/Y resolution."""
return self._res
@res.setter
def res(self, value):
# max_val is the abbe limit, but for an accurate simulation
# the pixel size must be smaller than half this number
# thinking in terms of the convolution that is implicitly
# performed when generating the OTFi we also don't want
# any wrapping effects. However, allowing the number
# to be the Abbe limit can allow phase retrieval for
# larger pixels
abbe_limit = 1 / (2 * self.na / self.wl)
if value >= abbe_limit:
raise ValueError(
f"{value} is larger than the Abbe Limit, try a number smaller than {abbe_limit}"
)
if value >= abbe_limit / 2:
logger.info(
f"res has been set to {value} which is greater than the Nyquist limit of {abbe_limit / 2}"
)
self._res = value
self._attribute_changed()
@property
def vec_corr(self):
"""Take into account the vectorial nature of light.
Valid values are: "none", "x", "y", "z", "total"
"""
return self._vec_corr
@vec_corr.setter
def vec_corr(self, value):
valid_values = {"none", "x", "y", "z", "total"}
if value not in valid_values:
raise ValueError("Vector correction must be one of {}".format(", ".join(valid_values)))
self._vec_corr = value
self._attribute_changed()
@property
def condition(self):
"""Imaging condition to simulate."""
return self._condition
@condition.setter
def condition(self, value):
valid_values = {"none", "sine", "herschel"}
if value not in valid_values:
raise ValueError(("Condition must be one of {}").format(", ".join(valid_values)))
self._condition = value
self._attribute_changed()
@cached_property
def OTFa(self):
"""Amplitude OTF (coherent transfer function), complex array."""
raise NotImplementedError
@cached_property
def PSFa(self):
"""Amplitude PSF, complex array."""
raise NotImplementedError
@cached_property
def PSFi(self):
"""Intensity PSF, real array."""
return (abs(self.PSFa) ** 2).sum(axis=0)
@cached_property
def OTFi(self):
"""Intensity OTF, complex array."""
return easy_fft(self.PSFi)
def _validate_zrange(self):
"""Check zrange for uniform step size."""
try:
# make sure there's only one size of z-step and that there's more than one
zsteps = np.diff(self.zrange)
if len(zsteps) < 2 or not np.allclose(zsteps, zsteps.mean()):
raise RuntimeError(f"{self} doesn't have uniform z-steps ---> {zsteps}")
except AttributeError:
pass
def plot_psf(self, **kwargs):
"""Plot the intensity PSF.
See `pyotf.display.psf_plot` for details and possible kwargs
"""
self._validate_zrange()
# smart cropping
# nice lateral extent
lateral_extent = self.wl / 2 / self.na / self.res * 32
axial_extent = self.wl / (self.ni - np.sqrt(self.ni**2 - self.na**2)) / self.zres * 16
max_loc = np.unravel_index(self.PSFi.argmax(), self.PSFi.shape)
crop = slice_maker(max_loc, (axial_extent, lateral_extent, lateral_extent))
return psf_plot(self.PSFi[crop], zres=self.zres, res=self.res, **kwargs)
def plot_otf(self, **kwargs):
"""Plot the intensity OTF.
See `pyotf.display.otf_plot` for details and possible kwargs
"""
self._validate_zrange()
# normalize OTF and make sure it's real
otf = abs(self.OTFi)
otf = np.fmax(otf / otf.max(), np.finfo(float).eps)
# nice default plotting kwargs
dkwargs = dict(vmin=1e-4)
dkwargs.update(kwargs)
return otf_plot(
otf,
na=self.na,
ni=self.ni,
wl=self.wl,
zres=self.zres,
res=self.res,
**dkwargs,
)
class HanserPSF(BasePSF):
"""A class defining the pupil function and its closely related methods.
Based on the following work
[(1) <NAME>.; <NAME>.; <NAME>.; <NAME>.
Phase-Retrieved Pupil Functions in Wide-Field Fluorescence Microscopy.
Journal of Microscopy 2004, 216 (1), 32–48.](dx.doi.org/10.1111/j.0022-2720.2004.01393.x)
[(2) <NAME>.; <NAME>.; <NAME>.; <NAME>.
Phase Retrieval for High-Numerical-Aperture Optical Systems.
Optics Letters 2003, 28 (10), 801.](dx.doi.org/10.1364/OL.28.000801)
"""
def __init__(self, *args, zrange=None, **kwargs): # noqa: D205,D208,D400,D403
"""zrange : array-like
An alternate way to specify the z range for the calculation
must be expressed in the same units as wavelength
"""
super().__init__(*args, **kwargs)
if zrange is None:
self._gen_zrange()
else:
self.zrange = zrange
# include parent documentation
__init__.__doc__ = BasePSF.__init__.__doc__ + __init__.__doc__
def __repr__(self):
"""Represent HanserPSF."""
return super().__repr__()[:-1] + f", zrange={self.zrange!r})"
def _gen_zrange(self):
"""Generate the zrange from zsize and zres."""
self.zrange = (np.arange(self.zsize) - (self.zsize + 1) // 2) * self.zres
@BasePSF.zsize.setter
def zsize(self, value):
"""Set zsize."""
# we need override this setter so that the zrange is recalculated
BasePSF.zsize.fset(self, value)
# try and except is necessary for initialization
try:
self._gen_zrange()
except AttributeError:
pass
@BasePSF.zres.setter
def zres(self, value):
"""Set zres."""
# same as for zsize
BasePSF.zres.fset(self, value)
try:
self._gen_zrange()
except AttributeError:
pass
@property
def zrange(self):
"""Return range overwhich to calculate the psf."""
return self._zrange
@zrange.setter
def zrange(self, value):
self._zrange = np.asarray(value)
# check if passed value is scalar
if not self._zrange.shape:
# convert to array for later multiplications
self._zrange.shape = (1,)
self._attribute_changed()
def _gen_kr(self):
"""Generate coordinate system and other internal parameters."""
k = self._k = fftfreq(self.size, self.res)
kxx, kyy = np.meshgrid(k, k)
self._kr, self._phi = cart2pol(kyy, kxx)
# kmag is the radius of the spherical shell of the OTF
self._kmag = self.ni / self.wl
# because the OTF only exists on a spherical shell we can calculate
# a kz value for any pair of kx and ky values
self._kz = psqrt(self._kmag**2 - self._kr**2)
def _gen_pupil(self):
"""Generate an ideal pupil."""
kr = self._kr
# define the diffraction limit
# remember we"re working with _coherent_ data _not_ intensity,
# so drop the factor of 2
diff_limit = self._na / self._wl
# return a circle of intensity 1 over the ideal passband of the
# objective make sure data is complex
return (kr < diff_limit).astype(complex)
def _calc_defocus(self):
"""Calculate the defocus to apply to the base pupil."""
kz = self._kz
return np.exp(2 * np.pi * 1j * kz * self.zrange[:, np.newaxis, np.newaxis])
def _gen_psf(self, pupil_base=None):
"""Generate the PSF.
kwargs
------
pupil_base : ndarray
provided so that phase retrieval algorithms can hook into this
method.
NOTE: that the internal state is created with fftfreq, which creates
_unshifted_ frequences
"""
# clear internal state
self._attribute_changed()
# generate internal coordinates
self._gen_kr()
# generate the pupil
if pupil_base is None:
pupil_base = self._gen_pupil()
else:
assert pupil_base.ndim == 2, f"`pupil_base` is wrong shape: {pupil_base.shape}"
# Maybe we should do ifftshift here so user doesn't have too
# pull relevant internal state variables
kr = self._kr
phi = self._phi
kmag = self._kmag
# apply the defocus to the base_pupil
pupil = pupil_base * self._calc_defocus()
# calculate theta, this is possible because we know that the
# OTF is only non-zero on a spherical shell
theta = np.arcsin((kr < kmag) * kr / kmag)
# The authors claim that the following code is unecessary as the
# sine condition is already taken into account in the definition
# of the pupil, but I call bullshit
if self.condition != "none":
if self.condition == "sine":
a = 1.0 / np.sqrt(np.cos(theta))
elif self.condition == "herschel":
a = 1.0 / np.cos(theta)
else:
raise RuntimeError("You should never see this")
pupil *= a
# apply the vectorial corrections, if requested
if self.vec_corr != "none":
plist = []
if self.vec_corr == "z" or self.vec_corr == "total":
plist.append(np.sin(theta) * np.cos(phi)) # Pzx
plist.append(np.sin(theta) * np.sin(phi)) # Pzy
if self.vec_corr == "y" or self.vec_corr == "total":
plist.append((np.cos(theta) - 1) * np.sin(phi) * np.cos(phi)) # Pyx
plist.append(np.cos(theta) * np.sin(phi) ** 2 + np.cos(phi) ** 2) # Pyy
if self.vec_corr == "x" or self.vec_corr == "total":
plist.append(np.cos(theta) * np.cos(phi) ** 2 + np.sin(phi) ** 2) # Pxx
plist.append((np.cos(theta) - 1) * np.sin(phi) * np.cos(phi)) # Pxy
# apply the corrections to the base pupil
pupils = pupil * np.array(plist)[:, np.newaxis]
else:
# if no correction we still need one more axis for the following
# code to work generally
pupils = pupil[np.newaxis]
# save the pupil for inspection, not necessary
# self._pupils = pupils
# because the internal state is created with fftfreq, no initial shift
# is necessary.
PSFa = fftshift(ifftn(pupils, axes=(2, 3)), axes=(2, 3))
# save the PSF internally
return PSFa
def apply_pupil(self, pupil):
"""Apply a pupil function to the model."""
self._attribute_changed()
self.PSFa = self._gen_psf(pupil)
@cached_property
def OTFa(self):
"""Amplitude OTF."""
return easy_fft(self.PSFa, axes=(1, 2, 3))
@cached_property
def PSFa(self):
"""Amplitude PSF."""
return self._gen_psf()
class SheppardPSF(BasePSF):
"""A class defining the 3D pupil function and its closely related methods.
Based on the following work:
[(1) <NAME>.; <NAME>. A 3D Vectorial Optical Transfer
Function Suitable for Arbitrary Pupil Functions. Optics Communications
2002, 211 (1–6), 53–63.](dx.doi.org/10.1016/S0030-4018(02)01857-6)
"""
dual = NumericProperty(attr="_dual", vartype=bool, doc="Simulate dual objectives")
def __init__(self, *args, dual=False, **kwargs): # noqa: D205,D208,D400,D403
"""dual : bool
Simulate dual objectives
"""
super().__init__(*args, **kwargs)
self.dual = dual
# include parent documentation
__init__.__doc__ = BasePSF.__init__.__doc__ + __init__.__doc__
def __repr__(self):
"""Represent SheppardPSF."""
return super().__repr__()[:-1] + f", dual={self.dual})"
@property
def dual(self):
"""Simulate opposing objectives."""
return self._dual
@dual.setter
def dual(self, value):
if not isinstance(value, bool):
raise TypeError("`dual` must be a boolean")
self._dual = value
self._attribute_changed()
@BasePSF.zres.setter
def zres(self, value):
"""Set zres."""
# this checks the nyquist limit for z
# remember that because we create a spherical shell for
# The amplitude OTF not nyquist for the final intensity OTF ...
max_val = self.wl / 2 / self.ni
if value >= max_val:
# this will cause a fftconvolution error when calculating the
# intensity OTF
raise ValueError(f"{value} is too large try a number smaller than {max_val}")
BasePSF.zres.fset(self, value)
def _gen_kr(self):
"""Generate internal state."""
# generate internal kspace coordinates
k = fftfreq(self.size, self.res)
kz = fftfreq(self.zsize, self.zres)
k_tot = np.meshgrid(kz, k, k, indexing="ij")
# calculate r
kr = norm(k_tot, axis=0)
# calculate the radius of the spherical shell in k-space
self.kmag = kmag = self.ni / self.wl
# determine k-space pixel size
dk, dkz = k[1] - k[0], kz[1] - kz[0]
# save output for user
self.dk, self.dkz = dk, dkz
# determine the min value for kz given the NA and wavelength
kz_min = np.sqrt(kmag**2 - (self.na / self.wl) ** 2)
# make sure we're not crazy
assert kz_min >= 0, "Something went horribly wrong"
# if the user gave us different z and x/y res we need to calculate
# the positional "error" in k-space to draw the spherical shell
if dk != dkz:
with np.errstate(invalid="ignore"):
dd = np.array((dkz, dk, dk)).reshape(3, 1, 1, 1)
dkr = norm(np.array(k_tot) * dd, axis=0) / kr
# we know the origin is zero so replace it
dkr[0, 0, 0] = 0.0
else:
dkr = dk
if self.dual:
# if we want dual objectives we need two spherical shells
kzz = abs(k_tot[0])
else:
kzz = k_tot[0]
# calculate the points on the spherical shell, save them and the
# corresponding kz, ky and kx coordinates
self.valid_points = np.logical_and(abs(kr - kmag) < dkr, kzz > kz_min + dkr)
self.kzz, self.kyy, self.kxx = [k[self.valid_points] for k in k_tot]
def _gen_otf(self):
"""Generate the OTFs."""
# clear internal state
self._attribute_changed()
# generate coordinate space
self._gen_kr()
kxx, kyy, kzz = self.kxx, self.kyy, self.kzz
# generate direction cosines
m, n, s = np.array((kxx, kyy, kzz)) / norm((kxx, kyy, kzz), axis=0)
# apply a given imaging condition
if self.condition == "sine":
a = 1.0 / np.sqrt(s)
elif self.condition == "herschel":
a = 1.0 / s
elif self.condition == "none":
a = 1.0
else:
raise RuntimeError("You should never see this")
# apply the vectorial corrections if requested
if self.vec_corr != "none":
plist = []
if self.vec_corr == "z" or self.vec_corr == "total":
plist.append(-m) # Pzx
plist.append(-n) # Pzy
if self.vec_corr == "y" or self.vec_corr == "total":
plist.append(-n * m / (1 + s)) # Pyx
plist.append(1 - n**2 / (1 + s)) # Pyy
if self.vec_corr == "x" or self.vec_corr == "total":
plist.append(1 - m**2 / (1 + s)) # Pxx
plist.append(-m * n / (1 + s)) # Pxy
# generate empty otf
otf = np.zeros((len(plist), self.zsize, self.size, self.size), dtype="D")
# fill in the valid poins
for o, p in zip(otf, plist):
o[self.valid_points] = p * a
else:
# TODO: we can actually do a LOT better here.
# if the vectorial correction is None then we can
# calculate a 2D (kz, kr) OTF and interpolate it out to
# the full 3D size.
# otf_sub = self._gen_radsym_otf()
# otf = otf_sub[np.newaxis]
otf_sub = np.zeros((self.zsize, self.size, self.size), dtype="D")
otf_sub[self.valid_points] = 1.0
otf = otf_sub[np.newaxis]
# we're already calculating the OTF, so we just need to shift it into
# the right place.
return fftshift(otf, axes=(1, 2, 3))
@cached_property
def OTFa(self):
"""Amplitude OTF."""
return self._gen_otf()
@cached_property
def PSFa(self):
"""Amplitude PSF."""
return easy_ifft(self.OTFa, axes=(1, 2, 3))
def apply_aberration(model, mcoefs, pcoefs):
"""Apply a set of abberations to a model PSF.
Parameters
----------
model : HanserPSF
The model PSF to which to apply the aberrations
mcoefs : ndarray (n, )
The magnitude coefficiencts
pcoefs : ndarray (n, )
The phase coefficients
Note: this function assumes the mcoefs and pcoefs are Noll ordered
"""
# sanity checks
assert isinstance(model, HanserPSF), "Model must be a HanserPSF"
model = copy.copy(model)
if mcoefs is None and pcoefs is None:
logger.warning("No abberation applied")
return model
if mcoefs is None:
mcoefs = np.zeros_like(pcoefs)
if pcoefs is None:
pcoefs = np.zeros_like(mcoefs)
assert len(mcoefs) == len(pcoefs), "Coefficient lengths don't match"
# extract kr
model._gen_kr()
kr = model._kr
theta = model._phi
# make zernikes (need to convert kr to r where r = 1 when kr is at
# diffraction limit)
r = kr * model.wl / model.na
zerns = zernike(r, theta, *noll2degrees(np.arange(len(mcoefs)) + 1))
pupil_phase = (zerns * pcoefs[:, None, None]).sum(0)
pupil_mag = (zerns * mcoefs[:, None, None]).sum(0)
# apply aberrations to unaberrated pupil (N.B. the unaberrated phase is 0)
pupil_mag += abs(model._gen_pupil())
# generate the PSF, assign to attribute
pupil_total = pupil_mag * np.exp(1j * pupil_phase)
model.apply_pupil(pupil_total)
return model
def apply_named_aberration(model, aberration, magnitude):
"""Apply a specific named aberration to the PSF. This will only effect the phase."""
pcoefs = named_aberration_to_pcoefs(aberration, magnitude)
return apply_aberration(model, None, pcoefs)
def named_aberration_to_pcoefs(aberration, magnitude):
"""Convert named aberration into phase coefficients.
Parameters
----------
aberration: str
Name of aberration
magnitude: int
Magnitude of aberration
Returns
-------
np.ndarray
Corresponding phase coefficients
"""
try:
noll = name2noll[aberration]
except KeyError as e:
raise KeyError(
f"Aberration '{aberration}' unknown, choose from: '"
+ "', '".join(name2noll.keys())
+ "'"
)
pcoefs = np.zeros(max(name2noll.values()))
pcoefs[noll - 1] = magnitude
return pcoefs
def apply_named_aberrations(model, aberrations):
"""Use to apply multiple named aberration to the PSF. This will only affect the phase.
Parameters
----------
model: PSF
PSF model onto which aberration will be applied
aberrations: dict()
Dictionary of aberration-magnitude pairs
Returns
-------
PSF
Aberrated model
"""
pcoefs = np.zeros(len(name2noll))
for aberration, magnitude in aberrations.items():
# Sum phase coefficients
pcoefs = np.add(pcoefs, named_aberration_to_pcoefs(aberration, magnitude))
return apply_aberration(model, None, pcoefs)
if __name__ == "__main__":
# import plotting
from matplotlib import pyplot as plt
# generate a comparison
kwargs = dict(
wl=520e-3,
na=1.27,
ni=1.33,
res=90e-3,
size=256,
zres=190e-3,
zsize=128,
vec_corr="none",
condition="none",
)
psfs = HanserPSF(**kwargs), SheppardPSF(**kwargs)
with plt.style.context("dark_background"):
fig, axs = plt.subplots(2, 2, figsize=(9, 6), gridspec_kw=dict(width_ratios=(1, 2)))
for psf, ax_sub in zip(psfs, axs):
psf.plot_otf()
psf.plot_psf(interpolation="bicubic")
# make coordinates
ax_yx, ax_zx = ax_sub
# get magnitude
otf = abs(psf.OTFi)
# normalize
otf /= otf.max()
otf /= otf.mean()
otf = np.log(otf + np.finfo(float).eps)
# plot
style = dict(vmin=-3, vmax=5, cmap="inferno", interpolation="bicubic")
ax_yx.matshow(otf[otf.shape[0] // 2], **style)
ax_yx.set_title("{} $k_y k_x$ plane".format(psf.__class__.__name__))
ax_zx.matshow(otf[..., otf.shape[1] // 2], **style)
ax_zx.set_title("{} $k_z k_x$ plane".format(psf.__class__.__name__))
for ax in ax_sub:
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
fig.tight_layout()
# NOTE: the results are _very_ close on a qualitative scale, but they do not match exactly
# as theory says they should (they're mathematically identical to one another)
model_kwargs = dict(
wl=525,
na=1.27,
ni=1.33,
res=70,
size=256,
zrange=[0],
vec_corr="none",
condition="none",
)
model = HanserPSF(**model_kwargs)
with plt.style.context("dark_background"):
fig, axs = plt.subplots(3, 5, figsize=(12, 8))
# fill out plot
for ax, name in zip(axs.ravel(), name2noll.keys()):
model2 = apply_named_aberration(model, name, 1)
ax.imshow(
model2.PSFi.squeeze()[104:-104, 104:-104], cmap="inferno", interpolation="bicubic"
)
ax.set_xlabel(name.replace(" ", "\n", 1).title())
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
# fig.tight_layout()
plt.show()
| [
"numpy.sin",
"numpy.linalg.norm",
"numpy.exp",
"numpy.arange",
"numpy.meshgrid",
"numpy.zeros_like",
"numpy.arcsin",
"numpy.finfo",
"numpy.fft.fftfreq",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show",
"numpy.asarray",
"matplotlib.pyplot.style.context",
"numpy.fft.fftshift",
"nump... | [((749, 776), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (766, 776), False, 'import logging\n'), ((22125, 22141), 'copy.copy', 'copy.copy', (['model'], {}), '(model)\n', (22134, 22141), False, 'import copy\n'), ((27160, 27170), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (27168, 27170), True, 'from matplotlib import pyplot as plt\n'), ((10984, 11001), 'numpy.asarray', 'np.asarray', (['value'], {}), '(value)\n', (10994, 11001), True, 'import numpy as np\n'), ((11326, 11354), 'numpy.fft.fftfreq', 'fftfreq', (['self.size', 'self.res'], {}), '(self.size, self.res)\n', (11333, 11354), False, 'from numpy.fft import fftfreq, fftshift, ifftn\n'), ((11374, 11391), 'numpy.meshgrid', 'np.meshgrid', (['k', 'k'], {}), '(k, k)\n', (11385, 11391), True, 'import numpy as np\n'), ((12298, 12368), 'numpy.exp', 'np.exp', (['(2 * np.pi * 1.0j * kz * self.zrange[:, np.newaxis, np.newaxis])'], {}), '(2 * np.pi * 1.0j * kz * self.zrange[:, np.newaxis, np.newaxis])\n', (12304, 12368), True, 'import numpy as np\n'), ((13478, 13512), 'numpy.arcsin', 'np.arcsin', (['((kr < kmag) * kr / kmag)'], {}), '((kr < kmag) * kr / kmag)\n', (13487, 13512), True, 'import numpy as np\n'), ((17664, 17692), 'numpy.fft.fftfreq', 'fftfreq', (['self.size', 'self.res'], {}), '(self.size, self.res)\n', (17671, 17692), False, 'from numpy.fft import fftfreq, fftshift, ifftn\n'), ((17706, 17736), 'numpy.fft.fftfreq', 'fftfreq', (['self.zsize', 'self.zres'], {}), '(self.zsize, self.zres)\n', (17713, 17736), False, 'from numpy.fft import fftfreq, fftshift, ifftn\n'), ((17753, 17789), 'numpy.meshgrid', 'np.meshgrid', (['kz', 'k', 'k'], {'indexing': '"""ij"""'}), "(kz, k, k, indexing='ij')\n", (17764, 17789), True, 'import numpy as np\n'), ((17825, 17844), 'numpy.linalg.norm', 'norm', (['k_tot'], {'axis': '(0)'}), '(k_tot, axis=0)\n', (17829, 17844), False, 'from numpy.linalg import norm\n'), ((18192, 18237), 'numpy.sqrt', 'np.sqrt', (['(kmag ** 2 - (self.na / self.wl) ** 2)'], {}), '(kmag ** 2 - (self.na / self.wl) ** 2)\n', (18199, 18237), True, 'import numpy as np\n'), ((21361, 21390), 'numpy.fft.fftshift', 'fftshift', (['otf'], {'axes': '(1, 2, 3)'}), '(otf, axes=(1, 2, 3))\n', (21369, 21390), False, 'from numpy.fft import fftfreq, fftshift, ifftn\n'), ((22295, 22316), 'numpy.zeros_like', 'np.zeros_like', (['pcoefs'], {}), '(pcoefs)\n', (22308, 22316), True, 'import numpy as np\n'), ((22358, 22379), 'numpy.zeros_like', 'np.zeros_like', (['mcoefs'], {}), '(mcoefs)\n', (22371, 22379), True, 'import numpy as np\n'), ((23045, 23071), 'numpy.exp', 'np.exp', (['(1.0j * pupil_phase)'], {}), '(1.0j * pupil_phase)\n', (23051, 23071), True, 'import numpy as np\n'), ((25078, 25114), 'matplotlib.pyplot.style.context', 'plt.style.context', (['"""dark_background"""'], {}), "('dark_background')\n", (25095, 25114), True, 'from matplotlib import pyplot as plt\n'), ((26575, 26611), 'matplotlib.pyplot.style.context', 'plt.style.context', (['"""dark_background"""'], {}), "('dark_background')\n", (26592, 26611), True, 'from matplotlib import pyplot as plt\n'), ((26632, 26667), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(5)'], {'figsize': '(12, 8)'}), '(3, 5, figsize=(12, 8))\n', (26644, 26667), True, 'from matplotlib import pyplot as plt\n'), ((7374, 7394), 'numpy.diff', 'np.diff', (['self.zrange'], {}), '(self.zrange)\n', (7381, 7394), True, 'import numpy as np\n'), ((15305, 15331), 'numpy.fft.ifftn', 'ifftn', (['pupils'], {'axes': '(2, 3)'}), '(pupils, axes=(2, 3))\n', (15310, 15331), False, 'from numpy.fft import fftfreq, fftshift, ifftn\n'), ((19537, 19562), 'numpy.array', 'np.array', (['(kxx, kyy, kzz)'], {}), '((kxx, kyy, kzz))\n', (19545, 19562), True, 'import numpy as np\n'), ((19565, 19594), 'numpy.linalg.norm', 'norm', (['(kxx, kyy, kzz)'], {'axis': '(0)'}), '((kxx, kyy, kzz), axis=0)\n', (19569, 19594), False, 'from numpy.linalg import norm\n'), ((21102, 21157), 'numpy.zeros', 'np.zeros', (['(self.zsize, self.size, self.size)'], {'dtype': '"""D"""'}), "((self.zsize, self.size, self.size), dtype='D')\n", (21110, 21157), True, 'import numpy as np\n'), ((8542, 8557), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (8550, 8557), True, 'import numpy as np\n'), ((10150, 10171), 'numpy.arange', 'np.arange', (['self.zsize'], {}), '(self.zsize)\n', (10159, 10171), True, 'import numpy as np\n'), ((18518, 18547), 'numpy.errstate', 'np.errstate', ([], {'invalid': '"""ignore"""'}), "(invalid='ignore')\n", (18529, 18547), True, 'import numpy as np\n'), ((19696, 19706), 'numpy.sqrt', 'np.sqrt', (['s'], {}), '(s)\n', (19703, 19706), True, 'import numpy as np\n'), ((27049, 27066), 'matplotlib.pyplot.NullLocator', 'plt.NullLocator', ([], {}), '()\n', (27064, 27066), True, 'from matplotlib import pyplot as plt\n'), ((27107, 27124), 'matplotlib.pyplot.NullLocator', 'plt.NullLocator', ([], {}), '()\n', (27122, 27124), True, 'from matplotlib import pyplot as plt\n'), ((14893, 14908), 'numpy.array', 'np.array', (['plist'], {}), '(plist)\n', (14901, 14908), True, 'import numpy as np\n'), ((26053, 26070), 'matplotlib.pyplot.NullLocator', 'plt.NullLocator', ([], {}), '()\n', (26068, 26070), True, 'from matplotlib import pyplot as plt\n'), ((26115, 26132), 'matplotlib.pyplot.NullLocator', 'plt.NullLocator', ([], {}), '()\n', (26130, 26132), True, 'from matplotlib import pyplot as plt\n'), ((7952, 7988), 'numpy.sqrt', 'np.sqrt', (['(self.ni ** 2 - self.na ** 2)'], {}), '(self.ni ** 2 - self.na ** 2)\n', (7959, 7988), True, 'import numpy as np\n'), ((13815, 13828), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (13821, 13828), True, 'import numpy as np\n'), ((13903, 13916), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (13909, 13916), True, 'import numpy as np\n'), ((14231, 14244), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (14237, 14244), True, 'import numpy as np\n'), ((14247, 14258), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (14253, 14258), True, 'import numpy as np\n'), ((14296, 14309), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (14302, 14309), True, 'import numpy as np\n'), ((14312, 14323), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (14318, 14323), True, 'import numpy as np\n'), ((14462, 14473), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (14468, 14473), True, 'import numpy as np\n'), ((14790, 14801), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (14796, 14801), True, 'import numpy as np\n'), ((18570, 18593), 'numpy.array', 'np.array', (['(dkz, dk, dk)'], {}), '((dkz, dk, dk))\n', (18578, 18593), True, 'import numpy as np\n'), ((25570, 25585), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (25578, 25585), True, 'import numpy as np\n'), ((14448, 14459), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (14454, 14459), True, 'import numpy as np\n'), ((14511, 14524), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (14517, 14524), True, 'import numpy as np\n'), ((14546, 14557), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (14552, 14557), True, 'import numpy as np\n'), ((14665, 14678), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (14671, 14678), True, 'import numpy as np\n'), ((14700, 14711), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (14706, 14711), True, 'import numpy as np\n'), ((14776, 14787), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (14782, 14787), True, 'import numpy as np\n'), ((18641, 18656), 'numpy.array', 'np.array', (['k_tot'], {}), '(k_tot)\n', (18649, 18656), True, 'import numpy as np\n'), ((14427, 14440), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (14433, 14440), True, 'import numpy as np\n'), ((14527, 14538), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (14533, 14538), True, 'import numpy as np\n'), ((14681, 14692), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (14687, 14692), True, 'import numpy as np\n'), ((14755, 14768), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (14761, 14768), True, 'import numpy as np\n')] |
"""Three-dimensional dam break over a dry bed. (14 hours)
The case is described as a SPHERIC benchmark
https://wiki.manchester.ac.uk/spheric/index.php/Test2
By default the simulation runs for 6 seconds of simulation time.
"""
import numpy as np
from pysph.base.kernels import WendlandQuintic
from pysph.examples._db_geometry import DamBreak3DGeometry
from pysph.solver.application import Application
from pysph.sph.integrator import EPECIntegrator
from pysph.sph.scheme import WCSPHScheme
dim = 3
dt = 1e-5
tf = 6.0
# parameter to change the resolution
dx = 0.02
nboundary_layers = 1
hdx = 1.3
ro = 1000.0
h0 = dx * hdx
gamma = 7.0
alpha = 0.25
beta = 0.0
c0 = 10.0 * np.sqrt(2.0 * 9.81 * 0.55)
class DamBreak3D(Application):
def add_user_options(self, group):
group.add_argument(
'--dx', action='store', type=float, dest='dx', default=dx,
help='Particle spacing.'
)
group.add_argument(
'--hdx', action='store', type=float, dest='hdx', default=hdx,
help='Specify the hdx factor where h = hdx * dx.'
)
def consume_user_options(self):
dx = self.options.dx
self.dx = dx
self.hdx = self.options.hdx
self.geom = DamBreak3DGeometry(
dx=dx, nboundary_layers=nboundary_layers, hdx=self.hdx, rho0=ro
)
self.co = 10.0 * self.geom.get_max_speed(g=9.81)
def create_scheme(self):
s = WCSPHScheme(
['fluid'], ['boundary', 'obstacle'], dim=dim, rho0=ro, c0=c0,
h0=h0, hdx=hdx, gz=-9.81, alpha=alpha, beta=beta, gamma=gamma,
hg_correction=True, tensile_correction=False
)
return s
def configure_scheme(self):
s = self.scheme
hdx = self.hdx
kernel = WendlandQuintic(dim=dim)
h0 = self.dx * hdx
s.configure(h0=h0, hdx=hdx)
dt = 0.25*h0/(1.1 * self.co)
s.configure_solver(
kernel=kernel, integrator_cls=EPECIntegrator, tf=tf, dt=dt,
adaptive_timestep=True, n_damp=50,
output_at_times=[0.4, 0.6, 1.0]
)
def create_particles(self):
return self.geom.create_particles()
def customize_output(self):
self._mayavi_config('''
viewer.scalar = 'u'
b = particle_arrays['boundary']
b.plot.actor.mapper.scalar_visibility = False
b.plot.actor.property.opacity = 0.1
''')
if __name__ == '__main__':
app = DamBreak3D()
app.run()
| [
"pysph.base.kernels.WendlandQuintic",
"pysph.examples._db_geometry.DamBreak3DGeometry",
"pysph.sph.scheme.WCSPHScheme",
"numpy.sqrt"
] | [((676, 702), 'numpy.sqrt', 'np.sqrt', (['(2.0 * 9.81 * 0.55)'], {}), '(2.0 * 9.81 * 0.55)\n', (683, 702), True, 'import numpy as np\n'), ((1240, 1327), 'pysph.examples._db_geometry.DamBreak3DGeometry', 'DamBreak3DGeometry', ([], {'dx': 'dx', 'nboundary_layers': 'nboundary_layers', 'hdx': 'self.hdx', 'rho0': 'ro'}), '(dx=dx, nboundary_layers=nboundary_layers, hdx=self.hdx,\n rho0=ro)\n', (1258, 1327), False, 'from pysph.examples._db_geometry import DamBreak3DGeometry\n'), ((1445, 1635), 'pysph.sph.scheme.WCSPHScheme', 'WCSPHScheme', (["['fluid']", "['boundary', 'obstacle']"], {'dim': 'dim', 'rho0': 'ro', 'c0': 'c0', 'h0': 'h0', 'hdx': 'hdx', 'gz': '(-9.81)', 'alpha': 'alpha', 'beta': 'beta', 'gamma': 'gamma', 'hg_correction': '(True)', 'tensile_correction': '(False)'}), "(['fluid'], ['boundary', 'obstacle'], dim=dim, rho0=ro, c0=c0,\n h0=h0, hdx=hdx, gz=-9.81, alpha=alpha, beta=beta, gamma=gamma,\n hg_correction=True, tensile_correction=False)\n", (1456, 1635), False, 'from pysph.sph.scheme import WCSPHScheme\n'), ((1788, 1812), 'pysph.base.kernels.WendlandQuintic', 'WendlandQuintic', ([], {'dim': 'dim'}), '(dim=dim)\n', (1803, 1812), False, 'from pysph.base.kernels import WendlandQuintic\n')] |
import torch
import argparse
import onnx
import onnxruntime
from resnets_3d import resnet50_3d
import torch.autograd.profiler as profiler
import tvm.relay.op
from tqdm import tqdm
from tvm import relay
import tvm
from tvm import te
import numpy as np
import tvm.contrib.graph_executor as runtime
from tvm.relay import testing
from torchvision.models import resnet
torch.backends.cudnn.benchmark = True
NAME = 'resnet50_3d'
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--iterations", help="How many iterations to average for timing", type=int, default=500)
parser.add_argument("--discard_iter", help="How many iterations to not time during warm up", type=int, default=100)
args = parser.parse_args()
model = resnet50_3d().cuda()
model.eval()
inputs = torch.randn(1, 64, 3, 56, 56).cuda()
from torch2trt import torch2trt
import time
model_trt = torch2trt(model, [inputs])
times = []
for i in tqdm(range(args.discard_iter + args.iterations)):
torch.cuda.current_stream().synchronize()
t0 = time.time()
model_trt(inputs)
torch.cuda.current_stream().synchronize()
t1 = time.time()
times.append(1000.0 * (t1 - t0))
total = 0
for i in range(args.discard_iter, len(times)):
total += times[i]
avg = total / (args.iterations)
print("TensorRT: Average inference time of the last " + str(args.iterations) + " iterations: " + str(avg) + " ms")
print(model(inputs).size())
times = []
with torch.no_grad():
for i in tqdm(range(args.discard_iter + args.iterations)):
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
start.record()
model(inputs)
end.record()
# Waits for everything to finish running
torch.cuda.synchronize()
times.append(start.elapsed_time(end))
total = 0
for i in range(args.discard_iter, len(times)):
total += times[i]
avg = total / (args.iterations)
print("Average inference time of the last " + str(args.iterations) + " iterations: " + str(avg) + " ms")
input_shape = [1, 64, 3, 56, 56]
input_data = torch.randn(input_shape)
scripted_model = torch.jit.trace(model.cpu(), input_data).eval()
torch.jit.save(scripted_model, f'models/{NAME}.pth')
input_name = "input0"
shape_list = [(input_name, input_shape)]
mod, params = relay.frontend.from_pytorch(scripted_model, shape_list)
#print("Relay module function:\n", mod.astext(show_meta_data=True))
with open(f"models/{NAME}.txt", "w") as text_file:
text_file.write(mod.astext(show_meta_data=True))
input_names = [ "input0" ]
output_names = [ "output0" ]
model.eval()
with torch.no_grad():
out_torch = model(inputs.cpu()).cpu().detach().numpy()
torch.onnx.export(scripted_model, input_data,
f"models/{NAME}.onnx", verbose=False,
export_params=True,
do_constant_folding=False,
input_names=input_names, output_names=output_names,
training = torch.onnx.TrainingMode.TRAINING,
example_outputs=torch.rand((1, 2048, 1, 7, 7)),
opset_version=12)
onnx_model = onnx.load(f"models/{NAME}.onnx")
sess = onnxruntime.InferenceSession(f"models/{NAME}.onnx")
out_onnx = sess.run(["output0"], {"input0": inputs.cpu().numpy()})[0]
input_name = "input0"
shape_dict = {input_name: input_shape}
mod2, params2 = relay.frontend.from_onnx(onnx_model, shape_dict, freeze_params=True)
with open(f"models/{NAME}_onnx.txt", "w") as text_file:
text_file.write(mod2.astext(show_meta_data=True))
# Bulid the subgraph
ctx = tvm.device("cuda", 0)
with tvm.transform.PassContext(opt_level=3):
lib = relay.build(mod, target="cuda", target_host="llvm", params=params)
with tvm.transform.PassContext(opt_level=3):
lib2 = relay.build(mod2, target="cuda", target_host="llvm", params=params2)
m = runtime.GraphModule(lib["default"](ctx))
# Set inputs
m.set_input(input_name, tvm.nd.array(inputs.cpu().numpy().astype(np.float32)))
m2 = runtime.GraphModule(lib2["default"](ctx))
# Set inputs
m2.set_input(input_name, tvm.nd.array(inputs.cpu().numpy().astype(np.float32)))
# Measure performance
ftimer = m.module.time_evaluator("run", ctx, number=100, repeat=3)
prof_res = np.array(ftimer().results) * 1000 # convert to millisecond
perf = np.mean(prof_res)
print("%.5f ms" % (perf))
ftimer = m2.module.time_evaluator("run", ctx, number=100, repeat=3)
prof_res = np.array(ftimer().results) * 1000 # convert to millisecond
perf = np.mean(prof_res)
print("%.5f ms" % (perf))
m.run()
out = m.get_output(0)
out_tvm = out.asnumpy()
m2.run()
out = m2.get_output(0)
out_tvm2 = out.asnumpy()
print(out_tvm[0,:10,0,0])
print(out_tvm2[0,:10,0,0])
print(out_torch[0,:10,0,0])
print(out_onnx[0,:10,0,0])
TOL = 1e-01
assert np.allclose(out_onnx, out_torch, rtol=TOL, atol=TOL)
assert np.allclose(out_onnx, out_tvm, rtol=TOL, atol=TOL)
assert np.allclose(out_torch, out_tvm, rtol=TOL, atol=TOL)
assert np.allclose(out_onnx, out_tvm2, rtol=TOL, atol=TOL)
assert np.allclose(out_torch, out_tvm2, rtol=TOL, atol=TOL)
print(np.abs((out_torch - out_tvm)).max())
| [
"torch.cuda.synchronize",
"numpy.abs",
"argparse.ArgumentParser",
"numpy.allclose",
"tvm.relay.frontend.from_onnx",
"torch.randn",
"onnxruntime.InferenceSession",
"numpy.mean",
"torch.no_grad",
"torch2trt.torch2trt",
"resnets_3d.resnet50_3d",
"onnx.load",
"torch.cuda.Event",
"tvm.relay.fro... | [((467, 492), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (490, 492), False, 'import argparse\n'), ((927, 953), 'torch2trt.torch2trt', 'torch2trt', (['model', '[inputs]'], {}), '(model, [inputs])\n', (936, 953), False, 'from torch2trt import torch2trt\n'), ((2278, 2302), 'torch.randn', 'torch.randn', (['input_shape'], {}), '(input_shape)\n', (2289, 2302), False, 'import torch\n'), ((2377, 2429), 'torch.jit.save', 'torch.jit.save', (['scripted_model', 'f"""models/{NAME}.pth"""'], {}), "(scripted_model, f'models/{NAME}.pth')\n", (2391, 2429), False, 'import torch\n'), ((2520, 2575), 'tvm.relay.frontend.from_pytorch', 'relay.frontend.from_pytorch', (['scripted_model', 'shape_list'], {}), '(scripted_model, shape_list)\n', (2547, 2575), False, 'from tvm import relay\n'), ((3394, 3426), 'onnx.load', 'onnx.load', (['f"""models/{NAME}.onnx"""'], {}), "(f'models/{NAME}.onnx')\n", (3403, 3426), False, 'import onnx\n'), ((3439, 3490), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['f"""models/{NAME}.onnx"""'], {}), "(f'models/{NAME}.onnx')\n", (3467, 3490), False, 'import onnxruntime\n'), ((3655, 3723), 'tvm.relay.frontend.from_onnx', 'relay.frontend.from_onnx', (['onnx_model', 'shape_dict'], {'freeze_params': '(True)'}), '(onnx_model, shape_dict, freeze_params=True)\n', (3679, 3723), False, 'from tvm import relay\n'), ((3879, 3900), 'tvm.device', 'tvm.device', (['"""cuda"""', '(0)'], {}), "('cuda', 0)\n", (3889, 3900), False, 'import tvm\n'), ((4653, 4670), 'numpy.mean', 'np.mean', (['prof_res'], {}), '(prof_res)\n', (4660, 4670), True, 'import numpy as np\n'), ((4860, 4877), 'numpy.mean', 'np.mean', (['prof_res'], {}), '(prof_res)\n', (4867, 4877), True, 'import numpy as np\n'), ((5197, 5249), 'numpy.allclose', 'np.allclose', (['out_onnx', 'out_torch'], {'rtol': 'TOL', 'atol': 'TOL'}), '(out_onnx, out_torch, rtol=TOL, atol=TOL)\n', (5208, 5249), True, 'import numpy as np\n'), ((5261, 5311), 'numpy.allclose', 'np.allclose', (['out_onnx', 'out_tvm'], {'rtol': 'TOL', 'atol': 'TOL'}), '(out_onnx, out_tvm, rtol=TOL, atol=TOL)\n', (5272, 5311), True, 'import numpy as np\n'), ((5323, 5374), 'numpy.allclose', 'np.allclose', (['out_torch', 'out_tvm'], {'rtol': 'TOL', 'atol': 'TOL'}), '(out_torch, out_tvm, rtol=TOL, atol=TOL)\n', (5334, 5374), True, 'import numpy as np\n'), ((5386, 5437), 'numpy.allclose', 'np.allclose', (['out_onnx', 'out_tvm2'], {'rtol': 'TOL', 'atol': 'TOL'}), '(out_onnx, out_tvm2, rtol=TOL, atol=TOL)\n', (5397, 5437), True, 'import numpy as np\n'), ((5449, 5501), 'numpy.allclose', 'np.allclose', (['out_torch', 'out_tvm2'], {'rtol': 'TOL', 'atol': 'TOL'}), '(out_torch, out_tvm2, rtol=TOL, atol=TOL)\n', (5460, 5501), True, 'import numpy as np\n'), ((1097, 1108), 'time.time', 'time.time', ([], {}), '()\n', (1106, 1108), False, 'import time\n'), ((1198, 1209), 'time.time', 'time.time', ([], {}), '()\n', (1207, 1209), False, 'import time\n'), ((1556, 1571), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1569, 1571), False, 'import torch\n'), ((2855, 2870), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2868, 2870), False, 'import torch\n'), ((3911, 3949), 'tvm.transform.PassContext', 'tvm.transform.PassContext', ([], {'opt_level': '(3)'}), '(opt_level=3)\n', (3936, 3949), False, 'import tvm\n'), ((3965, 4031), 'tvm.relay.build', 'relay.build', (['mod'], {'target': '"""cuda"""', 'target_host': '"""llvm"""', 'params': 'params'}), "(mod, target='cuda', target_host='llvm', params=params)\n", (3976, 4031), False, 'from tvm import relay\n'), ((4042, 4080), 'tvm.transform.PassContext', 'tvm.transform.PassContext', ([], {'opt_level': '(3)'}), '(opt_level=3)\n', (4067, 4080), False, 'import tvm\n'), ((4097, 4165), 'tvm.relay.build', 'relay.build', (['mod2'], {'target': '"""cuda"""', 'target_host': '"""llvm"""', 'params': 'params2'}), "(mod2, target='cuda', target_host='llvm', params=params2)\n", (4108, 4165), False, 'from tvm import relay\n'), ((770, 783), 'resnets_3d.resnet50_3d', 'resnet50_3d', ([], {}), '()\n', (781, 783), False, 'from resnets_3d import resnet50_3d\n'), ((821, 850), 'torch.randn', 'torch.randn', (['(1)', '(64)', '(3)', '(56)', '(56)'], {}), '(1, 64, 3, 56, 56)\n', (832, 850), False, 'import torch\n'), ((1673, 1709), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (1689, 1709), False, 'import torch\n'), ((1728, 1764), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (1744, 1764), False, 'import torch\n'), ((1910, 1934), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (1932, 1934), False, 'import torch\n'), ((3307, 3337), 'torch.rand', 'torch.rand', (['(1, 2048, 1, 7, 7)'], {}), '((1, 2048, 1, 7, 7))\n', (3317, 3337), False, 'import torch\n'), ((1042, 1069), 'torch.cuda.current_stream', 'torch.cuda.current_stream', ([], {}), '()\n', (1067, 1069), False, 'import torch\n'), ((1143, 1170), 'torch.cuda.current_stream', 'torch.cuda.current_stream', ([], {}), '()\n', (1168, 1170), False, 'import torch\n'), ((5513, 5540), 'numpy.abs', 'np.abs', (['(out_torch - out_tvm)'], {}), '(out_torch - out_tvm)\n', (5519, 5540), True, 'import numpy as np\n')] |
# -------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# -------------------------------------------------------------------------------------------
import shutil
from pathlib import Path
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import torch
from ruamel.yaml import YAML
from torchmetrics.classification.confusion_matrix import ConfusionMatrix
from torchmetrics.metric import Metric
from health_azure.utils import replace_directory
from histopathology.datasets.base_dataset import SlidesDataset
from histopathology.utils.metrics_utils import (plot_attention_tiles, plot_heatmap_overlay,
plot_normalized_confusion_matrix, plot_scores_hist, plot_slide,
select_k_tiles)
from histopathology.utils.naming import MetricsKey, ResultsKey, SlideKey
from histopathology.utils.viz_utils import load_image_dict
BatchResultsType = Dict[ResultsKey, Any]
EpochResultsType = List[BatchResultsType]
ResultsType = Dict[ResultsKey, List[Any]]
def validate_class_names(class_names: Optional[Sequence[str]], n_classes: int) -> Tuple[str, ...]:
"""Return valid names for the specified number of classes.
:param class_names: List of class names. If `None`, will return `('0', '1', ...)`.
:param n_classes: Number of classes. If `1` (binary), expects `len(class_names) == 2`.
:return: Validated class names tuple with length `2` for binary classes (`n_classes == 1`), otherwise `n_classes`.
"""
effective_n_classes = n_classes if n_classes > 1 else 2
if class_names is None:
class_names = [str(i) for i in range(effective_n_classes)]
if len(class_names) != effective_n_classes:
raise ValueError(f"Mismatch in number of class names ({class_names}) and number"
f"of classes ({effective_n_classes})")
return tuple(class_names)
def save_figure(fig: plt.figure, figpath: Path) -> None:
fig.savefig(figpath, bbox_inches='tight')
plt.close(fig)
def normalize_dict_for_df(dict_old: Dict[ResultsKey, Any]) -> Dict[str, Any]:
# slide-level dictionaries are processed by making value dimensions uniform and converting to numpy arrays.
# these steps are required to convert the dictionary to pandas dataframe.
dict_new = dict()
bag_size = len(dict_old[ResultsKey.SLIDE_ID])
for key, value in dict_old.items():
if key not in [ResultsKey.CLASS_PROBS, ResultsKey.PROB]:
if isinstance(value, torch.Tensor):
value = value.squeeze(0).cpu().numpy()
if value.ndim == 0:
value = np.full(bag_size, fill_value=value)
dict_new[key] = value
elif key == ResultsKey.CLASS_PROBS:
if isinstance(value, torch.Tensor):
value = value.squeeze(0).cpu().numpy()
for i in range(len(value)):
dict_new[key + str(i)] = np.repeat(value[i], bag_size)
return dict_new
def collate_results(epoch_results: EpochResultsType) -> ResultsType:
results: ResultsType = {}
for key in epoch_results[0].keys():
results[key] = []
for batch_results in epoch_results:
results[key] += batch_results[key]
return results
def save_outputs_and_features(results: ResultsType, outputs_dir: Path) -> None:
print("Saving outputs ...")
# collate at slide level
list_slide_dicts = []
# any column can be used here, the assumption is that the first dimension is the N of slides
for slide_idx in range(len(results[ResultsKey.SLIDE_ID])):
slide_dict = {key: results[key][slide_idx] for key in results
if key not in [ResultsKey.IMAGE, ResultsKey.LOSS]}
list_slide_dicts.append(slide_dict)
assert outputs_dir.is_dir(), f"No such dir: {outputs_dir}"
print(f"Metrics results will be output to {outputs_dir}")
csv_filename = outputs_dir / 'test_output.csv'
# Collect the list of dictionaries in a list of pandas dataframe and save
df_list = []
for slide_dict in list_slide_dicts:
slide_dict = normalize_dict_for_df(slide_dict)
df_list.append(pd.DataFrame.from_dict(slide_dict))
df = pd.concat(df_list, ignore_index=True)
df.to_csv(csv_filename, mode='w+', header=True)
def save_features(results: ResultsType, outputs_dir: Path) -> None:
# Collect all features in a list and save
features_list = [features.squeeze(0).cpu() for features in results[ResultsKey.IMAGE]]
torch.save(features_list, outputs_dir / 'test_encoded_features.pickle')
def save_top_and_bottom_tiles(results: ResultsType, n_classes: int, figures_dir: Path) \
-> Dict[str, List[str]]:
print("Selecting tiles ...")
def select_k_tiles_from_results(label: int, select: Tuple[str, str]) \
-> List[Tuple[Any, Any, List, List]]:
return select_k_tiles(results, n_slides=10, label=label, n_tiles=10, select=select)
# Class 0
tn_top_tiles = select_k_tiles_from_results(label=0, select=('highest_pred', 'highest_att'))
tn_bottom_tiles = select_k_tiles_from_results(label=0, select=('highest_pred', 'lowest_att'))
fp_top_tiles = select_k_tiles_from_results(label=0, select=('lowest_pred', 'highest_att'))
fp_bottom_tiles = select_k_tiles_from_results(label=0, select=('lowest_pred', 'lowest_att'))
report_cases = {'TN': [tn_top_tiles, tn_bottom_tiles],
'FP': [fp_top_tiles, fp_bottom_tiles]}
# Class 1 to n_classes-1
n_classes_to_select = n_classes if n_classes > 1 else 2
for i in range(1, n_classes_to_select):
fn_top_tiles = select_k_tiles_from_results(label=i, select=('lowest_pred', 'highest_att'))
fn_bottom_tiles = select_k_tiles_from_results(label=i, select=('lowest_pred', 'lowest_att'))
tp_top_tiles = select_k_tiles_from_results(label=i, select=('highest_pred', 'highest_att'))
tp_bottom_tiles = select_k_tiles_from_results(label=i, select=('highest_pred', 'lowest_att'))
report_cases.update({'TP_' + str(i): [tp_top_tiles, tp_bottom_tiles],
'FN_' + str(i): [fn_top_tiles, fn_bottom_tiles]})
selected_slide_ids: Dict[str, List[str]] = {}
for key in report_cases.keys():
print(f"Plotting {key} (tiles, thumbnails, attention heatmaps)...")
key_dir = figures_dir / key
key_dir.mkdir(parents=True, exist_ok=True)
n_slides = len(report_cases[key][0])
selected_slide_ids[key] = []
for i in range(n_slides):
slide_id, score, paths, top_attn = report_cases[key][0][i]
fig = plot_attention_tiles(slide_id, score, paths, top_attn, key + '_top', ncols=4)
save_figure(fig=fig, figpath=key_dir / f'{slide_id}_top.png')
_, _, paths, bottom_attn = report_cases[key][1][i]
fig = plot_attention_tiles(slide_id, score, paths, bottom_attn, key + '_bottom', ncols=4)
save_figure(fig=fig, figpath=key_dir / f'{slide_id}_bottom.png')
selected_slide_ids[key].append(slide_id)
return selected_slide_ids
def save_slide_thumbnails_and_heatmaps(results: ResultsType, selected_slide_ids: Dict[str, List[str]], tile_size: int,
level: int, slides_dataset: SlidesDataset, figures_dir: Path) -> None:
for key in selected_slide_ids:
print(f"Plotting {key} (tiles, thumbnails, attention heatmaps)...")
key_dir = figures_dir / key
key_dir.mkdir(parents=True, exist_ok=True)
for slide_id in selected_slide_ids[key]:
save_slide_thumbnail_and_heatmap(results, slide_id=slide_id, tile_size=tile_size, level=level,
slides_dataset=slides_dataset, key_dir=key_dir)
def save_slide_thumbnail_and_heatmap(results: ResultsType, slide_id: str, tile_size: int, level: int,
slides_dataset: SlidesDataset, key_dir: Path) -> None:
slide_index = slides_dataset.dataset_df.index.get_loc(slide_id)
assert isinstance(slide_index, int), f"Got non-unique slide ID: {slide_id}"
slide_dict = slides_dataset[slide_index]
slide_dict = load_image_dict(slide_dict, level=level, margin=0)
slide_image = slide_dict[SlideKey.IMAGE]
location_bbox = slide_dict[SlideKey.LOCATION]
fig = plot_slide(slide_image=slide_image, scale=1.0)
save_figure(fig=fig, figpath=key_dir / f'{slide_id}_thumbnail.png')
fig = plot_heatmap_overlay(slide=slide_id, slide_image=slide_image, results=results,
location_bbox=location_bbox, tile_size=tile_size, level=level)
save_figure(fig=fig, figpath=key_dir / f'{slide_id}_heatmap.png')
def save_scores_histogram(results: ResultsType, figures_dir: Path) -> None:
print("Plotting histogram ...")
fig = plot_scores_hist(results)
save_figure(fig=fig, figpath=figures_dir / 'hist_scores.png')
def save_confusion_matrix(conf_matrix_metric: ConfusionMatrix, class_names: Sequence[str], figures_dir: Path) -> None:
print("Computing and saving confusion matrix...")
cf_matrix = conf_matrix_metric.compute().cpu().numpy()
# We can't log tensors in the normal way - just print it to console
print('test/confusion matrix:')
print(cf_matrix)
# Save the normalized confusion matrix as a figure in outputs
cf_matrix_n = cf_matrix / cf_matrix.sum(axis=1, keepdims=True)
fig = plot_normalized_confusion_matrix(cm=cf_matrix_n, class_names=(class_names))
save_figure(fig=fig, figpath=figures_dir / 'normalized_confusion_matrix.png')
class OutputsPolicy:
"""Utility class that defines when to save validation epoch outputs."""
_BEST_EPOCH_KEY = 'best_epoch'
_BEST_VALUE_KEY = 'best_value'
_PRIMARY_METRIC_KEY = 'primary_metric'
def __init__(self, outputs_root: Path, primary_val_metric: MetricsKey, maximise: bool) -> None:
"""
:param outputs_root: Root directory where to save a recovery file with best epoch and metric value.
:param primary_val_metric: Name of the validation metric to track for saving best epoch outputs.
:param maximise: Whether higher is better for `primary_val_metric`.
"""
self.outputs_root = outputs_root
self.primary_val_metric = primary_val_metric
self.maximise = maximise
self._init_best_metric()
@property
def best_metric_file_path(self) -> Path:
return self.outputs_root / "best_val_metric.yml"
def _init_best_metric(self) -> None:
"""Initialise running best metric epoch and value (recovered from disk if available).
:raises ValueError: If the primary metric name does not match the one saved on disk.
"""
if self.best_metric_file_path.exists():
contents = YAML().load(self.best_metric_file_path)
self._best_metric_epoch = contents[self._BEST_EPOCH_KEY]
self._best_metric_value = contents[self._BEST_VALUE_KEY]
if contents[self._PRIMARY_METRIC_KEY] != self.primary_val_metric:
raise ValueError(f"Expected primary metric '{self.primary_val_metric}', but found "
f"'{contents[self._PRIMARY_METRIC_KEY]}' in {self.best_metric_file_path}")
else:
self._best_metric_epoch = 0
self._best_metric_value = float('-inf') if self.maximise else float('inf')
def _save_best_metric(self) -> None:
"""Save best metric epoch, value, and name to disk, to allow recovery (e.g. in case of pre-emption)."""
contents = {self._BEST_EPOCH_KEY: self._best_metric_epoch,
self._BEST_VALUE_KEY: self._best_metric_value,
self._PRIMARY_METRIC_KEY: self.primary_val_metric.value}
YAML().dump(contents, self.best_metric_file_path)
def should_save_validation_outputs(self, metrics_dict: Mapping[MetricsKey, Metric], epoch: int) -> bool:
"""Determine whether validation outputs should be saved given the current epoch's metrics.
:param metrics_dict: Current epoch's metrics dictionary from
:py:class:`~histopathology.models.deepmil.DeepMILModule`.
:param epoch: Current epoch number.
:return: Whether this is the best validation epoch so far.
"""
metric_value = float(metrics_dict[self.primary_val_metric].compute())
if self.maximise:
is_best = metric_value > self._best_metric_value
else:
is_best = metric_value < self._best_metric_value
if is_best:
self._best_metric_value = metric_value
self._best_metric_epoch = epoch
self._save_best_metric()
return is_best
class DeepMILOutputsHandler:
"""Class that manages writing validation and test outputs for DeepMIL models."""
def __init__(self, outputs_root: Path, n_classes: int, tile_size: int, level: int,
slides_dataset: Optional[SlidesDataset], class_names: Optional[Sequence[str]],
primary_val_metric: MetricsKey, maximise: bool) -> None:
"""
:param outputs_root: Root directory where to save all produced outputs.
:param n_classes: Number of MIL classes (set `n_classes=1` for binary).
:param tile_size: The size of each tile.
:param level: The downsampling level (e.g. 0, 1, 2) of the tiles if available (default=1).
:param slides_dataset: Optional slides dataset from which to plot thumbnails and heatmaps.
:param class_names: List of class names. For binary (`n_classes == 1`), expects `len(class_names) == 2`.
If `None`, will return `('0', '1', ...)`.
:param primary_val_metric: Name of the validation metric to track for saving best epoch outputs.
:param maximise: Whether higher is better for `primary_val_metric`.
"""
self.outputs_root = outputs_root
self.n_classes = n_classes
self.tile_size = tile_size
self.level = level
self.slides_dataset = slides_dataset
self.class_names = validate_class_names(class_names, self.n_classes)
self.outputs_policy = OutputsPolicy(outputs_root=outputs_root,
primary_val_metric=primary_val_metric,
maximise=maximise)
@property
def validation_outputs_dir(self) -> Path:
return self.outputs_root / "val"
@property
def previous_validation_outputs_dir(self) -> Path:
return self.validation_outputs_dir.with_name("val_old")
@property
def test_outputs_dir(self) -> Path:
return self.outputs_root / "test"
def _save_outputs(self, epoch_results: EpochResultsType, metrics_dict: Mapping[MetricsKey, Metric],
outputs_dir: Path) -> None:
"""Trigger the rendering and saving of DeepMIL outputs and figures.
:param epoch_results: Aggregated results from all epoch batches.
:param metrics_dict: Current epoch's validation metrics dictionary from
:py:class:`~histopathology.models.deepmil.DeepMILModule`.
:param outputs_dir: Specific directory into which outputs should be saved (different for validation and test).
"""
# outputs object consists of a list of dictionaries (of metadata and results, including encoded features)
# It can be indexed as outputs[batch_idx][batch_key][bag_idx][tile_idx]
# example of batch_key ResultsKey.SLIDE_ID_COL
# for batch keys that contains multiple values for slides e.g. ResultsKey.BAG_ATTN_COL
# outputs[batch_idx][batch_key][bag_idx][tile_idx]
# contains the tile value
# TODO: Synchronise this with checkpoint saving (e.g. on_save_checkpoint())
results = collate_results(epoch_results)
figures_dir = outputs_dir / "fig"
outputs_dir.mkdir(exist_ok=True, parents=True)
figures_dir.mkdir(exist_ok=True, parents=True)
save_outputs_and_features(results, outputs_dir)
print("Selecting tiles ...")
selected_slide_ids = save_top_and_bottom_tiles(results, n_classes=self.n_classes, figures_dir=figures_dir)
if self.slides_dataset is not None:
save_slide_thumbnails_and_heatmaps(results, selected_slide_ids, tile_size=self.tile_size, level=self.level,
slides_dataset=self.slides_dataset, figures_dir=figures_dir)
save_scores_histogram(results, figures_dir=figures_dir)
conf_matrix: ConfusionMatrix = metrics_dict[MetricsKey.CONF_MATRIX] # type: ignore
save_confusion_matrix(conf_matrix, class_names=self.class_names, figures_dir=figures_dir)
def save_validation_outputs(self, epoch_results: EpochResultsType, metrics_dict: Mapping[MetricsKey, Metric],
epoch: int) -> None:
"""Render and save validation epoch outputs, according to the configured :py:class:`OutputsPolicy`.
:param epoch_results: Aggregated results from all epoch batches, as passed to :py:meth:`validation_epoch_end()`.
:param metrics_dict: Current epoch's validation metrics dictionary from
:py:class:`~histopathology.models.deepmil.DeepMILModule`.
:param epoch: Current epoch number.
"""
if self.outputs_policy.should_save_validation_outputs(metrics_dict, epoch):
# First move existing outputs to a temporary directory, to avoid mixing
# outputs of different epochs in case writing fails halfway through
if self.validation_outputs_dir.exists():
replace_directory(source=self.validation_outputs_dir,
target=self.previous_validation_outputs_dir)
self._save_outputs(epoch_results, metrics_dict, self.validation_outputs_dir)
# Writing completed successfully; delete temporary back-up
if self.previous_validation_outputs_dir.exists():
shutil.rmtree(self.previous_validation_outputs_dir)
def save_test_outputs(self, epoch_results: EpochResultsType, metrics_dict: Mapping[MetricsKey, Metric]) -> None:
"""Render and save test epoch outputs.
:param epoch_results: Aggregated results from all epoch batches, as passed to :py:meth:`test_epoch_end()`.
:param metrics_dict: Test metrics dictionary from :py:class:`~histopathology.models.deepmil.DeepMILModule`.
"""
self._save_outputs(epoch_results, metrics_dict, self.test_outputs_dir)
| [
"numpy.full",
"histopathology.utils.metrics_utils.plot_heatmap_overlay",
"pandas.DataFrame.from_dict",
"histopathology.utils.metrics_utils.plot_normalized_confusion_matrix",
"matplotlib.pyplot.close",
"ruamel.yaml.YAML",
"torch.save",
"histopathology.utils.metrics_utils.plot_scores_hist",
"health_az... | [((2287, 2301), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (2296, 2301), True, 'import matplotlib.pyplot as plt\n'), ((4501, 4538), 'pandas.concat', 'pd.concat', (['df_list'], {'ignore_index': '(True)'}), '(df_list, ignore_index=True)\n', (4510, 4538), True, 'import pandas as pd\n'), ((4801, 4872), 'torch.save', 'torch.save', (['features_list', "(outputs_dir / 'test_encoded_features.pickle')"], {}), "(features_list, outputs_dir / 'test_encoded_features.pickle')\n", (4811, 4872), False, 'import torch\n'), ((8480, 8530), 'histopathology.utils.viz_utils.load_image_dict', 'load_image_dict', (['slide_dict'], {'level': 'level', 'margin': '(0)'}), '(slide_dict, level=level, margin=0)\n', (8495, 8530), False, 'from histopathology.utils.viz_utils import load_image_dict\n'), ((8637, 8683), 'histopathology.utils.metrics_utils.plot_slide', 'plot_slide', ([], {'slide_image': 'slide_image', 'scale': '(1.0)'}), '(slide_image=slide_image, scale=1.0)\n', (8647, 8683), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((8767, 8913), 'histopathology.utils.metrics_utils.plot_heatmap_overlay', 'plot_heatmap_overlay', ([], {'slide': 'slide_id', 'slide_image': 'slide_image', 'results': 'results', 'location_bbox': 'location_bbox', 'tile_size': 'tile_size', 'level': 'level'}), '(slide=slide_id, slide_image=slide_image, results=\n results, location_bbox=location_bbox, tile_size=tile_size, level=level)\n', (8787, 8913), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((9134, 9159), 'histopathology.utils.metrics_utils.plot_scores_hist', 'plot_scores_hist', (['results'], {}), '(results)\n', (9150, 9159), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((9734, 9807), 'histopathology.utils.metrics_utils.plot_normalized_confusion_matrix', 'plot_normalized_confusion_matrix', ([], {'cm': 'cf_matrix_n', 'class_names': 'class_names'}), '(cm=cf_matrix_n, class_names=class_names)\n', (9766, 9807), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((5171, 5247), 'histopathology.utils.metrics_utils.select_k_tiles', 'select_k_tiles', (['results'], {'n_slides': '(10)', 'label': 'label', 'n_tiles': '(10)', 'select': 'select'}), '(results, n_slides=10, label=label, n_tiles=10, select=select)\n', (5185, 5247), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((4456, 4490), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['slide_dict'], {}), '(slide_dict)\n', (4478, 4490), True, 'import pandas as pd\n'), ((6916, 6993), 'histopathology.utils.metrics_utils.plot_attention_tiles', 'plot_attention_tiles', (['slide_id', 'score', 'paths', 'top_attn', "(key + '_top')"], {'ncols': '(4)'}), "(slide_id, score, paths, top_attn, key + '_top', ncols=4)\n", (6936, 6993), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((7150, 7237), 'histopathology.utils.metrics_utils.plot_attention_tiles', 'plot_attention_tiles', (['slide_id', 'score', 'paths', 'bottom_attn', "(key + '_bottom')"], {'ncols': '(4)'}), "(slide_id, score, paths, bottom_attn, key + '_bottom',\n ncols=4)\n", (7170, 7237), False, 'from histopathology.utils.metrics_utils import plot_attention_tiles, plot_heatmap_overlay, plot_normalized_confusion_matrix, plot_scores_hist, plot_slide, select_k_tiles\n'), ((12088, 12094), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (12092, 12094), False, 'from ruamel.yaml import YAML\n'), ((17962, 18065), 'health_azure.utils.replace_directory', 'replace_directory', ([], {'source': 'self.validation_outputs_dir', 'target': 'self.previous_validation_outputs_dir'}), '(source=self.validation_outputs_dir, target=self.\n previous_validation_outputs_dir)\n', (17979, 18065), False, 'from health_azure.utils import replace_directory\n'), ((18335, 18386), 'shutil.rmtree', 'shutil.rmtree', (['self.previous_validation_outputs_dir'], {}), '(self.previous_validation_outputs_dir)\n', (18348, 18386), False, 'import shutil\n'), ((2916, 2951), 'numpy.full', 'np.full', (['bag_size'], {'fill_value': 'value'}), '(bag_size, fill_value=value)\n', (2923, 2951), True, 'import numpy as np\n'), ((11110, 11116), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (11114, 11116), False, 'from ruamel.yaml import YAML\n'), ((3222, 3251), 'numpy.repeat', 'np.repeat', (['value[i]', 'bag_size'], {}), '(value[i], bag_size)\n', (3231, 3251), True, 'import numpy as np\n')] |
from numpy import array
def scigrid_2011_01_07_12():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[589, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[595, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[600, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[601, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[617, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[618, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[632, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[639, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[668, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[675, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[676, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[683, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[694, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[697, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[698, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[716, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[717, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[730, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[731, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[737, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[741, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[742, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[746, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[747, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[750, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[753, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[761, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[762, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[767, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[774, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[776, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[788, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[790, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[791, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[800, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[801, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[808, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[809, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[810, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[815, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[816, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[818, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[821, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[822, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[825, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[829, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[833, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[842, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[843, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[844, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[847, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[848, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[849, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[854, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[857, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[859, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[860, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[862, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[864, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[865, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[872, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[873, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[877, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[881, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[883, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[886, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[889, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[893, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[894, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[895, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[902, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[903, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[905, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[907, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[909, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[913, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[914, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[918, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[919, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[923, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[928, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[931, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[936, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[937, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[939, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[943, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[946, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[951, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[952, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[956, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[959, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[966, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[967, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[968, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9 ],
[969, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9 ],
[971, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[980, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[986, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[987, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[988, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[990, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[993, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[994, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[998, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[999, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1000, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1002, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1003, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1006, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1010, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1011, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1014, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1018, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1019, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1023, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1026, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1028, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1031, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1032, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1035, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1036, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1039, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1041, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1046, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1047, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1050, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1051, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1054, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1055, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1058, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1059, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1062, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1063, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1066, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1067, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1068, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1069, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1070, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1071, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1074, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1075, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1077, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1078, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1080, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1081, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1084, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1085, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1088, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1089, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1092, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1093, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1096, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1097, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1100, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1101, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1102, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1103, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1104, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1105, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1106, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1107, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1108, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1109, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1110, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1111, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1112, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1113, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1114, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1115, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1116, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1117, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1118, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1119, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1120, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1121, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1122, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1123, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1124, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1125, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1126, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1127, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1128, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1129, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1130, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1131, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1132, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1133, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1134, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1135, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1136, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1137, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1138, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1139, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1140, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1141, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1142, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1143, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1144, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1145, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1146, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1147, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1148, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1149, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1150, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1151, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1152, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1153, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1154, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1155, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1156, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1157, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1158, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1159, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1160, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1161, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1162, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1166, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1167, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1169, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1170, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1171, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1172, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1173, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1174, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1175, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1176, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1177, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1178, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1179, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1180, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1181, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1182, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1183, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1184, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1185, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1186, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1187, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1188, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1189, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1190, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1191, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1192, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1193, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1194, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1195, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1196, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1197, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1198, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1199, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1200, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1201, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1202, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1203, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1205, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1206, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1207, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1208, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1209, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1210, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1211, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1212, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1213, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1214, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1215, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1216, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1217, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1218, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1219, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1220, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1221, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1222, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1223, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1224, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1225, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1226, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1227, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1230, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1231, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1232, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1234, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1235, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1236, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1237, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1238, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1239, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1240, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1241, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1242, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1243, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1244, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1245, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1246, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1247, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1248, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1249, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1250, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1251, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1252, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1253, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1254, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1255, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1256, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1257, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1258, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1259, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1260, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1261, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1262, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1263, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1264, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1265, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1266, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1270, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1271, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1272, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1273, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1275, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1276, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1279, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1280, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1282, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1284, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1285, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1286, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1288, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1289, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1290, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1292, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1293, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1294, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1296, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1297, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1300, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1301, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1302, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1303, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1304, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1305, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1306, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1307, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1308, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1309, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1310, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1311, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1313, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1314, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1315, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1316, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1318, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1319, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1320, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1321, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1322, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1323, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1324, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1325, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1327, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1328, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1329, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1330, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1331, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1332, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1333, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1334, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1336, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1337, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1338, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1339, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1340, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1341, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1342, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1343, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1344, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1345, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1348, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1349, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1350, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1351, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1352, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1355, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1356, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1358, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1359, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1360, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1363, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1364, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1367, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1368, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1369, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1370, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1371, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1372, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1373, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1374, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1375, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1376, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1377, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1378, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1379, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1380, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1381, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1382, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1383, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1384, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1385, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1386, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1387, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1388, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1389, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1390, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1391, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1392, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1393, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1394, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1395, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1396, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1397, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1398, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1399, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1400, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1401, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1402, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1403, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1404, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1405, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1406, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1407, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1408, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1409, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1410, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1411, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1412, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1413, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1414, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1415, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1416, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1417, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1419, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1421, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9 ],
[1422, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1425, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1426, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1431, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1432, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1435, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1436, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1439, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1440, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1441, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1442, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1444, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1445, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1447, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1448, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1451, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1452, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1455, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1456, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1459, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1460, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1463, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1464, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1467, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1468, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1471, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1472, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1475, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1476, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1479, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1480, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1481, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1483, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1484, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1487, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1488, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1489, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1490, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1493, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1494, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1497, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1498, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1501, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1502, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1503, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1505, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1506, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1507, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1508, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1510, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1511, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1513, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1514, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1516, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1517, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1518, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1520, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1521, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1522, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1523, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1524, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1525, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1526, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1527, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1528, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1529, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1530, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1531, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1532, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1534, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1535, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1536, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1537, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1538, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1539, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1540, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1541, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1542, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1543, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1544, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1545, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1546, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1547, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1548, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1549, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1550, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1551, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1552, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1553, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1554, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1555, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1556, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1557, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1558, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1559, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1560, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1561, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1562, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1563, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1564, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1565, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1566, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1567, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1568, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1569, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1570, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1571, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1572, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1573, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1574, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1575, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1576, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1577, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1578, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1579, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1580, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1581, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1582, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1583, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1584, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1585, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1586, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1587, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1588, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1589, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1591, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1592, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1595, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1596, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1600, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1601, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1603, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1604, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1605, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1606, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1609, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1610, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1611, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1615, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1616, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1617, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1618, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1620, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1621, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1622, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1625, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1626, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1627, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1630, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1632, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1633, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1634, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1635, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1636, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1637, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1639, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1644, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1645, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1648, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1649, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1650, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1651, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1653, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1654, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1656, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1657, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1659, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1660, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1663, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1664, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1665, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1666, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1667, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1668, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1669, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1671, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1673, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1674, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1675, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1676, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1677, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1679, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1680, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1682, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1683, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1684, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1685, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1686, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1688, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1690, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1691, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1692, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1694, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1695, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1697, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1698, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1699, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1700, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1703, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1706, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1709, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1710, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1712, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1715, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1716, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1717, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1718, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1720, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1721, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1726, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1727, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1728, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1729, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1730, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1731, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1732, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1734, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1736, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1737, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1740, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1741, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1742, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1744, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1746, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1747, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1750, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1751, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1752, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1753, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1754, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1755, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1756, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1757, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1758, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1759, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1760, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1761, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1762, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1763, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1764, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1765, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1766, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1767, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1768, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1770, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1771, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1773, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1774, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1775, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1776, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1777, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1779, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1780, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1782, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1783, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1786, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1788, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1790, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1791, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1793, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1794, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1796, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1797, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1799, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1800, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1801, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1803, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1804, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1807, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1808, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1809, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1810, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1812, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1813, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1815, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1816, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1818, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1819, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1820, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1821, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1822, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1823, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1824, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1825, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1827, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1828, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1829, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1831, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1832, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1833, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1838, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1842, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1843, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1844, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1846, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1847, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1848, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1849, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1850, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1854, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1857, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1860, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1861, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1862, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1864, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1865, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1866, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1868, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1869, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1870, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1871, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1872, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1873, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1874, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1876, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1877, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1878, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1879, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1880, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1881, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1882, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1883, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1884, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1885, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1886, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1887, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1889, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1891, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1892, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1893, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1894, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1895, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1897, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1899, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1901, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1902, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1903, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1904, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1905, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1906, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1907, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1908, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1909, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9 ],
[1910, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1912, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[1913, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1914, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1915, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1918, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1919, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1923, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1924, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1926, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1927, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1928, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1929, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1930, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1931, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1932, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1933, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1936, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1937, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1938, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1939, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1941, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1943, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1946, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1947, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1949, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1951, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1952, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1953, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1954, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1955, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1956, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1957, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1958, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1959, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1961, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1962, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1964, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1965, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1966, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1967, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1968, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1969, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1970, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1971, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1972, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1974, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1975, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1977, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1979, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1980, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1986, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1987, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1988, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1989, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1990, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1991, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1992, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1993, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1994, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1995, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1998, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1999, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2000, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2001, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2002, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2003, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2004, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2005, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2006, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2008, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[1, 1, 325.748587, 65.149717, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[2, 1, 0, 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9 ],
[3, 1, 57.094965, 11.418993, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[4, 1, 93.894564, 18.778913, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[5, 1, 0, 0, 0, 0, 0, 1.00026, 0, 380.0, 0, 1.1, 0.9 ],
[6, 1, 275.713362, 55.142672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[7, 1, 207.784304, 41.556861, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[8, 1, 173.85906, 34.771812, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[9, 1, 117.578165, 23.515633, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[10, 1, 0, 0, 0, 0, 0, 1.000518, 0, 380.0, 0, 1.1, 0.9 ],
[11, 1, 103.018516, 20.603703, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[12, 1, 0, 0, 0, 0, 0, 1.00057, 0, 380.0, 0, 1.1, 0.9 ],
[13, 1, 0, 0, 0, 0, 0, 1.000425, 0, 380.0, 0, 1.1, 0.9 ],
[14, 1, 246.382498, 49.2765, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[15, 1, 0, 0, 0, 0, 0, 1.000581, 0, 380.0, 0, 1.1, 0.9 ],
[16, 1, 420.196361, 84.039272, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[17, 1, 98.967281, 19.793456, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[18, 1, 0, 0, 0, 0, 0, 1.002692, 0, 380.0, 0, 1.1, 0.9 ],
[19, 1, 244.510845, 48.902169, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[20, 1, 0, 0, 0, 0, 0, 0.998777, 0, 380.0, 0, 1.1, 0.9 ],
[21, 1, 1051.434139, 210.286828, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[22, 1, 0, 0, 0, 0, 0, 1.000461, 0, 380.0, 0, 1.1, 0.9 ],
[23, 1, 137.668379, 27.533676, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[24, 1, 0, 0, 0, 0, 0, 0.999996, 0, 380.0, 0, 1.1, 0.9 ],
[25, 1, 65.847745, 13.169549, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[26, 1, 0, 0, 0, 0, 0, 1.000752, 0, 380.0, 0, 1.1, 0.9 ],
[27, 1, 80.82993, 16.165986, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[28, 1, 238.828227, 47.765645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[29, 1, 87.72658, 17.545316, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[30, 1, 0, 0, 0, 0, 0, 0.99974, 0, 380.0, 0, 1.1, 0.9 ],
[31, 1, 172.643645, 34.528729, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[32, 1, 0, 0, 0, 0, 0, 0.999876, 0, 380.0, 0, 1.1, 0.9 ],
[33, 1, 216.462687, 43.292537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[34, 1, 42.945181, 8.589036, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[35, 1, 2.843198, 0.56864, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[36, 1, 9.41342, 1.882684, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[37, 1, 0, 0, 0, 0, 0, 1.003518, 0, 380.0, 0, 1.1, 0.9 ],
[38, 1, 226.790299, 45.35806, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[39, 1, 74.262139, 14.852428, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[40, 1, 77.569126, 15.513825, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[41, 1, 83.36923, 16.673846, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[42, 1, 0, 0, 0, 0, 0, 1.001382, 0, 380.0, 0, 1.1, 0.9 ],
[43, 1, 127.850472, 25.570094, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[44, 1, 163.565722, 32.713144, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[45, 1, 86.824343, 17.364869, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[46, 1, 0, 0, 0, 0, 0, 1.000154, 0, 380.0, 0, 1.1, 0.9 ],
[47, 1, 377.519214, 75.503843, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[48, 1, 259.494186, 51.898837, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[49, 1, 65.638937, 13.127787, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[50, 1, 95.579153, 19.115831, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[51, 1, 123.864343, 24.772869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[52, 1, 0, 0, 0, 0, 0, 1.000109, 0, 380.0, 0, 1.1, 0.9 ],
[53, 1, 187.944302, 37.58886, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[54, 1, 95.486648, 19.09733, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[55, 1, 93.644497, 18.728899, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[56, 1, 0, 0, 0, 0, 0, 0.999658, 0, 380.0, 0, 1.1, 0.9 ],
[57, 1, 111.782276, 22.356455, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[58, 1, 256.054306, 51.210861, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[59, 1, 73.130675, 14.626135, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[60, 1, 38.556521, 7.711304, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[61, 1, 0, 0, 0, 0, 0, 0.999552, 0, 380.0, 0, 1.1, 0.9 ],
[62, 1, 293.946406, 58.789281, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[63, 1, 173.514047, 34.702809, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[64, 1, 1841.335671, 368.267134, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[65, 1, 6.135361, 1.227072, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[66, 1, 194.668019, 38.933604, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[67, 1, 417.595693, 83.519139, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[68, 1, 0, 0, 0, 0, 0, 0.998236, 0, 380.0, 0, 1.1, 0.9 ],
[69, 1, 0, 0, 0, 0, 0, 0.999783, 0, 380.0, 0, 1.1, 0.9 ],
[70, 1, 789.995804, 157.999161, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[71, 1, 183.584849, 36.71697, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[72, 1, 300.686791, 60.137358, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[73, 1, 96.261172, 19.252234, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[74, 1, 0, 0, 0, 0, 0, 1.001507, 0, 380.0, 0, 1.1, 0.9 ],
[75, 1, 119.975301, 23.99506, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[76, 1, 115.802488, 23.160498, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[77, 1, 112.162624, 22.432525, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[78, 1, 0, 0, 0, 0, 0, 1.000176, 0, 380.0, 0, 1.1, 0.9 ],
[79, 1, 115.816553, 23.163311, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[80, 1, 123.01505, 24.60301, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[81, 1, 138.867238, 27.773448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[82, 1, 4.621583, 0.924317, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[83, 1, 309.217998, 61.8436, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[84, 1, 30.440604, 6.088121, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[85, 1, 105.562105, 21.112421, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[86, 1, 0, 0, 0, 0, 0, 1.00001, 0, 380.0, 0, 1.1, 0.9 ],
[87, 1, 0, 0, 0, 0, 0, 1.000289, 0, 380.0, 0, 1.1, 0.9 ],
[88, 1, 85.202609, 17.040522, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[89, 1, 105.706878, 21.141376, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[90, 1, 122.086777, 24.417355, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[91, 1, 42.406867, 8.481373, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[92, 1, 46.280769, 9.256154, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[93, 1, 45.392163, 9.078433, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[94, 1, 0, 0, 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1, 0.9 ],
[95, 1, 0, 0, 0, 0, 0, 1.0007, 0, 380.0, 0, 1.1, 0.9 ],
[96, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ],
[97, 1, 6.384069, 1.276814, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[98, 1, 117.377345, 23.475469, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[99, 1, 0, 0, 0, 0, 0, 1.000519, 0, 380.0, 0, 1.1, 0.9 ],
[100, 1, 0, 0, 0, 0, 0, 1.002126, 0, 380.0, 0, 1.1, 0.9 ],
[101, 1, 83.11513, 16.623026, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[102, 1, 160.873209, 32.174642, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[103, 1, 188.09191, 37.618382, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[104, 1, 0, 0, 0, 0, 0, 1.000066, 0, 380.0, 0, 1.1, 0.9 ],
[105, 1, 0, 0, 0, 0, 0, 1.000146, 0, 380.0, 0, 1.1, 0.9 ],
[106, 1, 0, 0, 0, 0, 0, 0.999963, 0, 380.0, 0, 1.1, 0.9 ],
[107, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9 ],
[108, 1, 132.675911, 26.535182, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[109, 1, 53.718212, 10.743642, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[110, 1, 69.728393, 13.945679, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[111, 1, 122.880269, 24.576054, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[112, 1, 62.192906, 12.438581, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[113, 1, 98.03855, 19.60771, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[114, 1, 144.38681, 28.877362, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[115, 1, 93.077688, 18.615538, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[116, 1, 155.75271, 31.150542, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[117, 1, 0, 0, 0, 0, 0, 1.000162, 0, 380.0, 0, 1.1, 0.9 ],
[118, 1, 241.160786, 48.232157, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[119, 1, 46.746863, 9.349373, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[120, 1, 0, 0, 0, 0, 0, 1.00083, 0, 380.0, 0, 1.1, 0.9 ],
[121, 1, 63.482261, 12.696452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[122, 1, 55.578075, 11.115615, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[123, 1, 0, 0, 0, 0, 0, 1.000079, 0, 380.0, 0, 1.1, 0.9 ],
[124, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9 ],
[125, 1, 0, 0, 0, 0, 0, 0.999463, 0, 380.0, 0, 1.1, 0.9 ],
[126, 1, 291.397229, 58.279446, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[127, 1, 225.280714, 45.056143, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[128, 1, 0, 0, 0, 0, 0, 1.000968, 0, 380.0, 0, 1.1, 0.9 ],
[129, 1, 0, 0, 0, 0, 0, 0.999994, 0, 380.0, 0, 1.1, 0.9 ],
[130, 1, 310.621123, 62.124225, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[131, 1, 68.584875, 13.716975, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[132, 1, 178.584646, 35.716929, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[133, 1, 59.81886, 11.963772, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[134, 1, 59.573903, 11.914781, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[135, 1, 59.652888, 11.930578, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[136, 1, 57.787513, 11.557503, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[137, 1, 46.224691, 9.244938, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[138, 1, 0, 0, 0, 0, 0, 1.000239, 0, 380.0, 0, 1.1, 0.9 ],
[139, 1, 90.549485, 18.109897, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[140, 1, 62.618846, 12.523769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[141, 1, 74.19228, 14.838456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[142, 1, 81.637993, 16.327599, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[143, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9 ],
[144, 1, 74.363771, 14.872754, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[145, 1, 216.326177, 43.265235, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[146, 1, 278.885136, 55.777027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[147, 1, 170.940166, 34.188033, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[148, 1, 241.227956, 48.245591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[149, 1, 155.517918, 31.103584, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[150, 1, 203.044789, 40.608958, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[151, 1, 47.847194, 9.569439, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[152, 1, 99.325814, 19.865163, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[153, 1, 177.213406, 35.442681, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[154, 1, 182.033335, 36.406667, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[155, 1, 189.603806, 37.920761, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[156, 1, 0, 0, 0, 0, 0, 0.999987, 0, 380.0, 0, 1.1, 0.9 ],
[157, 1, 0, 0, 0, 0, 0, 1.001031, 0, 380.0, 0, 1.1, 0.9 ],
[158, 1, 49.954288, 9.990858, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[159, 1, 0, 0, 0, 0, 0, 1.001191, 0, 380.0, 0, 1.1, 0.9 ],
[160, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9 ],
[161, 1, 155.079459, 31.015892, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[162, 1, 231.797832, 46.359566, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[163, 1, 46.357377, 9.271475, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[164, 1, 46.543808, 9.308762, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[165, 1, 0, 0, 0, 0, 0, 1.000008, 0, 380.0, 0, 1.1, 0.9 ],
[166, 1, 54.417242, 10.883448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[167, 1, 76.551361, 15.310272, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[168, 1, 52.245327, 10.449065, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[169, 1, 178.850819, 35.770164, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[170, 1, 134.391309, 26.878262, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[171, 1, 114.702931, 22.940586, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[172, 1, 56.293074, 11.258615, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[173, 1, 53.776547, 10.755309, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[174, 1, 80.699328, 16.139866, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[175, 1, 53.741302, 10.74826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[176, 1, 187.268482, 37.453696, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[177, 1, 30.536855, 6.107371, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[178, 1, 161.730672, 32.346134, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[179, 1, 59.592171, 11.918434, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[180, 1, 52.383043, 10.476609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[181, 1, 39.537212, 7.907442, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[182, 1, 1.791054, 0.358211, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[183, 1, 536.118855, 107.223771, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[184, 1, 0, 0, 0, 0, 0, 0.999412, 0, 380.0, 0, 1.1, 0.9 ],
[185, 1, 114.645917, 22.929183, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[186, 1, 61.736231, 12.347246, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[187, 1, 36.109408, 7.221882, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[188, 1, 53.741302, 10.74826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[189, 1, 197.196893, 39.439379, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[190, 1, 260.829785, 52.165957, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[191, 1, 0, 0, 0, 0, 0, 1.000009, 0, 380.0, 0, 1.1, 0.9 ],
[192, 1, 62.815713, 12.563143, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[193, 1, 53.654613, 10.730923, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[194, 1, 37.038638, 7.407728, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[196, 1, 51.963051, 10.39261, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[197, 1, 82.328556, 16.465711, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[198, 1, 48.717631, 9.743526, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[199, 1, 62.722328, 12.544466, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[200, 1, 53.742549, 10.74851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[201, 1, 0, 0, 0, 0, 0, 1.000603, 0, 380.0, 0, 1.1, 0.9 ],
[202, 1, 55.070857, 11.014171, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[203, 1, 7.256079, 1.451216, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[204, 1, 212.674227, 42.534845, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[205, 1, 106.346688, 21.269338, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[206, 1, 51.038978, 10.207796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[207, 1, 151.767938, 30.353588, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[208, 1, 44.689673, 8.937935, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[209, 1, 62.103028, 12.420606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[210, 1, 71.344757, 14.268951, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[211, 1, 250.721465, 50.144293, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[212, 1, 62.839799, 12.56796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[213, 1, 294.578929, 58.915786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[214, 1, 198.21428, 39.642856, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[215, 1, 419.133986, 83.826797, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[216, 1, 141.326419, 28.265284, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[217, 1, 45.286003, 9.057201, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[218, 1, 137.965387, 27.593077, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[219, 1, 221.727192, 44.345438, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[220, 1, 0, 0, 0, 0, 0, 0.9995, 0, 380.0, 0, 1.1, 0.9 ],
[221, 1, 126.484966, 25.296993, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[222, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[223, 1, 125.354431, 25.070886, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[224, 1, 145.769935, 29.153987, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[225, 1, 261.73828, 52.347656, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[226, 1, 91.433269, 18.286654, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[227, 1, 113.907309, 22.781462, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[228, 1, 111.682638, 22.336528, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[229, 1, 247.134629, 49.426926, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[230, 1, 59.276997, 11.855399, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[231, 1, 0, 0, 0, 0, 0, 1.0008, 0, 380.0, 0, 1.1, 0.9 ],
[232, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9 ],
[233, 1, 0, 0, 0, 0, 0, 0.999572, 0, 380.0, 0, 1.1, 0.9 ],
[234, 1, 211.151257, 42.230251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[235, 1, 68.663575, 13.732715, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[236, 1, 0, 0, 0, 0, 0, 0.999972, 0, 380.0, 0, 1.1, 0.9 ],
[237, 1, 0.568269, 0.113654, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[238, 1, 77.694084, 15.538817, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[239, 1, 107.344119, 21.468824, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[240, 1, 677.106115, 135.421223, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[241, 1, 501.035004, 100.207001, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[242, 1, 182.435912, 36.487182, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[243, 1, 147.189401, 29.43788, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[244, 1, 175.365238, 35.073048, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[245, 1, 0, 0, 0, 0, 0, 1.001868, 0, 380.0, 0, 1.1, 0.9 ],
[246, 1, 0, 0, 0, 0, 0, 1.000314, 0, 380.0, 0, 1.1, 0.9 ],
[247, 1, 34.80024, 6.960048, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[248, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ],
[249, 1, 0, 0, 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9 ],
[250, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9 ],
[251, 1, 86.366303, 17.273261, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[252, 1, 221.490058, 44.298012, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[253, 1, 97.242587, 19.448517, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[254, 1, 31.047944, 6.209589, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[255, 1, 152.691204, 30.538241, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[256, 1, 175.110241, 35.022048, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[257, 1, 84.512076, 16.902415, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[258, 1, 275.414649, 55.08293, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[259, 1, 0, 0, 0, 0, 0, 0.999267, 0, 380.0, 0, 1.1, 0.9 ],
[260, 1, 171.407259, 34.281452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[261, 1, 0, 0, 0, 0, 0, 1.001914, 0, 380.0, 0, 1.1, 0.9 ],
[262, 1, 0, 0, 0, 0, 0, 1.000151, 0, 380.0, 0, 1.1, 0.9 ],
[263, 1, 245.883489, 49.176698, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[264, 1, 318.309439, 63.661888, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[265, 1, 0, 0, 0, 0, 0, 1.000004, 0, 380.0, 0, 1.1, 0.9 ],
[266, 1, 153.403945, 30.680789, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[267, 1, 194.022708, 38.804542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[268, 1, 67.469917, 13.493983, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[269, 1, 54.180873, 10.836175, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[270, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9 ],
[271, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[272, 1, 1.105489, 0.221098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[273, 1, 151.176192, 30.235238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[274, 1, 293.866602, 58.77332, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[275, 1, 55.013432, 11.002686, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[276, 1, 214.456344, 42.891269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[277, 1, 0, 0, 0, 0, 0, 0.999517, 0, 380.0, 0, 1.1, 0.9 ],
[278, 1, 167.418237, 33.483647, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[279, 1, 0, 0, 0, 0, 0, 0.999817, 0, 380.0, 0, 1.1, 0.9 ],
[280, 1, 0, 0, 0, 0, 0, 0.999266, 0, 380.0, 0, 1.1, 0.9 ],
[281, 1, 221.13944, 44.227888, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[282, 1, 312.725416, 62.545083, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[283, 1, 125.353926, 25.070785, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[284, 1, 190.167711, 38.033542, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[285, 1, 84.808128, 16.961626, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[286, 1, 177.744137, 35.548827, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[287, 1, 109.245452, 21.84909, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[288, 1, 70.265914, 14.053183, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[289, 1, 110.507903, 22.101581, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[290, 1, 0, 0, 0, 0, 0, 1.004495, 0, 380.0, 0, 1.1, 0.9 ],
[291, 1, 72.723946, 14.544789, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[292, 1, 143.371926, 28.674385, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[293, 1, 126.359101, 25.27182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[294, 1, 33.672791, 6.734558, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[295, 1, 70.455207, 14.091041, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[296, 1, 200.022498, 40.0045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[297, 1, 210.22589, 42.045178, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[298, 1, 111.003448, 22.20069, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[299, 1, 107.506102, 21.50122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[300, 1, 292.875731, 58.575146, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[301, 1, 0, 0, 0, 0, 0, 0.999437, 0, 380.0, 0, 1.1, 0.9 ],
[302, 1, 246.711976, 49.342395, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[303, 1, 126.718426, 25.343685, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[304, 1, 108.813201, 21.76264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[305, 1, 0, 0, 0, 0, 0, 0.99961, 0, 380.0, 0, 1.1, 0.9 ],
[306, 1, 0, 0, 0, 0, 0, 1.001597, 0, 380.0, 0, 1.1, 0.9 ],
[307, 1, 129.062569, 25.812514, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[308, 1, 159.116952, 31.82339, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[309, 1, 260.337709, 52.067542, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[310, 1, 0, 0, 0, 0, 0, 0.999901, 0, 380.0, 0, 1.1, 0.9 ],
[311, 1, 221.133187, 44.226637, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[312, 1, 99.449747, 19.889949, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[313, 1, 0, 0, 0, 0, 0, 1.000862, 0, 380.0, 0, 1.1, 0.9 ],
[314, 1, 308.032014, 61.606403, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[315, 1, 0, 0, 0, 0, 0, 1.00159, 0, 380.0, 0, 1.1, 0.9 ],
[316, 1, 120.690947, 24.138189, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[317, 1, 162.50594, 32.501188, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[318, 1, 267.057251, 53.41145, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[319, 1, 9.567058, 1.913412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[320, 1, 0, 0, 0, 0, 0, 0.999996, 0, 380.0, 0, 1.1, 0.9 ],
[321, 1, 226.312454, 45.262491, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[322, 1, 28.811032, 5.762206, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[323, 1, 2.997543, 0.599509, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[324, 1, 529.89302, 105.978604, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[325, 1, 172.614935, 34.522987, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[326, 1, 13.995083, 2.799017, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[327, 1, 120.437246, 24.087449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[328, 1, 205.243578, 41.048716, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[329, 1, 308.704638, 61.740928, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[330, 1, 0, 0, 0, 0, 0, 1.002351, 0, 380.0, 0, 1.1, 0.9 ],
[331, 1, 24.510098, 4.90202, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[332, 1, 0, 0, 0, 0, 0, 1.00029, 0, 380.0, 0, 1.1, 0.9 ],
[333, 1, 257.534094, 51.506819, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[334, 1, 0, 0, 0, 0, 0, 1.000078, 0, 380.0, 0, 1.1, 0.9 ],
[335, 1, 262.832973, 52.566595, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[336, 1, 0, 0, 0, 0, 0, 0.998883, 0, 380.0, 0, 1.1, 0.9 ],
[337, 1, 104.54725, 20.90945, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[338, 1, 283.756092, 56.751218, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[339, 1, 175.499218, 35.099844, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[340, 1, 148.381042, 29.676208, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[341, 1, 134.139426, 26.827885, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[342, 1, 232.687766, 46.537553, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[343, 1, 127.655901, 25.53118, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[344, 1, 320.06392, 64.012784, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[345, 1, 349.977293, 69.995459, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[346, 1, 347.438228, 69.487646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[347, 1, 121.505179, 24.301036, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[348, 1, 317.622541, 63.524508, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[349, 1, 0, 0, 0, 0, 0, 1.002227, 0, 380.0, 0, 1.1, 0.9 ],
[350, 1, 166.629421, 33.325884, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[351, 1, 0, 0, 0, 0, 0, 1.002311, 0, 380.0, 0, 1.1, 0.9 ],
[352, 1, 1102.969172, 220.593834, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[353, 1, 3.315894, 0.663179, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[354, 1, 22.527896, 4.505579, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[355, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[357, 1, 0.05647, 0.011294, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[358, 1, 0, 0, 0, 0, 0, 1.001145, 0, 380.0, 0, 1.1, 0.9 ],
[359, 1, 3.297102, 0.65942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[360, 1, 0, 0, 0, 0, 0, 1.000743, 0, 380.0, 0, 1.1, 0.9 ],
[361, 1, 84.386359, 16.877272, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[362, 1, 240.544798, 48.10896, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[363, 1, 354.159899, 70.83198, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[364, 1, 83.559152, 16.71183, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[365, 1, 74.998776, 14.999755, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[366, 1, 148.647335, 29.729467, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[367, 1, 71.849947, 14.369989, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[368, 1, 35.380095, 7.076019, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[369, 1, 29.073011, 5.814602, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[370, 1, 85.591776, 17.118355, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[371, 1, 430.66013, 86.132026, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[372, 1, 249.745997, 49.949199, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[373, 1, 168.52878, 33.705756, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[374, 1, 86.418705, 17.283741, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[375, 1, 283.483358, 56.696672, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[376, 1, 310.927852, 62.18557, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[377, 1, 222.495169, 44.499034, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[378, 1, 222.066912, 44.413382, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[379, 1, 76.536953, 15.307391, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[380, 1, 0, 0, 0, 0, 0, 1.001552, 0, 380.0, 0, 1.1, 0.9 ],
[381, 1, 255.944236, 51.188847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[382, 1, 0, 0, 0, 0, 0, 1.000904, 0, 380.0, 0, 1.1, 0.9 ],
[383, 1, 0, 0, 0, 0, 0, 0.999115, 0, 380.0, 0, 1.1, 0.9 ],
[384, 1, 90.316363, 18.063273, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[385, 1, 113.996976, 22.799395, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[386, 1, 91.593152, 18.31863, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[387, 1, 186.533196, 37.306639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[388, 1, 1001.680535, 200.336107, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[389, 1, 0, 0, 0, 0, 0, 0.999916, 0, 380.0, 0, 1.1, 0.9 ],
[390, 1, 82.706419, 16.541284, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[391, 1, 94.209664, 18.841933, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[392, 1, 180.787399, 36.15748, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[393, 1, 225.769637, 45.153927, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[394, 1, 81.202848, 16.24057, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[395, 1, 112.54213, 22.508426, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[396, 1, 79.712439, 15.942488, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[397, 1, 639.205952, 127.84119, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[398, 1, 276.853905, 55.370781, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[399, 1, 117.959928, 23.591986, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[400, 1, 62.847073, 12.569415, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[401, 1, 0, 0, 0, 0, 0, 1.000689, 0, 380.0, 0, 1.1, 0.9 ],
[402, 1, 0, 0, 0, 0, 0, 1.000468, 0, 380.0, 0, 1.1, 0.9 ],
[403, 1, 31.205033, 6.241007, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[404, 1, 109.937263, 21.987453, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[405, 1, 828.818277, 165.763655, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[406, 1, 62.797316, 12.559463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[407, 1, 124.308664, 24.861733, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[408, 1, 359.430945, 71.886189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[409, 1, 0, 0, 0, 0, 0, 0.999942, 0, 380.0, 0, 1.1, 0.9 ],
[410, 1, 46.535489, 9.307098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[411, 1, 44.001211, 8.800242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[412, 1, 3.090603, 0.618121, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[413, 1, 154.2885, 30.8577, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[414, 1, 13.100763, 2.620153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[415, 1, 0, 0, 0, 0, 0, 1.000239, 0, 380.0, 0, 1.1, 0.9 ],
[416, 1, 186.568647, 37.313729, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[417, 1, 7.300075, 1.460015, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[418, 1, 152.129169, 30.425834, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[419, 1, 81.311959, 16.262392, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[420, 1, 81.864619, 16.372924, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[421, 1, 117.923897, 23.584779, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[422, 1, 86.394999, 17.279, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[423, 1, 181.448589, 36.289718, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[424, 1, 13.081976, 2.616395, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[425, 1, 107.436029, 21.487206, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[426, 1, 8.901406, 1.780281, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[427, 1, 74.807559, 14.961512, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[428, 1, 33.541388, 6.708278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[429, 1, 378.506604, 75.701321, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[430, 1, 201.617449, 40.32349, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[431, 1, 134.824684, 26.964937, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[432, 1, 157.601785, 31.520357, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[433, 1, 80.561831, 16.112366, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[434, 1, 41.928301, 8.38566, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[435, 1, 167.686807, 33.537361, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[436, 1, 89.525173, 17.905035, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[437, 1, 20.388419, 4.077684, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[438, 1, 54.716933, 10.943387, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[439, 1, 101.875856, 20.375171, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[440, 1, 86.095509, 17.219102, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[441, 1, 66.003743, 13.200749, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[442, 1, 87.345295, 17.469059, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[443, 1, 189.372821, 37.874564, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[444, 1, 0, 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1, 0.9 ],
[445, 1, 86.048822, 17.209764, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[446, 1, 39.900067, 7.980013, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[447, 1, 75.857823, 15.171565, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[448, 1, 55.747797, 11.149559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[449, 1, 281.099266, 56.219853, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[450, 1, 172.019337, 34.403867, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[451, 1, 73.504711, 14.700942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[452, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9 ],
[453, 1, 49.262417, 9.852483, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[454, 1, 34.368712, 6.873742, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[455, 1, 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[456, 1, 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[457, 1, 171.846191, 34.369238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[458, 1, 163.447396, 32.689479, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[459, 1, 198.921561, 39.784312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[460, 1, 261.423915, 52.284783, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[461, 1, 271.93756, 54.387512, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[462, 1, 83.187109, 16.637422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[463, 1, 42.625596, 8.525119, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[464, 1, 42.67712, 8.535424, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[465, 1, 68.935213, 13.787043, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[466, 1, 55.966672, 11.193334, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[467, 1, 51.647972, 10.329594, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[468, 1, 84.682258, 16.936452, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[469, 1, 52.475899, 10.49518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[470, 1, 133.635974, 26.727195, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[471, 1, 131.576667, 26.315333, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[472, 1, 46.021552, 9.20431, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[473, 1, 84.506543, 16.901309, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[474, 1, 43.646746, 8.729349, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[475, 1, 42.832665, 8.566533, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[476, 1, 48.407958, 9.681592, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[477, 1, 78.119975, 15.623995, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[478, 1, 98.132926, 19.626585, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[479, 1, 177.838657, 35.567731, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[480, 1, 77.949906, 15.589981, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[481, 1, 67.695306, 13.539061, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[482, 1, 76.865108, 15.373022, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[483, 1, 65.368141, 13.073628, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[484, 1, 51.245443, 10.249089, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[485, 1, 76.547129, 15.309426, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[486, 1, 704.196192, 140.839238, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9 ],
[487, 1, 178.44006, 35.688012, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[488, 1, 514.1666, 102.83332, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[489, 1, 135.327186, 27.065437, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9 ],
[490, 1, 42.108774, 8.421755, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[491, 1, 57.900104, 11.580021, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[492, 1, 90.290026, 18.058005, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[493, 1, 116.373036, 23.274607, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[494, 1, 159.050014, 31.810003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[495, 1, 125.200788, 25.040158, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[496, 1, 8.868181, 1.773636, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[497, 1, 1108.963227, 221.792645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[498, 1, 52.009376, 10.401875, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[499, 1, 72.596567, 14.519313, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[500, 1, 39.745767, 7.949153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[501, 1, 67.242984, 13.448597, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[502, 1, 265.394132, 53.078826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[503, 1, 81.27987, 16.255974, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[504, 1, 53.225877, 10.645175, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[505, 1, 377.519214, 75.503843, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[506, 1, 118.498636, 23.699727, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[507, 1, 112.71728, 22.543456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[508, 1, 163.866255, 32.773251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[509, 1, 215.943222, 43.188644, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[510, 1, 136.424234, 27.284847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[511, 1, 119.003612, 23.800722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[512, 1, 78.609233, 15.721847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[513, 1, 43.305299, 8.66106, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[514, 1, 107.782698, 21.55654, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[515, 1, 96.14857, 19.229714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[516, 1, 107.567625, 21.513525, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[517, 1, 50.527088, 10.105418, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[518, 1, 284.571762, 56.914352, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[519, 1, 28.007071, 5.601414, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[520, 1, 113.075388, 22.615078, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[521, 1, 102.145474, 20.429095, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[522, 1, 87.457782, 17.491556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[523, 1, 47.077529, 9.415506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[524, 1, 136.642116, 27.328423, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[525, 1, 162.787043, 32.557409, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[526, 1, 49.35397, 9.870794, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[527, 1, 54.18719, 10.837438, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[528, 1, 118.26861, 23.653722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[529, 1, 151.602845, 30.320569, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[530, 1, 64.243093, 12.848619, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[531, 1, 65.318252, 13.06365, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[532, 1, 62.694136, 12.538827, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[533, 1, 56.181511, 11.236302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[534, 1, 154.980048, 30.99601, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[535, 1, 194.025074, 38.805015, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[536, 1, 152.933571, 30.586714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[537, 1, 50.874697, 10.174939, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[538, 1, 38.030453, 7.606091, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[539, 1, 40.352648, 8.07053, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[540, 1, 36.335787, 7.267157, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[541, 1, 93.858474, 18.771695, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[542, 1, 128.932532, 25.786506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[543, 1, 70.422315, 14.084463, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[544, 1, 131.162551, 26.23251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[545, 1, 282.414482, 56.482896, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[546, 1, 141.550404, 28.310081, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[547, 1, 182.963197, 36.592639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[548, 1, 59.225944, 11.845189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[549, 1, 50.643246, 10.128649, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[550, 1, 41.78929, 8.357858, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[551, 1, 40.283868, 8.056774, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[552, 1, 200.04515, 40.00903, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[553, 1, 1.384003, 0.276801, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[554, 1, 202.666621, 40.533324, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[555, 1, 77.218226, 15.443645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[556, 1, 119.459166, 23.891833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[557, 1, 253.807751, 50.76155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[558, 1, 149.659946, 29.931989, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[559, 1, 80.096562, 16.019312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[560, 1, 125.129779, 25.025956, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[561, 1, 68.617518, 13.723504, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[562, 1, 187.457919, 37.491584, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[563, 1, 131.798194, 26.359639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[564, 1, 260.235901, 52.04718, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[565, 1, 196.360882, 39.272176, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[566, 1, 0.315398, 0.06308, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[567, 1, 319.193421, 63.838684, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[568, 1, 295.176685, 59.035337, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[569, 1, 207.688389, 41.537678, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[570, 1, 324.238974, 64.847795, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[571, 1, 238.729406, 47.745881, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[572, 1, 421.078814, 84.215763, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[573, 1, 122.570522, 24.514104, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[574, 1, 233.543651, 46.70873, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[575, 1, 4.388704, 0.877741, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[576, 1, 283.987513, 56.797503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[577, 1, 313.066628, 62.613326, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[578, 1, 298.905533, 59.781107, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[579, 1, 109.048896, 21.809779, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[580, 1, 22.702358, 4.540472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[581, 1, 0.13045, 0.02609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[582, 1, 82.137246, 16.427449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[583, 1, 94.208402, 18.84168, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[584, 1, 54.052269, 10.810454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ],
[585, 1, 93.84139, 18.768278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9 ]
])
ppc["gen"] = array([
[586, 272.0, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[590, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[593, 11.1, 0, 9999, -9999, 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[594, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[595, 1115.083703, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[597, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[598, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[599, 9.3, 0, 9999, -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[600, 16.9, 0, 9999, -9999, 1.0, 100, 1, 16.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[603, 837.82977, 0, 9999, -9999, 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[607, 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[609, 36.4, 0, 9999, -9999, 1.0, 100, 1, 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[610, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[612, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[616, 29.0, 0, 9999, -9999, 1.0, 100, 1, 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[617, 137.0, 0, 9999, -9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[618, 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[623, 760.0, 0, 9999, -9999, 1.0, 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[624, 27.0, 0, 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[631, 79.8, 0, 9999, -9999, 1.0, 100, 1, 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[632, 45.1, 0, 9999, -9999, 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[637, 53.7, 0, 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[640, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[641, 12.6, 0, 9999, -9999, 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[642, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[647, 14.0, 0, 9999, -9999, 1.0, 100, 1, 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[650, 1324.5, 0, 9999, -9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[652, 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[658, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[661, 32.7, 0, 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[666, 28.9, 0, 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[668, 766.0, 0, 9999, -9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[670, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[675, 10.6, 0, 9999, -9999, 1.0, 100, 1, 10.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[676, 370.0, 0, 9999, -9999, 1.0, 100, 1, 370.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[678, 1017.0, 0, 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[679, 547.278885, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[683, 27.5, 0, 9999, -9999, 1.0, 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[687, 1329.0, 0, 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[693, 194.0, 0, 9999, -9999, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[694, 16.4, 0, 9999, -9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[695, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[698, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[701, 47.2, 0, 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[702, 73.4, 0, 9999, -9999, 1.0, 100, 1, 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[704, 508.0, 0, 9999, -9999, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[705, 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[707, 34.0, 0, 9999, -9999, 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[708, 7.8, 0, 9999, -9999, 1.0, 100, 1, 7.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[711, 102.08865, 0, 9999, -9999, 1.0, 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[713, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[714, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[716, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[717, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[719, 1347.602507, 0, 9999, -9999, 1.0, 100, 1, 1958.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[722, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[723, 19.7, 0, 9999, -9999, 1.0, 100, 1, 19.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[724, 12.1, 0, 9999, -9999, 1.0, 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[725, 800.0, 0, 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[728, 510.0, 0, 9999, -9999, 1.0, 100, 1, 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[730, 633.2, 0, 9999, -9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[731, 774.368631, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[733, 396.6, 0, 9999, -9999, 1.0, 100, 1, 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[735, 84.8, 0, 9999, -9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[737, 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[739, 59.9, 0, 9999, -9999, 1.0, 100, 1, 59.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[741, 214.0, 0, 9999, -9999, 1.0, 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[742, 9.0, 0, 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[743, 1410.0, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[745, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[746, 100.0, 0, 9999, -9999, 1.0, 100, 1, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[747, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[748, 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[750, 90.8, 0, 9999, -9999, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[753, 297.43075, 0, 9999, -9999, 1.0, 100, 1, 311.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[758, 18.5, 0, 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[760, 342.451659, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[762, 1105.0, 0, 9999, -9999, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[763, 20.3, 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[769, 43.3, 0, 9999, -9999, 1.0, 100, 1, 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[771, 690.0, 0, 9999, -9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[772, 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[776, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[777, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[778, 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[781, 981.561684, 0, 9999, -9999, 1.0, 100, 1, 1310.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[784, 967.134125, 0, 9999, -9999, 1.0, 100, 1, 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[785, 3.0, 0, 9999, -9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[787, 778.0, 0, 9999, -9999, 1.0, 100, 1, 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[788, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[789, 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[790, 75.8, 0, 9999, -9999, 1.0, 100, 1, 75.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[791, 10.0, 0, 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[795, 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[798, 116.273516, 0, 9999, -9999, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[800, 36.5, 0, 9999, -9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[801, 50.0, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[805, 661.169352, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[806, 35.8, 0, 9999, -9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[808, 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[810, 97.9, 0, 9999, -9999, 1.0, 100, 1, 97.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[811, 25.2, 0, 9999, -9999, 1.0, 100, 1, 25.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[814, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[815, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[816, 80.1, 0, 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[817, 54.0, 0, 9999, -9999, 1.0, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[818, 757.0, 0, 9999, -9999, 1.0, 100, 1, 757.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[821, 82.5, 0, 9999, -9999, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[822, 134.0, 0, 9999, -9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[825, 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[826, 58.0, 0, 9999, -9999, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[829, 211.0, 0, 9999, -9999, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[833, 18.6, 0, 9999, -9999, 1.0, 100, 1, 18.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[834, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[835, 63.7, 0, 9999, -9999, 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[836, 25.5, 0, 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[837, 472.0, 0, 9999, -9999, 1.0, 100, 1, 472.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[839, 73.3, 0, 9999, -9999, 1.0, 100, 1, 73.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[840, 1158.147571, 0, 9999, -9999, 1.0, 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[841, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[842, 540.5, 0, 9999, -9999, 1.0, 100, 1, 540.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[843, 333.0, 0, 9999, -9999, 1.0, 100, 1, 333.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[844, 40.0, 0, 9999, -9999, 1.0, 100, 1, 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[845, 318.0, 0, 9999, -9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[847, 124.467036, 0, 9999, -9999, 1.0, 100, 1, 280.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[849, 779.0, 0, 9999, -9999, 1.0, 100, 1, 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[850, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[851, 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[854, 81.8, 0, 9999, -9999, 1.0, 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[855, 688.0, 0, 9999, -9999, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[856, 36.0, 0, 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[858, 56.8, 0, 9999, -9999, 1.0, 100, 1, 56.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[859, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[860, 25.0, 0, 9999, -9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[862, 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[864, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[865, 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[867, 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[870, 58.4, 0, 9999, -9999, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[872, 22.5, 0, 9999, -9999, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[873, 122.0, 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[877, 24.8, 0, 9999, -9999, 1.0, 100, 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[881, 1001.3, 0, 9999, -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[882, 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[883, 18.0, 0, 9999, -9999, 1.0, 100, 1, 18.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[886, 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[889, 9.5, 0, 9999, -9999, 1.0, 100, 1, 9.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[890, 48.0, 0, 9999, -9999, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[893, 60.0, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[894, 158.0, 0, 9999, -9999, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[895, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[896, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[898, 84.6, 0, 9999, -9999, 1.0, 100, 1, 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[900, 112.6, 0, 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[903, 20.1, 0, 9999, -9999, 1.0, 100, 1, 20.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[905, 121.080178, 0, 9999, -9999, 1.0, 100, 1, 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[907, 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[911, 288.5, 0, 9999, -9999, 1.0, 100, 1, 288.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[913, 33.01098, 0, 9999, -9999, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[914, 112.1, 0, 9999, -9999, 1.0, 100, 1, 112.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[915, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[916, 196.0, 0, 9999, -9999, 1.0, 100, 1, 196.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[917, 17.0, 0, 9999, -9999, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[918, 38.5, 0, 9999, -9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[919, 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[921, 124.0, 0, 9999, -9999, 1.0, 100, 1, 124.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[922, 164.0, 0, 9999, -9999, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[923, 146.0, 0, 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[928, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[931, 217.1, 0, 9999, -9999, 1.0, 100, 1, 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[934, 296.0, 0, 9999, -9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[935, 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[936, 104.4, 0, 9999, -9999, 1.0, 100, 1, 104.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[937, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[939, 0.1, 0, 9999, -9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[940, 29.6, 0, 9999, -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[942, 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[943, 66.3, 0, 9999, -9999, 1.0, 100, 1, 66.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[944, 25.4, 0, 9999, -9999, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[945, 35.0, 0, 9999, -9999, 1.0, 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[946, 80.0, 0, 9999, -9999, 1.0, 100, 1, 80.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[951, 393.739186, 0, 9999, -9999, 1.0, 100, 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[952, 31.7, 0, 9999, -9999, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[956, 65.0, 0, 9999, -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[957, 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[959, 45.5, 0, 9999, -9999, 1.0, 100, 1, 45.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[960, 26.5, 0, 9999, -9999, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[963, 559.823432, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[966, 66.0, 0, 9999, -9999, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[967, 37.5, 0, 9999, -9999, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[968, 54.0, 0, 9999, -9999, 0.99951, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[969, 56.9, 0, 9999, -9999, 0.99951, 100, 1, 56.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[973, 1347.0, 0, 9999, -9999, 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[976, 26.9, 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[978, 4.6, 0, 9999, -9999, 1.0, 100, 1, 4.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[980, 309.665775, 0, 9999, -9999, 1.0, 100, 1, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[981, 119.0, 0, 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[984, 465.0, 0, 9999, -9999, 1.0, 100, 1, 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[985, 22.0, 0, 9999, -9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[986, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[990, 300.0, 0, 9999, -9999, 1.0, 100, 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[993, 392.0, 0, 9999, -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[994, 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[997, 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[998, 423.0, 0, 9999, -9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[999, 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1003, 900.0, 0, 9999, -9999, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1006, 122.0, 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1007, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1010, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1011, 18.7, 0, 9999, -9999, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1012, 2835.0, 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1014, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1018, 175.9, 0, 9999, -9999, 1.0, 100, 1, 175.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1019, 120.0, 0, 9999, -9999, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1023, 0.2, 0, 9999, -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1025, 113.6, 0, 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, 655.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1028, 193.856792, 0, 9999, -9999, 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1029, 47.945063, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1030, 512.154762, 0, 9999, -9999, 1.0, 100, 1, 1018.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1031, 465.297424, 0, 9999, -9999, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1032, 38.015413, 0, 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1033, 2.188896, 0, 9999, -9999, 1.0, 100, 1, 50.164506, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1034, 28.459011, 0, 9999, -9999, 1.0, 100, 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1035, 13.483148, 0, 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1036, 10.668878, 0, 9999, -9999, 1.0, 100, 1, 67.223077, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1037, 6.453908, 0, 9999, -9999, 1.0, 100, 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1038, 9.267765, 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1039, 0.034961, 0, 9999, -9999, 1.0, 100, 1, 132.724114, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1041, 45.839958, 0, 9999, -9999, 1.0, 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1042, 0.015112, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1044, 2.20729, 0, 9999, -9999, 1.0, 100, 1, 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1046, 4.510177, 0, 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1047, 1.291195, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1048, 3.439348, 0, 9999, -9999, 1.0, 100, 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1049, 90.190989, 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1050, 2.855489, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1051, 25.039476, 0, 9999, -9999, 1.0, 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1052, 4.408997, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1053, 3.393402, 0, 9999, -9999, 1.0, 100, 1, 16.368087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1054, 64.159181, 0, 9999, -9999, 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1055, 0.001443, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1056, 46.702863, 0, 9999, -9999, 1.0, 100, 1, 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1057, 1.149455, 0, 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1058, 7.224099, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1059, 17.207922, 0, 9999, -9999, 1.0, 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1060, 0.105899, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1061, 5.978684, 0, 9999, -9999, 1.0, 100, 1, 161.862597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1062, 0.003268, 0, 9999, -9999, 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1063, 0.002721, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1064, 43.352939, 0, 9999, -9999, 1.0, 100, 1, 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1065, 64.557076, 0, 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1066, 6.593551, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1067, 8.448185, 0, 9999, -9999, 1.0, 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1068, 0.145539, 0, 9999, -9999, 1.0, 100, 1, 5.009022, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1069, 0.157038, 0, 9999, -9999, 1.0, 100, 1, 3.190759, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1070, 0.029105, 0, 9999, -9999, 1.0, 100, 1, 0.788599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1071, 0.624346, 0, 9999, -9999, 1.0, 100, 1, 4.328696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1072, 40.600927, 0, 9999, -9999, 1.0, 100, 1, 112.606433, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1073, 17.346842, 0, 9999, -9999, 1.0, 100, 1, 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1074, 48.373759, 0, 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1075, 2.832969, 0, 9999, -9999, 1.0, 100, 1, 15.783448, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1077, 0.795164, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1078, 1.572306, 0, 9999, -9999, 1.0, 100, 1, 34.413246, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1079, 23.483715, 0, 9999, -9999, 1.0, 100, 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1080, 12.71579, 0, 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1081, 41.337281, 0, 9999, -9999, 1.0, 100, 1, 405.642115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1082, 17.035693, 0, 9999, -9999, 1.0, 100, 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1083, 30.072335, 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1084, 27.557337, 0, 9999, -9999, 1.0, 100, 1, 602.719371, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1085, 6.690069, 0, 9999, -9999, 1.0, 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1086, 9.340055, 0, 9999, -9999, 1.0, 100, 1, 225.59917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1087, 2.219279, 0, 9999, -9999, 1.0, 100, 1, 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1088, 0.225948, 0, 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1089, 0.685877, 0, 9999, -9999, 1.0, 100, 1, 384.449592, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1090, 18.3652, 0, 9999, -9999, 1.0, 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1091, 0.069841, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1092, 0.000886, 0, 9999, -9999, 1.0, 100, 1, 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1093, 19.472331, 0, 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1094, 0.324922, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1095, 0.017632, 0, 9999, -9999, 1.0, 100, 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1096, 5.653431, 0, 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1097, 0.849404, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1098, 22.024295, 0, 9999, -9999, 1.0, 100, 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1099, 111.287059, 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1100, 0.000469, 0, 9999, -9999, 1.0, 100, 1, 0.026696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1101, 0.059355, 0, 9999, -9999, 1.0, 100, 1, 83.930665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1102, 0.348019, 0, 9999, -9999, 1.0, 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1103, 4.374488, 0, 9999, -9999, 1.0, 100, 1, 245.381701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1104, 0.020088, 0, 9999, -9999, 1.0, 100, 1, 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1105, 0.140469, 0, 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1106, 0.24489, 0, 9999, -9999, 1.0, 100, 1, 2.289793, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1107, 4.365112, 0, 9999, -9999, 1.0, 100, 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1108, 15.005714, 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1109, 0.032298, 0, 9999, -9999, 1.0, 100, 1, 0.77821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1110, 0.109011, 0, 9999, -9999, 1.0, 100, 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1111, 3.982839, 0, 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1112, 13.347732, 0, 9999, -9999, 1.0, 100, 1, 69.53429, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1113, 0.690017, 0, 9999, -9999, 1.0, 100, 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1114, 3.236521, 0, 9999, -9999, 1.0, 100, 1, 13.446889, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1115, 12.945936, 0, 9999, -9999, 1.0, 100, 1, 50.575278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1116, 7.186063, 0, 9999, -9999, 1.0, 100, 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1117, 21.735816, 0, 9999, -9999, 1.0, 100, 1, 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1118, 1.167272, 0, 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1119, 9.731188, 0, 9999, -9999, 1.0, 100, 1, 43.254023, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1120, 0.454554, 0, 9999, -9999, 1.0, 100, 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1121, 0.129799, 0, 9999, -9999, 1.0, 100, 1, 0.540589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1122, 0.277958, 0, 9999, -9999, 1.0, 100, 1, 1.462883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1123, 0.327821, 0, 9999, -9999, 1.0, 100, 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1124, 0.319573, 0, 9999, -9999, 1.0, 100, 1, 1.288283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1125, 1.853524, 0, 9999, -9999, 1.0, 100, 1, 25.818899, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1126, 2.010115, 0, 9999, -9999, 1.0, 100, 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1127, 6.767523, 0, 9999, -9999, 1.0, 100, 1, 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1128, 0.159146, 0, 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1129, 0.240204, 0, 9999, -9999, 1.0, 100, 1, 4.738747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1130, 0.112767, 0, 9999, -9999, 1.0, 100, 1, 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1131, 0.151265, 0, 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1132, 0.043874, 0, 9999, -9999, 1.0, 100, 1, 0.359497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1133, 0.17278, 0, 9999, -9999, 1.0, 100, 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1134, 0.12208, 0, 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1135, 0.70461, 0, 9999, -9999, 1.0, 100, 1, 8.117819, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1136, 0.085367, 0, 9999, -9999, 1.0, 100, 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1137, 0.542436, 0, 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1138, 0.28633, 0, 9999, -9999, 1.0, 100, 1, 1.254278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1139, 3.827873, 0, 9999, -9999, 1.0, 100, 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1140, 8.531552, 0, 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1141, 14.71591, 0, 9999, -9999, 1.0, 100, 1, 119.46456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1142, 0.282411, 0, 9999, -9999, 1.0, 100, 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1143, 2.17636, 0, 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1144, 14.468173, 0, 9999, -9999, 1.0, 100, 1, 52.527382, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1145, 41.721366, 0, 9999, -9999, 1.0, 100, 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1146, 0.206808, 0, 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1147, 10.482934, 0, 9999, -9999, 1.0, 100, 1, 45.703707, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1148, 1.12205, 0, 9999, -9999, 1.0, 100, 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1149, 0.384525, 0, 9999, -9999, 1.0, 100, 1, 8.556784, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1150, 0.21385, 0, 9999, -9999, 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1151, 0.761655, 0, 9999, -9999, 1.0, 100, 1, 13.036113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1152, 0.007549, 0, 9999, -9999, 1.0, 100, 1, 0.116518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1153, 0.005643, 0, 9999, -9999, 1.0, 100, 1, 0.068788, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1154, 0.013178, 0, 9999, -9999, 1.0, 100, 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1155, 0.036293, 0, 9999, -9999, 1.0, 100, 1, 0.609451, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1156, 2.725518, 0, 9999, -9999, 1.0, 100, 1, 16.022334, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1157, 0.254864, 0, 9999, -9999, 1.0, 100, 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1158, 0.090066, 0, 9999, -9999, 1.0, 100, 1, 1.04304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1159, 1.846823, 0, 9999, -9999, 1.0, 100, 1, 13.498087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1160, 4.449778, 0, 9999, -9999, 1.0, 100, 1, 238.377761, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1161, 0.968938, 0, 9999, -9999, 1.0, 100, 1, 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1162, 0.004399, 0, 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1164, 0.681555, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1166, 19.037928, 0, 9999, -9999, 1.0, 100, 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1167, 0.436847, 0, 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1168, 0.092048, 0, 9999, -9999, 1.0, 100, 1, 1.345774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1169, 0.15256, 0, 9999, -9999, 1.0, 100, 1, 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1170, 0.022911, 0, 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1171, 1.218434, 0, 9999, -9999, 1.0, 100, 1, 9.029885, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1172, 0.184488, 0, 9999, -9999, 1.0, 100, 1, 3.584043, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1173, 0.074867, 0, 9999, -9999, 1.0, 100, 1, 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1174, 0.108899, 0, 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1175, 0.04558, 0, 9999, -9999, 1.0, 100, 1, 0.855454, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1176, 0.013921, 0, 9999, -9999, 1.0, 100, 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1177, 1.759222, 0, 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1178, 0.209645, 0, 9999, -9999, 1.0, 100, 1, 3.167999, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1179, 0.051465, 0, 9999, -9999, 1.0, 100, 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1180, 0.059365, 0, 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1181, 23.821689, 0, 9999, -9999, 1.0, 100, 1, 85.739557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1182, 24.612874, 0, 9999, -9999, 1.0, 100, 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1183, 3.24107, 0, 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1184, 0.358312, 0, 9999, -9999, 1.0, 100, 1, 4.219005, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1185, 2.182901, 0, 9999, -9999, 1.0, 100, 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1186, 2.184012, 0, 9999, -9999, 1.0, 100, 1, 38.916368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1187, 0.459888, 0, 9999, -9999, 1.0, 100, 1, 9.814574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1188, 53.562608, 0, 9999, -9999, 1.0, 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1189, 1.204921, 0, 9999, -9999, 1.0, 100, 1, 20.261805, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1190, 32.667547, 0, 9999, -9999, 1.0, 100, 1, 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1191, 17.953145, 0, 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1192, 2.590747, 0, 9999, -9999, 1.0, 100, 1, 21.454569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1193, 0.222396, 0, 9999, -9999, 1.0, 100, 1, 2.399953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1194, 0.77085, 0, 9999, -9999, 1.0, 100, 1, 8.986036, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1195, 0.015425, 0, 9999, -9999, 1.0, 100, 1, 0.202359, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1196, 0.029284, 0, 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1197, 0.11597, 0, 9999, -9999, 1.0, 100, 1, 90.592266, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1198, 4.134805, 0, 9999, -9999, 1.0, 100, 1, 39.819157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1199, 61.376881, 0, 9999, -9999, 1.0, 100, 1, 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1200, 21.487973, 0, 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1201, 0.691822, 0, 9999, -9999, 1.0, 100, 1, 25.166667, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1202, 3.586635, 0, 9999, -9999, 1.0, 100, 1, 49.89238, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1203, 12.725115, 0, 9999, -9999, 1.0, 100, 1, 182.623256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1204, 2.799582, 0, 9999, -9999, 1.0, 100, 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1205, 0.000146, 0, 9999, -9999, 1.0, 100, 1, 0.548843, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1206, 0.411467, 0, 9999, -9999, 1.0, 100, 1, 3.806894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1207, 0.331325, 0, 9999, -9999, 1.0, 100, 1, 3.575453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1208, 0.105374, 0, 9999, -9999, 1.0, 100, 1, 2.242031, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1209, 0.00265, 0, 9999, -9999, 1.0, 100, 1, 1.268261, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1210, 0.69402, 0, 9999, -9999, 1.0, 100, 1, 9.02599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1211, 5.750967, 0, 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1212, 26.199295, 0, 9999, -9999, 1.0, 100, 1, 91.171888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1213, 21.1062, 0, 9999, -9999, 1.0, 100, 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1214, 0.541037, 0, 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1215, 0.15338, 0, 9999, -9999, 1.0, 100, 1, 2.252965, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1216, 3.319201, 0, 9999, -9999, 1.0, 100, 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1217, 2.664727, 0, 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1218, 0.10866, 0, 9999, -9999, 1.0, 100, 1, 0.980482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1219, 0.83454, 0, 9999, -9999, 1.0, 100, 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1220, 1.729113, 0, 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1221, 43.354712, 0, 9999, -9999, 1.0, 100, 1, 593.230436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1222, 54.25302, 0, 9999, -9999, 1.0, 100, 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1223, 0.828555, 0, 9999, -9999, 1.0, 100, 1, 3.806101, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1224, 15.875443, 0, 9999, -9999, 1.0, 100, 1, 160.523778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1225, 1.071926, 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1226, 0.118196, 0, 9999, -9999, 1.0, 100, 1, 3.982858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1227, 3.258837, 0, 9999, -9999, 1.0, 100, 1, 17.482807, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1228, 0.156042, 0, 9999, -9999, 1.0, 100, 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1229, 7.933585, 0, 9999, -9999, 1.0, 100, 1, 51.244222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1230, 0.045286, 0, 9999, -9999, 1.0, 100, 1, 1.681276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1231, 1.223909, 0, 9999, -9999, 1.0, 100, 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1232, 2.573754, 0, 9999, -9999, 1.0, 100, 1, 75.075088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1233, 173.598538, 0, 9999, -9999, 1.0, 100, 1, 575.36828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1234, 33.990216, 0, 9999, -9999, 1.0, 100, 1, 101.1394, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1235, 0.001519, 0, 9999, -9999, 1.0, 100, 1, 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1236, 0.010199, 0, 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1237, 3.462839, 0, 9999, -9999, 1.0, 100, 1, 14.605409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1238, 12.106922, 0, 9999, -9999, 1.0, 100, 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1239, 0.483742, 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1240, 63.552975, 0, 9999, -9999, 1.0, 100, 1, 339.51051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1241, 9.744883, 0, 9999, -9999, 1.0, 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1242, 1.158061, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1243, 4.669682, 0, 9999, -9999, 1.0, 100, 1, 83.079842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1244, 115.794463, 0, 9999, -9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1245, 0.241619, 0, 9999, -9999, 1.0, 100, 1, 8.080896, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1246, 18.525152, 0, 9999, -9999, 1.0, 100, 1, 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1247, 5.100639, 0, 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1248, 13.259573, 0, 9999, -9999, 1.0, 100, 1, 91.958275, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1249, 1.47167, 0, 9999, -9999, 1.0, 100, 1, 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1250, 0.772338, 0, 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1251, 2.007729, 0, 9999, -9999, 1.0, 100, 1, 23.404345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1252, 1.728628, 0, 9999, -9999, 1.0, 100, 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1253, 17.018216, 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1254, 26.927476, 0, 9999, -9999, 1.0, 100, 1, 82.278695, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1255, 0.726767, 0, 9999, -9999, 1.0, 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1256, 3.218337, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1257, 19.556961, 0, 9999, -9999, 1.0, 100, 1, 88.95288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1258, 75.724888, 0, 9999, -9999, 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1259, 26.547394, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1260, 0.629507, 0, 9999, -9999, 1.0, 100, 1, 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1261, 10.592114, 0, 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1262, 0.066859, 0, 9999, -9999, 1.0, 100, 1, 0.524108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1263, 0.05282, 0, 9999, -9999, 1.0, 100, 1, 0.352421, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1264, 8.646042, 0, 9999, -9999, 1.0, 100, 1, 82.035361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1265, 0.87289, 0, 9999, -9999, 1.0, 100, 1, 6.654727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1266, 19.839091, 0, 9999, -9999, 1.0, 100, 1, 119.710849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1267, 1.42905, 0, 9999, -9999, 1.0, 100, 1, 39.469006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1270, 2.867892, 0, 9999, -9999, 1.0, 100, 1, 38.950511, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1271, 2.180592, 0, 9999, -9999, 1.0, 100, 1, 47.371792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1272, 0.12233, 0, 9999, -9999, 1.0, 100, 1, 1.23166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1273, 0.402412, 0, 9999, -9999, 1.0, 100, 1, 2.169201, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1274, 4.613569, 0, 9999, -9999, 1.0, 100, 1, 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1275, 5.039854, 0, 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1276, 0.577089, 0, 9999, -9999, 1.0, 100, 1, 25.655641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1277, 1.713473, 0, 9999, -9999, 1.0, 100, 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1278, 7.145337, 0, 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1279, 2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.004344, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1280, 0.008871, 0, 9999, -9999, 1.0, 100, 1, 0.626494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1282, 0.164926, 0, 9999, -9999, 1.0, 100, 1, 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1283, 24.042404, 0, 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1284, 2.961479, 0, 9999, -9999, 1.0, 100, 1, 28.426322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1285, 0.002761, 0, 9999, -9999, 1.0, 100, 1, 2.937048, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1286, 2.24876, 0, 9999, -9999, 1.0, 100, 1, 17.872201, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1287, 4.55563, 0, 9999, -9999, 1.0, 100, 1, 93.199628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1288, 3.72473, 0, 9999, -9999, 1.0, 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1289, 7.121503, 0, 9999, -9999, 1.0, 100, 1, 184.149235, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1290, 0.310739, 0, 9999, -9999, 1.0, 100, 1, 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1291, 5.174079, 0, 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1292, 3.680955, 0, 9999, -9999, 1.0, 100, 1, 41.682074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1293, 0.037266, 0, 9999, -9999, 1.0, 100, 1, 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1294, 0.017452, 0, 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1295, 0.038533, 0, 9999, -9999, 1.0, 100, 1, 5.873666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1296, 0.669408, 0, 9999, -9999, 1.0, 100, 1, 27.356489, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1297, 11.612135, 0, 9999, -9999, 1.0, 100, 1, 177.778742, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1300, 11.138034, 0, 9999, -9999, 1.0, 100, 1, 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1301, 27.94748, 0, 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1302, 1.775766, 0, 9999, -9999, 1.0, 100, 1, 4.877299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1303, 1.506596, 0, 9999, -9999, 1.0, 100, 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1304, 2.218171, 0, 9999, -9999, 1.0, 100, 1, 9.594319, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1305, 0.000322, 0, 9999, -9999, 1.0, 100, 1, 0.004567, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1306, 0.093112, 0, 9999, -9999, 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1307, 0.071688, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1308, 0.05088, 0, 9999, -9999, 1.0, 100, 1, 3.278321, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1309, 0.089478, 0, 9999, -9999, 1.0, 100, 1, 3.34909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1310, 0.043944, 0, 9999, -9999, 1.0, 100, 1, 1.64589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1311, 1.283616, 0, 9999, -9999, 1.0, 100, 1, 11.854004, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1312, 32.144668, 0, 9999, -9999, 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1313, 7.034633, 0, 9999, -9999, 1.0, 100, 1, 30.836748, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1314, 2.705834, 0, 9999, -9999, 1.0, 100, 1, 12.003987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1315, 1.715196, 0, 9999, -9999, 1.0, 100, 1, 7.879027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1316, 0.001198, 0, 9999, -9999, 1.0, 100, 1, 2.757497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1317, 1.374919, 0, 9999, -9999, 1.0, 100, 1, 23.958574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1318, 0.053995, 0, 9999, -9999, 1.0, 100, 1, 1.956332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1319, 2.412989, 0, 9999, -9999, 1.0, 100, 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1320, 2.01785, 0, 9999, -9999, 1.0, 100, 1, 20.75859, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1321, 0.017436, 0, 9999, -9999, 1.0, 100, 1, 0.161123, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1322, 0.131102, 0, 9999, -9999, 1.0, 100, 1, 0.929763, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1323, 68.564796, 0, 9999, -9999, 1.0, 100, 1, 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1324, 1.440474, 0, 9999, -9999, 1.0, 100, 1, 13.063258, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1325, 4.968484, 0, 9999, -9999, 1.0, 100, 1, 90.497559, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1326, 2.423617, 0, 9999, -9999, 1.0, 100, 1, 56.928865, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1327, 3.105262, 0, 9999, -9999, 1.0, 100, 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1328, 1.651998, 0, 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1329, 17.013592, 0, 9999, -9999, 1.0, 100, 1, 218.675424, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1330, 6.13151, 0, 9999, -9999, 1.0, 100, 1, 30.131028, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1331, 0.035299, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1332, 0.021045, 0, 9999, -9999, 1.0, 100, 1, 26.293088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1333, 5.410888, 0, 9999, -9999, 1.0, 100, 1, 45.650254, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1334, 0.000137, 0, 9999, -9999, 1.0, 100, 1, 1.215341, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1336, 3.321284, 0, 9999, -9999, 1.0, 100, 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1337, 1.111612, 0, 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1338, 0.06346, 0, 9999, -9999, 1.0, 100, 1, 0.832524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1339, 0.579758, 0, 9999, -9999, 1.0, 100, 1, 10.086482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1340, 0.035501, 0, 9999, -9999, 1.0, 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1341, 6.581426, 0, 9999, -9999, 1.0, 100, 1, 205.513321, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1342, 0.031756, 0, 9999, -9999, 1.0, 100, 1, 0.734589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1343, 0.005344, 0, 9999, -9999, 1.0, 100, 1, 1.102108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1344, 0.017248, 0, 9999, -9999, 1.0, 100, 1, 0.226057, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1345, 0.124928, 0, 9999, -9999, 1.0, 100, 1, 3.971188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1346, 12.149372, 0, 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1348, 2.617463, 0, 9999, -9999, 1.0, 100, 1, 22.707927, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1349, 2.716996, 0, 9999, -9999, 1.0, 100, 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1350, 0.016036, 0, 9999, -9999, 1.0, 100, 1, 0.094971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1351, 5.3e-05, 0, 9999, -9999, 1.0, 100, 1, 0.015958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1352, 0.007111, 0, 9999, -9999, 1.0, 100, 1, 0.83726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1355, 0.046937, 0, 9999, -9999, 1.0, 100, 1, 1.688324, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1356, 12.885707, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1357, 6.737632, 0, 9999, -9999, 1.0, 100, 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1358, 0.006907, 0, 9999, -9999, 1.0, 100, 1, 0.247293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1359, 0.897683, 0, 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1360, 3.153322, 0, 9999, -9999, 1.0, 100, 1, 17.135983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1361, 8.263279, 0, 9999, -9999, 1.0, 100, 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1362, 12.630815, 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1363, 0.006147, 0, 9999, -9999, 1.0, 100, 1, 0.036158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1364, 0.007668, 0, 9999, -9999, 1.0, 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1365, 9.7e-05, 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1366, 0.005584, 0, 9999, -9999, 1.0, 100, 1, 1.229992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1367, 6.250932, 0, 9999, -9999, 1.0, 100, 1, 43.863891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1368, 0.096174, 0, 9999, -9999, 1.0, 100, 1, 3.298243, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1369, 1.432042, 0, 9999, -9999, 1.0, 100, 1, 7.968859, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1370, 0.012611, 0, 9999, -9999, 1.0, 100, 1, 0.343308, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1371, 1.656353, 0, 9999, -9999, 1.0, 100, 1, 81.767208, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1372, 0.996171, 0, 9999, -9999, 1.0, 100, 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1373, 1.384774, 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1374, 45.514504, 0, 9999, -9999, 1.0, 100, 1, 108.220146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1375, 25.096659, 0, 9999, -9999, 1.0, 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1376, 21.592139, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1377, 1.308187, 0, 9999, -9999, 1.0, 100, 1, 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1378, 0.068137, 0, 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1379, 0.067837, 0, 9999, -9999, 1.0, 100, 1, 0.805984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1380, 0.148081, 0, 9999, -9999, 1.0, 100, 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1381, 0.079283, 0, 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1382, 75.120774, 0, 9999, -9999, 1.0, 100, 1, 138.839906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1383, 57.921895, 0, 9999, -9999, 1.0, 100, 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1384, 0.898474, 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1385, 0.010214, 0, 9999, -9999, 1.0, 100, 1, 0.124455, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1386, 0.058117, 0, 9999, -9999, 1.0, 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1387, 0.177086, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1388, 0.113278, 0, 9999, -9999, 1.0, 100, 1, 0.928188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1389, 0.02606, 0, 9999, -9999, 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1390, 0.189214, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1391, 0.022705, 0, 9999, -9999, 1.0, 100, 1, 0.521719, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1392, 1.653278, 0, 9999, -9999, 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1393, 0.304577, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1394, 0.242243, 0, 9999, -9999, 1.0, 100, 1, 1.077886, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1395, 0.016054, 0, 9999, -9999, 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1396, 0.005171, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1397, 1.529697, 0, 9999, -9999, 1.0, 100, 1, 25.084545, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1398, 0.154931, 0, 9999, -9999, 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1399, 1.184332, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1400, 0.28671, 0, 9999, -9999, 1.0, 100, 1, 1.297197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1401, 5.131858, 0, 9999, -9999, 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1402, 1.568442, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1403, 48.266806, 0, 9999, -9999, 1.0, 100, 1, 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1404, 51.082464, 0, 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1405, 1.986189, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1406, 1.132197, 0, 9999, -9999, 1.0, 100, 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1407, 0.012144, 0, 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1408, 3.606729, 0, 9999, -9999, 1.0, 100, 1, 41.078698, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1409, 0.595096, 0, 9999, -9999, 1.0, 100, 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1410, 1.341977, 0, 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1411, 6.631827, 0, 9999, -9999, 1.0, 100, 1, 39.395367, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1412, 0.149883, 0, 9999, -9999, 1.0, 100, 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1413, 0.108024, 0, 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1414, 0.018773, 0, 9999, -9999, 1.0, 100, 1, 25.992489, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1415, 0.000673, 0, 9999, -9999, 1.0, 100, 1, 7.454501, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1416, 0.000128, 0, 9999, -9999, 1.0, 100, 1, 7.958002, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1417, 2.2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.001311, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1418, 3.131184, 0, 9999, -9999, 1.0, 100, 1, 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1419, 0.892644, 0, 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1421, 0.846121, 0, 9999, -9999, 0.99951, 100, 1, 6.972369, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1422, 0.569459, 0, 9999, -9999, 1.0, 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1423, 0.239313, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1424, 0.085377, 0, 9999, -9999, 1.0, 100, 1, 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1425, 7.009151, 0, 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1426, 16.98374, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1427, 2.554959, 0, 9999, -9999, 1.0, 100, 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1428, 0.012327, 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1431, 5.108838, 0, 9999, -9999, 1.0, 100, 1, 227.662022, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1432, 0.587459, 0, 9999, -9999, 1.0, 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1433, 118.811298, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1434, 0.031591, 0, 9999, -9999, 1.0, 100, 1, 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1435, 4.644217, 0, 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1436, 14.975035, 0, 9999, -9999, 1.0, 100, 1, 98.434116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1437, 12.49617, 0, 9999, -9999, 1.0, 100, 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1438, 64.510912, 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1439, 0.058606, 0, 9999, -9999, 1.0, 100, 1, 99.103164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1440, 0.000863, 0, 9999, -9999, 1.0, 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1441, 0.00601, 0, 9999, -9999, 1.0, 100, 1, 0.171578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1442, 0.057526, 0, 9999, -9999, 1.0, 100, 1, 0.715522, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1443, 24.032003, 0, 9999, -9999, 1.0, 100, 1, 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1444, 1.205148, 0, 9999, -9999, 1.0, 100, 1, 8.981696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1445, 2.394259, 0, 9999, -9999, 1.0, 100, 1, 25.036799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1446, 20.59301, 0, 9999, -9999, 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1447, 8.109674, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1448, 1.364062, 0, 9999, -9999, 1.0, 100, 1, 7.523578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1449, 6.727523, 0, 9999, -9999, 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1450, 10.232409, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1451, 13.044952, 0, 9999, -9999, 1.0, 100, 1, 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1452, 4.020652, 0, 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1453, 12.794164, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1454, 71.645573, 0, 9999, -9999, 1.0, 100, 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1455, 0.038966, 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1456, 3.746818, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1457, 0.244411, 0, 9999, -9999, 1.0, 100, 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1458, 0.030047, 0, 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1459, 1.173315, 0, 9999, -9999, 1.0, 100, 1, 5.309059, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1460, 5.043479, 0, 9999, -9999, 1.0, 100, 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1461, 3.497456, 0, 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1462, 0.462345, 0, 9999, -9999, 1.0, 100, 1, 2.402686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1463, 0.170398, 0, 9999, -9999, 1.0, 100, 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1464, 24.648093, 0, 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1465, 0.600752, 0, 9999, -9999, 1.0, 100, 1, 5.299939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1466, 0.332156, 0, 9999, -9999, 1.0, 100, 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1467, 0.100837, 0, 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1468, 6.628756, 0, 9999, -9999, 1.0, 100, 1, 23.789171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1469, 3.982867, 0, 9999, -9999, 1.0, 100, 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1470, 19.817875, 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1471, 25.471799, 0, 9999, -9999, 1.0, 100, 1, 159.165074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1472, 0.789769, 0, 9999, -9999, 1.0, 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1473, 0.721082, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1474, 0.081557, 0, 9999, -9999, 1.0, 100, 1, 1.398948, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1475, 0.020827, 0, 9999, -9999, 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1476, 81.826956, 0, 9999, -9999, 1.0, 100, 1, 250.480113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1477, 0.580029, 0, 9999, -9999, 1.0, 100, 1, 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1479, 0.004362, 0, 9999, -9999, 1.0, 100, 1, 5.592606, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1480, 0.04074, 0, 9999, -9999, 1.0, 100, 1, 18.681964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1481, 0.004051, 0, 9999, -9999, 1.0, 100, 1, 0.053146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1482, 0.788081, 0, 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1483, 0.141817, 0, 9999, -9999, 1.0, 100, 1, 3.599649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1484, 0.002023, 0, 9999, -9999, 1.0, 100, 1, 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1485, 0.038114, 0, 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1486, 0.196086, 0, 9999, -9999, 1.0, 100, 1, 2.89934, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1487, 0.083872, 0, 9999, -9999, 1.0, 100, 1, 1.142917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1488, 0.007448, 0, 9999, -9999, 1.0, 100, 1, 5.569856, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1489, 0.028558, 0, 9999, -9999, 1.0, 100, 1, 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1490, 15.603052, 0, 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1491, 5.539285, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1492, 3.975544, 0, 9999, -9999, 1.0, 100, 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1493, 3.904134, 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1494, 56.119552, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1495, 1.179889, 0, 9999, -9999, 1.0, 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1497, 12.800197, 0, 9999, -9999, 1.0, 100, 1, 89.070006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1498, 22.315881, 0, 9999, -9999, 1.0, 100, 1, 105.800802, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1500, 0.040223, 0, 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1501, 1.659338, 0, 9999, -9999, 1.0, 100, 1, 8.165333, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1502, 0.015933, 0, 9999, -9999, 1.0, 100, 1, 0.938928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1503, 3.644376, 0, 9999, -9999, 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1504, 15.995903, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1505, 0.973825, 0, 9999, -9999, 1.0, 100, 1, 26.765913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1506, 1.68035, 0, 9999, -9999, 1.0, 100, 1, 56.406717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1507, 0.198063, 0, 9999, -9999, 1.0, 100, 1, 15.438042, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1508, 0.014206, 0, 9999, -9999, 1.0, 100, 1, 0.065259, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1510, 7.904758, 0, 9999, -9999, 1.0, 100, 1, 107.008141, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1511, 34.313644, 0, 9999, -9999, 1.0, 100, 1, 155.22192, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1512, 5.508085, 0, 9999, -9999, 1.0, 100, 1, 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1513, 2.253286, 0, 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1514, 0.00068, 0, 9999, -9999, 1.0, 100, 1, 0.027711, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1516, 0.000622, 0, 9999, -9999, 1.0, 100, 1, 0.02881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1517, 0.14151, 0, 9999, -9999, 1.0, 100, 1, 1.286804, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1518, 0.056948, 0, 9999, -9999, 1.0, 100, 1, 0.670542, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1519, 0.003953, 0, 9999, -9999, 1.0, 100, 1, 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1520, 1.320701, 0, 9999, -9999, 1.0, 100, 1, 79.674256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1521, 0.488031, 0, 9999, -9999, 1.0, 100, 1, 31.179116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1522, 0.667681, 0, 9999, -9999, 1.0, 100, 1, 40.212666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1523, 0.358897, 0, 9999, -9999, 1.0, 100, 1, 20.304521, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1524, 0.421411, 0, 9999, -9999, 1.0, 100, 1, 26.159251, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1525, 8.369013, 0, 9999, -9999, 1.0, 100, 1, 68.425403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1526, 13.439194, 0, 9999, -9999, 1.0, 100, 1, 44.478558, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1527, 47.41109, 0, 9999, -9999, 1.0, 100, 1, 103.998682, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1528, 19.05121, 0, 9999, -9999, 1.0, 100, 1, 41.386726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1529, 4.347441, 0, 9999, -9999, 1.0, 100, 1, 84.378012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1530, 36.879435, 0, 9999, -9999, 1.0, 100, 1, 79.055155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1531, 98.758267, 0, 9999, -9999, 1.0, 100, 1, 183.821409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1532, 3.146672, 0, 9999, -9999, 1.0, 100, 1, 37.379033, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1534, 16.179525, 0, 9999, -9999, 1.0, 100, 1, 29.516607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1535, 2.910988, 0, 9999, -9999, 1.0, 100, 1, 8.931779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1536, 13.30894, 0, 9999, -9999, 1.0, 100, 1, 39.26145, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1537, 5.590481, 0, 9999, -9999, 1.0, 100, 1, 99.740166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1538, 3.755931, 0, 9999, -9999, 1.0, 100, 1, 130.774402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1539, 6.565652, 0, 9999, -9999, 1.0, 100, 1, 201.766963, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1540, 0.089836, 0, 9999, -9999, 1.0, 100, 1, 4.160189, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1541, 0.293356, 0, 9999, -9999, 1.0, 100, 1, 3.429917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1542, 1.778872, 0, 9999, -9999, 1.0, 100, 1, 50.287947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1543, 7.196474, 0, 9999, -9999, 1.0, 100, 1, 14.788669, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1544, 15.520031, 0, 9999, -9999, 1.0, 100, 1, 121.437126, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1545, 64.930835, 0, 9999, -9999, 1.0, 100, 1, 185.545128, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1546, 55.458703, 0, 9999, -9999, 1.0, 100, 1, 255.44343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1547, 71.747708, 0, 9999, -9999, 1.0, 100, 1, 362.597919, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1548, 9.874324, 0, 9999, -9999, 1.0, 100, 1, 21.273779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1549, 26.315546, 0, 9999, -9999, 1.0, 100, 1, 77.017486, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1550, 2.578653, 0, 9999, -9999, 1.0, 100, 1, 5.214715, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1551, 4.679853, 0, 9999, -9999, 1.0, 100, 1, 9.576491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1552, 1.571054, 0, 9999, -9999, 1.0, 100, 1, 54.035471, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1553, 1.205813, 0, 9999, -9999, 1.0, 100, 1, 92.480282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1554, 4.550451, 0, 9999, -9999, 1.0, 100, 1, 155.333413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1555, 2.8799, 0, 9999, -9999, 1.0, 100, 1, 103.865774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1556, 1.072108, 0, 9999, -9999, 1.0, 100, 1, 40.376346, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1557, 0.628445, 0, 9999, -9999, 1.0, 100, 1, 25.990242, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1558, 0.94404, 0, 9999, -9999, 1.0, 100, 1, 24.622373, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1559, 4.593798, 0, 9999, -9999, 1.0, 100, 1, 112.609207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1560, 1.15871, 0, 9999, -9999, 1.0, 100, 1, 86.395942, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1561, 0.554621, 0, 9999, -9999, 1.0, 100, 1, 19.127379, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1562, 1.20192, 0, 9999, -9999, 1.0, 100, 1, 61.888351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1563, 3.188963, 0, 9999, -9999, 1.0, 100, 1, 106.233907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1564, 26.839461, 0, 9999, -9999, 1.0, 100, 1, 58.27282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1565, 0.825577, 0, 9999, -9999, 1.0, 100, 1, 12.83938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1566, 9.367373, 0, 9999, -9999, 1.0, 100, 1, 358.676351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1567, 0.521067, 0, 9999, -9999, 1.0, 100, 1, 29.531771, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1568, 2.721294, 0, 9999, -9999, 1.0, 100, 1, 89.300597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1569, 7.514268, 0, 9999, -9999, 1.0, 100, 1, 328.718571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1570, 6.439178, 0, 9999, -9999, 1.0, 100, 1, 243.241909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1571, 10.260218, 0, 9999, -9999, 1.0, 100, 1, 203.443403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1572, 6.054092, 0, 9999, -9999, 1.0, 100, 1, 232.127956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1573, 2.410514, 0, 9999, -9999, 1.0, 100, 1, 80.403772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1574, 3.788724, 0, 9999, -9999, 1.0, 100, 1, 144.715972, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1575, 10.428356, 0, 9999, -9999, 1.0, 100, 1, 153.606376, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1576, 2.443, 0, 9999, -9999, 1.0, 100, 1, 34.262017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1577, 15.38133, 0, 9999, -9999, 1.0, 100, 1, 217.054488, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1578, 0.821275, 0, 9999, -9999, 1.0, 100, 1, 16.348222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1579, 14.528543, 0, 9999, -9999, 1.0, 100, 1, 35.164333, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1580, 12.79112, 0, 9999, -9999, 1.0, 100, 1, 21.892492, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1581, 2.068277, 0, 9999, -9999, 1.0, 100, 1, 156.277964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1582, 0.165737, 0, 9999, -9999, 1.0, 100, 1, 8.151092, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1583, 0.043758, 0, 9999, -9999, 1.0, 100, 1, 1.791968, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1584, 1.216571, 0, 9999, -9999, 1.0, 100, 1, 81.24993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1585, 0.048815, 0, 9999, -9999, 1.0, 100, 1, 3.685182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1586, 0.843323, 0, 9999, -9999, 1.0, 100, 1, 61.31549, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1587, 2.519864, 0, 9999, -9999, 1.0, 100, 1, 191.635296, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1588, 3.852362, 0, 9999, -9999, 1.0, 100, 1, 59.424343, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1589, 19.154329, 0, 9999, -9999, 1.0, 100, 1, 48.538268, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1590, 20.947358, 0, 9999, -9999, 1.0, 100, 1, 119.077525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1591, 23.168103, 0, 9999, -9999, 1.0, 100, 1, 142.8447, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1592, 0.253241, 0, 9999, -9999, 1.0, 100, 1, 9.842361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1593, 0.15675, 0, 9999, -9999, 1.0, 100, 1, 7.183183, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1594, 0.292231, 0, 9999, -9999, 1.0, 100, 1, 9.56089, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1595, 2.231011, 0, 9999, -9999, 1.0, 100, 1, 54.79001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1596, 4.880936, 0, 9999, -9999, 1.0, 100, 1, 138.730049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1597, 0.08322, 0, 9999, -9999, 1.0, 100, 1, 2.858987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1598, 0.112467, 0, 9999, -9999, 1.0, 100, 1, 4.795494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1599, 3.84912, 0, 9999, -9999, 1.0, 100, 1, 86.703571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1600, 2.069032, 0, 9999, -9999, 1.0, 100, 1, 25.356501, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1601, 0.561492, 0, 9999, -9999, 1.0, 100, 1, 7.643653, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1602, 2.906505, 0, 9999, -9999, 1.0, 100, 1, 45.658169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1603, 1.783351, 0, 9999, -9999, 1.0, 100, 1, 26.209248, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1604, 1.098497, 0, 9999, -9999, 1.0, 100, 1, 16.363032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1605, 2.754133, 0, 9999, -9999, 1.0, 100, 1, 43.477178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1606, 2.112869, 0, 9999, -9999, 1.0, 100, 1, 42.024907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1607, 1.261272, 0, 9999, -9999, 1.0, 100, 1, 19.395236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1608, 1.278121, 0, 9999, -9999, 1.0, 100, 1, 19.491249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1609, 0.483623, 0, 9999, -9999, 1.0, 100, 1, 6.052272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1610, 1.005066, 0, 9999, -9999, 1.0, 100, 1, 18.571656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1611, 0.46381, 0, 9999, -9999, 1.0, 100, 1, 6.420554, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1612, 0.857392, 0, 9999, -9999, 1.0, 100, 1, 10.811203, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1613, 1.011747, 0, 9999, -9999, 1.0, 100, 1, 27.976217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1614, 1.022581, 0, 9999, -9999, 1.0, 100, 1, 28.183827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1615, 2.737635, 0, 9999, -9999, 1.0, 100, 1, 193.234776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1616, 0.13616, 0, 9999, -9999, 1.0, 100, 1, 6.865586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1617, 0.214465, 0, 9999, -9999, 1.0, 100, 1, 10.63107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1618, 0.137271, 0, 9999, -9999, 1.0, 100, 1, 4.920368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1619, 0.137714, 0, 9999, -9999, 1.0, 100, 1, 6.689637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1620, 0.054616, 0, 9999, -9999, 1.0, 100, 1, 1.912024, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1621, 0.643767, 0, 9999, -9999, 1.0, 100, 1, 8.056388, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1622, 0.454891, 0, 9999, -9999, 1.0, 100, 1, 5.693597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1623, 0.781413, 0, 9999, -9999, 1.0, 100, 1, 20.717111, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1624, 0.43014, 0, 9999, -9999, 1.0, 100, 1, 8.938454, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1625, 4.394301, 0, 9999, -9999, 1.0, 100, 1, 65.182465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1626, 0.907896, 0, 9999, -9999, 1.0, 100, 1, 11.878862, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1627, 0.828216, 0, 9999, -9999, 1.0, 100, 1, 10.196496, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1628, 3.64562, 0, 9999, -9999, 1.0, 100, 1, 66.613993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1629, 3.996364, 0, 9999, -9999, 1.0, 100, 1, 121.671047, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1630, 0.97886, 0, 9999, -9999, 1.0, 100, 1, 12.452584, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1631, 1.229738, 0, 9999, -9999, 1.0, 100, 1, 32.486249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1632, 1.735442, 0, 9999, -9999, 1.0, 100, 1, 25.874893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1633, 1.043532, 0, 9999, -9999, 1.0, 100, 1, 67.433329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1634, 0.770553, 0, 9999, -9999, 1.0, 100, 1, 9.643044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1635, 1.42036, 0, 9999, -9999, 1.0, 100, 1, 19.166135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1636, 0.484297, 0, 9999, -9999, 1.0, 100, 1, 25.181406, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1637, 0.890327, 0, 9999, -9999, 1.0, 100, 1, 29.114828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1638, 0.393448, 0, 9999, -9999, 1.0, 100, 1, 12.162188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1639, 0.529161, 0, 9999, -9999, 1.0, 100, 1, 29.183593, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1640, 0.055855, 0, 9999, -9999, 1.0, 100, 1, 2.237652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1641, 0.128633, 0, 9999, -9999, 1.0, 100, 1, 5.023705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1642, 0.300365, 0, 9999, -9999, 1.0, 100, 1, 11.730623, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1643, 0.0778, 0, 9999, -9999, 1.0, 100, 1, 3.417684, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1644, 0.519067, 0, 9999, -9999, 1.0, 100, 1, 11.76596, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1645, 0.212854, 0, 9999, -9999, 1.0, 100, 1, 11.144882, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1646, 0.08389, 0, 9999, -9999, 1.0, 100, 1, 3.73271, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1647, 0.49549, 0, 9999, -9999, 1.0, 100, 1, 17.434827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1648, 51.620123, 0, 9999, -9999, 1.0, 100, 1, 109.345623, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1649, 1.143986, 0, 9999, -9999, 1.0, 100, 1, 23.481556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1650, 68.504496, 0, 9999, -9999, 1.0, 100, 1, 176.928964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1651, 25.884619, 0, 9999, -9999, 1.0, 100, 1, 161.276649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1652, 22.304037, 0, 9999, -9999, 1.0, 100, 1, 84.070562, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1653, 5.825901, 0, 9999, -9999, 1.0, 100, 1, 18.431241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1654, 5.458977, 0, 9999, -9999, 1.0, 100, 1, 47.53021, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1655, 0.218497, 0, 9999, -9999, 1.0, 100, 1, 10.79071, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1656, 0.047498, 0, 9999, -9999, 1.0, 100, 1, 2.680105, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1657, 0.095463, 0, 9999, -9999, 1.0, 100, 1, 5.6313, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1658, 0.045291, 0, 9999, -9999, 1.0, 100, 1, 1.879381, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1659, 17.538243, 0, 9999, -9999, 1.0, 100, 1, 91.77667, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1660, 12.937488, 0, 9999, -9999, 1.0, 100, 1, 186.942171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1661, 31.605385, 0, 9999, -9999, 1.0, 100, 1, 138.604087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1662, 0.063493, 0, 9999, -9999, 1.0, 100, 1, 3.040325, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1663, 0.024501, 0, 9999, -9999, 1.0, 100, 1, 1.600649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1664, 0.036775, 0, 9999, -9999, 1.0, 100, 1, 1.578207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1665, 0.738544, 0, 9999, -9999, 1.0, 100, 1, 48.659717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1666, 0.10553, 0, 9999, -9999, 1.0, 100, 1, 2.877877, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1667, 0.158158, 0, 9999, -9999, 1.0, 100, 1, 5.227282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1668, 0.093074, 0, 9999, -9999, 1.0, 100, 1, 3.927043, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1669, 0.940983, 0, 9999, -9999, 1.0, 100, 1, 72.677935, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1670, 1.496978, 0, 9999, -9999, 1.0, 100, 1, 111.043025, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1671, 2.781499, 0, 9999, -9999, 1.0, 100, 1, 62.404971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1672, 0.388881, 0, 9999, -9999, 1.0, 100, 1, 10.579925, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1673, 0.334706, 0, 9999, -9999, 1.0, 100, 1, 4.091034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1674, 1.005445, 0, 9999, -9999, 1.0, 100, 1, 47.970381, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1675, 0.90703, 0, 9999, -9999, 1.0, 100, 1, 31.233663, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1676, 1.387516, 0, 9999, -9999, 1.0, 100, 1, 83.173368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1677, 0.214899, 0, 9999, -9999, 1.0, 100, 1, 13.887293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1678, 1.315679, 0, 9999, -9999, 1.0, 100, 1, 226.804108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1679, 0.418866, 0, 9999, -9999, 1.0, 100, 1, 71.380413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1680, 1.040782, 0, 9999, -9999, 1.0, 100, 1, 52.148102, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1681, 0.272268, 0, 9999, -9999, 1.0, 100, 1, 17.30062, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1682, 0.618993, 0, 9999, -9999, 1.0, 100, 1, 39.892468, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1683, 0.37783, 0, 9999, -9999, 1.0, 100, 1, 9.189765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1684, 16.720062, 0, 9999, -9999, 1.0, 100, 1, 40.575646, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1685, 38.280956, 0, 9999, -9999, 1.0, 100, 1, 74.922434, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1686, 1.592396, 0, 9999, -9999, 1.0, 100, 1, 81.035483, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1687, 1.448229, 0, 9999, -9999, 1.0, 100, 1, 112.01808, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1688, 0.25044, 0, 9999, -9999, 1.0, 100, 1, 18.158729, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1689, 2.728973, 0, 9999, -9999, 1.0, 100, 1, 116.696894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1690, 1.881404, 0, 9999, -9999, 1.0, 100, 1, 116.477465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1691, 1.937312, 0, 9999, -9999, 1.0, 100, 1, 228.38653, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1692, 0.360216, 0, 9999, -9999, 1.0, 100, 1, 26.501573, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1693, 6.045706, 0, 9999, -9999, 1.0, 100, 1, 86.236575, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1694, 0.838517, 0, 9999, -9999, 1.0, 100, 1, 53.656832, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1695, 0.366512, 0, 9999, -9999, 1.0, 100, 1, 23.132774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1696, 0.676037, 0, 9999, -9999, 1.0, 100, 1, 53.34209, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1697, 73.968329, 0, 9999, -9999, 1.0, 100, 1, 136.821485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1698, 7.947772, 0, 9999, -9999, 1.0, 100, 1, 25.60631, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1699, 0.032287, 0, 9999, -9999, 1.0, 100, 1, 5.356106, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1700, 0.345167, 0, 9999, -9999, 1.0, 100, 1, 55.825815, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1701, 0.33727, 0, 9999, -9999, 1.0, 100, 1, 37.297196, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1702, 1.288316, 0, 9999, -9999, 1.0, 100, 1, 25.149806, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1703, 2.47381, 0, 9999, -9999, 1.0, 100, 1, 48.587768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1704, 5.787415, 0, 9999, -9999, 1.0, 100, 1, 127.647586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1705, 2.86247, 0, 9999, -9999, 1.0, 100, 1, 52.051788, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1706, 0.421435, 0, 9999, -9999, 1.0, 100, 1, 6.76178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1707, 0.423471, 0, 9999, -9999, 1.0, 100, 1, 11.7078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1708, 1.09922, 0, 9999, -9999, 1.0, 100, 1, 26.288692, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1709, 4.063842, 0, 9999, -9999, 1.0, 100, 1, 226.257418, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1710, 3.872336, 0, 9999, -9999, 1.0, 100, 1, 183.631947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1711, 0.031912, 0, 9999, -9999, 1.0, 100, 1, 7.213854, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1712, 1.519606, 0, 9999, -9999, 1.0, 100, 1, 75.638853, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1713, 1.926968, 0, 9999, -9999, 1.0, 100, 1, 90.775073, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1714, 0.691647, 0, 9999, -9999, 1.0, 100, 1, 42.312538, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1715, 4.380165, 0, 9999, -9999, 1.0, 100, 1, 155.279397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1716, 99.103248, 0, 9999, -9999, 1.0, 100, 1, 156.979012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1717, 1.370715, 0, 9999, -9999, 1.0, 100, 1, 82.928251, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1718, 189.035332, 0, 9999, -9999, 1.0, 100, 1, 301.614349, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1719, 0.996406, 0, 9999, -9999, 1.0, 100, 1, 19.488967, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1720, 2.459531, 0, 9999, -9999, 1.0, 100, 1, 54.067169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1721, 1.395162, 0, 9999, -9999, 1.0, 100, 1, 82.151947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1722, 0.307342, 0, 9999, -9999, 1.0, 100, 1, 21.329566, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1723, 1.879056, 0, 9999, -9999, 1.0, 100, 1, 2.855273, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1724, 23.913688, 0, 9999, -9999, 1.0, 100, 1, 36.268783, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1725, 3.302072, 0, 9999, -9999, 1.0, 100, 1, 55.750844, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1726, 4.692439, 0, 9999, -9999, 1.0, 100, 1, 84.308501, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1727, 0.009857, 0, 9999, -9999, 1.0, 100, 1, 0.456443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1728, 1.500178, 0, 9999, -9999, 1.0, 100, 1, 65.283314, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1729, 9.626622, 0, 9999, -9999, 1.0, 100, 1, 220.758669, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1730, 2.579093, 0, 9999, -9999, 1.0, 100, 1, 51.367164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1731, 5.370488, 0, 9999, -9999, 1.0, 100, 1, 151.90213, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1732, 4.730721, 0, 9999, -9999, 1.0, 100, 1, 383.858473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1733, 1.601396, 0, 9999, -9999, 1.0, 100, 1, 60.655652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1734, 0.994327, 0, 9999, -9999, 1.0, 100, 1, 77.375277, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1735, 5.493087, 0, 9999, -9999, 1.0, 100, 1, 153.887449, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1736, 1.217485, 0, 9999, -9999, 1.0, 100, 1, 89.439426, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1737, 13.67404, 0, 9999, -9999, 1.0, 100, 1, 194.473407, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1738, 6.79528, 0, 9999, -9999, 1.0, 100, 1, 116.049526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1739, 1.628928, 0, 9999, -9999, 1.0, 100, 1, 33.525947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1740, 3.170471, 0, 9999, -9999, 1.0, 100, 1, 66.638954, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1741, 0.703631, 0, 9999, -9999, 1.0, 100, 1, 35.869318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1742, 0.41138, 0, 9999, -9999, 1.0, 100, 1, 25.619162, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1743, 0.014153, 0, 9999, -9999, 1.0, 100, 1, 0.986841, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1744, 0.06008, 0, 9999, -9999, 1.0, 100, 1, 3.775325, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1745, 0.52858, 0, 9999, -9999, 1.0, 100, 1, 31.215591, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1746, 2.317817, 0, 9999, -9999, 1.0, 100, 1, 172.123236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1747, 0.45041, 0, 9999, -9999, 1.0, 100, 1, 25.963706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1748, 1.875782, 0, 9999, -9999, 1.0, 100, 1, 67.219313, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1749, 5.661322, 0, 9999, -9999, 1.0, 100, 1, 218.703564, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1750, 0.722982, 0, 9999, -9999, 1.0, 100, 1, 22.191848, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1751, 0.570436, 0, 9999, -9999, 1.0, 100, 1, 18.416283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1752, 2.485541, 0, 9999, -9999, 1.0, 100, 1, 136.190504, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1753, 2.307659, 0, 9999, -9999, 1.0, 100, 1, 79.270006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1754, 9.096135, 0, 9999, -9999, 1.0, 100, 1, 408.37422, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1755, 1.808269, 0, 9999, -9999, 1.0, 100, 1, 46.277001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1756, 1.755721, 0, 9999, -9999, 1.0, 100, 1, 93.807787, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1757, 13.59206, 0, 9999, -9999, 1.0, 100, 1, 197.08743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1758, 4.309907, 0, 9999, -9999, 1.0, 100, 1, 311.473267, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1759, 4.837918, 0, 9999, -9999, 1.0, 100, 1, 156.546089, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1760, 2.229657, 0, 9999, -9999, 1.0, 100, 1, 114.687411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1761, 1.4435, 0, 9999, -9999, 1.0, 100, 1, 48.443946, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1762, 4.898546, 0, 9999, -9999, 1.0, 100, 1, 107.077622, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1763, 5.490835, 0, 9999, -9999, 1.0, 100, 1, 90.136674, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1764, 1.223566, 0, 9999, -9999, 1.0, 100, 1, 21.994769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1765, 7.971301, 0, 9999, -9999, 1.0, 100, 1, 112.249863, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1766, 9.468566, 0, 9999, -9999, 1.0, 100, 1, 99.811208, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1767, 48.00237, 0, 9999, -9999, 1.0, 100, 1, 95.5909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1768, 55.735285, 0, 9999, -9999, 1.0, 100, 1, 159.818572, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1769, 21.168997, 0, 9999, -9999, 1.0, 100, 1, 235.581664, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1770, 252.472611, 0, 9999, -9999, 1.0, 100, 1, 479.248156, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1771, 171.272253, 0, 9999, -9999, 1.0, 100, 1, 276.640075, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1772, 5.981185, 0, 9999, -9999, 1.0, 100, 1, 272.215345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1773, 31.853074, 0, 9999, -9999, 1.0, 100, 1, 533.823159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1774, 1.38998, 0, 9999, -9999, 1.0, 100, 1, 88.57714, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1775, 3.602189, 0, 9999, -9999, 1.0, 100, 1, 197.787397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1776, 3.86406, 0, 9999, -9999, 1.0, 100, 1, 111.203656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1777, 4.186652, 0, 9999, -9999, 1.0, 100, 1, 199.457983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1778, 2.885068, 0, 9999, -9999, 1.0, 100, 1, 80.070627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1779, 6.121667, 0, 9999, -9999, 1.0, 100, 1, 78.485044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1780, 4.042606, 0, 9999, -9999, 1.0, 100, 1, 97.872974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1781, 3.124553, 0, 9999, -9999, 1.0, 100, 1, 7.067063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1782, 4.836581, 0, 9999, -9999, 1.0, 100, 1, 9.94901, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1783, 5.154731, 0, 9999, -9999, 1.0, 100, 1, 10.739092, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1784, 2.922371, 0, 9999, -9999, 1.0, 100, 1, 240.920274, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1785, 3.064711, 0, 9999, -9999, 1.0, 100, 1, 275.41262, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1786, 15.899962, 0, 9999, -9999, 1.0, 100, 1, 195.868213, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1787, 65.367372, 0, 9999, -9999, 1.0, 100, 1, 123.060646, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1788, 0.117389, 0, 9999, -9999, 1.0, 100, 1, 9.486282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1789, 0.289917, 0, 9999, -9999, 1.0, 100, 1, 24.05804, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1790, 0.010999, 0, 9999, -9999, 1.0, 100, 1, 1.412167, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1791, 0.007829, 0, 9999, -9999, 1.0, 100, 1, 1.171034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1792, 0.044079, 0, 9999, -9999, 1.0, 100, 1, 8.914306, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1793, 0.236603, 0, 9999, -9999, 1.0, 100, 1, 41.722817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1794, 0.20779, 0, 9999, -9999, 1.0, 100, 1, 6.617641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1795, 0.266407, 0, 9999, -9999, 1.0, 100, 1, 3.33586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1796, 4.643687, 0, 9999, -9999, 1.0, 100, 1, 10.434523, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1797, 1.892799, 0, 9999, -9999, 1.0, 100, 1, 63.411765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1798, 0.404733, 0, 9999, -9999, 1.0, 100, 1, 14.835758, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1799, 6.065791, 0, 9999, -9999, 1.0, 100, 1, 51.10225, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1800, 12.893851, 0, 9999, -9999, 1.0, 100, 1, 79.286766, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1801, 0.096655, 0, 9999, -9999, 1.0, 100, 1, 21.006749, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1802, 0.050346, 0, 9999, -9999, 1.0, 100, 1, 11.305192, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1803, 0.067486, 0, 9999, -9999, 1.0, 100, 1, 15.182571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1804, 8.857977, 0, 9999, -9999, 1.0, 100, 1, 399.133201, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1805, 0.372681, 0, 9999, -9999, 1.0, 100, 1, 23.20491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1806, 0.645338, 0, 9999, -9999, 1.0, 100, 1, 21.469357, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1807, 0.476964, 0, 9999, -9999, 1.0, 100, 1, 28.156483, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1808, 2.263578, 0, 9999, -9999, 1.0, 100, 1, 118.262712, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1809, 0.706651, 0, 9999, -9999, 1.0, 100, 1, 33.031228, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1810, 1.838324, 0, 9999, -9999, 1.0, 100, 1, 74.139408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1811, 0.934047, 0, 9999, -9999, 1.0, 100, 1, 53.408299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1812, 0.847076, 0, 9999, -9999, 1.0, 100, 1, 47.34526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1813, 5.040034, 0, 9999, -9999, 1.0, 100, 1, 180.894957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1814, 1.305803, 0, 9999, -9999, 1.0, 100, 1, 62.572642, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1815, 1.125706, 0, 9999, -9999, 1.0, 100, 1, 61.953143, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1816, 0.526674, 0, 9999, -9999, 1.0, 100, 1, 30.445169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1817, 6.103422, 0, 9999, -9999, 1.0, 100, 1, 280.614897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1818, 2.278102, 0, 9999, -9999, 1.0, 100, 1, 173.515675, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1819, 0.043942, 0, 9999, -9999, 1.0, 100, 1, 1.538348, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1820, 1.0414, 0, 9999, -9999, 1.0, 100, 1, 79.71358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1821, 2.208855, 0, 9999, -9999, 1.0, 100, 1, 196.67938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1822, 98.239685, 0, 9999, -9999, 1.0, 100, 1, 170.831584, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1823, 4.830701, 0, 9999, -9999, 1.0, 100, 1, 131.456153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1824, 2.976789, 0, 9999, -9999, 1.0, 100, 1, 56.565054, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1825, 49.61097, 0, 9999, -9999, 1.0, 100, 1, 81.59195, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1826, 2.40722, 0, 9999, -9999, 1.0, 100, 1, 74.101252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1827, 0.690669, 0, 9999, -9999, 1.0, 100, 1, 30.303552, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1828, 27.146571, 0, 9999, -9999, 1.0, 100, 1, 43.298921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1829, 37.866018, 0, 9999, -9999, 1.0, 100, 1, 69.263255, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1830, 2.915109, 0, 9999, -9999, 1.0, 100, 1, 27.724768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1831, 39.925327, 0, 9999, -9999, 1.0, 100, 1, 69.89001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1832, 0.828831, 0, 9999, -9999, 1.0, 100, 1, 26.560625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1833, 1.109798, 0, 9999, -9999, 1.0, 100, 1, 81.361962, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1834, 2.554402, 0, 9999, -9999, 1.0, 100, 1, 102.529569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1836, 0.477418, 0, 9999, -9999, 1.0, 100, 1, 6.417969, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1837, 4.200009, 0, 9999, -9999, 1.0, 100, 1, 12.629331, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1838, 1.443062, 0, 9999, -9999, 1.0, 100, 1, 25.580913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1839, 28.228214, 0, 9999, -9999, 1.0, 100, 1, 183.749133, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1840, 10.988953, 0, 9999, -9999, 1.0, 100, 1, 132.975197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1841, 0.340284, 0, 9999, -9999, 1.0, 100, 1, 22.982632, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1842, 1.417646, 0, 9999, -9999, 1.0, 100, 1, 7.468633, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1843, 0.588474, 0, 9999, -9999, 1.0, 100, 1, 19.264686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1844, 0.345625, 0, 9999, -9999, 1.0, 100, 1, 32.384294, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1845, 0.373692, 0, 9999, -9999, 1.0, 100, 1, 31.436002, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1846, 0.117694, 0, 9999, -9999, 1.0, 100, 1, 3.74984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1847, 6.98851, 0, 9999, -9999, 1.0, 100, 1, 120.215574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1848, 0.671868, 0, 9999, -9999, 1.0, 100, 1, 9.514696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1849, 1.591079, 0, 9999, -9999, 1.0, 100, 1, 37.619097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1850, 3.459291, 0, 9999, -9999, 1.0, 100, 1, 48.54058, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1851, 5.355057, 0, 9999, -9999, 1.0, 100, 1, 7.956444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1852, 26.334441, 0, 9999, -9999, 1.0, 100, 1, 37.606916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1853, 21.05905, 0, 9999, -9999, 1.0, 100, 1, 30.116711, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1854, 1.087784, 0, 9999, -9999, 1.0, 100, 1, 2.241167, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1855, 4.821441, 0, 9999, -9999, 1.0, 100, 1, 121.687485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1856, 16.158296, 0, 9999, -9999, 1.0, 100, 1, 63.654358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1857, 1.392575, 0, 9999, -9999, 1.0, 100, 1, 41.229597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1858, 0.962874, 0, 9999, -9999, 1.0, 100, 1, 27.374415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1860, 5.321111, 0, 9999, -9999, 1.0, 100, 1, 84.163604, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1861, 1.232397, 0, 9999, -9999, 1.0, 100, 1, 26.861144, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1862, 0.420971, 0, 9999, -9999, 1.0, 100, 1, 32.512826, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1863, 0.38232, 0, 9999, -9999, 1.0, 100, 1, 30.063729, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1864, 1.848854, 0, 9999, -9999, 1.0, 100, 1, 138.236316, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1865, 26.719416, 0, 9999, -9999, 1.0, 100, 1, 68.097772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1866, 37.73908, 0, 9999, -9999, 1.0, 100, 1, 98.289141, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1867, 0.07468, 0, 9999, -9999, 1.0, 100, 1, 2.041288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1868, 0.184336, 0, 9999, -9999, 1.0, 100, 1, 6.453374, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1869, 0.097593, 0, 9999, -9999, 1.0, 100, 1, 2.759448, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1870, 0.859649, 0, 9999, -9999, 1.0, 100, 1, 54.564665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1871, 1.592185, 0, 9999, -9999, 1.0, 100, 1, 52.648444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1872, 0.137763, 0, 9999, -9999, 1.0, 100, 1, 1.683854, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1873, 0.231084, 0, 9999, -9999, 1.0, 100, 1, 9.025283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1874, 0.083646, 0, 9999, -9999, 1.0, 100, 1, 3.554415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1875, 0.158111, 0, 9999, -9999, 1.0, 100, 1, 7.837576, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1876, 0.141013, 0, 9999, -9999, 1.0, 100, 1, 4.936672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1877, 0.032441, 0, 9999, -9999, 1.0, 100, 1, 1.135717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1878, 0.168939, 0, 9999, -9999, 1.0, 100, 1, 8.374329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1879, 0.048728, 0, 9999, -9999, 1.0, 100, 1, 1.752881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1880, 0.763602, 0, 9999, -9999, 1.0, 100, 1, 38.46747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1881, 0.30875, 0, 9999, -9999, 1.0, 100, 1, 4.535799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1882, 0.374878, 0, 9999, -9999, 1.0, 100, 1, 5.120641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1883, 0.501411, 0, 9999, -9999, 1.0, 100, 1, 6.940957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1884, 0.420718, 0, 9999, -9999, 1.0, 100, 1, 5.865468, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1885, 0.774015, 0, 9999, -9999, 1.0, 100, 1, 47.510175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1886, 0.082618, 0, 9999, -9999, 1.0, 100, 1, 5.255398, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1887, 0.584546, 0, 9999, -9999, 1.0, 100, 1, 16.937671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1888, 0.279655, 0, 9999, -9999, 1.0, 100, 1, 4.141211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1889, 2.215842, 0, 9999, -9999, 1.0, 100, 1, 91.335184, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1890, 0.651391, 0, 9999, -9999, 1.0, 100, 1, 24.842697, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1891, 0.495423, 0, 9999, -9999, 1.0, 100, 1, 30.836318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1892, 0.592029, 0, 9999, -9999, 1.0, 100, 1, 38.14699, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1893, 0.992301, 0, 9999, -9999, 1.0, 100, 1, 46.5682, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1894, 0.671605, 0, 9999, -9999, 1.0, 100, 1, 31.347572, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1895, 0.005762, 0, 9999, -9999, 1.0, 100, 1, 0.140628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1896, 0.578794, 0, 9999, -9999, 1.0, 100, 1, 45.257234, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1897, 0.22732, 0, 9999, -9999, 1.0, 100, 1, 14.824595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1898, 0.253484, 0, 9999, -9999, 1.0, 100, 1, 18.270499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1899, 0.15769, 0, 9999, -9999, 1.0, 100, 1, 12.000496, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1900, 49.440108, 0, 9999, -9999, 1.0, 100, 1, 78.114509, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1901, 85.852576, 0, 9999, -9999, 1.0, 100, 1, 133.539659, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1902, 144.692709, 0, 9999, -9999, 1.0, 100, 1, 281.819662, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1903, 38.213684, 0, 9999, -9999, 1.0, 100, 1, 135.492385, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1904, 49.601, 0, 9999, -9999, 1.0, 100, 1, 79.184428, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1905, 0.245402, 0, 9999, -9999, 1.0, 100, 1, 9.160607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1906, 1.441792, 0, 9999, -9999, 1.0, 100, 1, 72.356523, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1907, 0.557731, 0, 9999, -9999, 1.0, 100, 1, 28.893637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1908, 0.972014, 0, 9999, -9999, 1.0, 100, 1, 50.477866, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1909, 2.006953, 0, 9999, -9999, 0.99951, 100, 1, 32.874676, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1910, 1.289808, 0, 9999, -9999, 1.0, 100, 1, 20.259486, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1911, 0.514865, 0, 9999, -9999, 1.0, 100, 1, 8.189799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1912, 69.733436, 0, 9999, -9999, 1.0, 100, 1, 101.236915, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1913, 0.109472, 0, 9999, -9999, 1.0, 100, 1, 6.782522, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1914, 0.280751, 0, 9999, -9999, 1.0, 100, 1, 15.944561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1915, 57.319413, 0, 9999, -9999, 1.0, 100, 1, 159.570248, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1916, 99.107497, 0, 9999, -9999, 1.0, 100, 1, 277.793548, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1917, 42.116008, 0, 9999, -9999, 1.0, 100, 1, 186.387377, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1918, 58.749074, 0, 9999, -9999, 1.0, 100, 1, 120.486097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1919, 28.497622, 0, 9999, -9999, 1.0, 100, 1, 61.1613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1920, 1.811743, 0, 9999, -9999, 1.0, 100, 1, 9.95472, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1921, 145.712044, 0, 9999, -9999, 1.0, 100, 1, 230.400935, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1922, 45.36466, 0, 9999, -9999, 1.0, 100, 1, 66.116137, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1923, 9.238607, 0, 9999, -9999, 1.0, 100, 1, 21.836163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1924, 5.019655, 0, 9999, -9999, 1.0, 100, 1, 36.518326, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1925, 5.170419, 0, 9999, -9999, 1.0, 100, 1, 135.324361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1926, 3.340663, 0, 9999, -9999, 1.0, 100, 1, 96.610178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1927, 23.399289, 0, 9999, -9999, 1.0, 100, 1, 65.668809, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1928, 0.747036, 0, 9999, -9999, 1.0, 100, 1, 1.509884, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1929, 0.180301, 0, 9999, -9999, 1.0, 100, 1, 4.804832, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1930, 0.214601, 0, 9999, -9999, 1.0, 100, 1, 11.004973, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1931, 0.663788, 0, 9999, -9999, 1.0, 100, 1, 38.07556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1932, 1.83202, 0, 9999, -9999, 1.0, 100, 1, 46.722379, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1933, 0.735851, 0, 9999, -9999, 1.0, 100, 1, 44.239188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1934, 47.829223, 0, 9999, -9999, 1.0, 100, 1, 383.418198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1935, 3.280962, 0, 9999, -9999, 1.0, 100, 1, 62.335643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1936, 0.079477, 0, 9999, -9999, 1.0, 100, 1, 6.00797, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1937, 2.133855, 0, 9999, -9999, 1.0, 100, 1, 134.605733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1938, 1.44698, 0, 9999, -9999, 1.0, 100, 1, 89.425619, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1939, 1.447635, 0, 9999, -9999, 1.0, 100, 1, 103.003683, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1940, 0.249661, 0, 9999, -9999, 1.0, 100, 1, 18.980829, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1941, 0.521998, 0, 9999, -9999, 1.0, 100, 1, 104.495097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1942, 0.789037, 0, 9999, -9999, 1.0, 100, 1, 70.75487, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1943, 0.083093, 0, 9999, -9999, 1.0, 100, 1, 3.652558, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1944, 1.445543, 0, 9999, -9999, 1.0, 100, 1, 93.133765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1945, 0.304251, 0, 9999, -9999, 1.0, 100, 1, 10.651443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1946, 0.037403, 0, 9999, -9999, 1.0, 100, 1, 1.309439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1947, 1.219744, 0, 9999, -9999, 1.0, 100, 1, 17.996246, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1948, 4.586959, 0, 9999, -9999, 1.0, 100, 1, 83.075413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1949, 0.82436, 0, 9999, -9999, 1.0, 100, 1, 10.193229, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1950, 0.070892, 0, 9999, -9999, 1.0, 100, 1, 0.866493, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1951, 0.63205, 0, 9999, -9999, 1.0, 100, 1, 7.917597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1952, 3.277791, 0, 9999, -9999, 1.0, 100, 1, 67.723951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1953, 0.21067, 0, 9999, -9999, 1.0, 100, 1, 8.928556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1954, 0.230766, 0, 9999, -9999, 1.0, 100, 1, 12.726892, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1955, 0.181558, 0, 9999, -9999, 1.0, 100, 1, 6.625255, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1956, 2.572929, 0, 9999, -9999, 1.0, 100, 1, 38.724888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1957, 3.910752, 0, 9999, -9999, 1.0, 100, 1, 131.682322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1958, 0.89549, 0, 9999, -9999, 1.0, 100, 1, 59.791759, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1959, 3.736043, 0, 9999, -9999, 1.0, 100, 1, 35.986928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1960, 0.47403, 0, 9999, -9999, 1.0, 100, 1, 13.579895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1961, 0.360769, 0, 9999, -9999, 1.0, 100, 1, 17.841481, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1962, 0.056937, 0, 9999, -9999, 1.0, 100, 1, 3.150179, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1963, 0.011195, 0, 9999, -9999, 1.0, 100, 1, 0.73138, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1964, 1.912109, 0, 9999, -9999, 1.0, 100, 1, 66.594121, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1965, 0.412755, 0, 9999, -9999, 1.0, 100, 1, 18.785491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1966, 0.856742, 0, 9999, -9999, 1.0, 100, 1, 2.674199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1967, 4.700675, 0, 9999, -9999, 1.0, 100, 1, 99.074235, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1968, 20.406765, 0, 9999, -9999, 1.0, 100, 1, 201.733891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1969, 0.416455, 0, 9999, -9999, 1.0, 100, 1, 15.048118, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1970, 145.974713, 0, 9999, -9999, 1.0, 100, 1, 236.871781, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1971, 0.435823, 0, 9999, -9999, 1.0, 100, 1, 14.404409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1972, 0.001026, 0, 9999, -9999, 1.0, 100, 1, 0.028378, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1973, 0.01934, 0, 9999, -9999, 1.0, 100, 1, 0.534696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1974, 0.0995, 0, 9999, -9999, 1.0, 100, 1, 2.750907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1975, 3.231276, 0, 9999, -9999, 1.0, 100, 1, 81.92918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1976, 1.378981, 0, 9999, -9999, 1.0, 100, 1, 2.17499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1977, 65.42762, 0, 9999, -9999, 1.0, 100, 1, 226.383637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1978, 0.106404, 0, 9999, -9999, 1.0, 100, 1, 1.331592, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1979, 133.220566, 0, 9999, -9999, 1.0, 100, 1, 189.722792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1980, 6.868705, 0, 9999, -9999, 1.0, 100, 1, 100.61941, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1981, 7.688742, 0, 9999, -9999, 1.0, 100, 1, 144.682717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1982, 5.752632, 0, 9999, -9999, 1.0, 100, 1, 134.93778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1983, 3.530567, 0, 9999, -9999, 1.0, 100, 1, 155.990147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1984, 1.936985, 0, 9999, -9999, 1.0, 100, 1, 94.470611, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1985, 1.330237, 0, 9999, -9999, 1.0, 100, 1, 41.975835, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1986, 5.765495, 0, 9999, -9999, 1.0, 100, 1, 298.346979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1987, 5.389422, 0, 9999, -9999, 1.0, 100, 1, 393.914067, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1988, 33.80903, 0, 9999, -9999, 1.0, 100, 1, 251.944939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1989, 6.748426, 0, 9999, -9999, 1.0, 100, 1, 10.378288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1990, 1.381387, 0, 9999, -9999, 1.0, 100, 1, 50.351426, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1991, 47.912587, 0, 9999, -9999, 1.0, 100, 1, 849.576944, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1992, 6.27345, 0, 9999, -9999, 1.0, 100, 1, 233.477991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1993, 9.719656, 0, 9999, -9999, 1.0, 100, 1, 242.698643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1994, 5.08751, 0, 9999, -9999, 1.0, 100, 1, 255.834576, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1995, 4.092824, 0, 9999, -9999, 1.0, 100, 1, 262.446698, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1996, 1.534479, 0, 9999, -9999, 1.0, 100, 1, 91.306832, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1997, 0.151788, 0, 9999, -9999, 1.0, 100, 1, 26.592561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1998, 7.104695, 0, 9999, -9999, 1.0, 100, 1, 12.126511, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1999, 4.534769, 0, 9999, -9999, 1.0, 100, 1, 199.184531, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2000, 7.544127, 0, 9999, -9999, 1.0, 100, 1, 579.835051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2001, 3.950905, 0, 9999, -9999, 1.0, 100, 1, 122.315703, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2002, 1.721932, 0, 9999, -9999, 1.0, 100, 1, 30.606436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2003, 14.962198, 0, 9999, -9999, 1.0, 100, 1, 23.645071, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2004, 10.900896, 0, 9999, -9999, 1.0, 100, 1, 17.73338, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2005, 2.306607, 0, 9999, -9999, 1.0, 100, 1, 72.071456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2006, 1.851369, 0, 9999, -9999, 1.0, 100, 1, 59.660888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2007, 0.061806, 0, 9999, -9999, 1.0, 100, 1, 1.681507, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2008, 0.00429, 0, 9999, -9999, 1.0, 100, 1, 0.116706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
ppc["branch"] = array([
[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[589, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[590, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[593, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[594, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[595, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[600, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[601, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[602, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[603, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[607, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[608, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[609, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[613, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[614, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[616, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[617, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[618, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[619, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[621, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[628, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[629, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[631, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[632, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[637, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[638, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[639, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[642, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[643, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[646, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[647, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[650, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[652, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[655, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[661, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[662, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[663, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[666, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[668, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[670, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[672, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[675, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[678, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[679, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[681, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[683, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[687, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[689, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[691, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[694, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[695, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[696, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[697, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[698, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[701, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[702, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[707, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[708, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[711, 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[713, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[714, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[716, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[717, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[722, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[723, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[724, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[725, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[727, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[728, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[730, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[731, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[732, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[733, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[735, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[737, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[738, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[739, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[741, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[742, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[743, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[745, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[746, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[747, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[748, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[749, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[750, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[753, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[758, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[760, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[761, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[762, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[763, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[765, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[767, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[769, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[771, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[772, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[776, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[777, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[778, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[781, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[784, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[785, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[787, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[788, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[789, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[790, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[791, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[792, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[795, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[798, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[800, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[801, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[802, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[805, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[808, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[809, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[810, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[811, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[814, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[815, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[816, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[817, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[818, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[821, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[822, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[825, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[826, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[829, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[830, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[833, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[834, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[835, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[836, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[837, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[839, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[840, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[841, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[842, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[843, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[844, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[845, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[847, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[848, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[849, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[850, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[851, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[852, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[853, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[854, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[856, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[857, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[858, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[859, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[860, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[862, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[863, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[867, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[869, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[870, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[872, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[873, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[874, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[875, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[877, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[881, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[882, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[883, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[886, 394, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[889, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[890, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[893, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[894, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[895, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[896, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[898, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[900, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[902, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[903, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[905, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[907, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[909, 417, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[911, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[913, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[914, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[915, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[916, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[917, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[918, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[919, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[920, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[922, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[923, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[925, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[928, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[931, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[934, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[935, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[936, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[937, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[939, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[940, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[942, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[943, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[944, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[945, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[946, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[948, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[951, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[952, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[956, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[957, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[958, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[959, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[960, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[963, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[967, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[968, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[969, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[971, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[973, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[976, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[977, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[978, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[980, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[981, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[982, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[983, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[984, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[985, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[986, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[988, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[990, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[993, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[994, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[995, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[996, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[997, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[999, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1000, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1002, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1003, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1006, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1007, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1008, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1010, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1012, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1014, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1018, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1019, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1023, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1025, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1026, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1028, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1029, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1031, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1032, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1033, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1034, 4, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1035, 6, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1036, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1037, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1041, 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1042, 17, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1044, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1046, 25, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1047, 27, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1048, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1049, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1052, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1053, 35, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1054, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1055, 38, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1056, 39, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1057, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1058, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1061, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1062, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1063, 48, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1064, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1065, 50, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1066, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1067, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1068, 54, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1069, 55, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1070, 57, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1071, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1072, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1073, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1074, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1075, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1077, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1078, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1079, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1080, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1081, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1082, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1083, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1084, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1085, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1086, 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1087, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1088, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1089, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1090, 82, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1091, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1092, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1093, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1094, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1095, 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1096, 90, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1097, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1098, 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1099, 93, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1100, 97, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1101, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1102, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1103, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1104, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1106, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1107, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1108, 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1109, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1110, 113, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1111, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1112, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1113, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1114, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1116, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1117, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1118, 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1119, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1120, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1121, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1122, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1123, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1124, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1126, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1127, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1128, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1129, 140, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1130, 141, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1131, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1132, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1133, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1134, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1136, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1137, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1138, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1139, 151, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1140, 152, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1141, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1142, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1143, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1144, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1146, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1147, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1148, 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1149, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1150, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1151, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1152, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1153, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1154, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1156, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1157, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1158, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1159, 176, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1160, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1161, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1162, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1164, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1166, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1167, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1168, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1169, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1170, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1171, 189, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1172, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1173, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1174, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1175, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1176, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1178, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1179, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1180, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1181, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1182, 203, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1183, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1184, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1185, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1186, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1188, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1189, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1190, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1191, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1192, 213, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1193, 214, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1194, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1195, 216, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1196, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1200, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1201, 223, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1202, 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1203, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1204, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1205, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1206, 228, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1207, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1208, 230, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1209, 234, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1210, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1211, 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1212, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1213, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1214, 240, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1215, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1216, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1220, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1221, 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1222, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1223, 254, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1224, 255, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1225, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1226, 257, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1227, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1230, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1231, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1232, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1233, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1234, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1235, 271, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1236, 272, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1237, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1238, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1240, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1241, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1242, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1243, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1244, 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1245, 284, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1246, 285, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1247, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1248, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1250, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1251, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1252, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1253, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1254, 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1255, 295, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1256, 296, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1257, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1258, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1260, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1261, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1262, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1263, 304, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1264, 307, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1265, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1266, 309, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1270, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1271, 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1272, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1273, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1274, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1275, 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1276, 323, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1277, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1278, 325, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1279, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1280, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1282, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1283, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1284, 333, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1285, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1286, 337, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1287, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1288, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1289, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1291, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1292, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1293, 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1294, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1295, 346, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1296, 347, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1297, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1300, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1301, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1304, 357, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1305, 359, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1306, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1307, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1308, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1309, 364, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1310, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1311, 366, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1312, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1313, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1314, 369, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1315, 370, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1316, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1317, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1318, 373, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1319, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1320, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1321, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1322, 377, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1324, 379, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1325, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1326, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1327, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1328, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1329, 387, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1330, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1331, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1332, 391, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1333, 392, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1334, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1336, 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1337, 396, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1338, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1339, 398, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1340, 399, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1341, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1342, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1343, 404, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1344, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1345, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1346, 407, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1348, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1349, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1350, 412, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1351, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1352, 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1355, 418, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1358, 421, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1359, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1360, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1361, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1362, 425, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1363, 426, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1364, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1365, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1367, 430, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1368, 431, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1369, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1370, 433, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1371, 434, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1372, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1373, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1374, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1375, 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1376, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1377, 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1378, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1379, 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1380, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1381, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1382, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1383, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1384, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1385, 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1386, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1387, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1388, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1389, 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1390, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1391, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1392, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1393, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1394, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1395, 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1396, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1397, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1398, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1399, 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1400, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1401, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1402, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1403, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1404, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1405, 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1406, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1407, 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1408, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1409, 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1410, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1411, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1412, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1413, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1414, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1415, 480, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1416, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1417, 482, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1418, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1419, 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1421, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1422, 487, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1423, 488, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1424, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1425, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1426, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1431, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1432, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1433, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1434, 499, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1435, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1436, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1437, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1438, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1441, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1442, 507, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1443, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1444, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1445, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1446, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1447, 512, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1448, 513, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1449, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1452, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1453, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1454, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1455, 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1456, 521, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1457, 522, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1458, 523, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1459, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1462, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1463, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1464, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1465, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1466, 531, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1467, 532, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1468, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1469, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1472, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1473, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1474, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1475, 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1476, 541, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1477, 542, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1479, 544, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1480, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1481, 546, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1482, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1483, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1484, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1485, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1486, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1487, 552, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1488, 554, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1489, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1491, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1492, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1493, 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1494, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1495, 561, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1497, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1498, 564, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1500, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1501, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1502, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1505, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1506, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1507, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1508, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1510, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1511, 577, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1512, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1513, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1514, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1516, 582, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1517, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1518, 584, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1519, 585, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1520, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1521, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1522, 4, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1523, 6, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1524, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1525, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1526, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1527, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1528, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1529, 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1530, 17, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1531, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1532, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1534, 25, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1535, 27, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1536, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1537, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1538, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1539, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1540, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1541, 35, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1542, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1543, 38, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1544, 39, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1545, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1546, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1547, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1548, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1549, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1550, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1551, 48, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1552, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1553, 50, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1554, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1555, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1556, 54, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1557, 55, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1558, 57, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1559, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1560, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1561, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1562, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1563, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1564, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1565, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1566, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1567, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1568, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1569, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1570, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1571, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1572, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1573, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1574, 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1575, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1576, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1577, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1578, 82, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1579, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1580, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1581, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1582, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1583, 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1584, 90, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1585, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1586, 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1587, 93, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1588, 97, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1589, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1590, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1591, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1592, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1593, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1594, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1595, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1596, 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1597, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1598, 113, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1599, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1600, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1601, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1602, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1603, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1604, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1605, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1606, 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1607, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1608, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1609, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1610, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1611, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1612, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1613, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1614, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1615, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1616, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1617, 140, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1618, 141, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1619, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1620, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1621, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1622, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1623, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1624, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1625, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1626, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1627, 151, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1628, 152, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1629, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1630, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1631, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1632, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1633, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1634, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1635, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1636, 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1637, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1638, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1639, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1640, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1641, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1642, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1643, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1644, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1645, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1646, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1647, 176, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1648, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1649, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1650, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1651, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1652, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1653, 182, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1654, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1655, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1656, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1657, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1658, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1659, 189, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1660, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1661, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1662, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1663, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1664, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1665, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1666, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1667, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1668, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1669, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1670, 203, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1671, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1672, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1673, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1674, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1675, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1676, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1677, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1678, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1679, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1680, 213, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1681, 214, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1682, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1683, 216, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1684, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1685, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1686, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1687, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1688, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1689, 223, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1690, 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1691, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1692, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1693, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1694, 228, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1695, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1696, 230, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1697, 234, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1698, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1699, 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1700, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1701, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1702, 240, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1703, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1704, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1705, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1706, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1707, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1708, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1709, 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1710, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1711, 254, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1712, 255, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1713, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1714, 257, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1715, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1716, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1717, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1718, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1719, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1720, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1721, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1722, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1723, 271, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1724, 272, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1725, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1726, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1727, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1728, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1729, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1730, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1731, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1732, 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1733, 284, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1734, 285, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1735, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1736, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1737, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1738, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1739, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1740, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1741, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1742, 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1743, 295, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1744, 296, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1745, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1746, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1747, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1748, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1749, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1750, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1751, 304, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1752, 307, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1753, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1754, 309, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1755, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1756, 312, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1757, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1758, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1759, 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1760, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1761, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1762, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1763, 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1764, 323, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1765, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1766, 325, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1767, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1768, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1769, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1770, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1771, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1772, 333, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1773, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1774, 337, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1775, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1776, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1777, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1778, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1779, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1780, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1781, 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1782, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1783, 346, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1784, 347, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1785, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1786, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1787, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1788, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1789, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1790, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1791, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1792, 357, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1793, 359, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1794, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1795, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1796, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1797, 364, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1798, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1799, 366, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1800, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1801, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1802, 369, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1803, 370, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1804, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1805, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1806, 373, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1807, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1808, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1809, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1810, 377, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1811, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1812, 379, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1813, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1814, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1815, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1816, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1817, 387, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1818, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1819, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1820, 391, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1821, 392, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1822, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1823, 394, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1824, 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1825, 396, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1826, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1827, 398, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1828, 399, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1829, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1830, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1831, 404, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1832, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1833, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1834, 407, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1836, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1837, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1838, 412, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1839, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1840, 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1841, 416, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1842, 417, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1843, 418, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1844, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1845, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1846, 421, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1847, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1848, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1849, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1850, 425, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1851, 426, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1852, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1853, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1854, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1855, 430, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1856, 431, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1857, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1858, 433, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1860, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1861, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1862, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1863, 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1864, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1865, 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1866, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1867, 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1868, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1869, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1870, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1871, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1872, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1873, 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1874, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1875, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1876, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1877, 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1878, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1879, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1880, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1881, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1882, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1883, 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1884, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1885, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1886, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1887, 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1888, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1889, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1890, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1891, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1892, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1893, 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1894, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1895, 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1896, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1897, 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1898, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1899, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1900, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1901, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1902, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1903, 480, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1904, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1905, 482, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1906, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1907, 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1908, 485, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1909, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1910, 487, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1911, 488, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1912, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1913, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1914, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1915, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1916, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1917, 494, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1918, 495, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1919, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1920, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1921, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1922, 499, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1923, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1924, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1925, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1926, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1927, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1928, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1929, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1930, 507, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1931, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1932, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1933, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1934, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1935, 512, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1936, 513, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1937, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1938, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1939, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1940, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1941, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1942, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1943, 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1944, 521, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1945, 522, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1946, 523, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1947, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1948, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1949, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1950, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1951, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1952, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1953, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1954, 531, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1955, 532, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1956, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1957, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1958, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1959, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1960, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1961, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1962, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1963, 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1964, 541, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1965, 542, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1966, 543, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1967, 544, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1968, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1969, 546, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1970, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1971, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1972, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1973, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1974, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1975, 552, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1976, 553, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1977, 554, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1978, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1979, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1980, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1981, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1982, 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1983, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1984, 561, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1985, 562, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1986, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1987, 564, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1988, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1989, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1990, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1991, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1992, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1993, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1994, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1995, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1996, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1997, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1998, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1999, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2000, 577, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2001, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2002, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2003, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2004, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2005, 582, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2006, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2007, 584, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[2008, 585, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360 ],
[1, 490, 0, 0.01433884297520661, 0.151691958358336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 43.375 ],
[3, 4, 0, 0.006291637811634348, 0.903417549506624, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 72.681 ],
[491, 6, 0, 0.011200661157024791, 0.118492839955776, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.882 ],
[7, 5, 0, 0.005794840720221606, 0.20802058859584005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.471 ],
[8, 9, 0, 0.0024379328254847646, 0.350063268897336, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 28.163 ],
[492, 11, 0, 0.018224793388429753, 0.0482004476327704, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.565 ],
[11, 493, 0, 0.030286942148760328, 0.08010209706571599, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.809 ],
[492, 493, 0, 0.04521652892561983, 0.11958747011094399, 495.0, 495.0, 495.0, 0, 1, 1, -360, 68.39 ],
[494, 14, 0, 0.012990743801652892, 0.137430291356512, 991.0, 991.0, 991.0, 0, 2, 1, -360, 39.297 ],
[13, 15, 0, 0.007681959833795014, 0.27576354266704156, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 44.371 ],
[16, 5, 0, 0.006275623268698061, 0.22527950450957998, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 36.248000000000005 ],
[17, 18, 0, 0.04623522622347646, 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 200.291 ],
[17, 12, 0, 0.0056020313942728535, 0.113118303398186, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.268 ],
[14, 495, 0, 0.0017957024793388433, 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1, 1, -360, 5.432 ],
[494, 19, 0, 0.010246611570247935, 0.10839986031771602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 30.996 ],
[20, 21, 0, 0.005415685595567867, 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 31.281 ],
[20, 22, 0, 0.0049706544321329645, 0.713737278110032, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 57.42100000000001 ],
[497, 23, 0, 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.313 ],
[23, 499, 0, 0.020799669421487598, 0.22004164444829602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919 ],
[25, 26, 0, 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.193 ],
[25, 22, 0, 0.0035578254847645433, 0.0319293051869808, 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275 ],
[23, 27, 0, 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.95399999999999 ],
[28, 23, 0, 0.012841652892561981, 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423 ],
[8, 21, 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 28.584 ],
[9, 29, 0, 0.002212863573407202, 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 25.563000000000002 ],
[30, 25, 0, 0.019958795013850415, 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, 57.641000000000005 ],
[31, 32, 0, 0.0299776084949446, 0.605319030583196, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863 ],
[32, 33, 0, 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 72.61399999999999 ],
[34, 35, 0, 0.001931900826446281, 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, 5.843999999999999 ],
[35, 36, 0, 0.0008730578512396695, 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641 ],
[490, 6, 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, 1, -360, 74.645 ],
[37, 10, 0, 0.02404639889196676, 0.485553838251812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169 ],
[10, 38, 0, 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.669 ],
[37, 38, 0, 0.01437834718372576, 1.1613317560186958, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574 ],
[39, 40, 0, 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 195.877 ],
[39, 41, 0, 0.017466989843005543, 0.35269996139852006, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667 ],
[42, 41, 0, 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 134.922 ],
[18, 42, 0, 0.03439750692520776, 0.6945672650962679, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01 ],
[492, 43, 0, 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -360, 55.03 ],
[44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.755 ],
[44, 505, 0, 0.006061487603305785, 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168 ],
[46, 12, 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.029 ],
[47, 48, 0, 0.005344182825484765, 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, 7.7170000000000005 ],
[49, 50, 0, 0.0019151662049861494, 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, 5.531000000000001 ],
[31, 33, 0, 0.013475992613088641, 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378 ],
[31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.647 ],
[52, 53, 0, 0.010464421745152355, 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885 ],
[52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 32.978 ],
[506, 55, 0, 0.012634380165289257, 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219 ],
[506, 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0, 1, 1, -360, 66.788 ],
[57, 506, 0, 0.004687272727272727, 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179 ],
[57, 58, 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.835 ],
[58, 506, 0, 0.019797685950413223, 0.052360391943288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003 ],
[59, 60, 0, 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, -360, 56.049 ],
[508, 62, 0, 0.051111404958677685, 0.03379452026753001, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653 ],
[30, 61, 0, 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1, -360, 90.79 ],
[63, 506, 0, 0.027457190082644623, 0.072618044249872, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996 ],
[13, 64, 0, 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.116 ],
[65, 66, 0, 0.03778185595567867, 0.7629053006222161, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671 ],
[59, 67, 0, 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1, 1, -360, 14.982999999999999 ],
[61, 67, 0, 0.012931440443213295, 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346 ],
[68, 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 64.4 ],
[70, 69, 0, 0.009625346260387812, 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.596000000000004 ],
[71, 72, 0, 0.008878635734072021, 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283 ],
[73, 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 54.278 ],
[37, 75, 0, 0.027459141274238225, 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 118.95299999999999 ],
[72, 75, 0, 0.006688711911357341, 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634 ],
[37, 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 156.914 ],
[76, 77, 0, 0.004683777700831025, 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107 ],
[77, 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 41.955 ],
[73, 72, 0, 0.025475069252077563, 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 110.35799999999999 ],
[18, 40, 0, 0.01302770083102493, 0.26306018504072, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001 ],
[492, 45, 0, 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, -360, 70.03699999999999 ],
[10, 74, 0, 0.030167359187465374, 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685 ],
[45, 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, 0, 1, 1, -360, 62.038000000000004 ],
[78, 32, 0, 0.013458795013850415, 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738 ],
[79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 21.999000000000002 ],
[81, 79, 0, 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 62.195 ],
[34, 82, 0, 0.0015497520661157025, 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344 ],
[83, 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.652000000000001 ],
[83, 499, 0, 0.04179570247933885, 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608 ],
[85, 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 46.343999999999994 ],
[87, 86, 0, 0.01904968836565097, 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031 ],
[88, 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.752000000000001 ],
[90, 86, 0, 0.012097818559556786, 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877 ],
[91, 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.07 ],
[86, 92, 0, 0.0001852493074792244, 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07 ],
[86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 47.086999999999996 ],
[94, 86, 0, 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 74.417 ],
[86, 95, 0, 0.010421052631578947, 0.37409026526870803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192 ],
[513, 517, 0, 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.321 ],
[97, 66, 0, 0.03812777008310249, 0.34217338998058805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113 ],
[42, 98, 0, 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 35.716 ],
[99, 100, 0, 0.016371537396121884, 0.587698093837988, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001 ],
[42, 101, 0, 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 47.163000000000004 ],
[102, 42, 0, 0.012403047091412742, 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64 ],
[103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 40.854 ],
[104, 103, 0, 0.0028852146814404432, 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665 ],
[105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.005 ],
[106, 107, 0, 0.005714219759923823, 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754 ],
[108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.687000000000001 ],
[109, 106, 0, 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.504 ],
[110, 111, 0, 0.019821849030470913, 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491 ],
[87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.441 ],
[113, 87, 0, 0.003981648199445983, 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998 ],
[87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 63.803000000000004 ],
[110, 114, 0, 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 67.37899999999999 ],
[115, 116, 0, 0.007048925619834712, 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323 ],
[117, 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.584 ],
[117, 119, 0, 0.0038738746537396117, 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 44.751000000000005 ],
[117, 120, 0, 0.005886686288088643, 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003 ],
[121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.228 ],
[123, 124, 0, 0.0018386426592797783, 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62 ],
[125, 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.54 ],
[127, 119, 0, 0.0029027008310249305, 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766 ],
[118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.726000000000006 ],
[121, 119, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.743 ],
[530, 527, 0, 0.022726611570247933, 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374 ],
[125, 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.932000000000002 ],
[125, 123, 0, 0.0019078081717451524, 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039 ],
[131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.646 ],
[133, 123, 0, 0.003864439058171745, 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.320999999999998 ],
[524, 134, 0, 0.008092231404958678, 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479 ],
[135, 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.283 ],
[123, 131, 0, 0.003138331024930748, 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127 ],
[117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 62.381 ],
[137, 521, 0, 0.013832396694214875, 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843 ],
[531, 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, 0, 1, 1, -360, 13.5 ],
[139, 521, 0, 0.021257520661157023, 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152 ],
[140, 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.023000000000003 ],
[522, 141, 0, 0.012168595041322314, 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405 ],
[142, 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.357 ],
[530, 526, 0, 0.020281652892561983, 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676 ],
[140, 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.062 ],
[142, 144, 0, 0.006678126721756199, 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151 ],
[140, 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.930999999999997 ],
[145, 146, 0, 0.028527603305785125, 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148 ],
[147, 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 37.227 ],
[144, 523, 0, 0.008479338842975206, 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825 ],
[139, 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, 0, 1, 1, -360, 22.116999999999997 ],
[140, 141, 0, 0.008362975206611572, 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.649000000000001 ],
[528, 526, 0, 0.015389090909090908, 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276 ],
[528, 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.638 ],
[149, 150, 0, 0.013604628099173552, 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.576999999999998 ],
[145, 528, 0, 0.00320595041322314, 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849 ],
[530, 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.881 ],
[524, 152, 0, 0.014598347107438016, 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08 ],
[149, 525, 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2, 1, -360, 51.114 ],
[139, 514, 0, 0.007824132231404959, 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.834000000000001 ],
[126, 120, 0, 0.012780297783933518, 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819 ],
[530, 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.1 ],
[528, 147, 0, 0.15786710743801652, 0.104380679149868, 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387 ],
[528, 154, 0, 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, 1, -360, 9.874 ],
[130, 120, 0, 0.01450502077562327, 0.5206947188067639, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781 ],
[528, 155, 0, 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -360, 121.485 ],
[524, 533, 0, 0.004432727272727273, 0.0468942356109744, 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409 ],
[524, 149, 0, 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, 1, -360, 17.065 ],
[154, 150, 0, 0.007539173553719007, 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, 11.402999999999999 ],
[157, 110, 0, 0.009962084487534625, 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 57.541000000000004 ],
[119, 158, 0, 0.0002490189289012004, 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315 ],
[159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, 856.0, 0, 1, 1, -360, 31.674 ],
[536, 161, 0, 0.021314380165289255, 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238 ],
[115, 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.477 ],
[162, 134, 0, 0.0015910743801652895, 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813 ],
[115, 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.73 ],
[138, 87, 0, 0.0011838642659279777, 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 13.675999999999998 ],
[123, 163, 0, 0.0022778739612188364, 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157 ],
[112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 10.019 ],
[112, 165, 0, 0.005989439058171744, 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595 ],
[166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.207 ],
[167, 537, 0, 0.00832595041322314, 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186 ],
[168, 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.743 ],
[531, 520, 0, 0.016156694214876033, 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 24.436999999999998 ],
[139, 520, 0, 0.010682314049586776, 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157 ],
[520, 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0, 0, 2, 1, -360, 3.427 ],
[168, 105, 0, 0.007340893351800554, 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401 ],
[520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, 495.0, 0, 2, 1, -360, 8.837 ],
[171, 89, 0, 0.005505454545454546, 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654 ],
[521, 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.072 ],
[123, 173, 0, 0.005247403047091413, 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309 ],
[521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.117 ],
[37, 39, 0, 0.004338873499549862, 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592 ],
[530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.857 ],
[530, 176, 0, 0.005685289256198347, 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599 ],
[88, 530, 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.099 ],
[177, 496, 0, 0.018632066115702478, 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, 56.361999999999995 ],
[178, 525, 0, 0.03106842975206612, 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.99100000000001 ],
[179, 493, 0, 0.057079669421487594, 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333 ],
[180, 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.053999999999995 ],
[182, 180, 0, 0.00866314049586777, 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206 ],
[179, 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.603 ],
[180, 493, 0, 0.06676561983471074, 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, 100.98299999999999 ],
[183, 30, 0, 0.0024804362880886427, 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654 ],
[183, 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 29.628 ],
[538, 185, 0, 0.018631404958677687, 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09 ],
[538, 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 21.945999999999998 ],
[184, 186, 0, 0.0016554709141274237, 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.562000000000001 ],
[184, 187, 0, 0.002698753462603878, 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588 ],
[520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, 991.0, 0, 2, 1, -360, 10.342 ],
[89, 175, 0, 0.0037309090909090903, 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643 ],
[185, 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.792 ],
[89, 188, 0, 0.003108760330578513, 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702 ],
[189, 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.253 ],
[539, 172, 0, 0.0021570247933884296, 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525 ],
[504, 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.9329999999999999 ],
[105, 186, 0, 0.003273372576177285, 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907 ],
[105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.540999999999999 ],
[539, 193, 0, 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.482999999999999 ],
[187, 194, 0, 4.8649584487534626e-05, 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562 ],
[539, 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.647 ],
[539, 196, 0, 0.00332297520661157, 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026 ],
[197, 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.165 ],
[110, 198, 0, 0.00018724030470914128, 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 2.1630000000000003 ],
[197, 539, 0, 0.009172231404958677, 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873 ],
[199, 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0, 1, 1, -360, 27.322 ],
[134, 526, 0, 0.007771239669421488, 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.754000000000001 ],
[200, 193, 0, 0.0009322314049586776, 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82 ],
[4, 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 79.282 ],
[202, 86, 0, 0.00013365650969529087, 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772 ],
[85, 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 21.962 ],
[147, 204, 0, 0.0073874380165289254, 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, 22.346999999999998 ],
[147, 205, 0, 0.005959669421487603, 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507 ],
[123, 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 6.646 ],
[537, 207, 0, 0.018456198347107437, 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915 ],
[165, 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.948 ],
[4, 94, 0, 0.013687673130193905, 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06 ],
[4, 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 0.902 ],
[209, 4, 0, 0.0022369286703601107, 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 25.840999999999998 ],
[119, 163, 0, 0.003535145429362881, 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419 ],
[210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.82 ],
[99, 211, 0, 0.0035045013850415513, 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242 ],
[99, 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 125.443 ],
[212, 99, 0, 0.008453774238227147, 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 48.82899999999999 ],
[213, 214, 0, 0.01490115702479339, 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076 ],
[510, 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, 1981.0, 0, 4, 1, -360, 13.157 ],
[128, 69, 0, 0.010711651662049862, 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741 ],
[216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 111.228 ],
[217, 98, 0, 0.0012787396121883656, 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386 ],
[504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.565 ],
[177, 504, 0, 0.07054809917355372, 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704 ],
[219, 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 45.501000000000005 ],
[219, 220, 0, 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 15.048 ],
[94, 95, 0, 0.01070740997229917, 0.38436979242743197, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004 ],
[159, 221, 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 57.397 ],
[34, 161, 0, 0.010965289256198347, 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17 ],
[222, 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 26.834 ],
[211, 52, 0, 0.05267313019390582, 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12 ],
[215, 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.707 ],
[224, 215, 0, 0.019086280991735535, 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.868000000000002 ],
[225, 224, 0, 0.04200925619834711, 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 63.538999999999994 ],
[224, 223, 0, 0.031061818181818183, 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981 ],
[226, 6, 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, 1, -360, 48.552 ],
[7, 3, 0, 0.009332929362880887, 0.335029305054692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907 ],
[216, 227, 0, 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.939 ],
[228, 229, 0, 0.010545454545454545, 0.027890337012274, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95 ],
[227, 230, 0, 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 46.128 ],
[231, 53, 0, 0.007193213296398893, 1.0328749562310842, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096 ],
[544, 545, 0, 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.756 ],
[234, 235, 0, 0.04608859504132231, 0.121893887321888, 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709 ],
[546, 214, 0, 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.251 ],
[233, 227, 0, 0.0029001038781163438, 0.1041066260218888, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998 ],
[237, 238, 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.816 ],
[212, 100, 0, 0.007955505540166205, 0.285583163531816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951 ],
[519, 239, 0, 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1, -360, 26.324 ],
[238, 519, 0, 0.015166280991735538, 0.040111375593995205, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939 ],
[213, 240, 0, 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 25.189 ],
[241, 242, 0, 0.009862015235457064, 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963 ],
[70, 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 44.126999999999995 ],
[509, 213, 0, 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, -360, 34.375 ],
[68, 243, 0, 0.003611668975069252, 0.1296500701715312, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861 ],
[243, 244, 0, 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.447 ],
[68, 244, 0, 0.004104051246537396, 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705 ],
[544, 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0, 1, 1, -360, 73.168 ],
[245, 227, 0, 0.012676419667590028, 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219 ],
[246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.8660000000000005 ],
[112, 208, 0, 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.355 ],
[165, 247, 0, 0.0002113919667590028, 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.2209999999999999 ],
[537, 549, 0, 0.00032066115702479337, 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485 ],
[537, 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.48700000000000004 ],
[537, 551, 0, 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.401 ],
[110, 251, 0, 0.00023857340720221602, 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.3780000000000001 ],
[510, 252, 0, 0.08467702479338843, 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, 64.03699999999999 ],
[529, 253, 0, 0.04859504132231405, 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5 ],
[237, 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 50.055 ],
[254, 238, 0, 0.07815008264462811, 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, 59.101000000000006 ],
[69, 255, 0, 0.0009369806094182826, 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 10.824000000000002 ],
[510, 225, 0, 0.021953719008264466, 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41 ],
[256, 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.315 ],
[258, 190, 0, 0.011717451523545707, 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84 ],
[258, 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.58 ],
[260, 261, 0, 0.006791031855955679, 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45 ],
[554, 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, 0, 1, 1, -360, 132.974 ],
[515, 263, 0, 0.006987107438016529, 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136 ],
[14, 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, 1, 1, -360, 51.446000000000005 ],
[116, 555, 0, 0.0009768595041322315, 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955 ],
[151, 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.958 ],
[111, 114, 0, 0.008806613573407202, 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867 ],
[77, 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 33.321999999999996 ],
[266, 525, 0, 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.774000000000001 ],
[267, 120, 0, 0.013136945983379503, 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 75.87899999999999 ],
[268, 269, 0, 0.0010327272727272726, 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.5619999999999998 ],
[556, 271, 0, 0.052289586776859506, 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, 39.544000000000004 ],
[556, 272, 0, 0.04685355371900827, 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433 ],
[529, 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.234 ],
[128, 274, 0, 0.0029350761772853184, 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953 ],
[34, 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0, 0, 1, 1, -360, 0.627 ],
[503, 276, 0, 0.006707438016528925, 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29 ],
[503, 504, 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1, -360, 194.59 ],
[177, 218, 0, 0.04330380165289256, 0.114528740018308, 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497 ],
[277, 278, 0, 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 83.072 ],
[557, 558, 0, 0.04341289256198347, 0.258338836678648, 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493 ],
[557, 559, 0, 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 51.665 ],
[559, 558, 0, 0.04474314049586777, 0.11833546501370001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999 ],
[277, 78, 0, 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1, -360, 103.557 ],
[277, 279, 0, 0.021390927977839334, 0.191970480441328, 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777 ],
[78, 279, 0, 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.665 ],
[281, 282, 0, 0.0023178670360110803, 0.08320574945862161, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388 ],
[283, 161, 0, 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1, -360, 55.571000000000005 ],
[268, 161, 0, 0.018883636363636366, 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, 57.123000000000005 ],
[256, 284, 0, 0.010755371900826446, 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535 ],
[515, 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.576 ],
[263, 516, 0, 0.0030355371900826445, 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365 ],
[516, 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.449000000000002 ],
[63, 286, 0, 0.019088925619834708, 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872 ],
[287, 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.105 ],
[8, 102, 0, 0.015100069252077563, 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 87.21799999999999 ],
[8, 101, 0, 0.019246883656509697, 0.69091598202144, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17 ],
[80, 288, 0, 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 46.11600000000001 ],
[80, 289, 0, 0.0003782317636201524, 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, 6.553999999999999 ],
[276, 560, 0, 0.01778314049586777, 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897 ],
[37, 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 48.773999999999994 ],
[290, 74, 0, 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 179.483 ],
[512, 291, 0, 0.0053299173553719, 0.056385693247479204, 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123 ],
[78, 292, 0, 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 50.381 ],
[199, 548, 0, 0.0015530578512396695, 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349 ],
[491, 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0, 0, 1, 1, -360, 10.720999999999998 ],
[4, 294, 0, 9.669321329639889e-05, 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117 ],
[490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, 495.0, 0, 1, 1, -360, 76.503 ],
[491, 295, 0, 0.010613553719008264, 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053 ],
[491, 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.656000000000001 ],
[295, 297, 0, 0.020297520661157024, 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7 ],
[508, 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.15 ],
[117, 123, 0, 0.005876211911357341, 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941 ],
[133, 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 12.907 ],
[71, 74, 0, 0.03904524469065097, 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144 ],
[74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 89.09200000000001 ],
[298, 515, 0, 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.823 ],
[5, 299, 0, 0.0016232686980609415, 0.058271370400665996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376 ],
[32, 292, 0, 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.908 ],
[5, 29, 0, 0.00743395083102493, 1.0674425076571843, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001 ],
[503, 560, 0, 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, -360, 45.8 ],
[300, 301, 0, 0.004892053324099723, 0.7024509290644521, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005 ],
[51, 300, 0, 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 29.729 ],
[244, 302, 0, 0.007714508310249307, 1.107727813004004, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118 ],
[31, 302, 0, 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 50.472 ],
[51, 282, 0, 0.006288434903047093, 0.9029576432132521, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999 ],
[303, 304, 0, 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.254 ],
[305, 304, 0, 0.003881117266849031, 0.0783689646873844, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813 ],
[305, 259, 0, 0.0025625, 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 29.601999999999997 ],
[306, 307, 0, 0.03223268698060942, 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088 ],
[305, 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.01 ],
[305, 309, 0, 0.011014773776523545, 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716 ],
[310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.253 ],
[306, 309, 0, 0.035333795013850415, 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044 ],
[311, 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 19.833 ],
[280, 278, 0, 0.009749769159764544, 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 84.47200000000001 ],
[311, 32, 0, 0.01205909510619806, 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48 ],
[13, 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 50.049 ],
[313, 314, 0, 0.006092624653739613, 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191 ],
[312, 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.635 ],
[547, 566, 0, 0.027035702479338848, 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783 ],
[245, 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 81.803 ],
[312, 316, 0, 8.803670360110802e-05, 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.0170000000000001 ],
[312, 314, 0, 0.005339854570637119, 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.843000000000004 ],
[554, 546, 0, 0.08174743801652892, 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 123.64299999999999 ],
[262, 216, 0, 0.042641966759002774, 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15 ],
[317, 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.464000000000002 ],
[318, 317, 0, 0.008311634349030471, 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006 ],
[231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 203.683 ],
[319, 567, 0, 0.006089586776859504, 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421 ],
[557, 321, 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2, 1, -360, 30.264 ],
[277, 65, 0, 0.009430170821779778, 0.7616700793261759, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703 ],
[322, 288, 0, 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.706 ],
[322, 323, 0, 0.0018503000923372577, 0.14944779312484, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031 ],
[277, 324, 0, 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 85.425 ],
[324, 325, 0, 0.01103508771932133, 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.803999999999995 ],
[277, 325, 0, 0.008665743305609418, 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54 ],
[326, 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.577 ],
[328, 326, 0, 0.10300958677685952, 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 77.90100000000001 ],
[328, 327, 0, 0.09827173553719008, 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318 ],
[326, 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.443999999999996 ],
[568, 329, 0, 0.05699900826446282, 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211 ],
[568, 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.681999999999995 ],
[332, 78, 0, 0.006471029547541551, 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065 ],
[333, 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 49.559 ],
[332, 333, 0, 0.007504674515235457, 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347 ],
[332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 49.456 ],
[66, 334, 0, 0.030625, 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445 ],
[330, 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 63.598 ],
[336, 66, 0, 0.015054362880886425, 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477 ],
[330, 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0, 1, 1, -360, 112.73700000000001 ],
[68, 70, 0, 0.016314058171745152, 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115 ],
[509, 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, 0, 1, 1, -360, 52.848 ],
[324, 288, 0, 0.012627423822714683, 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468 ],
[338, 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0, 2, 1, -360, 27.915 ],
[339, 559, 0, 0.03560595041322315, 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927 ],
[339, 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 131.762 ],
[559, 340, 0, 0.20983272727272728, 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686 ],
[341, 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 8.083 ],
[557, 342, 0, 0.006019834710743802, 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21 ],
[558, 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0, 0, 1, 1, -360, 32.217 ],
[502, 340, 0, 0.021737520661157025, 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756 ],
[72, 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 78.03399999999999 ],
[344, 345, 0, 0.0005762927054480609, 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993 ],
[346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 6.55 ],
[46, 47, 0, 0.0008975069252077563, 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184 ],
[346, 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.169 ],
[347, 328, 0, 0.029905454545454544, 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232 ],
[347, 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.862 ],
[571, 348, 0, 0.041548429752066116, 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842 ],
[347, 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 24.279 ],
[571, 570, 0, 0.17379041322314048, 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429 ],
[14, 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.772 ],
[350, 573, 0, 0.026277685950413226, 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745 ],
[15, 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0, 1, 1, -360, 76.222 ],
[352, 15, 0, 0.0015260560941828254, 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629 ],
[15, 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, 5134.0, 0, 1, 1, -360, 61.235 ],
[232, 227, 0, 5.5747922437673134e-05, 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161 ],
[565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 59.714 ],
[235, 567, 0, 0.02391404958677686, 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34 ],
[567, 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, 1981.0, 0, 1, 1, -360, 48.816 ],
[353, 519, 0, 0.007621818181818182, 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, 23.055999999999997 ],
[354, 353, 0, 0.0008436363636363636, 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552 ],
[355, 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.360999999999999 ],
[354, 356, 0, 0.01855404958677686, 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.063000000000002 ],
[357, 358, 0, 0.0034823407202216067, 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228 ],
[574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, 495.0, 0, 1, 1, -360, 20.195 ],
[235, 575, 0, 0.007459504132231404, 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565 ],
[167, 361, 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.864 ],
[528, 362, 0, 0.0011960330578512398, 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.6180000000000003 ],
[363, 344, 0, 0.0002662742382271468, 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538 ],
[259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.618 ],
[54, 56, 0, 0.007723337950138504, 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305 ],
[365, 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 21.649 ],
[231, 366, 0, 0.0013273891966759002, 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 7.667000000000001 ],
[30, 367, 0, 0.01126108033240997, 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522 ],
[61, 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0, 0, 1, 1, -360, 58.735 ],
[254, 368, 0, 0.0004297520661157025, 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3 ],
[254, 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.484 ],
[254, 370, 0, 0.0003669421487603306, 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11 ],
[99, 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 23.316999999999997 ],
[354, 519, 0, 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1, -360, 20.457 ],
[571, 371, 0, 0.023726942148760328, 0.06275238397221199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887 ],
[207, 372, 0, 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.523 ],
[57, 373, 0, 0.0017725619834710745, 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681 ],
[209, 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.847 ],
[375, 376, 0, 0.0045364727608518006, 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652 ],
[376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 13.38 ],
[16, 49, 0, 0.002266101108033241, 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178 ],
[318, 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 20.599 ],
[378, 297, 0, 0.01753917355371901, 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 26.528000000000002 ],
[562, 379, 0, 0.01802314049586777, 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26 ],
[576, 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.735 ],
[576, 381, 0, 0.0034320661157024794, 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191 ],
[577, 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, 0, 1, 1, -360, 90.818 ],
[244, 383, 0, 0.006845567867036011, 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655 ],
[244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 116.059 ],
[383, 306, 0, 0.0300685595567867, 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838 ],
[380, 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 2.958 ],
[252, 225, 0, 0.062094545454545444, 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, 46.958999999999996 ],
[220, 76, 0, 0.002772074099722992, 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023 ],
[542, 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.009 ],
[385, 384, 0, 0.053734876033057856, 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637 ],
[542, 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0, 2, 1, -360, 34.201 ],
[386, 385, 0, 0.003668760330578512, 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.097999999999999 ],
[387, 578, 0, 0.015444628099173553, 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72 ],
[332, 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 81.07300000000001 ],
[382, 332, 0, 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 102.60700000000001 ],
[382, 388, 0, 0.00476159972299169, 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503 ],
[579, 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.905 ],
[577, 387, 0, 0.07597818181818182, 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917 ],
[144, 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.647 ],
[37, 49, 0, 0.008441481994459835, 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758 ],
[391, 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 41.042 ],
[392, 310, 0, 0.007035318559556785, 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, 20.317999999999998 ],
[260, 393, 0, 0.006341412742382271, 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314 ],
[394, 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0, 0, 1, 1, -360, 2.1919999999999997 ],
[395, 282, 0, 0.008762984764542936, 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615 ],
[395, 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 19.665 ],
[25, 396, 0, 0.008809037396121884, 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881 ],
[81, 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 43.44 ],
[278, 80, 0, 0.016286011080332407, 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068 ],
[81, 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 121.60799999999999 ],
[569, 570, 0, 0.03253950413223141, 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216 ],
[397, 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 9.513 ],
[542, 398, 0, 0.0005580165289256199, 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.6880000000000002 ],
[398, 385, 0, 0.021893553719008262, 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, 33.114000000000004 ],
[399, 499, 0, 0.03266380165289256, 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, 24.701999999999998 ],
[83, 399, 0, 0.025700495867768593, 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436 ],
[498, 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.352999999999998 ],
[518, 239, 0, 0.04685289256198347, 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865 ],
[575, 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, 991.0, 0, 1, 1, -360, 9.168 ],
[401, 360, 0, 0.007957063711911357, 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98 ],
[580, 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.790999999999999 ],
[401, 402, 0, 0.0033434903047091418, 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656 ],
[403, 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.701999999999998 ],
[189, 360, 0, 0.028456024930747923, 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181 ],
[234, 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.24 ],
[235, 404, 0, 0.05107504132231405, 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251 ],
[235, 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.878 ],
[216, 259, 0, 0.0022115650969529088, 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 12.774000000000001 ],
[405, 259, 0, 0.0052832409972299165, 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516 ],
[405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 38.323 ],
[406, 230, 0, 8.098164819944598e-05, 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871 ],
[542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.674 ],
[23, 408, 0, 0.03224528925619835, 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771 ],
[577, 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0, 0, 2, 1, -360, 39.321999999999996 ],
[562, 564, 0, 0.06921520661157024, 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, 104.68799999999999 ],
[582, 507, 0, 0.006357685950413223, 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616 ],
[27, 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.544 ],
[501, 27, 0, 0.003811570247933884, 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53 ],
[27, 411, 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, 1, -360, 7.031000000000001 ],
[411, 410, 0, 0.002054214876033058, 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.1069999999999998 ],
[403, 360, 0, 0.008191481994459833, 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, 23.656999999999996 ],
[412, 360, 0, 0.016761772853185596, 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408 ],
[326, 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 36.533 ],
[414, 413, 0, 0.008093223140495867, 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482 ],
[6, 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.725999999999999 ],
[554, 580, 0, 0.07435371900826447, 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46 ],
[262, 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, 0, 1, 1, -360, 113.53399999999999 ],
[499, 556, 0, 0.04185586776859504, 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, 63.306999999999995 ],
[224, 229, 0, 0.004135206611570248, 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509 ],
[583, 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.257 ],
[415, 307, 0, 0.015675554016620498, 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271 ],
[416, 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.193 ],
[284, 561, 0, 0.015221487603305786, 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045 ],
[543, 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, 1981.0, 0, 4, 1, -360, 4.002 ],
[418, 506, 0, 0.0009395041322314049, 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842 ],
[220, 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 26.566999999999997 ],
[295, 419, 0, 0.0012023140495867769, 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637 ],
[295, 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.421 ],
[541, 62, 0, 0.05133355371900827, 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821 ],
[52, 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.802 ],
[60, 160, 0, 6.128808864265928e-05, 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177 ],
[535, 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.113 ],
[267, 282, 0, 0.0065652700831024926, 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921 ],
[52, 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 33.164 ],
[28, 27, 0, 0.015726942148760328, 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787 ],
[30, 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 52.725 ],
[422, 81, 0, 0.0004226685133887349, 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324 ],
[119, 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.673000000000002 ],
[423, 425, 0, 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 3.765 ],
[424, 425, 0, 0.005922957063711911, 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211 ],
[426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 42.193999999999996 ],
[427, 428, 0, 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2, 1, -360, 0.8059999999999999 ],
[19, 428, 0, 0.023607603305785128, 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413 ],
[45, 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0, 1, 1, -360, 38.755 ],
[44, 429, 0, 5.289256198347107e-05, 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08 ],
[505, 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.094 ],
[231, 431, 0, 0.011677285318559558, 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 67.44800000000001 ],
[190, 431, 0, 0.009600761772853185, 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 55.45399999999999 ],
[430, 431, 0, 0.0028100761772853187, 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.230999999999998 ],
[286, 433, 0, 0.01568694214876033, 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453 ],
[432, 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.304 ],
[506, 433, 0, 0.0065904132231404955, 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936 ],
[23, 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0, 2, 1, -360, 39.532 ],
[400, 434, 0, 0.008155371900826446, 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335 ],
[500, 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, 0, 2, 1, -360, 9.587 ],
[32, 436, 0, 0.0044813019390581715, 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884 ],
[435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 3.832 ],
[78, 436, 0, 0.00897680055401662, 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85 ],
[86, 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 84.868 ],
[437, 438, 0, 1.0387811634349031e-05, 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06 ],
[221, 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.17 ],
[207, 439, 0, 0.055703801652892564, 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, 42.126000000000005 ],
[516, 439, 0, 0.05448462809917355, 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, 41.20399999999999 ],
[513, 439, 0, 0.046726611570247926, 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.336999999999996 ],
[181, 441, 0, 0.040805289256198356, 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718 ],
[440, 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.2 ],
[504, 441, 0, 0.05916099173553719, 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, 89.48100000000001 ],
[135, 442, 0, 0.004956890581717451, 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631 ],
[109, 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.884 ],
[112, 442, 0, 0.0027304362880886425, 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.770999999999999 ],
[113, 443, 0, 0.0019885734072022164, 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 11.485999999999999 ],
[132, 443, 0, 0.006788434903047091, 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21 ],
[107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.129 ],
[444, 445, 0, 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.455 ],
[112, 445, 0, 0.002816135734072022, 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266 ],
[109, 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 8.291 ],
[119, 447, 0, 0.005212690443213296, 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217 ],
[100, 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 58.563 ],
[446, 447, 0, 2.9518698060941832e-05, 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341 ],
[124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.376 ],
[125, 448, 0, 0.00615148891966759, 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531 ],
[131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.226 ],
[449, 450, 0, 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.64 ],
[173, 450, 0, 0.002862361495844876, 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533 ],
[184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.236 ],
[144, 451, 0, 0.007672727272727273, 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605 ],
[140, 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0, 0, 1, 1, -360, 10.574000000000002 ],
[514, 451, 0, 0.01149289256198347, 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383 ],
[537, 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0, 1, 1, -360, 76.723 ],
[141, 585, 0, 0.007994710743801653, 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092 ],
[584, 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.14 ],
[522, 454, 0, 0.0035008264462809916, 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295 ],
[144, 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.85 ],
[453, 454, 0, 0.001114710743801653, 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686 ],
[199, 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, 0, 1, 1, -360, 9.879 ],
[140, 456, 0, 0.005061818181818182, 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, 7.656000000000001 ],
[455, 456, 0, 0.0011365289256198346, 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719 ],
[537, 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0, 0, 1, 1, -360, 29.538 ],
[538, 457, 0, 0.027927272727272728, 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12 ],
[153, 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0, 1, 1, -360, 22.758000000000003 ],
[176, 457, 0, 0.004579173553719009, 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463 ],
[524, 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.532 ],
[458, 459, 0, 0.001993388429752066, 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015 ],
[134, 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.868 ],
[460, 461, 0, 6.611570247933885e-05, 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1 ],
[150, 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.128 ],
[149, 461, 0, 0.005586115702479339, 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449 ],
[521, 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0, 0, 1, 1, -360, 10.850999999999999 ],
[462, 463, 0, 0.007197355371900825, 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443 ],
[538, 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, 0, 1, 1, -360, 9.235 ],
[110, 464, 0, 0.0025753116343490306, 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875 ],
[90, 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.332 ],
[165, 464, 0, 0.002152527700831025, 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433 ],
[458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.03 ],
[134, 465, 0, 0.011838677685950413, 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906 ],
[524, 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, 0, 1, 1, -360, 6.494 ],
[466, 467, 0, 0.0023509349030470914, 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579 ],
[110, 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.635 ],
[165, 467, 0, 0.0022891274238227145, 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.222000000000001 ],
[468, 469, 0, 0.0005269421487603305, 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797 ],
[541, 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 33.866 ],
[490, 469, 0, 0.028243305785123966, 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718 ],
[263, 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0, 1, 1, -360, 28.125 ],
[470, 471, 0, 0.001570909090909091, 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188 ],
[534, 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 18.526 ],
[136, 472, 0, 0.0007079293628808865, 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 4.0889999999999995 ],
[110, 472, 0, 0.00019511772853185596, 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127 ],
[251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.243 ],
[226, 474, 0, 0.017639669421487602, 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34 ],
[473, 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0, 0, 2, 1, -360, 5.244 ],
[257, 474, 0, 0.020264462809917356, 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65 ],
[6, 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, 1, 1, -360, 61.001000000000005 ],
[299, 475, 0, 0.013238227146814403, 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464 ],
[3, 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.614 ],
[210, 475, 0, 0.0001481994459833795, 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856 ],
[297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.267 ],
[296, 476, 0, 0.005596694214876033, 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465 ],
[295, 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.433 ],
[313, 478, 0, 0.008696849030470914, 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.233000000000004 ],
[477, 478, 0, 1.5235457063711912e-05, 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.08800000000000001 ],
[245, 478, 0, 0.005264542936288089, 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408 ],
[479, 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.986000000000004 ],
[565, 481, 0, 0.024842314049586776, 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574 ],
[480, 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.11699999999999999 ],
[415, 482, 0, 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1, -360, 31.831 ],
[56, 482, 0, 0.002630886426592798, 0.0236105947261788, 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598 ],
[409, 482, 0, 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1, 1, -360, 2.205 ],
[483, 484, 0, 9.037396121883656e-05, 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261 ],
[3, 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0, 0, 1, 1, -360, 28.944000000000003 ],
[301, 484, 0, 0.00966516620498615, 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913 ],
[233, 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0, 1, 1, -360, 40.726 ],
[392, 485, 0, 0.00914819944598338, 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42 ],
[391, 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.24600000000000002 ],
[579, 488, 0, 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 21.038 ],
[486, 488, 0, 0.00016969696969690082, 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77 ],
[487, 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.6609999999999999 ],
[270, 489, 0, 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.008 ],
[331, 489, 0, 0.003002943213296399, 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345 ],
[396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 64.968 ],
[519, 253, 0, 0.013353485337561985, 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, 40.394293146100004 ],
[382, 349, 0, 0.009091647380263157, 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 105.02671053600001 ],
[349, 351, 0, 0.0005858117819605263, 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 6.76729770521 ],
[459, 465, 0, 1.578788789911157e-05, 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.047758360894800005 ],
[549, 550, 0, 3.680432518409091e-05, 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.111333083682 ],
[550, 551, 0, 5.755645674710744e-05, 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.17410828165999997 ],
[194, 195, 0, 1.7560672583171745e-05, 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.202860889681 ],
[247, 248, 0, 2.1755213937811637e-05, 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.25131623141 ],
[2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991 ],
[549, 551, 0, 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1, 1, -360, 0.28029073853799996 ],
[54, 365, 0, 2.573045189134349e-05, 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.297238180249 ],
[131, 265, 0, 2.7616389041343487e-05, 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.319024526206 ],
[91, 92, 0, 2.8945628197853184e-05, 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.33437989694200004 ],
[247, 249, 0, 3.098840072160664e-05, 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.357978005136 ],
[186, 191, 0, 3.1591661821191135e-05, 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.36494687735799997 ],
[129, 173, 0, 3.202671277479225e-05, 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.369972585975 ],
[96, 202, 0, 3.5971247867797784e-05, 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.415539855369 ],
[53, 320, 0, 3.784209581142659e-05, 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.437151890814 ],
[24, 396, 0, 4.144748602818559e-05, 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.47880135859800005 ],
[133, 156, 0, 4.431754564044322e-05, 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.511956287238 ],
[442, 452, 0, 4.483572190450138e-05, 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.517942259441 ],
[445, 452, 0, 4.490753296371191e-05, 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.518771820797 ],
[247, 250, 0, 4.594910768732687e-05, 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.530804092004 ],
[187, 195, 0, 4.755760376239612e-05, 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.549385438663 ],
[216, 236, 0, 5.03353075283241e-05, 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.581473472567 ],
[244, 389, 0, 5.1633313019736845e-05, 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.596468032004 ],
[394, 406, 0, 5.6346419007686985e-05, 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.650913832377 ],
[442, 445, 0, 6.388070648310249e-05, 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.737949921293 ],
[442, 444, 0, 6.584378362735456e-05, 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.760627388463 ],
[198, 472, 0, 8.37554210498615e-05, 0.0120264578966664, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967 ],
[464, 467, 0, 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.977332411594 ],
[198, 251, 0, 8.83613182396122e-05, 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.0207499483 ],
[112, 143, 0, 9.049653833033241e-05, 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 1.04541601079 ],
[2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ],
[452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, 1, -360, 360 ]
])
ppc["gen_control"] = array([
[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0],
[589, 1, 0.010042676909098597, 0.5021338454549299, 0, 0, 0],
[590, 1, 0.012095775674984046, 0.6047887837492023, 0, 0, 0],
[593, 1, 0.0017666198683200384, 0.08833099341600192, 0, 0, 0],
[594, 1, 0.006047887837492023, 0.30239439187460115, 0, 0, 0],
[595, 1, 1.50560576164933, 75.2802880824665, 0, 0, 0],
[597, 1, 0.030239439187460113, 1.5119719593730057, 0, 0, 0],
[598, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[599, 1, 0.0029602819415092537, 0.1480140970754627, 0, 0, 0],
[600, 1, 0.005379437076506062, 0.26897185382530314, 0, 0, 0],
[601, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[602, 1, 0.007830423200121252, 0.39152116000606263, 0, 0, 0],
[603, 1, 1.0997606567649967, 54.98803283824984, 0, 0, 0],
[607, 1, 0.5729577951308232, 28.64788975654116, 0, 0, 0],
[608, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[609, 1, 0.0057932399285449895, 0.2896619964272495, 0, 0, 0],
[610, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[612, 1, 0.00954929658551372, 0.477464829275686, 0, 0, 0],
[613, 1, 0.027056340325622208, 1.3528170162811104, 0, 0, 0],
[614, 1, 0.00954929658551372, 0.477464829275686, 0, 0, 0],
[616, 1, 0.0046154933496649645, 0.23077466748324824, 0, 0, 0],
[617, 1, 0.04360845440717932, 2.1804227203589663, 0, 0, 0],
[618, 1, 0.010631550198538607, 0.5315775099269304, 0, 0, 0],
[619, 1, 0.037560566569687294, 1.8780283284843649, 0, 0, 0],
[621, 1, 0.24350706293059987, 12.175353146529993, 0, 0, 0],
[623, 1, 0.2419155134996809, 12.095775674984045, 0, 0, 0],
[624, 1, 0.004297183463481174, 0.21485917317405873, 0, 0, 0],
[628, 1, 0.14292113889652203, 7.1460569448261015, 0, 0, 0],
[629, 1, 0.023968734429639437, 1.198436721481972, 0, 0, 0],
[631, 1, 0.025401128917466494, 1.2700564458733248, 0, 0, 0],
[632, 1, 0.01435577586688896, 0.717788793344448, 0, 0, 0],
[637, 1, 0.017093240888069558, 0.854662044403478, 0, 0, 0],
[638, 1, 0.02048324117592693, 1.0241620587963465, 0, 0, 0],
[639, 1, 0.005029296201703893, 0.25146481008519467, 0, 0, 0],
[640, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[641, 1, 0.0040107045659157625, 0.20053522829578813, 0, 0, 0],
[642, 1, 0.00919915571071155, 0.4599577855355775, 0, 0, 0],
[643, 1, 0.27279157245950864, 13.639578622975431, 0, 0, 0],
[646, 1, 0.03278591827693044, 1.6392959138465222, 0, 0, 0],
[647, 1, 0.00445633840657307, 0.2228169203286535, 0, 0, 0],
[650, 1, 0.4216014442504307, 21.080072212521536, 0, 0, 0],
[652, 1, 0.00746436683100989, 0.37321834155049455, 0, 0, 0],
[655, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[657, 1, 0.012095775674984046, 0.6047887837492023, 0, 0, 0],
[658, 1, 0.030239439187460113, 1.5119719593730057, 0, 0, 0],
[661, 1, 0.010408733278209955, 0.5204366639104978, 0, 0, 0],
[662, 1, 0.002928450952890874, 0.1464225476445437, 0, 0, 0],
[663, 1, 0.00238732414637843, 0.1193662073189215, 0, 0, 0],
[666, 1, 0.00919915571071155, 0.4599577855355775, 0, 0, 0],
[668, 1, 0.24382537281678363, 12.191268640839182, 0, 0, 0],
[670, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[672, 1, 0.010536057232683471, 0.5268028616341736, 0, 0, 0],
[675, 1, 0.0033740847935481814, 0.16870423967740908, 0, 0, 0],
[676, 1, 0.11777465788800255, 5.888732894400127, 0, 0, 0],
[678, 1, 0.3237211542489151, 16.186057712445756, 0, 0, 0],
[679, 1, 0.2212253708977345, 11.061268544886726, 0, 0, 0],
[681, 1, 0.0063821132179850025, 0.31910566089925013, 0, 0, 0],
[683, 1, 0.008753521870054244, 0.4376760935027122, 0, 0, 0],
[687, 1, 0.42303383873825773, 21.151691936912886, 0, 0, 0],
[689, 1, 0.09867606471697511, 4.933803235848756, 0, 0, 0],
[691, 1, 0.008276057040778557, 0.4138028520389279, 0, 0, 0],
[693, 1, 0.06175211791965539, 3.0876058959827692, 0, 0, 0],
[694, 1, 0.005220282133414166, 0.2610141066707083, 0, 0, 0],
[695, 1, 0.004679155326901723, 0.23395776634508614, 0, 0, 0],
[696, 1, 0.22950142793851305, 11.475071396925653, 0, 0, 0],
[697, 1, 0.0036923946797319715, 0.1846197339865986, 0, 0, 0],
[698, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[701, 1, 0.015024226627874922, 0.7512113313937461, 0, 0, 0],
[702, 1, 0.023363945645890238, 1.168197282294512, 0, 0, 0],
[704, 1, 0.16170142218136566, 8.085071109068283, 0, 0, 0],
[705, 1, 0.005411268065124442, 0.27056340325622213, 0, 0, 0],
[707, 1, 0.010822536130248884, 0.5411268065124443, 0, 0, 0],
[708, 1, 0.0024828171122335675, 0.12414085561167837, 0, 0, 0],
[711, 1, 0.056054370956965534, 2.802718547848277, 0, 0, 0],
[713, 1, 0.004265352474862795, 0.21326762374313976, 0, 0, 0],
[714, 1, 0.00477464829275686, 0.238732414637843, 0, 0, 0],
[716, 1, 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0],
[717, 1, 0.0017507043740108488, 0.08753521870054244, 0, 0, 0],
[719, 1, 0.623250757147862, 31.162537857393104, 0, 0, 0],
[722, 1, 0.006589014644004467, 0.3294507322002233, 0, 0, 0],
[723, 1, 0.006270704757820675, 0.31353523789103377, 0, 0, 0],
[724, 1, 0.0019257748114119334, 0.09628874057059668, 0, 0, 0],
[725, 1, 0.25464790894703254, 12.732395447351628, 0, 0, 0],
[727, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[728, 1, 0.16233804195373325, 8.116902097686662, 0, 0, 0],
[730, 1, 0.10077690996578814, 5.038845498289407, 0, 0, 0],
[731, 1, 0.2848873481344926, 14.244367406724633, 0, 0, 0],
[732, 1, 0.004647324338283344, 0.2323662169141672, 0, 0, 0],
[733, 1, 0.12624170086049138, 6.312085043024569, 0, 0, 0],
[735, 1, 0.013496339174192726, 0.6748169587096363, 0, 0, 0],
[737, 1, 0.00891267681314614, 0.445633840657307, 0, 0, 0],
[738, 1, 0.04408591923645501, 2.2042959618227504, 0, 0, 0],
[739, 1, 0.01906676218240906, 0.9533381091204531, 0, 0, 0],
[741, 1, 0.0340591578216656, 1.7029578910832803, 0, 0, 0],
[742, 1, 0.0028647889756541157, 0.14323944878270578, 0, 0, 0],
[743, 1, 0.44881693951914486, 22.440846975957243, 0, 0, 0],
[745, 1, 0.013369015219719208, 0.6684507609859605, 0, 0, 0],
[746, 1, 0.03183098861837907, 1.5915494309189535, 0, 0, 0],
[747, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[748, 1, 0.03501408748021698, 1.7507043740108488, 0, 0, 0],
[749, 1, 0.0025464790894703256, 0.12732395447351627, 0, 0, 0],
[750, 1, 0.028902537665488188, 1.4451268832744095, 0, 0, 0],
[753, 1, 0.049624511256052974, 2.4812255628026487, 0, 0, 0],
[758, 1, 0.0058887328944001276, 0.2944366447200064, 0, 0, 0],
[760, 1, 0.2527380496299298, 12.636902481496492, 0, 0, 0],
[761, 1, 0.004997465213085514, 0.2498732606542757, 0, 0, 0],
[762, 1, 0.3517324242330887, 17.586621211654435, 0, 0, 0],
[763, 1, 0.006461690689530951, 0.32308453447654756, 0, 0, 0],
[765, 1, 0.018780283284843647, 0.9390141642421824, 0, 0, 0],
[767, 1, 0.0035650707252584553, 0.17825353626292276, 0, 0, 0],
[769, 1, 0.013782818071758136, 0.6891409035879068, 0, 0, 0],
[771, 1, 0.21963382146681557, 10.981691073340778, 0, 0, 0],
[772, 1, 0.002992112930127632, 0.1496056465063816, 0, 0, 0],
[774, 1, 0.010663381187156987, 0.5331690593578494, 0, 0, 0],
[776, 1, 0.01782535362629228, 0.891267681314614, 0, 0, 0],
[777, 1, 0.012573240504259732, 0.6286620252129866, 0, 0, 0],
[778, 1, 0.004679155326901723, 0.23395776634508614, 0, 0, 0],
[781, 1, 0.4169859509007658, 20.84929754503829, 0, 0, 0],
[784, 1, 0.4058451048843331, 20.292255244216655, 0, 0, 0],
[785, 1, 0.00047746482927568597, 0.0238732414637843, 0, 0, 0],
[787, 1, 0.24764509145098912, 12.382254572549456, 0, 0, 0],
[788, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[789, 1, 0.0123185925953127, 0.615929629765635, 0, 0, 0],
[790, 1, 0.02412788937273133, 1.2063944686365666, 0, 0, 0],
[791, 1, 0.0031830988618379067, 0.15915494309189535, 0, 0, 0],
[792, 1, 0.009979014931861837, 0.49895074659309185, 0, 0, 0],
[795, 1, 0.004329014452099553, 0.2164507226049777, 0, 0, 0],
[798, 1, 0.10179550160157626, 5.089775080078813, 0, 0, 0],
[800, 1, 0.0058091554228541795, 0.290457771142709, 0, 0, 0],
[801, 1, 0.007957747154594767, 0.3978873577297384, 0, 0, 0],
[802, 1, 0.07957747154594767, 3.9788735772973833, 0, 0, 0],
[805, 1, 0.44881693951914486, 22.440846975957243, 0, 0, 0],
[806, 1, 0.005697746962689853, 0.2848873481344927, 0, 0, 0],
[808, 1, 0.034616200122487235, 1.7308100061243619, 0, 0, 0],
[809, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[810, 1, 0.03116253785739311, 1.5581268928696554, 0, 0, 0],
[811, 1, 0.0040107045659157625, 0.20053522829578813, 0, 0, 0],
[814, 1, 0.014164789935178685, 0.7082394967589343, 0, 0, 0],
[815, 1, 0.004265352474862795, 0.21326762374313976, 0, 0, 0],
[816, 1, 0.012748310941660816, 0.6374155470830408, 0, 0, 0],
[817, 1, 0.017188733853924696, 0.8594366926962349, 0, 0, 0],
[818, 1, 0.24096058384112953, 12.048029192056477, 0, 0, 0],
[821, 1, 0.013130282805081364, 0.6565141402540683, 0, 0, 0],
[822, 1, 0.04265352474862795, 2.1326762374313977, 0, 0, 0],
[825, 1, 0.013591832140047864, 0.6795916070023932, 0, 0, 0],
[826, 1, 0.018461973398659858, 0.9230986699329929, 0, 0, 0],
[829, 1, 0.06716338598477982, 3.3581692992389915, 0, 0, 0],
[830, 1, 0.02832957987035737, 1.4164789935178685, 0, 0, 0],
[833, 1, 0.0059205638830185075, 0.2960281941509254, 0, 0, 0],
[834, 1, 0.007416620348082323, 0.37083101740411617, 0, 0, 0],
[835, 1, 0.010138169874953733, 0.5069084937476867, 0, 0, 0],
[836, 1, 0.008116902097686661, 0.4058451048843331, 0, 0, 0],
[837, 1, 0.15024226627874918, 7.512113313937459, 0, 0, 0],
[839, 1, 0.011666057328635928, 0.5833028664317964, 0, 0, 0],
[840, 1, 0.4427690516816528, 22.138452584082643, 0, 0, 0],
[841, 1, 0.0037083101740411615, 0.18541550870205808, 0, 0, 0],
[842, 1, 0.17204649348233886, 8.602324674116945, 0, 0, 0],
[843, 1, 0.10599719209920229, 5.2998596049601145, 0, 0, 0],
[844, 1, 0.012732395447351627, 0.6366197723675814, 0, 0, 0],
[845, 1, 0.10122254380644544, 5.061127190322272, 0, 0, 0],
[847, 1, 0.08912676813146139, 4.45633840657307, 0, 0, 0],
[848, 1, 0.013369015219719208, 0.6684507609859605, 0, 0, 0],
[849, 1, 0.24796340133717296, 12.398170066858649, 0, 0, 0],
[850, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[851, 1, 0.01265281797580568, 0.632640898790284, 0, 0, 0],
[852, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[853, 1, 0.0036923946797319715, 0.1846197339865986, 0, 0, 0],
[854, 1, 0.026037748689834075, 1.3018874344917037, 0, 0, 0],
[855, 1, 0.21899720169444797, 10.949860084722399, 0, 0, 0],
[856, 1, 0.011459155902616463, 0.5729577951308231, 0, 0, 0],
[857, 1, 0.4462704604296745, 22.313523021483725, 0, 0, 0],
[858, 1, 0.01808000153523931, 0.9040000767619655, 0, 0, 0],
[859, 1, 0.027056340325622208, 1.3528170162811104, 0, 0, 0],
[860, 1, 0.0039788735772973835, 0.1989436788648692, 0, 0, 0],
[862, 1, 0.23077466748324824, 11.538733374162412, 0, 0, 0],
[863, 1, 0.0001909859317102744, 0.00954929658551372, 0, 0, 0],
[864, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[865, 1, 0.0035014087480216977, 0.17507043740108488, 0, 0, 0],
[867, 1, 0.24478030247533505, 12.239015123766753, 0, 0, 0],
[869, 1, 0.4329014452099553, 21.645072260497766, 0, 0, 0],
[870, 1, 0.018589297353133374, 0.9294648676566688, 0, 0, 0],
[872, 1, 0.00716197243913529, 0.3580986219567645, 0, 0, 0],
[873, 1, 0.038833806114422456, 1.941690305721123, 0, 0, 0],
[874, 1, 0.006589014644004467, 0.3294507322002233, 0, 0, 0],
[875, 1, 0.007766761222884492, 0.38833806114422464, 0, 0, 0],
[877, 1, 0.007894085177358009, 0.39470425886790045, 0, 0, 0],
[881, 1, 0.3187236890358296, 15.93618445179148, 0, 0, 0],
[882, 1, 0.005538592019597957, 0.2769296009798979, 0, 0, 0],
[883, 1, 0.005729577951308231, 0.28647889756541156, 0, 0, 0],
[886, 1, 0.8186930272647096, 40.93465136323548, 0, 0, 0],
[889, 1, 0.0030239439187460114, 0.15119719593730058, 0, 0, 0],
[890, 1, 0.0076394372684109755, 0.3819718634205488, 0, 0, 0],
[893, 1, 0.00954929658551372, 0.477464829275686, 0, 0, 0],
[894, 1, 0.025146481008519465, 1.2573240504259733, 0, 0, 0],
[895, 1, 0.0030239439187460114, 0.15119719593730058, 0, 0, 0],
[896, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[898, 1, 0.013464508185574344, 0.6732254092787172, 0, 0, 0],
[900, 1, 0.03584169318429482, 1.7920846592147412, 0, 0, 0],
[902, 1, 0.006207042780583919, 0.31035213902919595, 0, 0, 0],
[903, 1, 0.0031990143561470966, 0.15995071780735484, 0, 0, 0],
[905, 1, 0.021851973686517232, 1.0925986843258617, 0, 0, 0],
[907, 1, 0.02142225534016911, 1.0711127670084555, 0, 0, 0],
[909, 1, 0.005856901905781748, 0.2928450952890874, 0, 0, 0],
[911, 1, 0.09183240216402361, 4.59162010820118, 0, 0, 0],
[913, 1, 0.02355493157760051, 1.1777465788800257, 0, 0, 0],
[914, 1, 0.03568253824120294, 1.7841269120601468, 0, 0, 0],
[915, 1, 0.0038197186342054878, 0.1909859317102744, 0, 0, 0],
[916, 1, 0.06238873769202297, 3.119436884601149, 0, 0, 0],
[917, 1, 0.005411268065124442, 0.27056340325622213, 0, 0, 0],
[918, 1, 0.012254930618075942, 0.612746530903797, 0, 0, 0],
[919, 1, 0.004965634224467135, 0.24828171122335674, 0, 0, 0],
[920, 1, 0.0020371832715762603, 0.10185916357881303, 0, 0, 0],
[921, 1, 0.019735212943395024, 0.9867606471697512, 0, 0, 0],
[922, 1, 0.05220282133414166, 2.6101410667070835, 0, 0, 0],
[923, 1, 0.023236621691416718, 1.161831084570836, 0, 0, 0],
[925, 1, 0.008276057040778557, 0.4138028520389279, 0, 0, 0],
[928, 1, 0.019576058000303126, 0.9788029000151565, 0, 0, 0],
[931, 1, 0.03455253814525047, 1.7276269072625237, 0, 0, 0],
[934, 1, 0.09421972631040204, 4.710986315520103, 0, 0, 0],
[935, 1, 0.007352958370845565, 0.36764791854227824, 0, 0, 0],
[936, 1, 0.016615776058793875, 0.8307888029396938, 0, 0, 0],
[937, 1, 0.00477464829275686, 0.238732414637843, 0, 0, 0],
[939, 1, 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0],
[940, 1, 0.009421972631040205, 0.47109863155201026, 0, 0, 0],
[942, 1, 0.016520283092938737, 0.8260141546469368, 0, 0, 0],
[943, 1, 0.021103945453985317, 1.055197272699266, 0, 0, 0],
[944, 1, 0.004042535554534142, 0.2021267777267071, 0, 0, 0],
[945, 1, 0.011140846016432674, 0.5570423008216338, 0, 0, 0],
[946, 1, 0.025464790894703253, 1.2732395447351628, 0, 0, 0],
[948, 1, 0.025146481008519465, 1.2573240504259733, 0, 0, 0],
[950, 1, 0.005092958178940651, 0.25464790894703254, 0, 0, 0],
[951, 1, 0.14132958946560306, 7.066479473280154, 0, 0, 0],
[952, 1, 0.005045211696013082, 0.2522605848006541, 0, 0, 0],
[956, 1, 0.020690142601946394, 1.0345071300973196, 0, 0, 0],
[957, 1, 0.0019098593171027439, 0.0954929658551372, 0, 0, 0],
[958, 1, 0.010615634704229418, 0.530781735211471, 0, 0, 0],
[959, 1, 0.007241549910681238, 0.3620774955340619, 0, 0, 0],
[960, 1, 0.004217605991935227, 0.21088029959676136, 0, 0, 0],
[963, 1, 0.2785211504108168, 13.926057520540843, 0, 0, 0],
[965, 1, 0.11204507993669433, 5.602253996834716, 0, 0, 0],
[966, 1, 0.021008452488130186, 1.0504226244065094, 0, 0, 0],
[967, 1, 0.01193662073189215, 0.5968310365946076, 0, 0, 0],
[968, 1, 0.017188733853924696, 0.8594366926962349, 0, 0, 0],
[969, 1, 0.018111832523857688, 0.9055916261928845, 0, 0, 0],
[971, 1, 0.0031830988618379067, 0.15915494309189535, 0, 0, 0],
[973, 1, 0.4287634166895661, 21.438170834478306, 0, 0, 0],
[976, 1, 0.008562535938343968, 0.4281267969171984, 0, 0, 0],
[977, 1, 0.1031324031235482, 5.15662015617741, 0, 0, 0],
[978, 1, 0.0007321127382227185, 0.03660563691113593, 0, 0, 0],
[980, 1, 0.11140846016432673, 5.570423008216337, 0, 0, 0],
[981, 1, 0.03787887645587108, 1.8939438227935543, 0, 0, 0],
[982, 1, 0.0015756339366097638, 0.07878169683048819, 0, 0, 0],
[983, 1, 0.01400563499208679, 0.7002817496043395, 0, 0, 0],
[984, 1, 0.14801409707546268, 7.400704853773133, 0, 0, 0],
[985, 1, 0.0035014087480216977, 0.17507043740108488, 0, 0, 0],
[986, 1, 0.0017825353626292277, 0.08912676813146138, 0, 0, 0],
[987, 1, 0.02618098813861678, 1.3090494069308392, 0, 0, 0],
[988, 1, 0.0008116902097686662, 0.04058451048843331, 0, 0, 0],
[990, 1, 0.0954929658551372, 4.7746482927568605, 0, 0, 0],
[993, 1, 0.06238873769202297, 3.119436884601149, 0, 0, 0],
[994, 1, 0.010504226244065093, 0.5252113122032547, 0, 0, 0],
[995, 1, 0.0006684507609859605, 0.033422538049298026, 0, 0, 0],
[996, 1, 0.003660563691113593, 0.18302818455567965, 0, 0, 0],
[997, 1, 0.005984225860255264, 0.2992112930127632, 0, 0, 0],
[998, 1, 0.13464508185574348, 6.732254092787174, 0, 0, 0],
[999, 1, 0.004965634224467135, 0.24828171122335674, 0, 0, 0],
[1000, 1, 0.015597184423005743, 0.7798592211502873, 0, 0, 0],
[1002, 1, 0.0031512678732195276, 0.15756339366097638, 0, 0, 0],
[1003, 1, 0.2864788975654116, 14.32394487827058, 0, 0, 0],
[1006, 1, 0.038833806114422456, 1.941690305721123, 0, 0, 0],
[1007, 1, 0.007416620348082323, 0.37083101740411617, 0, 0, 0],
[1008, 1, 0.015597184423005743, 0.7798592211502873, 0, 0, 0],
[1010, 1, 0.238732414637843, 11.93662073189215, 0, 0, 0],
[1011, 1, 0.005952394871636886, 0.2976197435818443, 0, 0, 0],
[1012, 1, 0.9024085273310466, 45.12042636655233, 0, 0, 0],
[1014, 1, 0.238732414637843, 11.93662073189215, 0, 0, 0],
[1018, 1, 0.05599070897972878, 2.7995354489864392, 0, 0, 0],
[1019, 1, 0.03819718634205488, 1.909859317102744, 0, 0, 0],
[1023, 1, 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0],
[1025, 1, 0.03616000307047862, 1.808000153523931, 0, 0, 0],
[1026, 1, 0.20868396138209316, 10.434198069104658, 0, 0, 0],
[1028, 2, 0.025464790894703257, 1.273239544735163, 0, 0, 0],
[1029, 2, 0.003819718634205488, 0.19098593171027442, 0, 0, 0],
[1030, 2, 0.06480789282701978, 3.2403946413509894, 0, 0, 0],
[1031, 2, 0.0921316134570364, 4.60658067285182, 0, 0, 0],
[1032, 2, 0.009772775025341927, 0.4886387512670964, 0, 0, 0],
[1033, 2, 0.0015543376717485793, 0.07771688358742897, 0, 0, 0],
[1034, 2, 0.005364335122251813, 0.26821675611259066, 0, 0, 0],
[1035, 3, 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502],
[1036, 2, 0.003538471088451239, 0.17692355442256197, 0, 0, 0],
[1037, 2, 0.0032845967867616726, 0.16422983933808363, 0, 0, 0],
[1038, 2, 0.0035759833548530246, 0.17879916774265123, 0, 0, 0],
[1039, 2, 0.0033678813297702355, 0.1683940664885118, 0, 0, 0],
[1041, 2, 0.012998987840239671, 0.6499493920119837, 0, 0, 0],
[1042, 2, 0.0013374224133557281, 0.0668711206677864, 0, 0, 0],
[1044, 3, 0.0012140138945870601, 0.060700694729353, 2.22, 61.69, 0.004502],
[1046, 2, 0.0032875263364469907, 0.16437631682234954, 0, 0, 0],
[1047, 3, 0.0005212006415679155, 0.026060032078395773, 2.22, 61.69, 0.004502],
[1048, 2, 0.0022653377413018724, 0.11326688706509364, 0, 0, 0],
[1049, 2, 0.01870104799381521, 0.9350523996907605, 0, 0, 0],
[1050, 2, 0.0017161801534011875, 0.08580900767005938, 0, 0, 0],
[1051, 2, 0.011268551438979963, 0.5634275719489983, 0, 0, 0],
[1052, 3, 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502],
[1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, 0.004502],
[1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],
[1055, 3, 7.255367011902793e-05, 0.0036276835059513967, 2.22, 61.69, 0.004502],
[1056, 2, 0.02185427247219657, 1.0927136236098287, 0, 0, 0],
[1057, 2, 0.010956497647839606, 0.5478248823919804, 0, 0, 0],
[1058, 2, 0.02761344248663413, 1.3806721243317066, 0, 0, 0],
[1059, 2, 0.01272767318121002, 0.636383659060501, 0, 0, 0],
[1060, 3, 0.0002750105502899529, 0.013750527514497644, 2.22, 61.69, 0.004502],
[1061, 2, 0.004862954432750976, 0.2431477216375488, 0, 0, 0],
[1062, 3, 7.333747745020713e-05, 0.0036668738725103567, 2.22, 61.69, 0.004502],
[1063, 3, 0.00022007597509710681, 0.011003798754855342, 2.22, 61.69, 0.004502],
[1064, 2, 0.013355424896304362, 0.667771244815218, 0, 0, 0],
[1065, 2, 0.020654478247623165, 1.0327239123811582, 0, 0, 0],
[1066, 2, 0.004269679264204669, 0.21348396321023344, 0, 0, 0],
[1067, 3, 0.002078788013715776, 0.1039394006857888, 2.22, 61.69, 0.004502],
[1068, 3, 0.00014512554313847776, 0.007256277156923888, 2.22, 61.69, 0.004502],
[1069, 3, 0.00010143951295915809, 0.005071975647957905, 2.22, 61.69, 0.004502],
[1070, 3, 2.3689278981581715e-05, 0.001184463949079086, 2.22, 61.69, 0.004502],
[1071, 3, 0.00021315991932608, 0.010657995966304002, 2.22, 61.69, 0.004502],
[1072, 2, 0.007168748144119091, 0.3584374072059546, 0, 0, 0],
[1073, 2, 0.004954025493475761, 0.24770127467378808, 0, 0, 0],
[1074, 2, 0.009778033156939965, 0.48890165784699824, 0, 0, 0],
[1075, 3, 0.0009142432329184414, 0.04571216164592208, 2.22, 61.69, 0.004502],
[1077, 3, 0.000761621711582911, 0.038081085579145545, 2.22, 61.69, 0.004502],
[1078, 3, 0.0010764248660874562, 0.05382124330437281, 2.22, 61.69, 0.004502],
[1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0],
[1080, 2, 0.005216654256351391, 0.2608327128175696, 0, 0, 0],
[1081, 2, 0.01643746145779033, 0.8218730728895166, 0, 0, 0],
[1082, 2, 0.015076341350664345, 0.7538170675332174, 0, 0, 0],
[1083, 2, 0.019983163198675734, 0.9991581599337868, 0, 0, 0],
[1084, 2, 0.018855524406049307, 0.9427762203024654, 0, 0, 0],
[1085, 2, 0.0037788529320756745, 0.1889426466037837, 0, 0, 0],
[1086, 2, 0.006918625580223116, 0.34593127901115583, 0, 0, 0],
[1087, 2, 0.0032275229191801595, 0.16137614595900798, 0, 0, 0],
[1088, 3, 0.0009589741139576335, 0.04794870569788167, 2.22, 61.69, 0.004502],
[1089, 2, 0.009823983504007974, 0.49119917520039863, 0, 0, 0],
[1090, 2, 0.005674885746854652, 0.2837442873427326, 0, 0, 0],
[1091, 3, 0.001168793996530651, 0.05843969982653256, 2.22, 61.69, 0.004502],
[1092, 2, 0.0013687465331790676, 0.06843732665895338, 0, 0, 0],
[1093, 2, 0.007017509546711356, 0.3508754773355678, 0, 0, 0],
[1094, 3, 0.00014185080981113786, 0.0070925404905568925, 2.22, 61.69, 0.004502],
[1095, 3, 7.71951382648268e-06, 0.000385975691324134, 2.22, 61.69, 0.004502],
[1096, 2, 0.0029145237970444643, 0.14572618985222321, 0, 0, 0],
[1097, 3, 0.0002728726471928731, 0.013643632359643654, 2.22, 61.69, 0.004502],
[1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0],
[1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0],
[1100, 3, 7.335549646801683e-07, 3.667774823400842e-05, 2.22, 61.69, 0.004502],
[1101, 2, 0.0021341020267997028, 0.10670510133998513, 0, 0, 0],
[1102, 2, 0.008936050319297435, 0.44680251596487175, 0, 0, 0],
[1103, 2, 0.006751135880742038, 0.33755679403710187, 0, 0, 0],
[1104, 3, 8.200597012001097e-06, 0.0004100298506000548, 2.22, 61.69, 0.004502],
[1105, 3, 7.430370821118754e-05, 0.003715185410559377, 2.22, 61.69, 0.004502],
[1106, 3, 9.496706349756433e-05, 0.004748353174878216, 2.22, 61.69, 0.004502],
[1107, 2, 0.002514754747681537, 0.12573773738407681, 0, 0, 0],
[1108, 2, 0.010075472977677913, 0.5037736488838956, 0, 0, 0],
[1109, 3, 2.3877174563372565e-05, 0.0011938587281686282, 2.22, 61.69, 0.004502],
[1110, 3, 5.6797921539226925e-05, 0.0028398960769613463, 2.22, 61.69, 0.004502],
[1111, 2, 0.0027876433772406257, 0.13938216886203128, 0, 0, 0],
[1112, 2, 0.004265767031264296, 0.2132883515632148, 0, 0, 0],
[1113, 3, 0.00022012925719619891, 0.011006462859809947, 2.22, 61.69, 0.004502],
[1114, 3, 0.0008560555102861403, 0.042802775514307015, 2.22, 61.69, 0.004502],
[1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, 0],
[1116, 3, 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, 0.004502],
[1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0],
[1118, 3, 0.0004094636121064103, 0.02047318060532052, 2.22, 61.69, 0.004502],
[1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, 61.69, 0.004502],
[1120, 3, 0.00014563422679717648, 0.007281711339858825, 2.22, 61.69, 0.004502],
[1121, 3, 3.4414977793908876e-05, 0.0017207488896954439, 2.22, 61.69, 0.004502],
[1122, 3, 8.894132329422267e-05, 0.004447066164711133, 2.22, 61.69, 0.004502],
[1123, 3, 9.32225252447514e-05, 0.00466112626223757, 2.22, 61.69, 0.004502],
[1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22, 61.69, 0.004502],
[1125, 3, 0.0009107448109473576, 0.04553724054736788, 2.22, 61.69, 0.004502],
[1126, 3, 0.0010150413250921298, 0.050752066254606494, 2.22, 61.69, 0.004502],
[1127, 2, 0.003587869493403156, 0.17939347467015782, 0, 0, 0],
[1128, 3, 9.85754616930036e-05, 0.004928773084650179, 2.22, 61.69, 0.004502],
[1129, 3, 0.00015167785485332866, 0.0075838927426664345, 2.22, 61.69, 0.004502],
[1130, 3, 4.313144137237104e-05, 0.0021565720686185525, 2.22, 61.69, 0.004502],
[1131, 3, 9.338261111863579e-05, 0.00466913055593179, 2.22, 61.69, 0.004502],
[1132, 3, 1.598304249187116e-05, 0.0007991521245935579, 2.22, 61.69, 0.004502],
[1133, 3, 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, 0.004502],
[1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22, 61.69, 0.004502],
[1135, 3, 0.00030684246506199216, 0.01534212325309961, 2.22, 61.69, 0.004502],
[1136, 3, 2.5636662405410735e-05, 0.0012818331202705368, 2.22, 61.69, 0.004502],
[1137, 3, 0.00018370212263491662, 0.00918510613174583, 2.22, 61.69, 0.004502],
[1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, 0.004502],
[1139, 3, 0.0012225149594472903, 0.06112574797236452, 2.22, 61.69, 0.004502],
[1140, 3, 0.0018073289497007397, 0.09036644748503699, 2.22, 61.69, 0.004502],
[1141, 2, 0.005339291711123932, 0.2669645855561966, 0, 0, 0],
[1142, 3, 7.73959943559724e-05, 0.00386979971779862, 2.22, 61.69, 0.004502],
[1143, 3, 0.0009515158509821171, 0.04757579254910586, 2.22, 61.69, 0.004502],
[1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0],
[1145, 2, 0.011197481443497569, 0.5598740721748785, 0, 0, 0],
[1146, 3, 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502],
[1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, 0.004502],
[1148, 3, 0.0005993650905551883, 0.029968254527759416, 2.22, 61.69, 0.004502],
[1149, 3, 0.00026672685204354104, 0.013336342602177052, 2.22, 61.69, 0.004502],
[1150, 3, 0.0001204929064021154, 0.00602464532010577, 2.22, 61.69, 0.004502],
[1151, 3, 0.00043239573730817076, 0.021619786865408542, 2.22, 61.69, 0.004502],
[1152, 3, 3.9796369738190234e-06, 0.0001989818486909512, 2.22, 61.69, 0.004502],
[1153, 3, 2.543747302116541e-06, 0.00012718736510582707, 2.22, 61.69, 0.004502],
[1154, 3, 5.939787701451754e-06, 0.00029698938507258764, 2.22, 61.69, 0.004502],
[1155, 3, 2.0319819845729137e-05, 0.001015990992286457, 2.22, 61.69, 0.004502],
[1156, 3, 0.0008888342953225629, 0.044441714766128154, 2.22, 61.69, 0.004502],
[1157, 3, 0.00014449421139309436, 0.007224710569654718, 2.22, 61.69, 0.004502],
[1158, 3, 3.9344224255474475e-05, 0.001967211212773724, 2.22, 61.69, 0.004502],
[1159, 3, 0.0006423837433282069, 0.032119187166410344, 2.22, 61.69, 0.004502],
[1160, 2, 0.006583846414473584, 0.3291923207236792, 0, 0, 0],
[1161, 3, 0.0007639741440540192, 0.038198707202700966, 2.22, 61.69, 0.004502],
[1162, 2, 0.012733717176428691, 0.6366858588214346, 0, 0, 0],
[1164, 2, 0.007318959323231913, 0.3659479661615957, 0, 0, 0],
[1166, 2, 0.005301588846150501, 0.26507944230752506, 0, 0, 0],
[1167, 3, 0.0001907109190583028, 0.00953554595291514, 2.22, 61.69, 0.004502],
[1168, 3, 4.6735632418379986e-05, 0.0023367816209189994, 2.22, 61.69, 0.004502],
[1169, 3, 8.929850730838101e-05, 0.004464925365419051, 2.22, 61.69, 0.004502],
[1170, 3, 1.00233247146895e-05, 0.0005011662357344751, 2.22, 61.69, 0.004502],
[1171, 3, 0.0004260194354054759, 0.021300971770273798, 2.22, 61.69, 0.004502],
[1172, 3, 0.00011513389518096898, 0.005756694759048449, 2.22, 61.69, 0.004502],
[1173, 2, 0.006452614026547609, 0.32263070132738053, 0, 0, 0],
[1174, 3, 4.754703790085141e-05, 0.00237735189504257, 2.22, 61.69, 0.004502],
[1175, 3, 2.7710161030475335e-05, 0.001385508051523767, 2.22, 61.69, 0.004502],
[1176, 3, 7.75663051366249e-06, 0.0003878315256831245, 2.22, 61.69, 0.004502],
[1177, 3, 0.0009447268553453907, 0.04723634276726953, 2.22, 61.69, 0.004502],
[1178, 3, 0.0001088973020076013, 0.005444865100380065, 2.22, 61.69, 0.004502],
[1179, 3, 3.969316682855094e-05, 0.001984658341427547, 2.22, 61.69, 0.004502],
[1180, 3, 2.5956634148895864e-05, 0.0012978317074447932, 2.22, 61.69, 0.004502],
[1181, 2, 0.00545834972439398, 0.272917486219699, 0, 0, 0],
[1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0],
[1183, 3, 0.0014314935186861295, 0.07157467593430648, 2.22, 61.69, 0.004502],
[1184, 3, 0.00015810533075432708, 0.007905266537716353, 2.22, 61.69, 0.004502],
[1185, 3, 0.0006974320121398697, 0.034871600606993486, 2.22, 61.69, 0.004502],
[1186, 3, 0.0012771847490467955, 0.06385923745233978, 2.22, 61.69, 0.004502],
[1187, 3, 0.0003086504024546428, 0.01543252012273214, 2.22, 61.69, 0.004502],
[1188, 2, 0.011440868435801076, 0.5720434217900537, 0, 0, 0],
[1189, 3, 0.0006752949613083114, 0.03376474806541557, 2.22, 61.69, 0.004502],
[1190, 2, 0.011056408319218359, 0.552820415960918, 0, 0, 0],
[1191, 2, 0.004652379906159672, 0.23261899530798363, 0, 0, 0],
[1192, 3, 0.0009482218539415114, 0.04741109269707557, 2.22, 61.69, 0.004502],
[1193, 3, 9.320005102883975e-05, 0.0046600025514419875, 2.22, 61.69, 0.004502],
[1194, 3, 0.00033807612872480814, 0.016903806436240405, 2.22, 61.69, 0.004502],
[1195, 3, 7.285440296486341e-06, 0.0003642720148243171, 2.22, 61.69, 0.004502],
[1196, 2, 0.0040761948650300354, 0.20380974325150175, 0, 0, 0],
[1197, 2, 0.0023095720666282643, 0.11547860333141323, 0, 0, 0],
[1198, 3, 0.0016279886826880022, 0.08139943413440012, 2.22, 61.69, 0.004502],
[1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0],
[1200, 2, 0.0035658606694853635, 0.1782930334742682, 0, 0, 0],
[1201, 3, 0.0007239107895971019, 0.03619553947985509, 2.22, 61.69, 0.004502],
[1202, 3, 0.00176071556288929, 0.0880357781444645, 2.22, 61.69, 0.004502],
[1203, 2, 0.0063796286094078974, 0.31898143047039484, 0, 0, 0],
[1204, 3, 0.0015802630524518553, 0.07901315262259277, 2.22, 61.69, 0.004502],
[1205, 3, 1.3927092046315124e-05, 0.0006963546023157563, 2.22, 61.69, 0.004502],
[1206, 3, 0.00015871592092437352, 0.007935796046218677, 2.22, 61.69, 0.004502],
[1207, 3, 0.00013884952267018553, 0.006942476133509278, 2.22, 61.69, 0.004502],
[1208, 3, 7.055386967979429e-05, 0.0035276934839897148, 2.22, 61.69, 0.004502],
[1209, 3, 3.2453994235092736e-05, 0.001622699711754637, 2.22, 61.69, 0.004502],
[1210, 3, 0.0003259549620621221, 0.016297748103106108, 2.22, 61.69, 0.004502],
[1211, 3, 0.0011462484513341364, 0.057312422566706815, 2.22, 61.69, 0.004502],
[1212, 2, 0.005804182676892941, 0.290209133844647, 0, 0, 0],
[1213, 2, 0.0036505499187602444, 0.18252749593801224, 0, 0, 0],
[1214, 3, 0.00019852168003620192, 0.009926084001810095, 2.22, 61.69, 0.004502],
[1215, 3, 7.81255594160887e-05, 0.003906277970804435, 2.22, 61.69, 0.004502],
[1216, 2, 0.0021517677385590084, 0.10758838692795043, 0, 0, 0],
[1217, 3, 0.001279974509378072, 0.0639987254689036, 2.22, 61.69, 0.004502],
[1218, 3, 4.139664610366431e-05, 0.0020698323051832157, 2.22, 61.69, 0.004502],
[1219, 3, 0.00042701347071105576, 0.02135067353555279, 2.22, 61.69, 0.004502],
[1220, 3, 0.0010059882305525484, 0.050299411527627416, 2.22, 61.69, 0.004502],
[1221, 2, 0.02105078881494917, 1.0525394407474586, 0, 0, 0],
[1222, 2, 0.013436354905899806, 0.6718177452949904, 0, 0, 0],
[1223, 3, 0.00024230393037435297, 0.01211519651871765, 2.22, 61.69, 0.004502],
[1224, 2, 0.006415271247382745, 0.3207635623691373, 0, 0, 0],
[1225, 3, 0.0010196947606849961, 0.05098473803424981, 2.22, 61.69, 0.004502],
[1226, 3, 0.00011572498554223855, 0.005786249277111928, 2.22, 61.69, 0.004502],
[1227, 3, 0.0010454325410475286, 0.05227162705237644, 2.22, 61.69, 0.004502],
[1228, 3, 9.713499706791583e-05, 0.004856749853395792, 2.22, 61.69, 0.004502],
[1229, 2, 0.0026494957954367885, 0.13247478977183944, 0, 0, 0],
[1230, 3, 4.8238032843230984e-05, 0.002411901642161549, 2.22, 61.69, 0.004502],
[1231, 3, 0.0010059686019705035, 0.05029843009852517, 2.22, 61.69, 0.004502],
[1232, 2, 0.002228131222721375, 0.11140656113606878, 0, 0, 0],
[1233, 2, 0.03662908231521014, 1.831454115760507, 0, 0, 0],
[1234, 2, 0.0064387341725816285, 0.32193670862908147, 0, 0, 0],
[1235, 3, 0.0002292223612393676, 0.01146111806196838, 2.22, 61.69, 0.004502],
[1236, 2, 0.0020851258089392244, 0.10425629044696123, 0, 0, 0],
[1237, 3, 0.0009298092078685558, 0.04649046039342779, 2.22, 61.69, 0.004502],
[1238, 2, 0.00642623738699833, 0.3213118693499165, 0, 0, 0],
[1239, 3, 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, 0.004502],
[1240, 2, 0.02037573875130283, 1.0187869375651415, 0, 0, 0],
[1241, 2, 0.010972960615224547, 0.5486480307612274, 0, 0, 0],
[1242, 3, 0.0008355662499393597, 0.041778312496967986, 2.22, 61.69, 0.004502],
[1243, 2, 0.0027276591752610937, 0.1363829587630547, 0, 0, 0],
[1244, 2, 0.020592901244747865, 1.0296450622373932, 0, 0, 0],
[1245, 3, 0.00023503888700973188, 0.011751944350486595, 2.22, 61.69, 0.004502],
[1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0],
[1247, 3, 0.0013899571448864774, 0.06949785724432388, 2.22, 61.69, 0.004502],
[1248, 2, 0.004527446475069785, 0.22637232375348926, 0, 0, 0],
[1249, 2, 0.0021092345113500805, 0.10546172556750404, 0, 0, 0],
[1250, 3, 0.000876926339333997, 0.04384631696669984, 2.22, 61.69, 0.004502],
[1251, 3, 0.0008805328097855692, 0.044026640489278464, 2.22, 61.69, 0.004502],
[1252, 3, 0.0006440660331426705, 0.032203301657133525, 2.22, 61.69, 0.004502],
[1253, 2, 0.004106369053307717, 0.20531845266538587, 0, 0, 0],
[1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],
[1255, 3, 0.00023250233000853782, 0.01162511650042689, 2.22, 61.69, 0.004502],
[1256, 3, 0.0009607764830526361, 0.048038824152631804, 2.22, 61.69, 0.004502],
[1257, 2, 0.005662916214121937, 0.28314581070609685, 0, 0, 0],
[1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0],
[1259, 2, 0.00695753592752513, 0.34787679637625657, 0, 0, 0],
[1260, 3, 0.000590177310330468, 0.0295088655165234, 2.22, 61.69, 0.004502],
[1261, 2, 0.0065104902868619585, 0.3255245143430979, 0, 0, 0],
[1262, 3, 2.3902123196900468e-05, 0.0011951061598450233, 2.22, 61.69, 0.004502],
[1263, 3, 1.7811428520856433e-05, 0.0008905714260428216, 2.22, 61.69, 0.004502],
[1264, 2, 0.0033780757704728456, 0.1689037885236423, 0, 0, 0],
[1265, 3, 0.0003085654478954214, 0.015428272394771068, 2.22, 61.69, 0.004502],
[1266, 2, 0.006508243779623651, 0.3254121889811826, 0, 0, 0],
[1267, 3, 0.0011818165946297665, 0.05909082973148832, 2.22, 61.69, 0.004502],
[1270, 3, 0.0013856435479358959, 0.06928217739679479, 2.22, 61.69, 0.004502],
[1271, 3, 0.0014840987910167424, 0.07420493955083712, 2.22, 61.69, 0.004502],
[1272, 3, 4.931888796058019e-05, 0.00246594439802901, 2.22, 61.69, 0.004502],
[1273, 3, 0.00012918225610620136, 0.006459112805310069, 2.22, 61.69, 0.004502],
[1274, 2, 0.002007808497835817, 0.10039042489179087, 0, 0, 0],
[1275, 2, 0.003173827843694794, 0.1586913921847397, 0, 0, 0],
[1276, 3, 0.0007211910038712903, 0.036059550193564514, 2.22, 61.69, 0.004502],
[1277, 2, 0.00187538099082149, 0.09376904954107451, 0, 0, 0],
[1278, 2, 0.0052395364566005164, 0.2619768228300258, 0, 0, 0],
[1279, 3, 1.1251600278965072e-07, 5.625800139482535e-06, 2.22, 61.69, 0.004502],
[1280, 3, 1.694789540680769e-05, 0.0008473947703403845, 2.22, 61.69, 0.004502],
[1282, 3, 0.00013160445621004433, 0.006580222810502218, 2.22, 61.69, 0.004502],
[1283, 2, 0.03582020109680739, 1.7910100548403696, 0, 0, 0],
[1284, 3, 0.001164025604385567, 0.058201280219278353, 2.22, 61.69, 0.004502],
[1285, 3, 7.476034074798499e-05, 0.0037380170373992492, 2.22, 61.69, 0.004502],
[1286, 3, 0.0008085504689103687, 0.04042752344551843, 2.22, 61.69, 0.004502],
[1287, 2, 0.0029583869971778567, 0.14791934985889282, 0, 0, 0],
[1288, 2, 0.004222012491839328, 0.2111006245919664, 0, 0, 0],
[1289, 2, 0.005576926941677767, 0.2788463470838884, 0, 0, 0],
[1290, 3, 0.00016635371363986156, 0.008317685681993078, 2.22, 61.69, 0.004502],
[1291, 2, 0.0031745529736635094, 0.1587276486831755, 0, 0, 0],
[1292, 3, 0.0015865361520825533, 0.07932680760412766, 2.22, 61.69, 0.004502],
[1293, 3, 6.53883586637161e-05, 0.003269417933185805, 2.22, 61.69, 0.004502],
[1294, 3, 0.00013884615253373605, 0.006942307626686803, 2.22, 61.69, 0.004502],
[1295, 3, 0.00015342985152912175, 0.007671492576456088, 2.22, 61.69, 0.004502],
[1296, 3, 0.0007760328429390742, 0.03880164214695372, 2.22, 61.69, 0.004502],
[1297, 2, 0.006086894248154212, 0.3043447124077106, 0, 0, 0],
[1300, 3, 0.001511593201166196, 0.07557966005830981, 2.22, 61.69, 0.004502],
[1301, 2, 0.0038746782543149596, 0.193733912715748, 0, 0, 0],
[1302, 3, 0.0003104985267932093, 0.015524926339660468, 2.22, 61.69, 0.004502],
[1303, 3, 0.00027600750632746427, 0.013800375316373212, 2.22, 61.69, 0.004502],
[1304, 3, 0.000610793340517708, 0.030539667025885397, 2.22, 61.69, 0.004502],
[1305, 3, 1.6012209452329225e-07, 8.006104726164614e-06, 2.22, 61.69, 0.004502],
[1306, 3, 5.855304532138158e-05, 0.0029276522660690793, 2.22, 61.69, 0.004502],
[1307, 3, 1.9031130574577255e-05, 0.0009515565287288628, 2.22, 61.69, 0.004502],
[1308, 3, 8.924254018516687e-05, 0.004462127009258345, 2.22, 61.69, 0.004502],
[1309, 3, 9.599337069530822e-05, 0.004799668534765412, 2.22, 61.69, 0.004502],
[1310, 3, 4.717144911466962e-05, 0.002358572455733481, 2.22, 61.69, 0.004502],
[1311, 3, 0.000494670556881473, 0.024733527844073653, 2.22, 61.69, 0.004502],
[1312, 2, 0.011688306978695986, 0.5844153489347994, 0, 0, 0],
[1313, 3, 0.0019631283227609974, 0.09815641613804986, 2.22, 61.69, 0.004502],
[1314, 3, 0.0007641975650906521, 0.038209878254532606, 2.22, 61.69, 0.004502],
[1315, 3, 0.0005015944131679134, 0.02507972065839567, 2.22, 61.69, 0.004502],
[1316, 3, 7.002675793369909e-05, 0.0035013378966849544, 2.22, 61.69, 0.004502],
[1317, 3, 0.0007908894216365961, 0.039544471081829805, 2.22, 61.69, 0.004502],
[1318, 3, 5.6301925294159776e-05, 0.002815096264707989, 2.22, 61.69, 0.004502],
[1319, 3, 0.0008405877558306301, 0.04202938779153151, 2.22, 61.69, 0.004502],
[1320, 3, 0.0008231691710158349, 0.04115845855079175, 2.22, 61.69, 0.004502],
[1321, 3, 6.721511097913718e-06, 0.0003360755548956859, 2.22, 61.69, 0.004502],
[1322, 3, 4.510903550142661e-05, 0.0022554517750713312, 2.22, 61.69, 0.004502],
[1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0],
[1324, 3, 0.0005501358559855778, 0.027506792799278885, 2.22, 61.69, 0.004502],
[1325, 2, 0.0029533893249704176, 0.14766946624852087, 0, 0, 0],
[1326, 2, 0.0017553273040833693, 0.08776636520416847, 0, 0, 0],
[1327, 2, 0.0017060005041489908, 0.08530002520744955, 0, 0, 0],
[1328, 3, 0.0006537346009359085, 0.032686730046795426, 2.22, 61.69, 0.004502],
[1329, 2, 0.00793023382909983, 0.3965116914549916, 0, 0, 0],
[1330, 3, 0.0019182008434651947, 0.09591004217325974, 2.22, 61.69, 0.004502],
[1331, 3, 1.2859395030416278e-05, 0.0006429697515208139, 2.22, 61.69, 0.004502],
[1332, 3, 0.0006688404111922736, 0.03344202055961368, 2.22, 61.69, 0.004502],
[1333, 3, 0.0019970167397866546, 0.09985083698933273, 2.22, 61.69, 0.004502],
[1334, 3, 3.081793473501891e-05, 0.001540896736750946, 2.22, 61.69, 0.004502],
[1336, 3, 0.0012612757957991489, 0.06306378978995744, 2.22, 61.69, 0.004502],
[1337, 2, 0.003207094686766897, 0.16035473433834485, 0, 0, 0],
[1338, 3, 2.9972992477731713e-05, 0.0014986496238865857, 2.22, 61.69, 0.004502],
[1339, 3, 0.00033310206544168424, 0.016655103272084214, 2.22, 61.69, 0.004502],
[1340, 2, 0.0017807406464817902, 0.08903703232408952, 0, 0, 0],
[1341, 2, 0.0060362713117726305, 0.3018135655886316, 0, 0, 0],
[1342, 3, 2.2718668528089703e-05, 0.0011359334264044853, 2.22, 61.69, 0.004502],
[1343, 3, 2.8562833512248258e-05, 0.001428141675612413, 2.22, 61.69, 0.004502],
[1344, 3, 8.141338105296074e-06, 0.0004070669052648037, 2.22, 61.69, 0.004502],
[1345, 3, 0.00011633701914020801, 0.005816850957010401, 2.22, 61.69, 0.004502],
[1346, 2, 0.007061813430091215, 0.35309067150456075, 0, 0, 0],
[1348, 3, 0.000978567012051048, 0.048928350602552406, 2.22, 61.69, 0.004502],
[1349, 3, 0.0014423210644570928, 0.07211605322285465, 2.22, 61.69, 0.004502],
[1350, 3, 5.238023081568273e-06, 0.0002619011540784137, 2.22, 61.69, 0.004502],
[1351, 3, 4.1064133941603613e-07, 2.0532066970801804e-05, 2.22, 61.69, 0.004502],
[1352, 3, 2.2066211271763273e-05, 0.0011033105635881637, 2.22, 61.69, 0.004502],
[1355, 3, 4.8633739445049876e-05, 0.0024316869722524944, 2.22, 61.69, 0.004502],
[1356, 2, 0.004176219204509461, 0.20881096022547305, 0, 0, 0],
[1357, 2, 0.0024790764561485362, 0.12395382280742683, 0, 0, 0],
[1358, 3, 7.127776476894326e-06, 0.00035638882384471626, 2.22, 61.69, 0.004502],
[1359, 2, 0.0018980577612326096, 0.0949028880616305, 0, 0, 0],
[1360, 3, 0.00101350119837844, 0.050675059918922, 2.22, 61.69, 0.004502],
[1361, 2, 0.0029249133090325724, 0.14624566545162862, 0, 0, 0],
[1362, 2, 0.004182445633969954, 0.2091222816984977, 0, 0, 0],
[1363, 3, 2.004955475366426e-06, 0.0001002477737683213, 2.22, 61.69, 0.004502],
[1364, 3, 2.7595075243285495e-06, 0.00013797537621642746, 2.22, 61.69, 0.004502],
[1365, 3, 2.8999446623259055e-08, 1.449972331162953e-06, 2.22, 61.69, 0.004502],
[1366, 3, 3.1831901356432676e-05, 0.001591595067821634, 2.22, 61.69, 0.004502],
[1367, 3, 0.0021429014821967973, 0.10714507410983987, 2.22, 61.69, 0.004502],
[1368, 3, 9.560516623724435e-05, 0.004780258311862218, 2.22, 61.69, 0.004502],
[1369, 3, 0.00046204655219542516, 0.023102327609771257, 2.22, 61.69, 0.004502],
[1370, 3, 1.0304608838582957e-05, 0.0005152304419291479, 2.22, 61.69, 0.004502],
[1371, 2, 0.0022749567929977086, 0.11374783964988543, 0, 0, 0],
[1372, 2, 0.0050082619833296356, 0.2504130991664818, 0, 0, 0],
[1373, 3, 0.0010693151538022578, 0.05346575769011289, 2.22, 61.69, 0.004502],
[1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0],
[1375, 2, 0.003897629175102736, 0.1948814587551368, 0, 0, 0],
[1376, 2, 0.007852128522530815, 0.39260642612654084, 0, 0, 0],
[1377, 2, 0.006094764129655812, 0.30473820648279065, 0, 0, 0],
[1378, 2, 0.0062434108523654235, 0.3121705426182712, 0, 0, 0],
[1379, 3, 3.0098190435426792e-05, 0.0015049095217713397, 2.22, 61.69, 0.004502],
[1380, 3, 5.394520401513898e-05, 0.002697260200756949, 2.22, 61.69, 0.004502],
[1381, 3, 3.680472218048895e-05, 0.001840236109024447, 2.22, 61.69, 0.004502],
[1382, 2, 0.008838822964419164, 0.4419411482209583, 0, 0, 0],
[1383, 2, 0.006991449967869686, 0.34957249839348425, 0, 0, 0],
[1384, 3, 0.0002870603107466644, 0.01435301553733322, 2.22, 61.69, 0.004502],
[1385, 3, 4.602918986308876e-06, 0.00023014594931544384, 2.22, 61.69, 0.004502],
[1386, 3, 2.5406083498023173e-05, 0.0012703041749011585, 2.22, 61.69, 0.004502],
[1387, 3, 0.00011182192406483717, 0.0055910962032418585, 2.22, 61.69, 0.004502],
[1388, 3, 4.1266752095987256e-05, 0.0020633376047993627, 2.22, 61.69, 0.004502],
[1389, 3, 9.493711173340556e-06, 0.00047468555866702787, 2.22, 61.69, 0.004502],
[1390, 3, 0.00011948001087807657, 0.005974000543903829, 2.22, 61.69, 0.004502],
[1391, 3, 1.6156815754111043e-05, 0.0008078407877055523, 2.22, 61.69, 0.004502],
[1392, 3, 0.0007258528797202384, 0.03629264398601192, 2.22, 61.69, 0.004502],
[1393, 3, 8.763130962106806e-05, 0.004381565481053403, 2.22, 61.69, 0.004502],
[1394, 3, 6.862035771367977e-05, 0.003431017885683988, 2.22, 61.69, 0.004502],
[1395, 3, 4.696755105006889e-06, 0.00023483775525034447, 2.22, 61.69, 0.004502],
[1396, 3, 1.6473931389884785e-06, 8.236965694942393e-05, 2.22, 61.69, 0.004502],
[1397, 3, 0.000841878959456196, 0.042093947972809805, 2.22, 61.69, 0.004502],
[1398, 3, 9.106352752461475e-05, 0.0045531763762307375, 2.22, 61.69, 0.004502],
[1399, 3, 0.000614501928895323, 0.03072509644476615, 2.22, 61.69, 0.004502],
[1400, 3, 8.258214886247176e-05, 0.004129107443123589, 2.22, 61.69, 0.004502],
[1401, 2, 0.0029499050537279323, 0.14749525268639663, 0, 0, 0],
[1402, 3, 0.0008779203509557502, 0.04389601754778751, 2.22, 61.69, 0.004502],
[1403, 2, 0.007617262031172502, 0.38086310155862513, 0, 0, 0],
[1404, 2, 0.008581667499251882, 0.42908337496259413, 0, 0, 0],
[1405, 3, 0.0010206451561773305, 0.051032257808866534, 2.22, 61.69, 0.004502],
[1406, 3, 0.00044281345416550866, 0.02214067270827543, 2.22, 61.69, 0.004502],
[1407, 3, 6.985519985723439e-06, 0.00034927599928617195, 2.22, 61.69, 0.004502],
[1408, 3, 0.0015599034807669107, 0.07799517403834554, 2.22, 61.69, 0.004502],
[1409, 3, 0.0003826451438968471, 0.019132257194842357, 2.22, 61.69, 0.004502],
[1410, 3, 0.001119849138434054, 0.0559924569217027, 2.22, 61.69, 0.004502],
[1411, 3, 0.0021677332100863795, 0.10838666050431899, 2.22, 61.69, 0.004502],
[1412, 3, 0.0001702932115988861, 0.008514660579944306, 2.22, 61.69, 0.004502],
[1413, 3, 0.00015712687360754934, 0.007856343680377468, 2.22, 61.69, 0.004502],
[1414, 3, 0.0006609559456239092, 0.033047797281195467, 2.22, 61.69, 0.004502],
[1415, 3, 0.0001890075811839285, 0.009450379059196426, 2.22, 61.69, 0.004502],
[1416, 3, 0.0002017048354821146, 0.010085241774105731, 2.22, 61.69, 0.004502],
[1417, 3, 3.587634624733768e-08, 1.7938173123668838e-06, 2.22, 61.69, 0.004502],
[1418, 2, 0.002634005451573638, 0.13170027257868192, 0, 0, 0],
[1419, 3, 0.0009538705167746413, 0.04769352583873206, 2.22, 61.69, 0.004502],
[1421, 3, 0.00030900630459512675, 0.015450315229756338, 2.22, 61.69, 0.004502],
[1422, 3, 0.0002087121412723534, 0.010435607063617671, 2.22, 61.69, 0.004502],
[1423, 3, 8.660213976572599e-05, 0.0043301069882863, 2.22, 61.69, 0.004502],
[1424, 2, 0.005562707763624093, 0.27813538818120465, 0, 0, 0],
[1425, 3, 0.0013602274146640447, 0.06801137073320224, 2.22, 61.69, 0.004502],
[1426, 2, 0.004377563184547638, 0.2188781592273819, 0, 0, 0],
[1427, 2, 0.012484847220837852, 0.6242423610418927, 0, 0, 0],
[1428, 2, 0.008488880122374441, 0.4244440061187221, 0, 0, 0],
[1431, 2, 0.006398108618200077, 0.31990543091000384, 0, 0, 0],
[1432, 3, 0.00038249012070950037, 0.019124506035475018, 2.22, 61.69, 0.004502],
[1433, 2, 0.0499489397816605, 2.4974469890830253, 0, 0, 0],
[1434, 2, 0.002523926322700656, 0.12619631613503277, 0, 0, 0],
[1435, 2, 0.00281243262144019, 0.1406216310720095, 0, 0, 0],
[1436, 2, 0.005026791926267322, 0.2513395963133661, 0, 0, 0],
[1437, 2, 0.007689748714359815, 0.38448743571799077, 0, 0, 0],
[1438, 2, 0.021209120082186957, 1.060456004109348, 0, 0, 0],
[1439, 2, 0.0025185488172777457, 0.12592744086388727, 0, 0, 0],
[1440, 3, 2.1228241611109457e-05, 0.001061412080555473, 2.22, 61.69, 0.004502],
[1441, 3, 5.1097125443354235e-06, 0.0002554856272167712, 2.22, 61.69, 0.004502],
[1442, 3, 2.626011287317575e-05, 0.0013130056436587876, 2.22, 61.69, 0.004502],
[1443, 2, 0.006557506818224797, 0.3278753409112398, 0, 0, 0],
[1444, 3, 0.00042227456865251087, 0.021113728432625545, 2.22, 61.69, 0.004502],
[1445, 3, 0.0009856395478638393, 0.04928197739319196, 2.22, 61.69, 0.004502],
[1446, 2, 0.02178507310152743, 1.0892536550763714, 0, 0, 0],
[1447, 2, 0.003442397713820559, 0.17211988569102793, 0, 0, 0],
[1448, 3, 0.000439455069088402, 0.0219727534544201, 2.22, 61.69, 0.004502],
[1449, 2, 0.003346435866528816, 0.16732179332644082, 0, 0, 0],
[1450, 2, 0.0033264151601212124, 0.1663207580060606, 0, 0, 0],
[1451, 2, 0.004170743873351868, 0.2085371936675934, 0, 0, 0],
[1452, 3, 0.0013165328240904745, 0.06582664120452372, 2.22, 61.69, 0.004502],
[1453, 2, 0.004077756743774734, 0.20388783718873668, 0, 0, 0],
[1454, 2, 0.009875666531734596, 0.49378332658672985, 0, 0, 0],
[1455, 3, 2.1818849454345026e-05, 0.001090942472717251, 2.22, 61.69, 0.004502],
[1456, 2, 0.0017907486519991621, 0.08953743259995812, 0, 0, 0],
[1457, 3, 8.903780729597746e-05, 0.004451890364798873, 2.22, 61.69, 0.004502],
[1458, 3, 1.0945897203271481e-05, 0.0005472948601635741, 2.22, 61.69, 0.004502],
[1459, 3, 0.00033798517072819835, 0.01689925853640992, 2.22, 61.69, 0.004502],
[1460, 2, 0.003233851084262461, 0.16169255421312306, 0, 0, 0],
[1461, 3, 0.0011159317192975062, 0.05579658596487532, 2.22, 61.69, 0.004502],
[1462, 3, 0.00014771811478685875, 0.0073859057393429375, 2.22, 61.69, 0.004502],
[1463, 3, 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502],
[1464, 2, 0.009317735345896607, 0.4658867672948304, 0, 0, 0],
[1465, 3, 0.0002263874562139475, 0.011319372810697375, 2.22, 61.69, 0.004502],
[1466, 3, 0.00018856670442025825, 0.009428335221012914, 2.22, 61.69, 0.004502],
[1467, 3, 6.63001698920047e-05, 0.0033150084946002357, 2.22, 61.69, 0.004502],
[1468, 3, 0.0015144656821575462, 0.0757232841078773, 2.22, 61.69, 0.004502],
[1469, 2, 0.0021846358435379763, 0.10923179217689882, 0, 0, 0],
[1470, 2, 0.005027084884666319, 0.2513542442333159, 0, 0, 0],
[1471, 2, 0.008429379144717497, 0.42146895723587485, 0, 0, 0],
[1472, 3, 0.000411329166889909, 0.020566458344495452, 2.22, 61.69, 0.004502],
[1473, 3, 0.0003152649698806797, 0.01576324849403399, 2.22, 61.69, 0.004502],
[1474, 3, 4.6374430095522104e-05, 0.0023187215047761056, 2.22, 61.69, 0.004502],
[1475, 3, 1.2661518354387543e-05, 0.0006330759177193771, 2.22, 61.69, 0.004502],
[1476, 2, 0.015946059282369706, 0.7973029641184852, 0, 0, 0],
[1477, 3, 0.0003829836649997916, 0.01914918324998958, 2.22, 61.69, 0.004502],
[1479, 3, 0.00014225067121410135, 0.007112533560705067, 2.22, 61.69, 0.004502],
[1480, 3, 0.0004782600316322042, 0.023913001581610215, 2.22, 61.69, 0.004502],
[1481, 3, 1.9134115446378896e-06, 9.567057723189448e-05, 2.22, 61.69, 0.004502],
[1482, 3, 0.0005460062457677878, 0.02730031228838939, 2.22, 61.69, 0.004502],
[1483, 3, 0.00010937933305696306, 0.005468966652848153, 2.22, 61.69, 0.004502],
[1484, 3, 1.0350331428991598e-06, 5.175165714495798e-05, 2.22, 61.69, 0.004502],
[1485, 3, 1.9501739896369628e-05, 0.0009750869948184814, 2.22, 61.69, 0.004502],
[1486, 3, 0.00010033262049505883, 0.005016631024752942, 2.22, 61.69, 0.004502],
[1487, 3, 4.061288205771431e-05, 0.0020306441028857154, 2.22, 61.69, 0.004502],
[1488, 3, 0.0001420359709113183, 0.007101798545565915, 2.22, 61.69, 0.004502],
[1489, 3, 7.571817467557017e-06, 0.00037859087337785094, 2.22, 61.69, 0.004502],
[1490, 2, 0.02173832998960063, 1.0869164994800316, 0, 0, 0],
[1491, 2, 0.002899243829618353, 0.14496219148091766, 0, 0, 0],
[1492, 2, 0.006310327387189529, 0.31551636935947647, 0, 0, 0],
[1493, 2, 0.0026261050067275696, 0.1313052503363785, 0, 0, 0],
[1494, 2, 0.01942091372606376, 0.971045686303188, 0, 0, 0],
[1495, 2, 0.001839513558783269, 0.09197567793916346, 0, 0, 0],
[1497, 2, 0.004375527360649893, 0.2187763680324947, 0, 0, 0],
[1498, 2, 0.006735488235440387, 0.3367744117720194, 0, 0, 0],
[1500, 3, 9.85597782087346e-06, 0.000492798891043673, 2.22, 61.69, 0.004502],
[1501, 3, 0.0005198212383651805, 0.02599106191825903, 2.22, 61.69, 0.004502],
[1502, 3, 2.5730645753187908e-05, 0.0012865322876593954, 2.22, 61.69, 0.004502],
[1503, 3, 0.0016785036591113812, 0.08392518295556907, 2.22, 61.69, 0.004502],
[1504, 2, 0.0070690698718853685, 0.3534534935942685, 0, 0, 0],
[1505, 3, 0.0008020995657820899, 0.0401049782891045, 2.22, 61.69, 0.004502],
[1506, 2, 0.0016397994496200178, 0.08198997248100089, 0, 0, 0],
[1507, 3, 0.00041507959569883954, 0.020753979784941975, 2.22, 61.69, 0.004502],
[1508, 3, 4.154538017488063e-06, 0.00020772690087440316, 2.22, 61.69, 0.004502],
[1510, 2, 0.0038109932532764228, 0.19054966266382115, 0, 0, 0],
[1511, 2, 0.00988173435818505, 0.4940867179092525, 0, 0, 0],
[1512, 2, 0.0024139057115332764, 0.12069528557666383, 0, 0, 0],
[1513, 3, 0.0009163944605813735, 0.04581972302906867, 2.22, 61.69, 0.004502],
[1514, 3, 7.863212274868215e-07, 3.931606137434107e-05, 2.22, 61.69, 0.004502],
[1516, 3, 8.064530491522743e-07, 4.032265245761371e-05, 2.22, 61.69, 0.004502],
[1517, 3, 5.411679453042277e-05, 0.0027058397265211386, 2.22, 61.69, 0.004502],
[1518, 3, 2.5128262984133043e-05, 0.0012564131492066523, 2.22, 61.69, 0.004502],
[1519, 3, 1.7440471969906603e-06, 8.720235984953302e-05, 2.22, 61.69, 0.004502],
[1520, 2, 0.002179468836492435, 0.10897344182462178, 0, 0, 0],
[1521, 3, 0.0008492761068800811, 0.042463805344004055, 2.22, 61.69, 0.004502],
[1522, 3, 0.001100146404858253, 0.055007320242912654, 2.22, 61.69, 0.004502],
[1523, 3, 0.0005582443262487387, 0.027912216312436934, 2.22, 61.69, 0.004502],
[1524, 3, 0.000714042943349428, 0.0357021471674714, 2.22, 61.69, 0.004502],
[1525, 2, 0.0030458928986021308, 0.15229464493010655, 0, 0, 0],
[1526, 3, 0.0028315929319783603, 0.14157964659891803, 2.22, 61.69, 0.004502],
[1527, 2, 0.006620761748036568, 0.3310380874018284, 0, 0, 0],
[1528, 3, 0.0026347607821089578, 0.13173803910544787, 2.22, 61.69, 0.004502],
[1529, 2, 0.002711166418718582, 0.1355583209359291, 0, 0, 0],
[1530, 2, 0.005032807482107288, 0.25164037410536444, 0, 0, 0],
[1531, 2, 0.01170243432457441, 0.5851217162287206, 0, 0, 0],
[1532, 3, 0.0013959626805160842, 0.06979813402580422, 2.22, 61.69, 0.004502],
[1534, 3, 0.0018790855823381403, 0.09395427911690701, 2.22, 61.69, 0.004502],
[1535, 3, 0.0005686146984208124, 0.028430734921040625, 2.22, 61.69, 0.004502],
[1536, 3, 0.0024994615604055, 0.124973078020275, 2.22, 61.69, 0.004502],
[1537, 2, 0.0032722848050199577, 0.16361424025099788, 0, 0, 0],
[1538, 2, 0.0037830688364752845, 0.18915344182376426, 0, 0, 0],
[1539, 2, 0.005940345649432395, 0.2970172824716198, 0, 0, 0],
[1540, 3, 0.00011646135769917789, 0.005823067884958895, 2.22, 61.69, 0.004502],
[1541, 3, 0.00012889056523503453, 0.006444528261751726, 2.22, 61.69, 0.004502],
[1542, 2, 0.0015000008003063865, 0.07500004001531933, 0, 0, 0],
[1543, 3, 0.0009414759018296965, 0.04707379509148483, 2.22, 61.69, 0.004502],
[1544, 2, 0.0055441839759994335, 0.2772091987999717, 0, 0, 0],
[1545, 2, 0.011812169709970757, 0.5906084854985378, 0, 0, 0],
[1546, 2, 0.01626203379888308, 0.8131016899441541, 0, 0, 0],
[1547, 2, 0.02285851188035466, 1.142925594017733, 0, 0, 0],
[1548, 3, 0.0013543308279443016, 0.06771654139721509, 2.22, 61.69, 0.004502],
[1549, 2, 0.0049030854262021965, 0.2451542713101098, 0, 0, 0],
[1550, 3, 0.00033197905453791535, 0.016598952726895766, 2.22, 61.69, 0.004502],
[1551, 3, 0.0006096583500745879, 0.030482917503729397, 2.22, 61.69, 0.004502],
[1552, 2, 0.0015656981738750837, 0.0782849086937542, 0, 0, 0],
[1553, 2, 0.0024888943599414575, 0.12444471799707287, 0, 0, 0],
[1554, 2, 0.004505411665481134, 0.22527058327405666, 0, 0, 0],
[1555, 2, 0.002990934193624122, 0.14954670968120612, 0, 0, 0],
[1556, 3, 0.0011564128320789798, 0.057820641603948994, 2.22, 61.69, 0.004502],
[1557, 3, 0.0007362927807377101, 0.036814639036885505, 2.22, 61.69, 0.004502],
[1558, 3, 0.0007445458899189016, 0.03722729449594508, 2.22, 61.69, 0.004502],
[1559, 2, 0.003443835108227301, 0.17219175541136506, 0, 0, 0],
[1560, 2, 0.002329145997663478, 0.11645729988317388, 0, 0, 0],
[1561, 3, 0.0005540231602239543, 0.027701158011197716, 2.22, 61.69, 0.004502],
[1562, 2, 0.0017152625197382394, 0.08576312598691198, 0, 0, 0],
[1563, 2, 0.0030915759312768417, 0.1545787965638421, 0, 0, 0],
[1564, 2, 0.0037097629455119584, 0.18548814727559793, 0, 0, 0],
[1565, 3, 0.0004375471497403783, 0.021877357487018915, 2.22, 61.69, 0.004502],
[1566, 2, 0.010252171892683539, 0.512608594634177, 0, 0, 0],
[1567, 3, 0.0008118171037128424, 0.04059085518564212, 2.22, 61.69, 0.004502],
[1568, 2, 0.002604241793178731, 0.13021208965893655, 0, 0, 0],
[1569, 2, 0.009255990694371212, 0.46279953471856067, 0, 0, 0],
[1570, 2, 0.0069640706150360665, 0.3482035307518033, 0, 0, 0],
[1571, 2, 0.0065041313813353095, 0.32520656906676554, 0, 0, 0],
[1572, 2, 0.006633904979541033, 0.33169524897705166, 0, 0, 0],
[1573, 2, 0.0023394661316732436, 0.11697330658366219, 0, 0, 0],
[1574, 2, 0.004137684975217191, 0.20688424876085953, 0, 0, 0],
[1575, 2, 0.005321935603588621, 0.266096780179431, 0, 0, 0],
[1576, 3, 0.0012058684964594748, 0.06029342482297374, 2.22, 61.69, 0.004502],
[1577, 2, 0.007623891664161928, 0.38119458320809646, 0, 0, 0],
[1578, 3, 0.0005221838250086942, 0.026109191250434708, 2.22, 61.69, 0.004502],
[1579, 3, 0.002238630940686654, 0.11193154703433271, 2.22, 61.69, 0.004502],
[1580, 3, 0.001393719346464869, 0.06968596732324346, 2.22, 61.69, 0.004502],
[1581, 2, 0.004209660542722961, 0.21048302713614803, 0, 0, 0],
[1582, 3, 0.00022686224095152467, 0.011343112047576234, 2.22, 61.69, 0.004502],
[1583, 3, 5.082160364336507e-05, 0.002541080182168254, 2.22, 61.69, 0.004502],
[1584, 2, 0.0022062235268679067, 0.11031117634339535, 0, 0, 0],
[1585, 3, 9.927313465409417e-05, 0.004963656732704709, 2.22, 61.69, 0.004502],
[1586, 2, 0.0016556098644012565, 0.08278049322006283, 0, 0, 0],
[1587, 2, 0.0051600530588915, 0.25800265294457503, 0, 0, 0],
[1588, 2, 0.0020300209546731105, 0.10150104773365555, 0, 0, 0],
[1589, 3, 0.003090042091003551, 0.15450210455017754, 2.22, 61.69, 0.004502],
[1590, 2, 0.00678480159716298, 0.33924007985814897, 0, 0, 0],
[1591, 2, 0.007640573237260637, 0.3820286618630319, 0, 0, 0],
[1592, 3, 0.0002808269093051203, 0.014041345465256016, 2.22, 61.69, 0.004502],
[1593, 3, 0.00020129856047632, 0.010064928023816, 2.22, 61.69, 0.004502],
[1594, 3, 0.0002789388372524298, 0.01394694186262149, 2.22, 61.69, 0.004502],
[1595, 2, 0.0016750204459843893, 0.08375102229921946, 0, 0, 0],
[1596, 2, 0.004134439238739313, 0.20672196193696565, 0, 0, 0],
[1597, 3, 8.285309045665851e-05, 0.004142654522832926, 2.22, 61.69, 0.004502],
[1598, 3, 0.00013540004754729773, 0.0067700023773648865, 2.22, 61.69, 0.004502],
[1599, 2, 0.0026959085186091525, 0.13479542593045762, 0, 0, 0],
[1600, 3, 0.0009357608497023268, 0.04678804248511634, 2.22, 61.69, 0.004502],
[1601, 3, 0.00027170543018973547, 0.013585271509486775, 2.22, 61.69, 0.004502],
[1602, 3, 0.0015513668512933244, 0.07756834256466623, 2.22, 61.69, 0.004502],
[1603, 3, 0.0009086996263346224, 0.04543498131673112, 2.22, 61.69, 0.004502],
[1604, 3, 0.0005649494759739373, 0.02824747379869687, 2.22, 61.69, 0.004502],
[1605, 3, 0.0014751450593580586, 0.07375725296790293, 2.22, 61.69, 0.004502],
[1606, 3, 0.0013425796771799677, 0.06712898385899839, 2.22, 61.69, 0.004502],
[1607, 3, 0.0006631858002546182, 0.03315929001273091, 2.22, 61.69, 0.004502],
[1608, 3, 0.000668140823101588, 0.0334070411550794, 2.22, 61.69, 0.004502],
[1609, 3, 0.00022162254349097636, 0.011081127174548818, 2.22, 61.69, 0.004502],
[1610, 3, 0.0006039031650447518, 0.030195158252237588, 2.22, 61.69, 0.004502],
[1611, 3, 0.00022694944446959337, 0.011347472223479668, 2.22, 61.69, 0.004502],
[1612, 3, 0.0003947897752379102, 0.019739488761895515, 2.22, 61.69, 0.004502],
[1613, 3, 0.0008375258341098956, 0.04187629170549478, 2.22, 61.69, 0.004502],
[1614, 3, 0.0008441996938739789, 0.042209984693698945, 2.22, 61.69, 0.004502],
[1615, 2, 0.005227574288460156, 0.26137871442300786, 0, 0, 0],
[1616, 3, 0.00019064354714925193, 0.009532177357462597, 2.22, 61.69, 0.004502],
[1617, 3, 0.00029566775950504534, 0.014783387975252268, 2.22, 61.69, 0.004502],
[1618, 3, 0.00014179949030894114, 0.007089974515447057, 2.22, 61.69, 0.004502],
[1619, 3, 0.00018640385871827544, 0.009320192935913772, 2.22, 61.69, 0.004502],
[1620, 3, 5.5271626586484114e-05, 0.0027635813293242053, 2.22, 61.69, 0.004502],
[1621, 3, 0.0002950094150485152, 0.014750470752425757, 2.22, 61.69, 0.004502],
[1622, 3, 0.00020847655089586544, 0.010423827544793273, 2.22, 61.69, 0.004502],
[1623, 3, 0.0006246630015592596, 0.031233150077962978, 2.22, 61.69, 0.004502],
[1624, 3, 0.00028274003590258393, 0.014137001795129197, 2.22, 61.69, 0.004502],
[1625, 2, 0.0022534174910895347, 0.11267087455447673, 0, 0, 0],
[1626, 3, 0.0004280693443394328, 0.02140346721697164, 2.22, 61.69, 0.004502],
[1627, 3, 0.000375648911560075, 0.01878244557800375, 2.22, 61.69, 0.004502],
[1628, 2, 0.002172204242957195, 0.10861021214785976, 0, 0, 0],
[1629, 2, 0.003587225381224193, 0.17936126906120967, 0, 0, 0],
[1630, 3, 0.00045326643232520994, 0.0226633216162605, 2.22, 61.69, 0.004502],
[1631, 3, 0.0009801395432241038, 0.04900697716120519, 2.22, 61.69, 0.004502],
[1632, 3, 0.0008930991123686864, 0.044654955618434314, 2.22, 61.69, 0.004502],
[1633, 2, 0.001835290275730487, 0.09176451378652435, 0, 0, 0],
[1634, 3, 0.00035310969975077067, 0.017655484987538533, 2.22, 61.69, 0.004502],
[1635, 3, 0.0006833295628236428, 0.03416647814118214, 2.22, 61.69, 0.004502],
[1636, 3, 0.0006973081800050544, 0.03486540900025272, 2.22, 61.69, 0.004502],
[1637, 3, 0.000849481774844417, 0.042474088742220854, 2.22, 61.69, 0.004502],
[1638, 3, 0.0003577601952454168, 0.01788800976227084, 2.22, 61.69, 0.004502],
[1639, 3, 0.0008040502325112668, 0.04020251162556334, 2.22, 61.69, 0.004502],
[1640, 3, 6.362024595159042e-05, 0.0031810122975795213, 2.22, 61.69, 0.004502],
[1641, 3, 0.00014325661737729948, 0.007162830868864973, 2.22, 61.69, 0.004502],
[1642, 3, 0.00033451195931950633, 0.01672559796597532, 2.22, 61.69, 0.004502],
[1643, 3, 9.619219687560661e-05, 0.0048096098437803315, 2.22, 61.69, 0.004502],
[1644, 3, 0.0003653755557936511, 0.018268777789682555, 2.22, 61.69, 0.004502],
[1645, 3, 0.00030842754735325555, 0.015421377367662779, 2.22, 61.69, 0.004502],
[1646, 3, 0.0001049187322986075, 0.005245936614930375, 2.22, 61.69, 0.004502],
[1647, 3, 0.000503659392774143, 0.025182969638707146, 2.22, 61.69, 0.004502],
[1648, 2, 0.006961158588339223, 0.34805792941696123, 0, 0, 0],
[1649, 3, 0.000744807327898371, 0.03724036639491855, 2.22, 61.69, 0.004502],
[1650, 2, 0.011263647688495146, 0.5631823844247573, 0, 0, 0],
[1651, 2, 0.008559494225984409, 0.4279747112992205, 0, 0, 0],
[1652, 2, 0.005352098184679378, 0.2676049092339689, 0, 0, 0],
[1653, 3, 0.0011733692302176245, 0.058668461510881224, 2.22, 61.69, 0.004502],
[1654, 3, 0.0020443508774251108, 0.10221754387125553, 2.22, 61.69, 0.004502],
[1655, 3, 0.0003002115401188504, 0.01501057700594252, 2.22, 61.69, 0.004502],
[1656, 3, 7.370159725959526e-05, 0.003685079862979763, 2.22, 61.69, 0.004502],
[1657, 3, 0.00015430974585088452, 0.007715487292544226, 2.22, 61.69, 0.004502],
[1658, 3, 5.322222256050306e-05, 0.0026611111280251533, 2.22, 61.69, 0.004502],
[1659, 2, 0.005607978495065647, 0.2803989247532824, 0, 0, 0],
[1660, 2, 0.006516269957589729, 0.32581349787948644, 0, 0, 0],
[1661, 2, 0.008823810212990009, 0.4411905106495005, 0, 0, 0],
[1662, 3, 8.483345715007819e-05, 0.00424167285750391, 2.22, 61.69, 0.004502],
[1663, 3, 4.3530191699128595e-05, 0.0021765095849564297, 2.22, 61.69, 0.004502],
[1664, 3, 4.452953003965536e-05, 0.002226476501982768, 2.22, 61.69, 0.004502],
[1665, 3, 0.0013225288693347707, 0.06612644346673854, 2.22, 61.69, 0.004502],
[1666, 3, 8.635567359373938e-05, 0.0043177836796869686, 2.22, 61.69, 0.004502],
[1667, 3, 0.0001522890012790897, 0.007614450063954485, 2.22, 61.69, 0.004502],
[1668, 3, 0.00011100625173614089, 0.005550312586807045, 2.22, 61.69, 0.004502],
[1669, 2, 0.0019551374257545055, 0.09775687128772527, 0, 0, 0],
[1670, 2, 0.002994563514151705, 0.1497281757075853, 0, 0, 0],
[1671, 2, 0.00194197125660994, 0.097098562830497, 0, 0, 0],
[1672, 3, 0.00031759653323842224, 0.01587982666192111, 2.22, 61.69, 0.004502],
[1673, 3, 0.00015112697948666895, 0.007556348974333448, 2.22, 61.69, 0.004502],
[1674, 3, 0.001338975669244281, 0.06694878346221406, 2.22, 61.69, 0.004502],
[1675, 3, 0.0009048640187272772, 0.04524320093636386, 2.22, 61.69, 0.004502],
[1676, 2, 0.002276296569919192, 0.11381482849595959, 0, 0, 0],
[1677, 3, 0.0003779607501536475, 0.018898037507682378, 2.22, 61.69, 0.004502],
[1678, 2, 0.005903817693380342, 0.2951908846690171, 0, 0, 0],
[1679, 2, 0.0018586402973926343, 0.09293201486963171, 0, 0, 0],
[1680, 2, 0.0014488887108239739, 0.0724444355411987, 0, 0, 0],
[1681, 3, 0.0004714294646830218, 0.023571473234151093, 2.22, 61.69, 0.004502],
[1682, 3, 0.001085935652974641, 0.05429678264873205, 2.22, 61.69, 0.004502],
[1683, 3, 0.00028145757533810527, 0.014072878766905264, 2.22, 61.69, 0.004502],
[1684, 3, 0.0025831258538967852, 0.12915629269483925, 2.22, 61.69, 0.004502],
[1685, 2, 0.0047697103139446575, 0.23848551569723286, 0, 0, 0],
[1686, 2, 0.0022483118876134227, 0.11241559438067113, 0, 0, 0],
[1687, 2, 0.0030131816049814983, 0.15065908024907493, 0, 0, 0],
[1688, 3, 0.0004903983387759389, 0.024519916938796946, 2.22, 61.69, 0.004502],
[1689, 2, 0.0032938946161484794, 0.16469473080742397, 0, 0, 0],
[1690, 2, 0.00317999955372553, 0.15899997768627652, 0, 0, 0],
[1691, 2, 0.006018881738424175, 0.30094408692120883, 0, 0, 0],
[1692, 3, 0.0007150498191215078, 0.03575249095607538, 2.22, 61.69, 0.004502],
[1693, 2, 0.0030184481369320087, 0.15092240684660044, 0, 0, 0],
[1694, 2, 0.001461369242868097, 0.07306846214340486, 0, 0, 0],
[1695, 3, 0.0006306603001410114, 0.03153301500705057, 2.22, 61.69, 0.004502],
[1696, 2, 0.0014331689037382152, 0.07165844518691075, 0, 0, 0],
[1697, 2, 0.008710326279612261, 0.43551631398061313, 0, 0, 0],
[1698, 3, 0.0016301483386422185, 0.08150741693211093, 2.22, 61.69, 0.004502],
[1699, 3, 0.00013956784357760127, 0.006978392178880064, 2.22, 61.69, 0.004502],
[1700, 2, 0.001455730736331227, 0.07278653681656136, 0, 0, 0],
[1701, 3, 0.000985466392749056, 0.04927331963745281, 2.22, 61.69, 0.004502],
[1702, 3, 0.0008069862705159137, 0.04034931352579569, 2.22, 61.69, 0.004502],
[1703, 3, 0.0015568099066940577, 0.07784049533470289, 2.22, 61.69, 0.004502],
[1704, 2, 0.0039863070632047415, 0.1993153531602371, 0, 0, 0],
[1705, 2, 0.0016994219326201241, 0.0849710966310062, 0, 0, 0],
[1706, 3, 0.00022834587513481845, 0.011417293756740922, 2.22, 61.69, 0.004502],
[1707, 3, 0.00035050593877745283, 0.017525296938872642, 2.22, 61.69, 0.004502],
[1708, 3, 0.0008077480562281571, 0.04038740281140786, 2.22, 61.69, 0.004502],
[1709, 2, 0.006228812219006413, 0.31144061095032066, 0, 0, 0],
[1710, 2, 0.005128653226179494, 0.2564326613089747, 0, 0, 0],
[1711, 3, 0.0001865928228376505, 0.009329641141882526, 2.22, 61.69, 0.004502],
[1712, 2, 0.002102837121501151, 0.10514185607505754, 0, 0, 0],
[1713, 2, 0.0025368957405395645, 0.12684478702697824, 0, 0, 0],
[1714, 3, 0.0011562226654331135, 0.05781113327165568, 2.22, 61.69, 0.004502],
[1715, 2, 0.004481367157274824, 0.22406835786374124, 0, 0, 0],
[1716, 2, 0.009993594261663767, 0.4996797130831883, 0, 0, 0],
[1717, 2, 0.002267986548968579, 0.11339932744842897, 0, 0, 0],
[1718, 2, 0.01920136583254073, 0.9600682916270364, 0, 0, 0],
[1719, 3, 0.0006250608555912478, 0.03125304277956239, 2.22, 61.69, 0.004502],
[1720, 2, 0.00168964057950739, 0.08448202897536951, 0, 0, 0],
[1721, 2, 0.0022514556432754154, 0.11257278216377076, 0, 0, 0],
[1722, 3, 0.0005776709769605844, 0.02888354884802922, 2.22, 61.69, 0.004502],
[1723, 3, 0.00018177235502873834, 0.009088617751436916, 2.22, 61.69, 0.004502],
[1724, 3, 0.002308942454207542, 0.1154471227103771, 2.22, 61.69, 0.004502],
[1725, 2, 0.0018560503299213332, 0.09280251649606665, 0, 0, 0],
[1726, 2, 0.002761006390807373, 0.13805031954036864, 0, 0, 0],
[1727, 3, 1.2777785942774298e-05, 0.0006388892971387149, 2.22, 61.69, 0.004502],
[1728, 2, 0.0018392523086213346, 0.09196261543106675, 0, 0, 0],
[1729, 2, 0.006839303534284608, 0.3419651767142304, 0, 0, 0],
[1730, 2, 0.0016405280887646968, 0.08202640443823485, 0, 0, 0],
[1731, 2, 0.004530580326268455, 0.2265290163134228, 0, 0, 0],
[1732, 2, 0.010296734416249178, 0.5148367208124589, 0, 0, 0],
[1733, 2, 0.0017360181799001156, 0.08680090899500578, 0, 0, 0],
[1734, 2, 0.002080576836187494, 0.1040288418093747, 0, 0, 0],
[1735, 2, 0.004596997723122095, 0.2298498861561048, 0, 0, 0],
[1736, 2, 0.002413425654250592, 0.12067128271252962, 0, 0, 0],
[1737, 2, 0.006813443685203153, 0.34067218426015766, 0, 0, 0],
[1738, 2, 0.0038515318581644853, 0.1925765929082243, 0, 0, 0],
[1739, 3, 0.0010627604171624583, 0.053138020858122914, 2.22, 61.69, 0.004502],
[1740, 2, 0.0021026257427105457, 0.10513128713552729, 0, 0, 0],
[1741, 3, 0.0009950302298943022, 0.049751511494715114, 2.22, 61.69, 0.004502],
[1742, 3, 0.0006991333883527254, 0.03495666941763627, 2.22, 61.69, 0.004502],
[1743, 3, 2.6718441567986027e-05, 0.0013359220783993014, 2.22, 61.69, 0.004502],
[1744, 3, 0.00010295853025504874, 0.0051479265127524374, 2.22, 61.69, 0.004502],
[1745, 3, 0.0008552992639033185, 0.04276496319516592, 2.22, 61.69, 0.004502],
[1746, 2, 0.004641428723601485, 0.23207143618007425, 0, 0, 0],
[1747, 3, 0.0007127580911748647, 0.03563790455874324, 2.22, 61.69, 0.004502],
[1748, 2, 0.0019372469660483122, 0.09686234830241562, 0, 0, 0],
[1749, 2, 0.006244643211840332, 0.3122321605920166, 0, 0, 0],
[1750, 3, 0.000653478119652876, 0.0326739059826438, 2.22, 61.69, 0.004502],
[1751, 3, 0.0005383084342515337, 0.026915421712576687, 2.22, 61.69, 0.004502],
[1752, 2, 0.0037542906982168446, 0.18771453491084222, 0, 0, 0],
[1753, 2, 0.002297268499533676, 0.11486342497668381, 0, 0, 0],
[1754, 2, 0.011467968203347287, 0.5733984101673645, 0, 0, 0],
[1755, 3, 0.0014040905423340156, 0.07020452711670079, 2.22, 61.69, 0.004502],
[1756, 2, 0.0025915006544054604, 0.12957503272027304, 0, 0, 0],
[1757, 2, 0.006862277688448091, 0.34311388442240454, 0, 0, 0],
[1758, 2, 0.008413471513428292, 0.42067357567141467, 0, 0, 0],
[1759, 2, 0.004574362398582669, 0.22871811992913343, 0, 0, 0],
[1760, 2, 0.0031789097473471192, 0.15894548736735598, 0, 0, 0],
[1761, 3, 0.0014083619528329524, 0.07041809764164762, 2.22, 61.69, 0.004502],
[1762, 2, 0.0033502257085727175, 0.1675112854286359, 0, 0, 0],
[1763, 2, 0.0030242326674567712, 0.15121163337283858, 0, 0, 0],
[1764, 3, 0.0007202102426608419, 0.0360105121330421, 2.22, 61.69, 0.004502],
[1765, 2, 0.003945424551590993, 0.19727122757954962, 0, 0, 0],
[1766, 2, 0.003915515453890014, 0.1957757726945007, 0, 0, 0],
[1767, 2, 0.006085505697192886, 0.30427528485964433, 0, 0, 0],
[1768, 2, 0.010174366269247585, 0.5087183134623792, 0, 0, 0],
[1769, 2, 0.009031054425598138, 0.451552721279907, 0, 0, 0],
[1770, 2, 0.030509885187144117, 1.525494259357206, 0, 0, 0],
[1771, 2, 0.017611454160671825, 0.8805727080335912, 0, 0, 0],
[1772, 2, 0.007633737706924312, 0.3816868853462156, 0, 0, 0],
[1773, 2, 0.01780807424723992, 0.890403712361996, 0, 0, 0],
[1774, 2, 0.002413161491111794, 0.1206580745555897, 0, 0, 0],
[1775, 2, 0.005451344168542172, 0.2725672084271086, 0, 0, 0],
[1776, 2, 0.0033074583919163653, 0.16537291959581826, 0, 0, 0],
[1777, 2, 0.005568161613558242, 0.2784080806779121, 0, 0, 0],
[1778, 2, 0.002395611780191415, 0.11978058900957077, 0, 0, 0],
[1779, 2, 0.0028488054525953985, 0.14244027262976997, 0, 0, 0],
[1780, 2, 0.0030002134377383463, 0.1500106718869173, 0, 0, 0],
[1781, 3, 0.0004499032173986467, 0.022495160869932335, 2.22, 61.69, 0.004502],
[1782, 3, 0.0006333736554700433, 0.03166868277350216, 2.22, 61.69, 0.004502],
[1783, 3, 0.0006836718573255382, 0.03418359286627692, 2.22, 61.69, 0.004502],
[1784, 2, 0.006456743545235233, 0.32283717726176164, 0, 0, 0],
[1785, 2, 0.007347157943155048, 0.36735789715775236, 0, 0, 0],
[1786, 2, 0.007214359186119591, 0.36071795930597955, 0, 0, 0],
[1787, 2, 0.007834284018991623, 0.39171420094958115, 0, 0, 0],
[1788, 3, 0.0002545220592081115, 0.012726102960405576, 2.22, 61.69, 0.004502],
[1789, 3, 0.0006445279945604626, 0.03222639972802314, 2.22, 61.69, 0.004502],
[1790, 3, 3.7097412529855566e-05, 0.0018548706264927782, 2.22, 61.69, 0.004502],
[1791, 3, 3.060700921589692e-05, 0.001530350460794846, 2.22, 61.69, 0.004502],
[1792, 3, 0.00023113047197876308, 0.011556523598938153, 2.22, 61.69, 0.004502],
[1793, 3, 0.0010854139444152772, 0.054270697220763865, 2.22, 61.69, 0.004502],
[1794, 3, 0.000193812719045554, 0.009690635952277699, 2.22, 61.69, 0.004502],
[1795, 3, 0.00012212686390123214, 0.006106343195061608, 2.22, 61.69, 0.004502],
[1796, 3, 0.0006642823349345957, 0.033214116746729784, 2.22, 61.69, 0.004502],
[1797, 2, 0.0018439478449351068, 0.09219739224675534, 0, 0, 0],
[1798, 3, 0.00042633568546037186, 0.021316784273018592, 2.22, 61.69, 0.004502],
[1799, 2, 0.002237269697339197, 0.11186348486695984, 0, 0, 0],
[1800, 2, 0.0042493921881998535, 0.2124696094099927, 0, 0, 0],
[1801, 3, 0.0005438025657211798, 0.02719012828605899, 2.22, 61.69, 0.004502],
[1802, 3, 0.00029245884668739017, 0.01462294233436951, 2.22, 61.69, 0.004502],
[1803, 3, 0.0003927492716827882, 0.01963746358413941, 2.22, 61.69, 0.004502],
[1804, 2, 0.01120428237244892, 0.5602141186224461, 0, 0, 0],
[1805, 3, 0.0006332582976482522, 0.03166291488241261, 2.22, 61.69, 0.004502],
[1806, 3, 0.0006249082238639684, 0.03124541119319842, 2.22, 61.69, 0.004502],
[1807, 3, 0.0007715037279579743, 0.03857518639789872, 2.22, 61.69, 0.004502],
[1808, 2, 0.003273470708969163, 0.16367353544845814, 0, 0, 0],
[1809, 3, 0.0009238292096633647, 0.04619146048316824, 2.22, 61.69, 0.004502],
[1810, 2, 0.002106300089692593, 0.10531500448462965, 0, 0, 0],
[1811, 2, 0.0014671228267872148, 0.07335614133936073, 0, 0, 0],
[1812, 3, 0.0013029854518401976, 0.0651492725920099, 2.22, 61.69, 0.004502],
[1813, 2, 0.005212306067684381, 0.26061530338421907, 0, 0, 0],
[1814, 2, 0.0017458294165536873, 0.08729147082768438, 0, 0, 0],
[1815, 2, 0.0017071985603054247, 0.08535992801527123, 0, 0, 0],
[1816, 3, 0.0008355966484335978, 0.04177983242167989, 2.22, 61.69, 0.004502],
[1817, 2, 0.00786124232779237, 0.39306211638961847, 0, 0, 0],
[1818, 2, 0.00467172216419726, 0.23358610820986297, 0, 0, 0],
[1819, 3, 4.446961087725697e-05, 0.0022234805438628488, 2.22, 61.69, 0.004502],
[1820, 2, 0.0021455616092900765, 0.10727808046450382, 0, 0, 0],
[1821, 2, 0.0052492883399868, 0.26246441699934, 0, 0, 0],
[1822, 2, 0.010875476397094096, 0.5437738198547047, 0, 0, 0],
[1823, 2, 0.003945992802078176, 0.19729964010390882, 0, 0, 0],
[1824, 2, 0.0018267545792273764, 0.09133772896136881, 0, 0, 0],
[1825, 2, 0.00519430489419229, 0.25971524470961443, 0, 0, 0],
[1826, 2, 0.0021811060524790952, 0.10905530262395477, 0, 0, 0],
[1827, 3, 0.0008530157012054359, 0.0426507850602718, 2.22, 61.69, 0.004502],
[1828, 3, 0.002756494944812388, 0.1378247472406194, 2.22, 61.69, 0.004502],
[1829, 2, 0.004409435763064647, 0.22047178815323237, 0, 0, 0],
[1830, 3, 0.0011403474572496454, 0.05701737286248228, 2.22, 61.69, 0.004502],
[1831, 2, 0.004449336207686825, 0.2224668103843413, 0, 0, 0],
[1832, 3, 0.0007771931121615173, 0.038859655608075874, 2.22, 61.69, 0.004502],
[1833, 2, 0.00219574579139257, 0.10978728956962851, 0, 0, 0],
[1834, 2, 0.0029144516945575063, 0.14572258472787536, 0, 0, 0],
[1836, 3, 0.0002291147948951537, 0.011455739744757684, 2.22, 61.69, 0.004502],
[1837, 3, 0.0008040081530028336, 0.040200407650141684, 2.22, 61.69, 0.004502],
[1838, 3, 0.0008406582811366919, 0.042032914056834604, 2.22, 61.69, 0.004502],
[1839, 2, 0.009448279703012192, 0.47241398515060967, 0, 0, 0],
[1840, 2, 0.004930931936026686, 0.2465465968013343, 0, 0, 0],
[1841, 3, 0.0006235800258089248, 0.03117900129044624, 2.22, 61.69, 0.004502],
[1842, 3, 0.000453678034330045, 0.022683901716502253, 2.22, 61.69, 0.004502],
[1843, 3, 0.0005619991314477211, 0.02809995657238605, 2.22, 61.69, 0.004502],
[1844, 3, 0.0008621042105392081, 0.043105210526960404, 2.22, 61.69, 0.004502],
[1845, 3, 0.000841554397088342, 0.0420777198544171, 2.22, 61.69, 0.004502],
[1846, 3, 0.00010981600382526249, 0.005490800191263125, 2.22, 61.69, 0.004502],
[1847, 2, 0.003982054075289823, 0.19910270376449113, 0, 0, 0],
[1848, 3, 0.00033381245647581777, 0.01669062282379089, 2.22, 61.69, 0.004502],
[1849, 3, 0.001158450269038491, 0.057922513451924555, 2.22, 61.69, 0.004502],
[1850, 3, 0.001708114521061397, 0.08540572605306987, 2.22, 61.69, 0.004502],
[1851, 3, 0.0005065229873089011, 0.025326149365445055, 2.22, 61.69, 0.004502],
[1852, 3, 0.0023941306142429277, 0.11970653071214639, 2.22, 61.69, 0.004502],
[1853, 3, 0.001917289339589373, 0.09586446697946867, 2.22, 61.69, 0.004502],
[1854, 3, 0.00014267713764539732, 0.007133856882269866, 2.22, 61.69, 0.004502],
[1855, 2, 0.003701425783106976, 0.18507128915534882, 0, 0, 0],
[1856, 2, 0.004052362315850483, 0.20261811579252417, 0, 0, 0],
[1857, 3, 0.0012207911958070376, 0.06103955979035188, 2.22, 61.69, 0.004502],
[1858, 3, 0.0008157807822408823, 0.04078903911204411, 2.22, 61.69, 0.004502],
[1860, 2, 0.0028539824090186706, 0.14269912045093353, 0, 0, 0],
[1861, 3, 0.0008409403758531892, 0.04204701879265946, 2.22, 61.69, 0.004502],
[1862, 3, 0.0008746423721642757, 0.04373211860821378, 2.22, 61.69, 0.004502],
[1863, 3, 0.0008078987718104445, 0.04039493859052222, 2.22, 61.69, 0.004502],
[1864, 2, 0.0037260737853256434, 0.1863036892662822, 0, 0, 0],
[1865, 2, 0.0043352387888536065, 0.21676193944268035, 0, 0, 0],
[1866, 2, 0.006257281052932708, 0.31286405264663536, 0, 0, 0],
[1867, 3, 6.12285505372934e-05, 0.00306142752686467, 2.22, 61.69, 0.004502],
[1868, 3, 0.00018655016239655994, 0.009327508119827998, 2.22, 61.69, 0.004502],
[1869, 3, 8.230686306328308e-05, 0.004115343153164154, 2.22, 61.69, 0.004502],
[1870, 2, 0.0014869657686431364, 0.07434828843215682, 0, 0, 0],
[1871, 2, 0.0015337314104040772, 0.07668657052020388, 0, 0, 0],
[1872, 3, 6.220327851111738e-05, 0.003110163925555869, 2.22, 61.69, 0.004502],
[1873, 3, 0.0002573648025375113, 0.012868240126875569, 2.22, 61.69, 0.004502],
[1874, 3, 0.00010039547173203763, 0.0050197735866018825, 2.22, 61.69, 0.004502],
[1875, 3, 0.0002179760373318144, 0.010898801866590722, 2.22, 61.69, 0.004502],
[1876, 3, 0.00014270627844755376, 0.00713531392237769, 2.22, 61.69, 0.004502],
[1877, 3, 3.283059900250418e-05, 0.001641529950125209, 2.22, 61.69, 0.004502],
[1878, 3, 0.00023290405284479777, 0.011645202642239888, 2.22, 61.69, 0.004502],
[1879, 3, 5.049284201103439e-05, 0.0025246421005517194, 2.22, 61.69, 0.004502],
[1880, 3, 0.001068255049908474, 0.05341275249542371, 2.22, 61.69, 0.004502],
[1881, 3, 0.00015727984940835908, 0.007863992470417953, 2.22, 61.69, 0.004502],
[1882, 3, 0.0001818121283940816, 0.00909060641970408, 2.22, 61.69, 0.004502],
[1883, 3, 0.0002453456224830875, 0.012267281124154376, 2.22, 61.69, 0.004502],
[1884, 3, 0.00020684198110963, 0.010342099055481502, 2.22, 61.69, 0.004502],
[1885, 3, 0.00129792588119142, 0.06489629405957101, 2.22, 61.69, 0.004502],
[1886, 3, 0.00014319470844547947, 0.007159735422273974, 2.22, 61.69, 0.004502],
[1887, 3, 0.0005032189871086648, 0.025160949355433244, 2.22, 61.69, 0.004502],
[1888, 3, 0.00014324092549305482, 0.0071620462746527416, 2.22, 61.69, 0.004502],
[1889, 2, 0.0025884474041454283, 0.12942237020727143, 0, 0, 0],
[1890, 3, 0.0007104281028062201, 0.035521405140311005, 2.22, 61.69, 0.004502],
[1891, 3, 0.0008415405866706834, 0.042077029333534174, 2.22, 61.69, 0.004502],
[1892, 3, 0.0010384360084148645, 0.05192180042074322, 2.22, 61.69, 0.004502],
[1893, 3, 0.001301927182997355, 0.06509635914986775, 2.22, 61.69, 0.004502],
[1894, 3, 0.0008768655006630459, 0.0438432750331523, 2.22, 61.69, 0.004502],
[1895, 3, 4.304267639620148e-06, 0.00021521338198100739, 2.22, 61.69, 0.004502],
[1896, 3, 0.0012165952308203119, 0.060829761541015596, 2.22, 61.69, 0.004502],
[1897, 3, 0.0004032096848351131, 0.020160484241755657, 2.22, 61.69, 0.004502],
[1898, 3, 0.0004936037088332394, 0.024680185441661975, 2.22, 61.69, 0.004502],
[1899, 3, 0.0003231170726398226, 0.016155853631991127, 2.22, 61.69, 0.004502],
[1900, 2, 0.004972924117850934, 0.2486462058925467, 0, 0, 0],
[1901, 2, 0.00850139874298526, 0.42506993714926306, 0, 0, 0],
[1902, 2, 0.017941196935571776, 0.8970598467785887, 0, 0, 0],
[1903, 2, 0.008625713146876468, 0.4312856573438233, 0, 0, 0],
[1904, 2, 0.005041037225995458, 0.2520518612997729, 0, 0, 0],
[1905, 3, 0.0002626527775456755, 0.013132638877283775, 2.22, 61.69, 0.004502],
[1906, 2, 0.002010065672184408, 0.10050328360922042, 0, 0, 0],
[1907, 3, 0.0008003650424765439, 0.040018252123827196, 2.22, 61.69, 0.004502],
[1908, 2, 0.0013979563523032034, 0.06989781761516019, 0, 0, 0],
[1909, 3, 0.0011036689330580832, 0.05518344665290417, 2.22, 61.69, 0.004502],
[1910, 3, 0.0006883943546285288, 0.03441971773142644, 2.22, 61.69, 0.004502],
[1911, 3, 0.0002772595538987581, 0.013862977694937906, 2.22, 61.69, 0.004502],
[1912, 2, 0.006444942182323984, 0.3222471091161993, 0, 0, 0],
[1913, 3, 0.0001851619920160923, 0.009258099600804617, 2.22, 61.69, 0.004502],
[1914, 3, 0.00043823655905455975, 0.02191182795272799, 2.22, 61.69, 0.004502],
[1915, 2, 0.010158557501696754, 0.5079278750848377, 0, 0, 0],
[1916, 2, 0.017684886510895965, 0.8842443255447983, 0, 0, 0],
[1917, 2, 0.01186578896955475, 0.5932894484777375, 0, 0, 0],
[1918, 2, 0.007670383184040397, 0.3835191592020199, 0, 0, 0],
[1919, 2, 0.0038936492873901407, 0.19468246436950706, 0, 0, 0],
[1920, 3, 0.0005833186660407878, 0.029165933302039395, 2.22, 61.69, 0.004502],
[1921, 2, 0.014667779068156944, 0.7333889534078474, 0, 0, 0],
[1922, 2, 0.00420908399548562, 0.21045419977428104, 0, 0, 0],
[1923, 3, 0.001390133293413998, 0.0695066646706999, 2.22, 61.69, 0.004502],
[1924, 3, 0.001743020791378585, 0.08715103956892926, 2.22, 61.69, 0.004502],
[1925, 2, 0.004089510330471294, 0.20447551652356472, 0, 0, 0],
[1926, 2, 0.00287118105637557, 0.1435590528187785, 0, 0, 0],
[1927, 2, 0.0041806062493278656, 0.20903031246639325, 0, 0, 0],
[1928, 3, 9.612221268309282e-05, 0.004806110634154641, 2.22, 61.69, 0.004502],
[1929, 3, 0.000144746604528514, 0.0072373302264257, 2.22, 61.69, 0.004502],
[1930, 3, 0.00030511943453295244, 0.015255971726647622, 2.22, 61.69, 0.004502],
[1931, 3, 0.0010456667798853683, 0.05228333899426842, 2.22, 61.69, 0.004502],
[1932, 3, 0.0014184910249342812, 0.07092455124671407, 2.22, 61.69, 0.004502],
[1933, 3, 0.0012104704776866732, 0.060523523884333665, 2.22, 61.69, 0.004502],
[1934, 2, 0.017260023459133387, 0.8630011729566692, 0, 0, 0],
[1935, 2, 0.0020131873177782612, 0.10065936588891305, 0, 0, 0],
[1936, 3, 0.00016183222128449105, 0.008091611064224553, 2.22, 61.69, 0.004502],
[1937, 2, 0.0036698553451389514, 0.18349276725694758, 0, 0, 0],
[1938, 2, 0.0024417642388014174, 0.12208821194007087, 0, 0, 0],
[1939, 2, 0.002785103211444589, 0.13925516057222947, 0, 0, 0],
[1940, 3, 0.0005110953936246092, 0.025554769681230462, 2.22, 61.69, 0.004502],
[1941, 2, 0.002709985093250103, 0.13549925466250515, 0, 0, 0],
[1942, 2, 0.0018877299747687521, 0.0943864987384376, 0, 0, 0],
[1943, 3, 0.00010279589286423787, 0.005139794643211894, 2.22, 61.69, 0.004502],
[1944, 2, 0.0025353013507918823, 0.1267650675395941, 0, 0, 0],
[1945, 3, 0.0003079053590355567, 0.015395267951777833, 2.22, 61.69, 0.004502],
[1946, 3, 3.785246414633451e-05, 0.0018926232073167254, 2.22, 61.69, 0.004502],
[1947, 3, 0.0006231855866823692, 0.03115927933411846, 2.22, 61.69, 0.004502],
[1948, 2, 0.002715072413449747, 0.13575362067248736, 0, 0, 0],
[1949, 3, 0.0003749199035037024, 0.01874599517518512, 2.22, 61.69, 0.004502],
[1950, 3, 3.2009130803650874e-05, 0.0016004565401825438, 2.22, 61.69, 0.004502],
[1951, 3, 0.00028982139778890414, 0.014491069889445209, 2.22, 61.69, 0.004502],
[1952, 2, 0.0021449687785486293, 0.10724843892743147, 0, 0, 0],
[1953, 3, 0.0002522618160854708, 0.012613090804273537, 2.22, 61.69, 0.004502],
[1954, 3, 0.0003506443043975968, 0.017532215219879844, 2.22, 61.69, 0.004502],
[1955, 3, 0.00019049808752063204, 0.009524904376031602, 2.22, 61.69, 0.004502],
[1956, 3, 0.0013327624870031016, 0.06663812435015508, 2.22, 61.69, 0.004502],
[1957, 2, 0.0038265233479846173, 0.1913261673992309, 0, 0, 0],
[1958, 2, 0.001623585117719857, 0.08117925588599285, 0, 0, 0],
[1959, 3, 0.0014711543728682193, 0.07355771864341097, 2.22, 61.69, 0.004502],
[1960, 3, 0.00040419410791183997, 0.020209705395591998, 2.22, 61.69, 0.004502],
[1961, 3, 0.0004963095835166648, 0.02481547917583324, 2.22, 61.69, 0.004502],
[1962, 3, 8.676879300628758e-05, 0.00433843965031438, 2.22, 61.69, 0.004502],
[1963, 3, 1.98901161405436e-05, 0.0009945058070271802, 2.22, 61.69, 0.004502],
[1964, 2, 0.001926379139961268, 0.0963189569980634, 0, 0, 0],
[1965, 3, 0.0005268011695933483, 0.026340058479667413, 2.22, 61.69, 0.004502],
[1966, 3, 0.00017024481693603925, 0.008512240846801963, 2.22, 61.69, 0.004502],
[1967, 2, 0.003124156872402211, 0.15620784362011056, 0, 0, 0],
[1968, 2, 0.008146530594916731, 0.4073265297458366, 0, 0, 0],
[1969, 3, 0.0004332236280372991, 0.021661181401864953, 2.22, 61.69, 0.004502],
[1970, 2, 0.015079725927314894, 0.7539862963657448, 0, 0, 0],
[1971, 3, 0.00041965080447621257, 0.020982540223810627, 2.22, 61.69, 0.004502],
[1972, 3, 8.495873978254917e-07, 4.247936989127459e-05, 2.22, 61.69, 0.004502],
[1973, 3, 1.600763469777576e-05, 0.0008003817348887879, 2.22, 61.69, 0.004502],
[1974, 3, 8.235613569316079e-05, 0.00411780678465804, 2.22, 61.69, 0.004502],
[1975, 2, 0.0024899950060986455, 0.12449975030493228, 0, 0, 0],
[1976, 3, 0.00013846418760463496, 0.006923209380231748, 2.22, 61.69, 0.004502],
[1977, 2, 0.01441202991758457, 0.7206014958792286, 0, 0, 0],
[1978, 3, 4.876032337019254e-05, 0.002438016168509627, 2.22, 61.69, 0.004502],
[1979, 2, 0.01207812804630862, 0.603906402315431, 0, 0, 0],
[1980, 2, 0.0034921293990410386, 0.17460646995205195, 0, 0, 0],
[1981, 2, 0.004683612493623978, 0.23418062468119888, 0, 0, 0],
[1982, 2, 0.004161761211985465, 0.20808806059927326, 0, 0, 0],
[1983, 2, 0.0043877697353720034, 0.21938848676860015, 0, 0, 0],
[1984, 2, 0.002631382568955209, 0.13156912844776045, 0, 0, 0],
[1985, 3, 0.0012310071496282526, 0.061550357481412625, 2.22, 61.69, 0.004502],
[1986, 2, 0.008265161826349031, 0.4132580913174515, 0, 0, 0],
[1987, 2, 0.010632736546116827, 0.5316368273058414, 0, 0, 0],
[1988, 2, 0.011845953811604956, 0.5922976905802478, 0, 0, 0],
[1989, 3, 0.0006607023412943799, 0.033035117064719, 2.22, 61.69, 0.004502],
[1990, 2, 0.0014479772099362613, 0.07239886049681307, 0, 0, 0],
[1991, 2, 0.02791736843845849, 1.3958684219229245, 0, 0, 0],
[1992, 2, 0.00669676694709918, 0.33483834735495904, 0, 0, 0],
[1993, 2, 0.007396801680359065, 0.36984008401795326, 0, 0, 0],
[1994, 2, 0.007105771430148137, 0.35528857150740684, 0, 0, 0],
[1995, 2, 0.007146789481908194, 0.35733947409540967, 0, 0, 0],
[1996, 2, 0.002500315814796374, 0.1250157907398187, 0, 0, 0],
[1997, 3, 0.0006919203107214647, 0.03459601553607324, 2.22, 61.69, 0.004502],
[1998, 3, 0.0007719976652252124, 0.038599883261260626, 2.22, 61.69, 0.004502],
[1999, 2, 0.005606206317377037, 0.28031031586885186, 0, 0, 0],
[2000, 2, 0.015602932071110567, 0.7801466035555285, 0, 0, 0],
[2001, 2, 0.003597196019504588, 0.1798598009752294, 0, 0, 0],
[2002, 3, 0.0010051105154040628, 0.05025552577020314, 2.22, 61.69, 0.004502],
[2003, 3, 0.0015052919810963758, 0.07526459905481879, 2.22, 61.69, 0.004502],
[2004, 3, 0.0011289420570764744, 0.05644710285382372, 2.22, 61.69, 0.004502],
[2005, 2, 0.0021166659006517613, 0.10583329503258805, 0, 0, 0],
[2006, 2, 0.0017443470806312704, 0.08721735403156351, 0, 0, 0],
[2007, 3, 5.04767876707769e-05, 0.002523839383538845, 2.22, 61.69, 0.004502],
[2008, 3, 3.5033818336598355e-06, 0.0001751690916829918, 2.22, 61.69, 0.004502]
])
ppc["branch_switch"] = array([
[586, 1, 0 ],
[589, 108, 0 ],
[590, 108, 0 ],
[593, 112, 0 ],
[594, 114, 0 ],
[595, 115, 0 ],
[597, 118, 0 ],
[598, 118, 0 ],
[599, 119, 0 ],
[600, 119, 0 ],
[601, 119, 0 ],
[602, 121, 0 ],
[603, 526, 0 ],
[607, 127, 0 ],
[608, 127, 0 ],
[609, 529, 0 ],
[610, 530, 0 ],
[612, 493, 0 ],
[613, 130, 0 ],
[614, 130, 0 ],
[616, 132, 0 ],
[617, 133, 0 ],
[618, 133, 0 ],
[619, 134, 0 ],
[621, 136, 0 ],
[623, 139, 0 ],
[624, 14, 0 ],
[628, 142, 0 ],
[629, 145, 0 ],
[631, 145, 0 ],
[632, 145, 0 ],
[637, 148, 0 ],
[638, 149, 0 ],
[639, 150, 0 ],
[640, 153, 0 ],
[641, 155, 0 ],
[642, 533, 0 ],
[643, 534, 0 ],
[646, 536, 0 ],
[647, 536, 0 ],
[650, 166, 0 ],
[652, 167, 0 ],
[655, 170, 0 ],
[657, 174, 0 ],
[658, 175, 0 ],
[661, 177, 0 ],
[662, 178, 0 ],
[663, 178, 0 ],
[666, 180, 0 ],
[668, 183, 0 ],
[670, 183, 0 ],
[672, 185, 0 ],
[675, 19, 0 ],
[676, 19, 0 ],
[678, 194, 0 ],
[679, 196, 0 ],
[681, 197, 0 ],
[683, 200, 0 ],
[687, 202, 0 ],
[689, 204, 0 ],
[691, 209, 0 ],
[693, 21, 0 ],
[694, 21, 0 ],
[695, 210, 0 ],
[696, 211, 0 ],
[697, 211, 0 ],
[698, 212, 0 ],
[701, 215, 0 ],
[702, 215, 0 ],
[704, 217, 0 ],
[705, 217, 0 ],
[707, 219, 0 ],
[708, 221, 0 ],
[711, 224, 0 ],
[713, 225, 0 ],
[714, 225, 0 ],
[716, 226, 0 ],
[717, 227, 0 ],
[719, 229, 0 ],
[722, 545, 0 ],
[723, 235, 0 ],
[724, 238, 0 ],
[725, 239, 0 ],
[727, 243, 0 ],
[728, 244, 0 ],
[730, 547, 0 ],
[731, 548, 0 ],
[732, 247, 0 ],
[733, 549, 0 ],
[735, 253, 0 ],
[737, 256, 0 ],
[738, 258, 0 ],
[739, 264, 0 ],
[741, 264, 0 ],
[742, 264, 0 ],
[743, 500, 0 ],
[745, 273, 0 ],
[746, 273, 0 ],
[747, 273, 0 ],
[748, 274, 0 ],
[749, 274, 0 ],
[750, 557, 0 ],
[753, 28, 0 ],
[758, 286, 0 ],
[760, 287, 0 ],
[761, 288, 0 ],
[762, 289, 0 ],
[763, 560, 0 ],
[765, 560, 0 ],
[767, 292, 0 ],
[769, 293, 0 ],
[771, 297, 0 ],
[772, 3, 0 ],
[774, 300, 0 ],
[776, 300, 0 ],
[777, 300, 0 ],
[778, 300, 0 ],
[781, 303, 0 ],
[784, 563, 0 ],
[785, 501, 0 ],
[787, 308, 0 ],
[788, 311, 0 ],
[789, 565, 0 ],
[790, 314, 0 ],
[791, 314, 0 ],
[792, 316, 0 ],
[795, 319, 0 ],
[798, 324, 0 ],
[800, 326, 0 ],
[801, 327, 0 ],
[802, 327, 0 ],
[805, 328, 0 ],
[806, 328, 0 ],
[808, 329, 0 ],
[809, 329, 0 ],
[810, 568, 0 ],
[811, 568, 0 ],
[814, 570, 0 ],
[815, 335, 0 ],
[816, 335, 0 ],
[817, 571, 0 ],
[818, 34, 0 ],
[821, 338, 0 ],
[822, 339, 0 ],
[825, 339, 0 ],
[826, 339, 0 ],
[829, 345, 0 ],
[830, 345, 0 ],
[833, 348, 0 ],
[834, 572, 0 ],
[835, 572, 0 ],
[836, 572, 0 ],
[837, 350, 0 ],
[839, 350, 0 ],
[840, 573, 0 ],
[841, 573, 0 ],
[842, 352, 0 ],
[843, 352, 0 ],
[844, 352, 0 ],
[845, 356, 0 ],
[847, 36, 0 ],
[848, 574, 0 ],
[849, 574, 0 ],
[850, 574, 0 ],
[851, 575, 0 ],
[852, 361, 0 ],
[853, 362, 0 ],
[854, 363, 0 ],
[855, 363, 0 ],
[856, 363, 0 ],
[857, 365, 0 ],
[858, 368, 0 ],
[859, 368, 0 ],
[860, 371, 0 ],
[862, 372, 0 ],
[863, 374, 0 ],
[864, 374, 0 ],
[865, 375, 0 ],
[867, 376, 0 ],
[869, 503, 0 ],
[870, 503, 0 ],
[872, 378, 0 ],
[873, 576, 0 ],
[874, 576, 0 ],
[875, 381, 0 ],
[877, 578, 0 ],
[881, 388, 0 ],
[882, 388, 0 ],
[883, 388, 0 ],
[886, 394, 0 ],
[889, 397, 0 ],
[890, 40, 0 ],
[893, 400, 0 ],
[894, 400, 0 ],
[895, 580, 0 ],
[896, 581, 0 ],
[898, 403, 0 ],
[900, 405, 0 ],
[902, 405, 0 ],
[903, 406, 0 ],
[905, 413, 0 ],
[907, 583, 0 ],
[909, 417, 0 ],
[911, 419, 0 ],
[913, 422, 0 ],
[914, 423, 0 ],
[915, 423, 0 ],
[916, 43, 0 ],
[917, 43, 0 ],
[918, 424, 0 ],
[919, 427, 0 ],
[920, 428, 0 ],
[921, 428, 0 ],
[922, 429, 0 ],
[923, 432, 0 ],
[925, 44, 0 ],
[928, 435, 0 ],
[931, 439, 0 ],
[934, 45, 0 ],
[935, 45, 0 ],
[936, 445, 0 ],
[937, 447, 0 ],
[939, 450, 0 ],
[940, 451, 0 ],
[942, 458, 0 ],
[943, 458, 0 ],
[944, 458, 0 ],
[945, 459, 0 ],
[946, 459, 0 ],
[948, 462, 0 ],
[950, 462, 0 ],
[951, 47, 0 ],
[952, 47, 0 ],
[956, 478, 0 ],
[957, 478, 0 ],
[958, 478, 0 ],
[959, 478, 0 ],
[960, 479, 0 ],
[963, 481, 0 ],
[965, 49, 0 ],
[966, 49, 0 ],
[967, 49, 0 ],
[968, 486, 0 ],
[969, 486, 0 ],
[971, 51, 0 ],
[973, 506, 0 ],
[976, 58, 0 ],
[977, 59, 0 ],
[978, 491, 0 ],
[980, 508, 0 ],
[981, 62, 0 ],
[982, 62, 0 ],
[983, 62, 0 ],
[984, 63, 0 ],
[985, 63, 0 ],
[986, 64, 0 ],
[987, 65, 0 ],
[988, 66, 0 ],
[990, 67, 0 ],
[993, 67, 0 ],
[994, 67, 0 ],
[995, 509, 0 ],
[996, 510, 0 ],
[997, 510, 0 ],
[998, 70, 0 ],
[999, 70, 0 ],
[1000, 71, 0 ],
[1002, 71, 0 ],
[1003, 72, 0 ],
[1006, 511, 0 ],
[1007, 511, 0 ],
[1008, 75, 0 ],
[1010, 79, 0 ],
[1011, 79, 0 ],
[1012, 81, 0 ],
[1014, 83, 0 ],
[1018, 514, 0 ],
[1019, 514, 0 ],
[1023, 515, 0 ],
[1025, 518, 0 ],
[1026, 518, 0 ],
[1028, 221, 0 ],
[1029, 268, 0 ],
[1030, 269, 0 ],
[1031, 498, 0 ],
[1032, 1, 0 ],
[1033, 3, 0 ],
[1034, 4, 0 ],
[1035, 6, 0 ],
[1036, 7, 0 ],
[1037, 8, 0 ],
[1038, 9, 0 ],
[1039, 11, 0 ],
[1041, 16, 0 ],
[1042, 17, 0 ],
[1044, 21, 0 ],
[1046, 25, 0 ],
[1047, 27, 0 ],
[1048, 28, 0 ],
[1049, 29, 0 ],
[1050, 31, 0 ],
[1051, 33, 0 ],
[1052, 34, 0 ],
[1053, 35, 0 ],
[1054, 36, 0 ],
[1055, 38, 0 ],
[1056, 39, 0 ],
[1057, 40, 0 ],
[1058, 41, 0 ],
[1059, 43, 0 ],
[1060, 44, 0 ],
[1061, 45, 0 ],
[1062, 47, 0 ],
[1063, 48, 0 ],
[1064, 49, 0 ],
[1065, 50, 0 ],
[1066, 51, 0 ],
[1067, 53, 0 ],
[1068, 54, 0 ],
[1069, 55, 0 ],
[1070, 57, 0 ],
[1071, 58, 0 ],
[1072, 59, 0 ],
[1073, 60, 0 ],
[1074, 62, 0 ],
[1075, 63, 0 ],
[1077, 65, 0 ],
[1078, 66, 0 ],
[1079, 67, 0 ],
[1080, 70, 0 ],
[1081, 71, 0 ],
[1082, 72, 0 ],
[1083, 73, 0 ],
[1084, 75, 0 ],
[1085, 76, 0 ],
[1086, 77, 0 ],
[1087, 79, 0 ],
[1088, 80, 0 ],
[1089, 81, 0 ],
[1090, 82, 0 ],
[1091, 83, 0 ],
[1092, 84, 0 ],
[1093, 85, 0 ],
[1094, 88, 0 ],
[1095, 89, 0 ],
[1096, 90, 0 ],
[1097, 91, 0 ],
[1098, 92, 0 ],
[1099, 93, 0 ],
[1100, 97, 0 ],
[1101, 98, 0 ],
[1102, 101, 0 ],
[1103, 102, 0 ],
[1104, 103, 0 ],
[1105, 108, 0 ],
[1106, 109, 0 ],
[1107, 110, 0 ],
[1108, 111, 0 ],
[1109, 112, 0 ],
[1110, 113, 0 ],
[1111, 114, 0 ],
[1112, 115, 0 ],
[1113, 116, 0 ],
[1114, 118, 0 ],
[1115, 119, 0 ],
[1116, 121, 0 ],
[1117, 122, 0 ],
[1118, 126, 0 ],
[1119, 127, 0 ],
[1120, 130, 0 ],
[1121, 131, 0 ],
[1122, 132, 0 ],
[1123, 133, 0 ],
[1124, 134, 0 ],
[1125, 135, 0 ],
[1126, 136, 0 ],
[1127, 137, 0 ],
[1128, 139, 0 ],
[1129, 140, 0 ],
[1130, 141, 0 ],
[1131, 142, 0 ],
[1132, 144, 0 ],
[1133, 145, 0 ],
[1134, 146, 0 ],
[1135, 147, 0 ],
[1136, 148, 0 ],
[1137, 149, 0 ],
[1138, 150, 0 ],
[1139, 151, 0 ],
[1140, 152, 0 ],
[1141, 153, 0 ],
[1142, 154, 0 ],
[1143, 155, 0 ],
[1144, 158, 0 ],
[1145, 161, 0 ],
[1146, 162, 0 ],
[1147, 163, 0 ],
[1148, 164, 0 ],
[1149, 166, 0 ],
[1150, 167, 0 ],
[1151, 168, 0 ],
[1152, 169, 0 ],
[1153, 170, 0 ],
[1154, 171, 0 ],
[1155, 172, 0 ],
[1156, 173, 0 ],
[1157, 174, 0 ],
[1158, 175, 0 ],
[1159, 176, 0 ],
[1160, 177, 0 ],
[1161, 178, 0 ],
[1162, 179, 0 ],
[1164, 181, 0 ],
[1166, 183, 0 ],
[1167, 185, 0 ],
[1168, 186, 0 ],
[1169, 187, 0 ],
[1170, 188, 0 ],
[1171, 189, 0 ],
[1172, 190, 0 ],
[1173, 192, 0 ],
[1174, 193, 0 ],
[1175, 194, 0 ],
[1176, 196, 0 ],
[1177, 197, 0 ],
[1178, 198, 0 ],
[1179, 199, 0 ],
[1180, 200, 0 ],
[1181, 202, 0 ],
[1182, 203, 0 ],
[1183, 204, 0 ],
[1184, 205, 0 ],
[1185, 206, 0 ],
[1186, 207, 0 ],
[1187, 208, 0 ],
[1188, 209, 0 ],
[1189, 210, 0 ],
[1190, 211, 0 ],
[1191, 212, 0 ],
[1192, 213, 0 ],
[1193, 214, 0 ],
[1194, 215, 0 ],
[1195, 216, 0 ],
[1196, 217, 0 ],
[1197, 218, 0 ],
[1198, 219, 0 ],
[1199, 221, 0 ],
[1200, 222, 0 ],
[1201, 223, 0 ],
[1202, 224, 0 ],
[1203, 225, 0 ],
[1204, 226, 0 ],
[1205, 227, 0 ],
[1206, 228, 0 ],
[1207, 229, 0 ],
[1208, 230, 0 ],
[1209, 234, 0 ],
[1210, 235, 0 ],
[1211, 237, 0 ],
[1212, 238, 0 ],
[1213, 239, 0 ],
[1214, 240, 0 ],
[1215, 241, 0 ],
[1216, 242, 0 ],
[1217, 243, 0 ],
[1218, 244, 0 ],
[1219, 247, 0 ],
[1220, 251, 0 ],
[1221, 252, 0 ],
[1222, 253, 0 ],
[1223, 254, 0 ],
[1224, 255, 0 ],
[1225, 256, 0 ],
[1226, 257, 0 ],
[1227, 258, 0 ],
[1228, 260, 0 ],
[1229, 263, 0 ],
[1230, 264, 0 ],
[1231, 266, 0 ],
[1232, 267, 0 ],
[1233, 268, 0 ],
[1234, 269, 0 ],
[1235, 271, 0 ],
[1236, 272, 0 ],
[1237, 273, 0 ],
[1238, 274, 0 ],
[1239, 275, 0 ],
[1240, 276, 0 ],
[1241, 278, 0 ],
[1242, 281, 0 ],
[1243, 282, 0 ],
[1244, 283, 0 ],
[1245, 284, 0 ],
[1246, 285, 0 ],
[1247, 286, 0 ],
[1248, 287, 0 ],
[1249, 288, 0 ],
[1250, 289, 0 ],
[1251, 291, 0 ],
[1252, 292, 0 ],
[1253, 293, 0 ],
[1254, 294, 0 ],
[1255, 295, 0 ],
[1256, 296, 0 ],
[1257, 297, 0 ],
[1258, 298, 0 ],
[1259, 299, 0 ],
[1260, 300, 0 ],
[1261, 302, 0 ],
[1262, 303, 0 ],
[1263, 304, 0 ],
[1264, 307, 0 ],
[1265, 308, 0 ],
[1266, 309, 0 ],
[1267, 311, 0 ],
[1270, 316, 0 ],
[1271, 317, 0 ],
[1272, 318, 0 ],
[1273, 319, 0 ],
[1274, 321, 0 ],
[1275, 322, 0 ],
[1276, 323, 0 ],
[1277, 324, 0 ],
[1278, 325, 0 ],
[1279, 326, 0 ],
[1280, 327, 0 ],
[1282, 329, 0 ],
[1283, 331, 0 ],
[1284, 333, 0 ],
[1285, 335, 0 ],
[1286, 337, 0 ],
[1287, 338, 0 ],
[1288, 339, 0 ],
[1289, 340, 0 ],
[1290, 341, 0 ],
[1291, 342, 0 ],
[1292, 343, 0 ],
[1293, 344, 0 ],
[1294, 345, 0 ],
[1295, 346, 0 ],
[1296, 347, 0 ],
[1297, 348, 0 ],
[1300, 353, 0 ],
[1301, 354, 0 ],
[1302, 355, 0 ],
[1303, 356, 0 ],
[1304, 357, 0 ],
[1305, 359, 0 ],
[1306, 361, 0 ],
[1307, 362, 0 ],
[1308, 363, 0 ],
[1309, 364, 0 ],
[1310, 365, 0 ],
[1311, 366, 0 ],
[1312, 367, 0 ],
[1313, 368, 0 ],
[1314, 369, 0 ],
[1315, 370, 0 ],
[1316, 371, 0 ],
[1317, 372, 0 ],
[1318, 373, 0 ],
[1319, 374, 0 ],
[1320, 375, 0 ],
[1321, 376, 0 ],
[1322, 377, 0 ],
[1323, 378, 0 ],
[1324, 379, 0 ],
[1325, 381, 0 ],
[1326, 384, 0 ],
[1327, 385, 0 ],
[1328, 386, 0 ],
[1329, 387, 0 ],
[1330, 388, 0 ],
[1331, 390, 0 ],
[1332, 391, 0 ],
[1333, 392, 0 ],
[1334, 393, 0 ],
[1336, 395, 0 ],
[1337, 396, 0 ],
[1338, 397, 0 ],
[1339, 398, 0 ],
[1340, 399, 0 ],
[1341, 400, 0 ],
[1342, 403, 0 ],
[1343, 404, 0 ],
[1344, 405, 0 ],
[1345, 406, 0 ],
[1346, 407, 0 ],
[1348, 410, 0 ],
[1349, 411, 0 ],
[1350, 412, 0 ],
[1351, 413, 0 ],
[1352, 414, 0 ],
[1355, 418, 0 ],
[1356, 419, 0 ],
[1357, 420, 0 ],
[1358, 421, 0 ],
[1359, 422, 0 ],
[1360, 423, 0 ],
[1361, 424, 0 ],
[1362, 425, 0 ],
[1363, 426, 0 ],
[1364, 427, 0 ],
[1365, 428, 0 ],
[1366, 429, 0 ],
[1367, 430, 0 ],
[1368, 431, 0 ],
[1369, 432, 0 ],
[1370, 433, 0 ],
[1371, 434, 0 ],
[1372, 435, 0 ],
[1373, 436, 0 ],
[1374, 437, 0 ],
[1375, 438, 0 ],
[1376, 439, 0 ],
[1377, 440, 0 ],
[1378, 441, 0 ],
[1379, 442, 0 ],
[1380, 443, 0 ],
[1381, 445, 0 ],
[1382, 446, 0 ],
[1383, 447, 0 ],
[1384, 448, 0 ],
[1385, 449, 0 ],
[1386, 450, 0 ],
[1387, 451, 0 ],
[1388, 453, 0 ],
[1389, 454, 0 ],
[1390, 455, 0 ],
[1391, 456, 0 ],
[1392, 457, 0 ],
[1393, 458, 0 ],
[1394, 459, 0 ],
[1395, 460, 0 ],
[1396, 461, 0 ],
[1397, 462, 0 ],
[1398, 463, 0 ],
[1399, 464, 0 ],
[1400, 465, 0 ],
[1401, 466, 0 ],
[1402, 467, 0 ],
[1403, 468, 0 ],
[1404, 469, 0 ],
[1405, 470, 0 ],
[1406, 471, 0 ],
[1407, 472, 0 ],
[1408, 473, 0 ],
[1409, 474, 0 ],
[1410, 475, 0 ],
[1411, 476, 0 ],
[1412, 477, 0 ],
[1413, 478, 0 ],
[1414, 479, 0 ],
[1415, 480, 0 ],
[1416, 481, 0 ],
[1417, 482, 0 ],
[1418, 483, 0 ],
[1419, 484, 0 ],
[1421, 486, 0 ],
[1422, 487, 0 ],
[1423, 488, 0 ],
[1424, 489, 0 ],
[1425, 490, 0 ],
[1426, 491, 0 ],
[1427, 492, 0 ],
[1428, 493, 0 ],
[1431, 496, 0 ],
[1432, 497, 0 ],
[1433, 498, 0 ],
[1434, 499, 0 ],
[1435, 500, 0 ],
[1436, 501, 0 ],
[1437, 502, 0 ],
[1438, 503, 0 ],
[1439, 504, 0 ],
[1440, 505, 0 ],
[1441, 506, 0 ],
[1442, 507, 0 ],
[1443, 508, 0 ],
[1444, 509, 0 ],
[1445, 510, 0 ],
[1446, 511, 0 ],
[1447, 512, 0 ],
[1448, 513, 0 ],
[1449, 514, 0 ],
[1450, 515, 0 ],
[1451, 516, 0 ],
[1452, 517, 0 ],
[1453, 518, 0 ],
[1454, 519, 0 ],
[1455, 520, 0 ],
[1456, 521, 0 ],
[1457, 522, 0 ],
[1458, 523, 0 ],
[1459, 524, 0 ],
[1460, 525, 0 ],
[1461, 526, 0 ],
[1462, 527, 0 ],
[1463, 528, 0 ],
[1464, 529, 0 ],
[1465, 530, 0 ],
[1466, 531, 0 ],
[1467, 532, 0 ],
[1468, 533, 0 ],
[1469, 534, 0 ],
[1470, 535, 0 ],
[1471, 536, 0 ],
[1472, 537, 0 ],
[1473, 538, 0 ],
[1474, 539, 0 ],
[1475, 540, 0 ],
[1476, 541, 0 ],
[1477, 542, 0 ],
[1479, 544, 0 ],
[1480, 545, 0 ],
[1481, 546, 0 ],
[1482, 547, 0 ],
[1483, 548, 0 ],
[1484, 549, 0 ],
[1485, 550, 0 ],
[1486, 551, 0 ],
[1487, 552, 0 ],
[1488, 554, 0 ],
[1489, 555, 0 ],
[1490, 556, 0 ],
[1491, 557, 0 ],
[1492, 558, 0 ],
[1493, 559, 0 ],
[1494, 560, 0 ],
[1495, 561, 0 ],
[1497, 563, 0 ],
[1498, 564, 0 ],
[1500, 566, 0 ],
[1501, 567, 0 ],
[1502, 568, 0 ],
[1503, 569, 0 ],
[1504, 570, 0 ],
[1505, 571, 0 ],
[1506, 572, 0 ],
[1507, 573, 0 ],
[1508, 574, 0 ],
[1510, 576, 0 ],
[1511, 577, 0 ],
[1512, 578, 0 ],
[1513, 579, 0 ],
[1514, 580, 0 ],
[1516, 582, 0 ],
[1517, 583, 0 ],
[1518, 584, 0 ],
[1519, 585, 0 ],
[1520, 1, 0 ],
[1521, 3, 0 ],
[1522, 4, 0 ],
[1523, 6, 0 ],
[1524, 7, 0 ],
[1525, 8, 0 ],
[1526, 9, 0 ],
[1527, 11, 0 ],
[1528, 14, 0 ],
[1529, 16, 0 ],
[1530, 17, 0 ],
[1531, 19, 0 ],
[1532, 21, 0 ],
[1534, 25, 0 ],
[1535, 27, 0 ],
[1536, 28, 0 ],
[1537, 29, 0 ],
[1538, 31, 0 ],
[1539, 33, 0 ],
[1540, 34, 0 ],
[1541, 35, 0 ],
[1542, 36, 0 ],
[1543, 38, 0 ],
[1544, 39, 0 ],
[1545, 40, 0 ],
[1546, 41, 0 ],
[1547, 43, 0 ],
[1548, 44, 0 ],
[1549, 45, 0 ],
[1550, 47, 0 ],
[1551, 48, 0 ],
[1552, 49, 0 ],
[1553, 50, 0 ],
[1554, 51, 0 ],
[1555, 53, 0 ],
[1556, 54, 0 ],
[1557, 55, 0 ],
[1558, 57, 0 ],
[1559, 58, 0 ],
[1560, 59, 0 ],
[1561, 60, 0 ],
[1562, 62, 0 ],
[1563, 63, 0 ],
[1564, 64, 0 ],
[1565, 65, 0 ],
[1566, 66, 0 ],
[1567, 67, 0 ],
[1568, 70, 0 ],
[1569, 71, 0 ],
[1570, 72, 0 ],
[1571, 73, 0 ],
[1572, 75, 0 ],
[1573, 76, 0 ],
[1574, 77, 0 ],
[1575, 79, 0 ],
[1576, 80, 0 ],
[1577, 81, 0 ],
[1578, 82, 0 ],
[1579, 83, 0 ],
[1580, 84, 0 ],
[1581, 85, 0 ],
[1582, 88, 0 ],
[1583, 89, 0 ],
[1584, 90, 0 ],
[1585, 91, 0 ],
[1586, 92, 0 ],
[1587, 93, 0 ],
[1588, 97, 0 ],
[1589, 98, 0 ],
[1590, 101, 0 ],
[1591, 102, 0 ],
[1592, 103, 0 ],
[1593, 108, 0 ],
[1594, 109, 0 ],
[1595, 110, 0 ],
[1596, 111, 0 ],
[1597, 112, 0 ],
[1598, 113, 0 ],
[1599, 114, 0 ],
[1600, 115, 0 ],
[1601, 116, 0 ],
[1602, 118, 0 ],
[1603, 119, 0 ],
[1604, 121, 0 ],
[1605, 122, 0 ],
[1606, 126, 0 ],
[1607, 127, 0 ],
[1608, 130, 0 ],
[1609, 131, 0 ],
[1610, 132, 0 ],
[1611, 133, 0 ],
[1612, 134, 0 ],
[1613, 135, 0 ],
[1614, 136, 0 ],
[1615, 137, 0 ],
[1616, 139, 0 ],
[1617, 140, 0 ],
[1618, 141, 0 ],
[1619, 142, 0 ],
[1620, 144, 0 ],
[1621, 145, 0 ],
[1622, 146, 0 ],
[1623, 147, 0 ],
[1624, 148, 0 ],
[1625, 149, 0 ],
[1626, 150, 0 ],
[1627, 151, 0 ],
[1628, 152, 0 ],
[1629, 153, 0 ],
[1630, 154, 0 ],
[1631, 155, 0 ],
[1632, 158, 0 ],
[1633, 161, 0 ],
[1634, 162, 0 ],
[1635, 163, 0 ],
[1636, 164, 0 ],
[1637, 166, 0 ],
[1638, 167, 0 ],
[1639, 168, 0 ],
[1640, 169, 0 ],
[1641, 170, 0 ],
[1642, 171, 0 ],
[1643, 172, 0 ],
[1644, 173, 0 ],
[1645, 174, 0 ],
[1646, 175, 0 ],
[1647, 176, 0 ],
[1648, 177, 0 ],
[1649, 178, 0 ],
[1650, 179, 0 ],
[1651, 180, 0 ],
[1652, 181, 0 ],
[1653, 182, 0 ],
[1654, 183, 0 ],
[1655, 185, 0 ],
[1656, 186, 0 ],
[1657, 187, 0 ],
[1658, 188, 0 ],
[1659, 189, 0 ],
[1660, 190, 0 ],
[1661, 192, 0 ],
[1662, 193, 0 ],
[1663, 194, 0 ],
[1664, 196, 0 ],
[1665, 197, 0 ],
[1666, 198, 0 ],
[1667, 199, 0 ],
[1668, 200, 0 ],
[1669, 202, 0 ],
[1670, 203, 0 ],
[1671, 204, 0 ],
[1672, 205, 0 ],
[1673, 206, 0 ],
[1674, 207, 0 ],
[1675, 208, 0 ],
[1676, 209, 0 ],
[1677, 210, 0 ],
[1678, 211, 0 ],
[1679, 212, 0 ],
[1680, 213, 0 ],
[1681, 214, 0 ],
[1682, 215, 0 ],
[1683, 216, 0 ],
[1684, 217, 0 ],
[1685, 218, 0 ],
[1686, 219, 0 ],
[1687, 221, 0 ],
[1688, 222, 0 ],
[1689, 223, 0 ],
[1690, 224, 0 ],
[1691, 225, 0 ],
[1692, 226, 0 ],
[1693, 227, 0 ],
[1694, 228, 0 ],
[1695, 229, 0 ],
[1696, 230, 0 ],
[1697, 234, 0 ],
[1698, 235, 0 ],
[1699, 237, 0 ],
[1700, 238, 0 ],
[1701, 239, 0 ],
[1702, 240, 0 ],
[1703, 241, 0 ],
[1704, 242, 0 ],
[1705, 243, 0 ],
[1706, 244, 0 ],
[1707, 247, 0 ],
[1708, 251, 0 ],
[1709, 252, 0 ],
[1710, 253, 0 ],
[1711, 254, 0 ],
[1712, 255, 0 ],
[1713, 256, 0 ],
[1714, 257, 0 ],
[1715, 258, 0 ],
[1716, 260, 0 ],
[1717, 263, 0 ],
[1718, 264, 0 ],
[1719, 266, 0 ],
[1720, 267, 0 ],
[1721, 268, 0 ],
[1722, 269, 0 ],
[1723, 271, 0 ],
[1724, 272, 0 ],
[1725, 273, 0 ],
[1726, 274, 0 ],
[1727, 275, 0 ],
[1728, 276, 0 ],
[1729, 278, 0 ],
[1730, 281, 0 ],
[1731, 282, 0 ],
[1732, 283, 0 ],
[1733, 284, 0 ],
[1734, 285, 0 ],
[1735, 286, 0 ],
[1736, 287, 0 ],
[1737, 288, 0 ],
[1738, 289, 0 ],
[1739, 291, 0 ],
[1740, 292, 0 ],
[1741, 293, 0 ],
[1742, 294, 0 ],
[1743, 295, 0 ],
[1744, 296, 0 ],
[1745, 297, 0 ],
[1746, 298, 0 ],
[1747, 299, 0 ],
[1748, 300, 0 ],
[1749, 302, 0 ],
[1750, 303, 0 ],
[1751, 304, 0 ],
[1752, 307, 0 ],
[1753, 308, 0 ],
[1754, 309, 0 ],
[1755, 311, 0 ],
[1756, 312, 0 ],
[1757, 314, 0 ],
[1758, 316, 0 ],
[1759, 317, 0 ],
[1760, 318, 0 ],
[1761, 319, 0 ],
[1762, 321, 0 ],
[1763, 322, 0 ],
[1764, 323, 0 ],
[1765, 324, 0 ],
[1766, 325, 0 ],
[1767, 326, 0 ],
[1768, 327, 0 ],
[1769, 328, 0 ],
[1770, 329, 0 ],
[1771, 331, 0 ],
[1772, 333, 0 ],
[1773, 335, 0 ],
[1774, 337, 0 ],
[1775, 338, 0 ],
[1776, 339, 0 ],
[1777, 340, 0 ],
[1778, 341, 0 ],
[1779, 342, 0 ],
[1780, 343, 0 ],
[1781, 344, 0 ],
[1782, 345, 0 ],
[1783, 346, 0 ],
[1784, 347, 0 ],
[1785, 348, 0 ],
[1786, 350, 0 ],
[1787, 352, 0 ],
[1788, 353, 0 ],
[1789, 354, 0 ],
[1790, 355, 0 ],
[1791, 356, 0 ],
[1792, 357, 0 ],
[1793, 359, 0 ],
[1794, 361, 0 ],
[1795, 362, 0 ],
[1796, 363, 0 ],
[1797, 364, 0 ],
[1798, 365, 0 ],
[1799, 366, 0 ],
[1800, 367, 0 ],
[1801, 368, 0 ],
[1802, 369, 0 ],
[1803, 370, 0 ],
[1804, 371, 0 ],
[1805, 372, 0 ],
[1806, 373, 0 ],
[1807, 374, 0 ],
[1808, 375, 0 ],
[1809, 376, 0 ],
[1810, 377, 0 ],
[1811, 378, 0 ],
[1812, 379, 0 ],
[1813, 381, 0 ],
[1814, 384, 0 ],
[1815, 385, 0 ],
[1816, 386, 0 ],
[1817, 387, 0 ],
[1818, 388, 0 ],
[1819, 390, 0 ],
[1820, 391, 0 ],
[1821, 392, 0 ],
[1822, 393, 0 ],
[1823, 394, 0 ],
[1824, 395, 0 ],
[1825, 396, 0 ],
[1826, 397, 0 ],
[1827, 398, 0 ],
[1828, 399, 0 ],
[1829, 400, 0 ],
[1830, 403, 0 ],
[1831, 404, 0 ],
[1832, 405, 0 ],
[1833, 406, 0 ],
[1834, 407, 0 ],
[1836, 410, 0 ],
[1837, 411, 0 ],
[1838, 412, 0 ],
[1839, 413, 0 ],
[1840, 414, 0 ],
[1841, 416, 0 ],
[1842, 417, 0 ],
[1843, 418, 0 ],
[1844, 419, 0 ],
[1845, 420, 0 ],
[1846, 421, 0 ],
[1847, 422, 0 ],
[1848, 423, 0 ],
[1849, 424, 0 ],
[1850, 425, 0 ],
[1851, 426, 0 ],
[1852, 427, 0 ],
[1853, 428, 0 ],
[1854, 429, 0 ],
[1855, 430, 0 ],
[1856, 431, 0 ],
[1857, 432, 0 ],
[1858, 433, 0 ],
[1860, 435, 0 ],
[1861, 436, 0 ],
[1862, 437, 0 ],
[1863, 438, 0 ],
[1864, 439, 0 ],
[1865, 440, 0 ],
[1866, 441, 0 ],
[1867, 442, 0 ],
[1868, 443, 0 ],
[1869, 445, 0 ],
[1870, 446, 0 ],
[1871, 447, 0 ],
[1872, 448, 0 ],
[1873, 449, 0 ],
[1874, 450, 0 ],
[1875, 451, 0 ],
[1876, 453, 0 ],
[1877, 454, 0 ],
[1878, 455, 0 ],
[1879, 456, 0 ],
[1880, 457, 0 ],
[1881, 458, 0 ],
[1882, 459, 0 ],
[1883, 460, 0 ],
[1884, 461, 0 ],
[1885, 462, 0 ],
[1886, 463, 0 ],
[1887, 464, 0 ],
[1888, 465, 0 ],
[1889, 466, 0 ],
[1890, 467, 0 ],
[1891, 468, 0 ],
[1892, 469, 0 ],
[1893, 470, 0 ],
[1894, 471, 0 ],
[1895, 472, 0 ],
[1896, 473, 0 ],
[1897, 474, 0 ],
[1898, 475, 0 ],
[1899, 476, 0 ],
[1900, 477, 0 ],
[1901, 478, 0 ],
[1902, 479, 0 ],
[1903, 480, 0 ],
[1904, 481, 0 ],
[1905, 482, 0 ],
[1906, 483, 0 ],
[1907, 484, 0 ],
[1908, 485, 0 ],
[1909, 486, 0 ],
[1910, 487, 0 ],
[1911, 488, 0 ],
[1912, 489, 0 ],
[1913, 490, 0 ],
[1914, 491, 0 ],
[1915, 492, 0 ],
[1916, 493, 0 ],
[1917, 494, 0 ],
[1918, 495, 0 ],
[1919, 496, 0 ],
[1920, 497, 0 ],
[1921, 498, 0 ],
[1922, 499, 0 ],
[1923, 500, 0 ],
[1924, 501, 0 ],
[1925, 502, 0 ],
[1926, 503, 0 ],
[1927, 504, 0 ],
[1928, 505, 0 ],
[1929, 506, 0 ],
[1930, 507, 0 ],
[1931, 508, 0 ],
[1932, 509, 0 ],
[1933, 510, 0 ],
[1934, 511, 0 ],
[1935, 512, 0 ],
[1936, 513, 0 ],
[1937, 514, 0 ],
[1938, 515, 0 ],
[1939, 516, 0 ],
[1940, 517, 0 ],
[1941, 518, 0 ],
[1942, 519, 0 ],
[1943, 520, 0 ],
[1944, 521, 0 ],
[1945, 522, 0 ],
[1946, 523, 0 ],
[1947, 524, 0 ],
[1948, 525, 0 ],
[1949, 526, 0 ],
[1950, 527, 0 ],
[1951, 528, 0 ],
[1952, 529, 0 ],
[1953, 530, 0 ],
[1954, 531, 0 ],
[1955, 532, 0 ],
[1956, 533, 0 ],
[1957, 534, 0 ],
[1958, 535, 0 ],
[1959, 536, 0 ],
[1960, 537, 0 ],
[1961, 538, 0 ],
[1962, 539, 0 ],
[1963, 540, 0 ],
[1964, 541, 0 ],
[1965, 542, 0 ],
[1966, 543, 0 ],
[1967, 544, 0 ],
[1968, 545, 0 ],
[1969, 546, 0 ],
[1970, 547, 0 ],
[1971, 548, 0 ],
[1972, 549, 0 ],
[1973, 550, 0 ],
[1974, 551, 0 ],
[1975, 552, 0 ],
[1976, 553, 0 ],
[1977, 554, 0 ],
[1978, 555, 0 ],
[1979, 556, 0 ],
[1980, 557, 0 ],
[1981, 558, 0 ],
[1982, 559, 0 ],
[1983, 560, 0 ],
[1984, 561, 0 ],
[1985, 562, 0 ],
[1986, 563, 0 ],
[1987, 564, 0 ],
[1988, 565, 0 ],
[1989, 566, 0 ],
[1990, 567, 0 ],
[1991, 568, 0 ],
[1992, 569, 0 ],
[1993, 570, 0 ],
[1994, 571, 0 ],
[1995, 572, 0 ],
[1996, 573, 0 ],
[1997, 574, 0 ],
[1998, 575, 0 ],
[1999, 576, 0 ],
[2000, 577, 0 ],
[2001, 578, 0 ],
[2002, 579, 0 ],
[2003, 580, 0 ],
[2004, 581, 0 ],
[2005, 582, 0 ],
[2006, 583, 0 ],
[2007, 584, 0 ],
[2008, 585, 0 ],
[1, 490, 0 ],
[3, 4, 1 ],
[491, 6, 0 ],
[7, 5, 0 ],
[8, 9, 0 ],
[492, 11, 0 ],
[11, 493, 0 ],
[492, 493, 1 ],
[494, 14, 0 ],
[13, 15, 0 ],
[16, 5, 0 ],
[17, 18, 1 ],
[17, 12, 0 ],
[14, 495, 0 ],
[494, 19, 0 ],
[20, 21, 0 ],
[20, 22, 1 ],
[497, 23, 0 ],
[23, 499, 1 ],
[25, 26, 0 ],
[25, 22, 0 ],
[23, 27, 0 ],
[28, 23, 0 ],
[8, 21, 0 ],
[9, 29, 0 ],
[30, 25, 1 ],
[31, 32, 1 ],
[32, 33, 1 ],
[34, 35, 0 ],
[35, 36, 0 ],
[490, 6, 1 ],
[37, 10, 1 ],
[10, 38, 0 ],
[37, 38, 1 ],
[39, 40, 1 ],
[39, 41, 1 ],
[42, 41, 1 ],
[18, 42, 1 ],
[492, 43, 1 ],
[44, 45, 0 ],
[44, 505, 0 ],
[46, 12, 0 ],
[47, 48, 0 ],
[49, 50, 0 ],
[31, 33, 1 ],
[31, 51, 0 ],
[52, 53, 1 ],
[52, 54, 0 ],
[506, 55, 0 ],
[506, 507, 1 ],
[57, 506, 0 ],
[57, 58, 0 ],
[58, 506, 0 ],
[59, 60, 1 ],
[508, 62, 0 ],
[30, 61, 1 ],
[63, 506, 0 ],
[13, 64, 0 ],
[65, 66, 1 ],
[59, 67, 0 ],
[61, 67, 0 ],
[68, 69, 1 ],
[70, 69, 1 ],
[71, 72, 1 ],
[73, 74, 1 ],
[37, 75, 1 ],
[72, 75, 0 ],
[37, 72, 1 ],
[76, 77, 1 ],
[77, 51, 0 ],
[73, 72, 1 ],
[18, 40, 1 ],
[492, 45, 1 ],
[10, 74, 1 ],
[45, 511, 1 ],
[78, 32, 1 ],
[79, 80, 0 ],
[81, 79, 1 ],
[34, 82, 0 ],
[83, 84, 0 ],
[83, 499, 0 ],
[85, 86, 0 ],
[87, 86, 1 ],
[88, 89, 0 ],
[90, 86, 1 ],
[91, 86, 0 ],
[86, 92, 0 ],
[86, 93, 0 ],
[94, 86, 1 ],
[86, 95, 1 ],
[513, 517, 0 ],
[97, 66, 1 ],
[42, 98, 0 ],
[99, 100, 1 ],
[42, 101, 0 ],
[102, 42, 1 ],
[103, 87, 0 ],
[104, 103, 0 ],
[105, 87, 0 ],
[106, 107, 0 ],
[108, 107, 0 ],
[109, 106, 0 ],
[110, 111, 1 ],
[87, 112, 0 ],
[113, 87, 0 ],
[87, 85, 1 ],
[110, 114, 1 ],
[115, 116, 0 ],
[117, 118, 0 ],
[117, 119, 0 ],
[117, 120, 1 ],
[121, 122, 0 ],
[123, 124, 0 ],
[125, 126, 0 ],
[127, 119, 0 ],
[118, 128, 0 ],
[121, 119, 0 ],
[530, 527, 0 ],
[125, 130, 0 ],
[125, 123, 0 ],
[131, 132, 0 ],
[133, 123, 0 ],
[524, 134, 0 ],
[135, 136, 0 ],
[123, 131, 0 ],
[117, 128, 1 ],
[137, 521, 0 ],
[531, 514, 0 ],
[139, 521, 0 ],
[140, 514, 0 ],
[522, 141, 0 ],
[142, 523, 0 ],
[530, 526, 0 ],
[140, 532, 0 ],
[142, 144, 0 ],
[140, 522, 0 ],
[145, 146, 0 ],
[147, 523, 0 ],
[144, 523, 0 ],
[139, 523, 0 ],
[140, 141, 0 ],
[528, 526, 0 ],
[528, 148, 0 ],
[149, 150, 0 ],
[145, 528, 0 ],
[530, 151, 0 ],
[524, 152, 0 ],
[149, 525, 1 ],
[139, 514, 0 ],
[126, 120, 1 ],
[530, 153, 0 ],
[528, 147, 1 ],
[528, 154, 0 ],
[130, 120, 1 ],
[528, 155, 1 ],
[524, 533, 0 ],
[524, 149, 0 ],
[154, 150, 0 ],
[157, 110, 1 ],
[119, 158, 0 ],
[159, 60, 0 ],
[536, 161, 0 ],
[115, 151, 0 ],
[162, 134, 0 ],
[115, 526, 0 ],
[138, 87, 0 ],
[123, 163, 0 ],
[112, 164, 0 ],
[112, 165, 0 ],
[166, 165, 0 ],
[167, 537, 0 ],
[168, 104, 0 ],
[531, 520, 0 ],
[139, 520, 0 ],
[520, 169, 0 ],
[168, 105, 0 ],
[520, 170, 0 ],
[171, 89, 0 ],
[521, 172, 0 ],
[123, 173, 0 ],
[521, 174, 0 ],
[37, 39, 0 ],
[530, 175, 0 ],
[530, 176, 0 ],
[88, 530, 0 ],
[177, 496, 1 ],
[178, 525, 0 ],
[179, 493, 1 ],
[180, 181, 1 ],
[182, 180, 0 ],
[179, 181, 0 ],
[180, 493, 1 ],
[183, 30, 0 ],
[183, 21, 0 ],
[538, 185, 0 ],
[538, 89, 0 ],
[184, 186, 0 ],
[184, 187, 0 ],
[520, 172, 0 ],
[89, 175, 0 ],
[185, 89, 0 ],
[89, 188, 0 ],
[189, 190, 0 ],
[539, 172, 0 ],
[504, 192, 0 ],
[105, 186, 0 ],
[105, 187, 0 ],
[539, 193, 0 ],
[187, 194, 0 ],
[539, 540, 0 ],
[539, 196, 0 ],
[197, 540, 0 ],
[110, 198, 0 ],
[197, 539, 0 ],
[199, 537, 0 ],
[134, 526, 0 ],
[200, 193, 0 ],
[4, 201, 1 ],
[202, 86, 0 ],
[85, 203, 0 ],
[147, 204, 0 ],
[147, 205, 0 ],
[123, 206, 0 ],
[537, 207, 0 ],
[165, 208, 0 ],
[4, 94, 1 ],
[4, 2, 0 ],
[209, 4, 0 ],
[119, 163, 0 ],
[210, 3, 0 ],
[99, 211, 0 ],
[99, 69, 1 ],
[212, 99, 0 ],
[213, 214, 0 ],
[510, 215, 0 ],
[128, 69, 1 ],
[216, 69, 1 ],
[217, 98, 0 ],
[504, 218, 0 ],
[177, 504, 1 ],
[219, 209, 0 ],
[219, 220, 0 ],
[94, 95, 1 ],
[159, 221, 1 ],
[34, 161, 0 ],
[222, 221, 0 ],
[211, 52, 1 ],
[215, 223, 1 ],
[224, 215, 0 ],
[225, 224, 1 ],
[224, 223, 0 ],
[226, 6, 0 ],
[7, 3, 1 ],
[216, 227, 1 ],
[228, 229, 0 ],
[227, 230, 0 ],
[231, 53, 1 ],
[544, 545, 0 ],
[234, 235, 1 ],
[546, 214, 1 ],
[233, 227, 0 ],
[237, 238, 0 ],
[212, 100, 0 ],
[519, 239, 0 ],
[238, 519, 0 ],
[213, 240, 0 ],
[241, 242, 1 ],
[70, 241, 0 ],
[509, 213, 0 ],
[68, 243, 0 ],
[243, 244, 0 ],
[68, 244, 0 ],
[544, 547, 1 ],
[245, 227, 1 ],
[246, 208, 0 ],
[112, 208, 0 ],
[165, 247, 0 ],
[537, 549, 0 ],
[537, 550, 0 ],
[537, 551, 0 ],
[110, 251, 0 ],
[510, 252, 1 ],
[529, 253, 1 ],
[237, 239, 1 ],
[254, 238, 1 ],
[69, 255, 0 ],
[510, 225, 1 ],
[256, 257, 0 ],
[258, 190, 0 ],
[258, 259, 0 ],
[260, 261, 1 ],
[554, 553, 1 ],
[515, 263, 0 ],
[14, 264, 1 ],
[116, 555, 0 ],
[151, 116, 0 ],
[111, 114, 1 ],
[77, 111, 0 ],
[266, 525, 0 ],
[267, 120, 1 ],
[268, 269, 0 ],
[556, 271, 0 ],
[556, 272, 0 ],
[529, 273, 0 ],
[128, 274, 0 ],
[34, 275, 0 ],
[503, 276, 0 ],
[503, 504, 1 ],
[177, 218, 1 ],
[277, 278, 1 ],
[557, 558, 1 ],
[557, 559, 1 ],
[559, 558, 1 ],
[277, 78, 1 ],
[277, 279, 1 ],
[78, 279, 0 ],
[281, 282, 0 ],
[283, 161, 1 ],
[268, 161, 1 ],
[256, 284, 0 ],
[515, 516, 1 ],
[263, 516, 0 ],
[516, 285, 0 ],
[63, 286, 0 ],
[287, 516, 0 ],
[8, 102, 1 ],
[8, 101, 1 ],
[80, 288, 0 ],
[80, 289, 0 ],
[276, 560, 0 ],
[37, 290, 0 ],
[290, 74, 1 ],
[512, 291, 0 ],
[78, 292, 1 ],
[199, 548, 0 ],
[491, 293, 0 ],
[4, 294, 0 ],
[490, 541, 1 ],
[491, 295, 0 ],
[491, 296, 0 ],
[295, 297, 0 ],
[508, 161, 0 ],
[117, 123, 0 ],
[133, 117, 0 ],
[71, 74, 1 ],
[74, 278, 1 ],
[298, 515, 0 ],
[5, 299, 0 ],
[32, 292, 1 ],
[5, 29, 1 ],
[503, 560, 0 ],
[300, 301, 1 ],
[51, 300, 0 ],
[244, 302, 1 ],
[31, 302, 1 ],
[51, 282, 1 ],
[303, 304, 0 ],
[305, 304, 0 ],
[305, 259, 0 ],
[306, 307, 1 ],
[305, 308, 0 ],
[305, 309, 0 ],
[310, 309, 1 ],
[306, 309, 1 ],
[311, 280, 0 ],
[280, 278, 1 ],
[311, 32, 1 ],
[13, 312, 1 ],
[313, 314, 0 ],
[312, 313, 1 ],
[547, 566, 1 ],
[245, 315, 1 ],
[312, 316, 0 ],
[312, 314, 0 ],
[554, 546, 1 ],
[262, 216, 1 ],
[317, 233, 0 ],
[318, 317, 0 ],
[231, 52, 1 ],
[319, 567, 0 ],
[557, 321, 0 ],
[277, 65, 1 ],
[322, 288, 1 ],
[322, 323, 0 ],
[277, 324, 1 ],
[324, 325, 0 ],
[277, 325, 0 ],
[326, 327, 0 ],
[328, 326, 1 ],
[328, 327, 1 ],
[326, 329, 0 ],
[568, 329, 1 ],
[568, 326, 0 ],
[332, 78, 1 ],
[333, 306, 0 ],
[332, 333, 0 ],
[332, 334, 0 ],
[66, 334, 1 ],
[330, 335, 1 ],
[336, 66, 0 ],
[330, 336, 1 ],
[68, 70, 0 ],
[509, 337, 1 ],
[324, 288, 0 ],
[338, 559, 0 ],
[339, 559, 0 ],
[339, 340, 1 ],
[559, 340, 1 ],
[341, 292, 0 ],
[557, 342, 0 ],
[558, 343, 0 ],
[502, 340, 1 ],
[72, 32, 1 ],
[344, 345, 0 ],
[346, 47, 0 ],
[46, 47, 0 ],
[346, 345, 0 ],
[347, 328, 0 ],
[347, 348, 1 ],
[571, 348, 1 ],
[347, 572, 0 ],
[571, 570, 1 ],
[14, 350, 0 ],
[350, 573, 0 ],
[15, 351, 1 ],
[352, 15, 0 ],
[15, 335, 1 ],
[232, 227, 0 ],
[565, 544, 1 ],
[235, 567, 1 ],
[567, 286, 0 ],
[353, 519, 0 ],
[354, 353, 0 ],
[355, 354, 0 ],
[354, 356, 0 ],
[357, 358, 0 ],
[574, 359, 0 ],
[235, 575, 0 ],
[167, 361, 0 ],
[528, 362, 0 ],
[363, 344, 0 ],
[259, 364, 1 ],
[54, 56, 0 ],
[365, 364, 0 ],
[231, 366, 0 ],
[30, 367, 0 ],
[61, 367, 1 ],
[254, 368, 0 ],
[254, 369, 0 ],
[254, 370, 0 ],
[99, 358, 0 ],
[354, 519, 0 ],
[571, 371, 0 ],
[207, 372, 0 ],
[57, 373, 0 ],
[209, 374, 0 ],
[375, 376, 0 ],
[376, 377, 0 ],
[16, 49, 0 ],
[318, 377, 0 ],
[378, 297, 0 ],
[562, 379, 0 ],
[576, 563, 0 ],
[576, 381, 0 ],
[577, 576, 1 ],
[244, 383, 0 ],
[244, 306, 1 ],
[383, 306, 1 ],
[380, 306, 0 ],
[252, 225, 0 ],
[220, 76, 0 ],
[542, 384, 0 ],
[385, 384, 0 ],
[542, 385, 0 ],
[386, 385, 0 ],
[387, 578, 0 ],
[332, 388, 1 ],
[382, 332, 1 ],
[382, 388, 0 ],
[579, 578, 0 ],
[577, 387, 1 ],
[144, 390, 0 ],
[37, 49, 0 ],
[391, 233, 0 ],
[392, 310, 0 ],
[260, 393, 0 ],
[394, 230, 0 ],
[395, 282, 1 ],
[395, 244, 0 ],
[25, 396, 1 ],
[81, 74, 0 ],
[278, 80, 1 ],
[81, 278, 1 ],
[569, 570, 0 ],
[397, 552, 0 ],
[542, 398, 0 ],
[398, 385, 0 ],
[399, 499, 0 ],
[83, 399, 0 ],
[498, 400, 0 ],
[518, 239, 1 ],
[575, 543, 0 ],
[401, 360, 0 ],
[580, 581, 0 ],
[401, 402, 0 ],
[403, 231, 0 ],
[189, 360, 1 ],
[234, 404, 0 ],
[235, 404, 1 ],
[235, 580, 0 ],
[216, 259, 0 ],
[405, 259, 0 ],
[405, 318, 0 ],
[406, 230, 0 ],
[542, 407, 0 ],
[23, 408, 0 ],
[577, 348, 0 ],
[562, 564, 1 ],
[582, 507, 0 ],
[27, 410, 0 ],
[501, 27, 0 ],
[27, 411, 0 ],
[411, 410, 0 ],
[403, 360, 0 ],
[412, 360, 0 ],
[326, 413, 0 ],
[414, 413, 0 ],
[6, 297, 0 ],
[554, 580, 1 ],
[262, 401, 1 ],
[499, 556, 1 ],
[224, 229, 0 ],
[583, 507, 0 ],
[415, 307, 0 ],
[416, 507, 0 ],
[284, 561, 0 ],
[543, 417, 0 ],
[418, 506, 0 ],
[220, 157, 0 ],
[295, 419, 0 ],
[295, 420, 0 ],
[541, 62, 0 ],
[52, 421, 0 ],
[60, 160, 0 ],
[535, 161, 0 ],
[267, 282, 0 ],
[52, 365, 0 ],
[28, 27, 0 ],
[30, 201, 1 ],
[422, 81, 0 ],
[119, 425, 0 ],
[423, 425, 0 ],
[424, 425, 0 ],
[426, 428, 0 ],
[427, 428, 0 ],
[19, 428, 1 ],
[45, 429, 0 ],
[44, 429, 0 ],
[505, 429, 0 ],
[231, 431, 1 ],
[190, 431, 1 ],
[430, 431, 0 ],
[286, 433, 0 ],
[432, 433, 0 ],
[506, 433, 0 ],
[23, 434, 0 ],
[400, 434, 0 ],
[500, 434, 0 ],
[32, 436, 0 ],
[435, 436, 0 ],
[78, 436, 1 ],
[86, 438, 1 ],
[437, 438, 0 ],
[221, 438, 0 ],
[207, 439, 0 ],
[516, 439, 0 ],
[513, 439, 0 ],
[181, 441, 1 ],
[440, 441, 0 ],
[504, 441, 1 ],
[135, 442, 0 ],
[109, 442, 0 ],
[112, 442, 0 ],
[113, 443, 0 ],
[132, 443, 0 ],
[107, 443, 0 ],
[444, 445, 0 ],
[112, 445, 0 ],
[109, 445, 0 ],
[119, 447, 1 ],
[100, 447, 1 ],
[446, 447, 0 ],
[124, 448, 0 ],
[125, 448, 0 ],
[131, 448, 0 ],
[449, 450, 0 ],
[173, 450, 0 ],
[184, 450, 0 ],
[144, 451, 0 ],
[140, 451, 0 ],
[514, 451, 0 ],
[537, 585, 1 ],
[141, 585, 0 ],
[584, 585, 0 ],
[522, 454, 0 ],
[144, 454, 0 ],
[453, 454, 0 ],
[199, 456, 0 ],
[140, 456, 0 ],
[455, 456, 0 ],
[537, 456, 0 ],
[538, 457, 0 ],
[153, 457, 0 ],
[176, 457, 0 ],
[524, 459, 0 ],
[458, 459, 0 ],
[134, 459, 0 ],
[460, 461, 0 ],
[150, 461, 0 ],
[149, 461, 0 ],
[521, 463, 0 ],
[462, 463, 0 ],
[538, 463, 0 ],
[110, 464, 0 ],
[90, 464, 0 ],
[165, 464, 0 ],
[458, 465, 0 ],
[134, 465, 0 ],
[524, 465, 0 ],
[466, 467, 0 ],
[110, 467, 0 ],
[165, 467, 0 ],
[468, 469, 0 ],
[541, 469, 0 ],
[490, 469, 0 ],
[263, 471, 0 ],
[470, 471, 0 ],
[534, 471, 0 ],
[136, 472, 0 ],
[110, 472, 0 ],
[251, 472, 0 ],
[226, 474, 0 ],
[473, 474, 0 ],
[257, 474, 0 ],
[6, 474, 1 ],
[299, 475, 1 ],
[3, 475, 0 ],
[210, 475, 0 ],
[297, 476, 0 ],
[296, 476, 0 ],
[295, 476, 0 ],
[313, 478, 1 ],
[477, 478, 0 ],
[245, 478, 0 ],
[479, 481, 0 ],
[565, 481, 0 ],
[480, 481, 0 ],
[415, 482, 0 ],
[56, 482, 0 ],
[409, 482, 0 ],
[483, 484, 0 ],
[3, 484, 0 ],
[301, 484, 0 ],
[233, 485, 0 ],
[392, 485, 0 ],
[391, 485, 0 ],
[579, 488, 0 ],
[486, 488, 0 ],
[487, 488, 0 ],
[270, 489, 0 ],
[331, 489, 0 ],
[396, 489, 1 ],
[519, 253, 0 ],
[382, 349, 1 ],
[349, 351, 0 ],
[459, 465, 0 ],
[549, 550, 0 ],
[550, 551, 0 ],
[194, 195, 0 ],
[247, 248, 0 ],
[2, 294, 0 ],
[549, 551, 0 ],
[54, 365, 0 ],
[131, 265, 0 ],
[91, 92, 0 ],
[247, 249, 0 ],
[186, 191, 0 ],
[129, 173, 0 ],
[96, 202, 0 ],
[53, 320, 0 ],
[24, 396, 0 ],
[133, 156, 0 ],
[442, 452, 0 ],
[445, 452, 0 ],
[247, 250, 0 ],
[187, 195, 0 ],
[216, 236, 0 ],
[244, 389, 0 ],
[394, 406, 0 ],
[442, 445, 0 ],
[442, 444, 0 ],
[198, 472, 0 ],
[464, 467, 0 ],
[198, 251, 0 ],
[112, 143, 0 ],
[2, 490, 0 ],
[5, 491, 0 ],
[10, 492, 0 ],
[12, 493, 0 ],
[13, 494, 0 ],
[15, 495, 0 ],
[18, 496, 0 ],
[20, 497, 0 ],
[22, 498, 0 ],
[24, 499, 0 ],
[26, 500, 0 ],
[30, 501, 0 ],
[32, 502, 0 ],
[37, 503, 0 ],
[42, 504, 0 ],
[46, 505, 0 ],
[52, 506, 0 ],
[56, 507, 0 ],
[61, 508, 0 ],
[68, 509, 0 ],
[69, 510, 0 ],
[74, 511, 0 ],
[78, 512, 0 ],
[86, 513, 0 ],
[87, 514, 0 ],
[94, 515, 0 ],
[95, 516, 0 ],
[96, 517, 0 ],
[99, 518, 0 ],
[100, 519, 0 ],
[104, 520, 0 ],
[105, 521, 0 ],
[106, 522, 0 ],
[107, 523, 0 ],
[117, 524, 0 ],
[120, 525, 0 ],
[123, 526, 0 ],
[124, 527, 0 ],
[125, 528, 0 ],
[128, 529, 0 ],
[129, 530, 0 ],
[138, 531, 0 ],
[143, 532, 0 ],
[156, 533, 0 ],
[157, 534, 0 ],
[159, 535, 0 ],
[160, 536, 0 ],
[165, 537, 0 ],
[184, 538, 0 ],
[191, 539, 0 ],
[195, 540, 0 ],
[201, 541, 0 ],
[220, 542, 0 ],
[231, 543, 0 ],
[232, 544, 0 ],
[233, 545, 0 ],
[236, 546, 0 ],
[245, 547, 0 ],
[246, 548, 0 ],
[248, 549, 0 ],
[249, 550, 0 ],
[250, 551, 0 ],
[259, 552, 0 ],
[261, 553, 0 ],
[262, 554, 0 ],
[265, 555, 0 ],
[270, 556, 0 ],
[277, 557, 0 ],
[279, 558, 0 ],
[280, 559, 0 ],
[290, 560, 0 ],
[301, 561, 0 ],
[305, 562, 0 ],
[306, 563, 0 ],
[310, 564, 0 ],
[313, 565, 0 ],
[315, 566, 0 ],
[320, 567, 0 ],
[330, 568, 0 ],
[332, 569, 0 ],
[334, 570, 0 ],
[336, 571, 0 ],
[349, 572, 0 ],
[351, 573, 0 ],
[358, 574, 0 ],
[360, 575, 0 ],
[380, 576, 0 ],
[382, 577, 0 ],
[383, 578, 0 ],
[389, 579, 0 ],
[401, 580, 0 ],
[402, 581, 0 ],
[409, 582, 0 ],
[415, 583, 0 ],
[444, 584, 0 ],
[452, 585, 0 ]
])
ppc["parameters"] = {
"x_trans_sg": 0.003,
"x_trans_fm": 0.001,
"x_trans_fl": 0.001,
"d_l": 1e-3,
"d_l_perturb": 1e-5,
"w_1_ij": 1,
"w_2_ij": 1,
"w_3_ij": 1,
"w_4_ij": 1,
"b_r": 238,
"b_c": 248 }
return ppc | [
"numpy.array"
] | [((115, 112642), 'numpy.array', 'array', (['[[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [589, 2, 0, 0, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, \n 0, 1.1, 0.9], [593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [594,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [595, 2, 0, 0, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [599, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [600, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [601, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [603, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [607, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [610, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [612, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [616, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [617, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [618, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [621, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [623, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [629, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [631, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [632, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [638, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [639, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [642, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [643, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [650, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [652, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [658, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [661, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [666, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [668, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [675, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [676, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [681, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [683, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [691, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [693, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [694, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [696, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [697, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [698, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [702, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [704, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [708, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [711, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [716, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [717, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [723, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [724, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [728, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [730, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [731, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [733, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [735, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [737, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [739, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [741, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [742, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [745, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [746, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [747, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [749, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [750, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [753, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [760, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [761, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [762, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [765, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [767, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [772, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [774, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [776, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [778, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [781, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [787, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [788, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [790, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [791, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [792, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [800, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [801, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [806, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [808, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [809, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [810, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [811, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [814, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [815, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [816, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [817, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [818, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [821, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [822, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [825, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [826, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [829, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [833, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [834, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [835, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [837, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [839, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [842, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [843, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [844, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [847, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [848, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [849, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [851, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [852, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [854, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [855, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [856, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [857, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [859, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [860, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [862, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [864, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [865, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [870, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [872, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [873, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [875, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [877, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [881, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [883, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [886, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [889, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [893, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [894, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [895, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [898, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [900, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [902, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [903, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [905, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [907, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [909, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [913, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [914, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [915, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [917, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [918, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [919, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [921, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [922, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [923, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [928, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [931, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [936, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [937, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [939, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [942, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [943, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [946, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [948, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [951, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [952, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [956, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [957, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [959, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [960, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [963, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [966, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [967, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [968, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1,\n 0.9], [969, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9], [971, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [973, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [978, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [980, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [983, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [984, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [986, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [987, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [988, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [990, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [993, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [994, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [995, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [998, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [999, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [1000, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [1002, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1003, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1006, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1007, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1010, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1011, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1012, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1014, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1018, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1019, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1023, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1026, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1028, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1029, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1031, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1032, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1033, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1035, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1036, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1037, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1039, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1041, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1042, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1046, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1047, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1048, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1050, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1051, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1052, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1054, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1055, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1056, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1058, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1059, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1060, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1062, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1063, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1064, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1066, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1067, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1068, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1069, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1070, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1071, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1072, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1074, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1075, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1077, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1078, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1079, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1080, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1081, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1083, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1084, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1085, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1087, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1088, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1089, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1091, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1092, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1093, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1095, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1096, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1097, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1099, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1100, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1101, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1102, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1103, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1104, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1105, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1106, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1107, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1108, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1109, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1110, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1111, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1112, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1113, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1114, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1115, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1116, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1117, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1118, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1119, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1120, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1121, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1122, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1123, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1124, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1125, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1126, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1127, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1128, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1129, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1130, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1131, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1132, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1133, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1134, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1135, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1136, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1137, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1138, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1139, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1140, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1141, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1142, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1143, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1144, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1145, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1146, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1147, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1148, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1149, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1150, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1151, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1152, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1153, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1154, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1155, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1156, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1157, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1158, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1159, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1160, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1161, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1162, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1164, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1166, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1167, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1169, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1170, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1171, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1172, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1173, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1174, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1175, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1176, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1177, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1178, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1179, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1180, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1181, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1182, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1183, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1184, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1185, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1186, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1187, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1188, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1189, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1190, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1191, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1192, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1193, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1194, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1195, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1196, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1197, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1198, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1199, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1200, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1201, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1202, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1203, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1205, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1206, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1207, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1208, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1209, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1210, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1211, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1212, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1213, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1214, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1215, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1216, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1217, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1218, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1219, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1220, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1221, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1222, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1223, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1224, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1225, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1226, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1227, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1229, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1230, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1231, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1232, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1233, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1234, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1235, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1236, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1237, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1238, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1239, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1240, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1241, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1242, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1243, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1244, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1245, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1246, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1247, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1248, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1249, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1250, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1251, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1252, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1253, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1254, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1255, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1256, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1257, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1258, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1259, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1260, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1261, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1262, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1263, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1264, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1265, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1266, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1267, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1270, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1271, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1272, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1273, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1275, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1276, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1277, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1279, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1280, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1282, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1284, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1285, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1286, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1288, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1289, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1290, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1292, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1293, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1294, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1296, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1297, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1300, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1301, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1302, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1303, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1304, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1305, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1306, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1307, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1308, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1309, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1310, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1311, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1312, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1313, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1314, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1315, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1316, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1318, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1319, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1320, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1321, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1322, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1323, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1324, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1325, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1326, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1327, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1328, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1329, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1330, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1331, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1332, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1333, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1334, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1336, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1337, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1338, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1339, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1340, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1341, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1342, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1343, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1344, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1345, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1348, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1349, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1350, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1351, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1352, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1355, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1356, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1358, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1359, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1360, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1362, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1363, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1364, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1366, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1367, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1368, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1369, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1370, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1371, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1372, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1373, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1374, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1375, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1376, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1377, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1378, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1379, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1380, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1381, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1382, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1383, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1384, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1385, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1386, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1387, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1388, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1389, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1390, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1391, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1392, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1393, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1394, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1395, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1396, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1397, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1398, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1399, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1400, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1401, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1402, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1403, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1404, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1405, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1406, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1407, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1408, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1409, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1410, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1411, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1412, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1413, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1414, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1415, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1416, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1417, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1418, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1419, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1421, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, \n 1.1, 0.9], [1422, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1423,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1424, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1425, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1426, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1427,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1428, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1431, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1432, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1433,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1434, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1435, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1436, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1437,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1438, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1439, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1440, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1441,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1442, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1443, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1444, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1445,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1446, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1447, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1448, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1449,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1450, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1451, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1452, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1453,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1454, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1455, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1456, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1457,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1458, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1459, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1460, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1461,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1462, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1463, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1464, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1465,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1466, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1467, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1468, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1469,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1470, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1471, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1472, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1473,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1474, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1475, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1476, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1477,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1479, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1480, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1481, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1482,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1483, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1484, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1485, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1486,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1487, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1488, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1489, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1490,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1491, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1492, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1493, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1494,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1495, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1497, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1498, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1500,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1501, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1502, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1503, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1504,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1505, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1506, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1507, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1508,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1510, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1511, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1512, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1513,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1514, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1516, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1517, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1518,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1519, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1520, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1521, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1522,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1523, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1524, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1525, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1526,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1527, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1528, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1529, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1530,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1531, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1532, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1534, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1535,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1536, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1537, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1538, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1539,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1540, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1541, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1542, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1543,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1544, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1545, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1546, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1547,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1548, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1549, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1550, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1551,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1552, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1553, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1554, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1555,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1556, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1557, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1558, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1559,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1560, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1561, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1562, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1563,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1564, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1565, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1566, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1567,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1568, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1569, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1570, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1571,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1572, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1573, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1574, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1575,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1576, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1577, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1578, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1579,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1580, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1581, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1582, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1583,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1584, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1585, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1586, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1587,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1588, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1589, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1590, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1591,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1592, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1595,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1596, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1597, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1599,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1600, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1601, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1603,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1604, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1605, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1606, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1607,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1608, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1609, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1610, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1611,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1612, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1613, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1615,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1616, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1617, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1618, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1619,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1620, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1621, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1622, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1623,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1624, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1625, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1626, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1627,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1628, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1630, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1631,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1632, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1633, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1634, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1635,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1636, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1637, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1639,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1640, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1643,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1644, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1645, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1646, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1647,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1648, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1649, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1650, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1651,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1652, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1653, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1654, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1655,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1656, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1657, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1659,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1660, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1661, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1662, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1663,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1664, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1665, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1666, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1667,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1668, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1669, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1670, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1671,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1672, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1673, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1674, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1675,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1676, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1677, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1678, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1679,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1680, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1682, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1683,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1684, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1685, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1686, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1687,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1688, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1690, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1691,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1692, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1693, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1694, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1695,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1696, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1697, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1698, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1699,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1700, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1703,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1704, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1705, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1706, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1707,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1708, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1709, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1710, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1711,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1712, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1713, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1715,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1716, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1717, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1718, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1719,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1720, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1721, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1723,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1724, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1725, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1726, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1727,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1728, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1729, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1730, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1731,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1732, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1734, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1735,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1736, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1737, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1739,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1740, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1741, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1742, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1743,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1744, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1746, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1747,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1748, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1750, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1751,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1752, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1753, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1754, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1755,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1756, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1757, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1758, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1759,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1760, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1761, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1762, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1763,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1764, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1765, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1766, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1767,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1768, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1769, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1770, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1771,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1772, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1773, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1774, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1775,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1776, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1777, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1779,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1780, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1781, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1782, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1783,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1784, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1786, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1787,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1788, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1789, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1790, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1791,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1792, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1793, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1794, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1795,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1796, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1797, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1799,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1800, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1801, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1802, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1803,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1804, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1807,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1808, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1809, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1810, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1811,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1812, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1813, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1814, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1815,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1816, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1818, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1819,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1820, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1821, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1822, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1823,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1824, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1825, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1826, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1827,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1828, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1829, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1831,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1832, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1833, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1834, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1836,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1837, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1838, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1839, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1840,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1841, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1842, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1843, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1844,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1845, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1846, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1847, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1848,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1849, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1850, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1852,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1853, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1854, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1856,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1857, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1860, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1861,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1862, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1864, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1865,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1866, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1867, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1868, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1869,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1870, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1871, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1872, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1873,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1874, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1876, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1877,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1878, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1879, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1880, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1881,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1882, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1883, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1884, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1885,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1886, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1887, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1888, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1889,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1890, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1891, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1892, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1893,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1894, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1895, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1897,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1898, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1899, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1900, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1901,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1902, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1903, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1904, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1905,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1906, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1907, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [1908, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1909,\n 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9], [1910, 2, 0, 0, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0,\n 0, 1.1, 0.9], [1912, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 1913, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1914, 2, 0, 0, 0,\n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1915, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [1916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [1917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1918, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1919, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1922, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1923, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1924, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1926, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1927, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1928, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1929, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1930, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1931, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1932, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1933, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1934, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1935, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1936, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1937, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1938, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1939, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1941, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1942, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1943, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1944, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1946, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1947, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1948, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1949, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1950, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1951, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1952, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1953, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1954, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1955, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1956, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1957, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1958, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1959, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1960, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1961, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1962, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1963, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1964, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1965, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1966, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1967, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1968, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1969, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1970, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1971, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1972, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1974, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1975, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1976, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1977, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1978, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1979, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1980, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1982, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1983, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1984, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1986, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1987, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1988, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1989, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1990, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1991, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1992, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1993, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1994, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1995, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1996, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1998, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1999, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [2000, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [2001, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2002, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2003, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [2004, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [2005, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2006, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2007, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [2008, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1, 1, 325.748587, 65.149717, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [2, 1, 0, 0, 0, 0, 0, 1.000012, 0, 380.0, 0, 1.1, 0.9], [3, 1, \n 57.094965, 11.418993, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [4, 1, \n 93.894564, 18.778913, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [5, 1, 0, 0,\n 0, 0, 0, 1.00026, 0, 380.0, 0, 1.1, 0.9], [6, 1, 275.713362, 55.142672,\n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [7, 1, 207.784304, 41.556861, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [8, 1, 173.85906, 34.771812, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [9, 1, 117.578165, 23.515633, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [10, 1, 0, 0, 0, 0, 0, 1.000518, 0, 380.0, 0, \n 1.1, 0.9], [11, 1, 103.018516, 20.603703, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [12, 1, 0, 0, 0, 0, 0, 1.00057, 0, 380.0, 0, 1.1, 0.9], [13,\n 1, 0, 0, 0, 0, 0, 1.000425, 0, 380.0, 0, 1.1, 0.9], [14, 1, 246.382498,\n 49.2765, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [15, 1, 0, 0, 0, 0, 0, \n 1.000581, 0, 380.0, 0, 1.1, 0.9], [16, 1, 420.196361, 84.039272, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [17, 1, 98.967281, 19.793456, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [18, 1, 0, 0, 0, 0, 0, 1.002692, 0, 380.0,\n 0, 1.1, 0.9], [19, 1, 244.510845, 48.902169, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [20, 1, 0, 0, 0, 0, 0, 0.998777, 0, 380.0, 0, 1.1, 0.9], [21,\n 1, 1051.434139, 210.286828, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [22, \n 1, 0, 0, 0, 0, 0, 1.000461, 0, 380.0, 0, 1.1, 0.9], [23, 1, 137.668379,\n 27.533676, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [24, 1, 0, 0, 0, 0, 0,\n 0.999996, 0, 380.0, 0, 1.1, 0.9], [25, 1, 65.847745, 13.169549, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [26, 1, 0, 0, 0, 0, 0, 1.000752, 0, 380.0,\n 0, 1.1, 0.9], [27, 1, 80.82993, 16.165986, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [28, 1, 238.828227, 47.765645, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [29, 1, 87.72658, 17.545316, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [30, 1, 0, 0, 0, 0, 0, 0.99974, 0, 380.0, 0, 1.1, 0.9], [31, 1, \n 172.643645, 34.528729, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [32, 1, 0,\n 0, 0, 0, 0, 0.999876, 0, 380.0, 0, 1.1, 0.9], [33, 1, 216.462687, \n 43.292537, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [34, 1, 42.945181, \n 8.589036, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [35, 1, 2.843198, \n 0.56864, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [36, 1, 9.41342, \n 1.882684, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [37, 1, 0, 0, 0, 0, 0, \n 1.003518, 0, 380.0, 0, 1.1, 0.9], [38, 1, 226.790299, 45.35806, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [39, 1, 74.262139, 14.852428, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [40, 1, 77.569126, 15.513825, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [41, 1, 83.36923, 16.673846, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [42, 1, 0, 0, 0, 0, 0, 1.001382, 0, 380.0, 0, 1.1,\n 0.9], [43, 1, 127.850472, 25.570094, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [44, 1, 163.565722, 32.713144, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [45, 1, 86.824343, 17.364869, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [46, 1, 0, 0, 0, 0, 0, 1.000154, 0, 380.0, 0, 1.1, 0.9], [47, 1, \n 377.519214, 75.503843, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [48, 1, \n 259.494186, 51.898837, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [49, 1, \n 65.638937, 13.127787, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [50, 1, \n 95.579153, 19.115831, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [51, 1, \n 123.864343, 24.772869, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [52, 1, 0,\n 0, 0, 0, 0, 1.000109, 0, 380.0, 0, 1.1, 0.9], [53, 1, 187.944302, \n 37.58886, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [54, 1, 95.486648, \n 19.09733, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [55, 1, 93.644497, \n 18.728899, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [56, 1, 0, 0, 0, 0, 0,\n 0.999658, 0, 380.0, 0, 1.1, 0.9], [57, 1, 111.782276, 22.356455, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [58, 1, 256.054306, 51.210861, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [59, 1, 73.130675, 14.626135, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [60, 1, 38.556521, 7.711304, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [61, 1, 0, 0, 0, 0, 0, 0.999552, 0, 380.0, 0, 1.1,\n 0.9], [62, 1, 293.946406, 58.789281, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [63, 1, 173.514047, 34.702809, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [64, 1, 1841.335671, 368.267134, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [65, 1, 6.135361, 1.227072, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [66, 1, 194.668019, 38.933604, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 67, 1, 417.595693, 83.519139, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [68,\n 1, 0, 0, 0, 0, 0, 0.998236, 0, 380.0, 0, 1.1, 0.9], [69, 1, 0, 0, 0, 0,\n 0, 0.999783, 0, 380.0, 0, 1.1, 0.9], [70, 1, 789.995804, 157.999161, 0,\n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [71, 1, 183.584849, 36.71697, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [72, 1, 300.686791, 60.137358, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [73, 1, 96.261172, 19.252234, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [74, 1, 0, 0, 0, 0, 0, 1.001507, 0, 380.0, 0, \n 1.1, 0.9], [75, 1, 119.975301, 23.99506, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [76, 1, 115.802488, 23.160498, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [77, 1, 112.162624, 22.432525, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [78, 1, 0, 0, 0, 0, 0, 1.000176, 0, 380.0, 0, 1.1, 0.9], [79, 1, \n 115.816553, 23.163311, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [80, 1, \n 123.01505, 24.60301, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [81, 1, \n 138.867238, 27.773448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [82, 1, \n 4.621583, 0.924317, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [83, 1, \n 309.217998, 61.8436, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [84, 1, \n 30.440604, 6.088121, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [85, 1, \n 105.562105, 21.112421, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [86, 1, 0,\n 0, 0, 0, 0, 1.00001, 0, 380.0, 0, 1.1, 0.9], [87, 1, 0, 0, 0, 0, 0, \n 1.000289, 0, 380.0, 0, 1.1, 0.9], [88, 1, 85.202609, 17.040522, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [89, 1, 105.706878, 21.141376, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [90, 1, 122.086777, 24.417355, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [91, 1, 42.406867, 8.481373, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [92, 1, 46.280769, 9.256154, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [93, 1, 45.392163, 9.078433, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [94, 1, 0, 0, 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1, \n 0.9], [95, 1, 0, 0, 0, 0, 0, 1.0007, 0, 380.0, 0, 1.1, 0.9], [96, 1, 0,\n 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9], [97, 1, 6.384069, \n 1.276814, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [98, 1, 117.377345, \n 23.475469, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [99, 1, 0, 0, 0, 0, 0,\n 1.000519, 0, 380.0, 0, 1.1, 0.9], [100, 1, 0, 0, 0, 0, 0, 1.002126, 0, \n 380.0, 0, 1.1, 0.9], [101, 1, 83.11513, 16.623026, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [102, 1, 160.873209, 32.174642, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [103, 1, 188.09191, 37.618382, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [104, 1, 0, 0, 0, 0, 0, 1.000066, 0, 380.0, 0, 1.1,\n 0.9], [105, 1, 0, 0, 0, 0, 0, 1.000146, 0, 380.0, 0, 1.1, 0.9], [106, 1,\n 0, 0, 0, 0, 0, 0.999963, 0, 380.0, 0, 1.1, 0.9], [107, 1, 0, 0, 0, 0, 0,\n 1.000005, 0, 380.0, 0, 1.1, 0.9], [108, 1, 132.675911, 26.535182, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [109, 1, 53.718212, 10.743642, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [110, 1, 69.728393, 13.945679, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [111, 1, 122.880269, 24.576054, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [112, 1, 62.192906, 12.438581, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [113, 1, 98.03855, 19.60771, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [114, 1, 144.38681, 28.877362, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [115, 1, 93.077688, 18.615538, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [116, 1, 155.75271, 31.150542, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [117, 1, 0, 0, 0, 0, 0, 1.000162, 0, 380.0, 0, 1.1,\n 0.9], [118, 1, 241.160786, 48.232157, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [119, 1, 46.746863, 9.349373, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [120, 1, 0, 0, 0, 0, 0, 1.00083, 0, 380.0, 0, 1.1, 0.9], [121, 1, \n 63.482261, 12.696452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [122, 1, \n 55.578075, 11.115615, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [123, 1, 0,\n 0, 0, 0, 0, 1.000079, 0, 380.0, 0, 1.1, 0.9], [124, 1, 0, 0, 0, 0, 0, \n 1.000003, 0, 380.0, 0, 1.1, 0.9], [125, 1, 0, 0, 0, 0, 0, 0.999463, 0, \n 380.0, 0, 1.1, 0.9], [126, 1, 291.397229, 58.279446, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [127, 1, 225.280714, 45.056143, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [128, 1, 0, 0, 0, 0, 0, 1.000968, 0, 380.0, 0, 1.1,\n 0.9], [129, 1, 0, 0, 0, 0, 0, 0.999994, 0, 380.0, 0, 1.1, 0.9], [130, 1,\n 310.621123, 62.124225, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [131, 1, \n 68.584875, 13.716975, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [132, 1, \n 178.584646, 35.716929, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [133, 1, \n 59.81886, 11.963772, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [134, 1, \n 59.573903, 11.914781, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [135, 1, \n 59.652888, 11.930578, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [136, 1, \n 57.787513, 11.557503, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [137, 1, \n 46.224691, 9.244938, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [138, 1, 0, \n 0, 0, 0, 0, 1.000239, 0, 380.0, 0, 1.1, 0.9], [139, 1, 90.549485, \n 18.109897, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [140, 1, 62.618846, \n 12.523769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [141, 1, 74.19228, \n 14.838456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [142, 1, 81.637993, \n 16.327599, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [143, 1, 0, 0, 0, 0, 0,\n 0.999985, 0, 380.0, 0, 1.1, 0.9], [144, 1, 74.363771, 14.872754, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [145, 1, 216.326177, 43.265235, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [146, 1, 278.885136, 55.777027, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [147, 1, 170.940166, 34.188033, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [148, 1, 241.227956, 48.245591, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [149, 1, 155.517918, 31.103584, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [150, 1, 203.044789, 40.608958, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [151, 1, 47.847194, 9.569439, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [152, 1, 99.325814, 19.865163, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [153, 1, 177.213406, 35.442681, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [154, 1, 182.033335, 36.406667, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [155, 1, 189.603806, 37.920761, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [156, 1, 0, 0, 0, 0, 0, 0.999987, 0, 380.0, 0, 1.1,\n 0.9], [157, 1, 0, 0, 0, 0, 0, 1.001031, 0, 380.0, 0, 1.1, 0.9], [158, 1,\n 49.954288, 9.990858, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [159, 1, 0, \n 0, 0, 0, 0, 1.001191, 0, 380.0, 0, 1.1, 0.9], [160, 1, 0, 0, 0, 0, 0, \n 1.000005, 0, 380.0, 0, 1.1, 0.9], [161, 1, 155.079459, 31.015892, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [162, 1, 231.797832, 46.359566, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [163, 1, 46.357377, 9.271475, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [164, 1, 46.543808, 9.308762, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [165, 1, 0, 0, 0, 0, 0, 1.000008, 0, 380.0, 0, 1.1,\n 0.9], [166, 1, 54.417242, 10.883448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [167, 1, 76.551361, 15.310272, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [168, 1, 52.245327, 10.449065, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [169, 1, 178.850819, 35.770164, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [170, 1, 134.391309, 26.878262, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [171, 1, 114.702931, 22.940586, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [172, 1, 56.293074, 11.258615, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [173, 1, 53.776547, 10.755309, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [174, 1, 80.699328, 16.139866, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [175, 1, 53.741302, 10.74826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [176, 1, 187.268482, 37.453696, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [177, 1, 30.536855, 6.107371, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 178, 1, 161.730672, 32.346134, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 179, 1, 59.592171, 11.918434, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 180, 1, 52.383043, 10.476609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 181, 1, 39.537212, 7.907442, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [182,\n 1, 1.791054, 0.358211, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [183, 1, \n 536.118855, 107.223771, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [184, 1, \n 0, 0, 0, 0, 0, 0.999412, 0, 380.0, 0, 1.1, 0.9], [185, 1, 114.645917, \n 22.929183, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [186, 1, 61.736231, \n 12.347246, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [187, 1, 36.109408, \n 7.221882, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [188, 1, 53.741302, \n 10.74826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [189, 1, 197.196893, \n 39.439379, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [190, 1, 260.829785, \n 52.165957, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [191, 1, 0, 0, 0, 0, 0,\n 1.000009, 0, 380.0, 0, 1.1, 0.9], [192, 1, 62.815713, 12.563143, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [193, 1, 53.654613, 10.730923, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [194, 1, 37.038638, 7.407728, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [196, 1, 51.963051, 10.39261, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [197, 1, 82.328556, 16.465711, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [198, 1, 48.717631, 9.743526, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 199, 1, 62.722328, 12.544466, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 200, 1, 53.742549, 10.74851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [201,\n 1, 0, 0, 0, 0, 0, 1.000603, 0, 380.0, 0, 1.1, 0.9], [202, 1, 55.070857,\n 11.014171, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [203, 1, 7.256079, \n 1.451216, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [204, 1, 212.674227, \n 42.534845, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [205, 1, 106.346688, \n 21.269338, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [206, 1, 51.038978, \n 10.207796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [207, 1, 151.767938, \n 30.353588, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [208, 1, 44.689673, \n 8.937935, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [209, 1, 62.103028, \n 12.420606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [210, 1, 71.344757, \n 14.268951, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [211, 1, 250.721465, \n 50.144293, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [212, 1, 62.839799, \n 12.56796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [213, 1, 294.578929, \n 58.915786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [214, 1, 198.21428, \n 39.642856, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [215, 1, 419.133986, \n 83.826797, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [216, 1, 141.326419, \n 28.265284, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [217, 1, 45.286003, \n 9.057201, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [218, 1, 137.965387, \n 27.593077, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [219, 1, 221.727192, \n 44.345438, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [220, 1, 0, 0, 0, 0, 0,\n 0.9995, 0, 380.0, 0, 1.1, 0.9], [221, 1, 126.484966, 25.296993, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [222, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 380.0,\n 0, 1.1, 0.9], [223, 1, 125.354431, 25.070886, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [224, 1, 145.769935, 29.153987, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [225, 1, 261.73828, 52.347656, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [226, 1, 91.433269, 18.286654, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [227, 1, 113.907309, 22.781462, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [228, 1, 111.682638, 22.336528, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [229, 1, 247.134629, 49.426926, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [230, 1, 59.276997, 11.855399, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [231, 1, 0, 0, 0, 0, 0, 1.0008, 0, 380.0, 0, 1.1, 0.9], [232,\n 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9], [233, 1, 0, 0, 0, 0,\n 0, 0.999572, 0, 380.0, 0, 1.1, 0.9], [234, 1, 211.151257, 42.230251, 0,\n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [235, 1, 68.663575, 13.732715, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [236, 1, 0, 0, 0, 0, 0, 0.999972, 0, \n 380.0, 0, 1.1, 0.9], [237, 1, 0.568269, 0.113654, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [238, 1, 77.694084, 15.538817, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [239, 1, 107.344119, 21.468824, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [240, 1, 677.106115, 135.421223, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [241, 1, 501.035004, 100.207001, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [242, 1, 182.435912, 36.487182, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [243, 1, 147.189401, 29.43788, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [244, 1, 175.365238, 35.073048, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [245, 1, 0, 0, 0, 0, 0, 1.001868, 0, 380.0, 0, 1.1,\n 0.9], [246, 1, 0, 0, 0, 0, 0, 1.000314, 0, 380.0, 0, 1.1, 0.9], [247, 1,\n 34.80024, 6.960048, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [248, 1, 0, 0,\n 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9], [249, 1, 0, 0, 0, 0, 0, \n 1.000002, 0, 380.0, 0, 1.1, 0.9], [250, 1, 0, 0, 0, 0, 0, 1.000003, 0, \n 380.0, 0, 1.1, 0.9], [251, 1, 86.366303, 17.273261, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [252, 1, 221.490058, 44.298012, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [253, 1, 97.242587, 19.448517, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [254, 1, 31.047944, 6.209589, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [255, 1, 152.691204, 30.538241, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [256, 1, 175.110241, 35.022048, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [257, 1, 84.512076, 16.902415, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [258, 1, 275.414649, 55.08293, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [259, 1, 0, 0, 0, 0, 0, 0.999267, 0, 380.0, 0, 1.1,\n 0.9], [260, 1, 171.407259, 34.281452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [261, 1, 0, 0, 0, 0, 0, 1.001914, 0, 380.0, 0, 1.1, 0.9], [262, 1,\n 0, 0, 0, 0, 0, 1.000151, 0, 380.0, 0, 1.1, 0.9], [263, 1, 245.883489, \n 49.176698, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [264, 1, 318.309439, \n 63.661888, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [265, 1, 0, 0, 0, 0, 0,\n 1.000004, 0, 380.0, 0, 1.1, 0.9], [266, 1, 153.403945, 30.680789, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [267, 1, 194.022708, 38.804542, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [268, 1, 67.469917, 13.493983, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [269, 1, 54.180873, 10.836175, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [270, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0,\n 0, 1.1, 0.9], [271, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [272, 1, 1.105489, 0.221098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [273,\n 1, 151.176192, 30.235238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [274, 1,\n 293.866602, 58.77332, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [275, 1, \n 55.013432, 11.002686, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [276, 1, \n 214.456344, 42.891269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [277, 1, 0,\n 0, 0, 0, 0, 0.999517, 0, 380.0, 0, 1.1, 0.9], [278, 1, 167.418237, \n 33.483647, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [279, 1, 0, 0, 0, 0, 0,\n 0.999817, 0, 380.0, 0, 1.1, 0.9], [280, 1, 0, 0, 0, 0, 0, 0.999266, 0, \n 380.0, 0, 1.1, 0.9], [281, 1, 221.13944, 44.227888, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [282, 1, 312.725416, 62.545083, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [283, 1, 125.353926, 25.070785, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [284, 1, 190.167711, 38.033542, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [285, 1, 84.808128, 16.961626, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [286, 1, 177.744137, 35.548827, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [287, 1, 109.245452, 21.84909, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [288, 1, 70.265914, 14.053183, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [289, 1, 110.507903, 22.101581, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [290, 1, 0, 0, 0, 0, 0, 1.004495, 0, 380.0, 0, 1.1,\n 0.9], [291, 1, 72.723946, 14.544789, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [292, 1, 143.371926, 28.674385, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [293, 1, 126.359101, 25.27182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [294, 1, 33.672791, 6.734558, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [295, 1, 70.455207, 14.091041, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [296, 1, 200.022498, 40.0045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 297, 1, 210.22589, 42.045178, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 298, 1, 111.003448, 22.20069, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 299, 1, 107.506102, 21.50122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 300, 1, 292.875731, 58.575146, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 301, 1, 0, 0, 0, 0, 0, 0.999437, 0, 380.0, 0, 1.1, 0.9], [302, 1, \n 246.711976, 49.342395, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [303, 1, \n 126.718426, 25.343685, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [304, 1, \n 108.813201, 21.76264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [305, 1, 0,\n 0, 0, 0, 0, 0.99961, 0, 380.0, 0, 1.1, 0.9], [306, 1, 0, 0, 0, 0, 0, \n 1.001597, 0, 380.0, 0, 1.1, 0.9], [307, 1, 129.062569, 25.812514, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [308, 1, 159.116952, 31.82339, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [309, 1, 260.337709, 52.067542, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [310, 1, 0, 0, 0, 0, 0, 0.999901, 0, 380.0,\n 0, 1.1, 0.9], [311, 1, 221.133187, 44.226637, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [312, 1, 99.449747, 19.889949, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [313, 1, 0, 0, 0, 0, 0, 1.000862, 0, 380.0, 0, 1.1, 0.9], [\n 314, 1, 308.032014, 61.606403, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 315, 1, 0, 0, 0, 0, 0, 1.00159, 0, 380.0, 0, 1.1, 0.9], [316, 1, \n 120.690947, 24.138189, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [317, 1, \n 162.50594, 32.501188, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [318, 1, \n 267.057251, 53.41145, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [319, 1, \n 9.567058, 1.913412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [320, 1, 0, 0,\n 0, 0, 0, 0.999996, 0, 380.0, 0, 1.1, 0.9], [321, 1, 226.312454, \n 45.262491, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [322, 1, 28.811032, \n 5.762206, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [323, 1, 2.997543, \n 0.599509, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [324, 1, 529.89302, \n 105.978604, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [325, 1, 172.614935, \n 34.522987, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [326, 1, 13.995083, \n 2.799017, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [327, 1, 120.437246, \n 24.087449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [328, 1, 205.243578, \n 41.048716, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [329, 1, 308.704638, \n 61.740928, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [330, 1, 0, 0, 0, 0, 0,\n 1.002351, 0, 380.0, 0, 1.1, 0.9], [331, 1, 24.510098, 4.90202, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [332, 1, 0, 0, 0, 0, 0, 1.00029, 0, 380.0,\n 0, 1.1, 0.9], [333, 1, 257.534094, 51.506819, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [334, 1, 0, 0, 0, 0, 0, 1.000078, 0, 380.0, 0, 1.1, 0.9], [\n 335, 1, 262.832973, 52.566595, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 336, 1, 0, 0, 0, 0, 0, 0.998883, 0, 380.0, 0, 1.1, 0.9], [337, 1, \n 104.54725, 20.90945, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [338, 1, \n 283.756092, 56.751218, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [339, 1, \n 175.499218, 35.099844, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [340, 1, \n 148.381042, 29.676208, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [341, 1, \n 134.139426, 26.827885, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [342, 1, \n 232.687766, 46.537553, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [343, 1, \n 127.655901, 25.53118, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [344, 1, \n 320.06392, 64.012784, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [345, 1, \n 349.977293, 69.995459, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [346, 1, \n 347.438228, 69.487646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [347, 1, \n 121.505179, 24.301036, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [348, 1, \n 317.622541, 63.524508, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [349, 1, 0,\n 0, 0, 0, 0, 1.002227, 0, 380.0, 0, 1.1, 0.9], [350, 1, 166.629421, \n 33.325884, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [351, 1, 0, 0, 0, 0, 0,\n 1.002311, 0, 380.0, 0, 1.1, 0.9], [352, 1, 1102.969172, 220.593834, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [353, 1, 3.315894, 0.663179, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [354, 1, 22.527896, 4.505579, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [355, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 357, 1, 0.05647, 0.011294, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [358, \n 1, 0, 0, 0, 0, 0, 1.001145, 0, 380.0, 0, 1.1, 0.9], [359, 1, 3.297102, \n 0.65942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [360, 1, 0, 0, 0, 0, 0, \n 1.000743, 0, 380.0, 0, 1.1, 0.9], [361, 1, 84.386359, 16.877272, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [362, 1, 240.544798, 48.10896, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [363, 1, 354.159899, 70.83198, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [364, 1, 83.559152, 16.71183, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [365, 1, 74.998776, 14.999755, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [366, 1, 148.647335, 29.729467, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [367, 1, 71.849947, 14.369989, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [368, 1, 35.380095, 7.076019, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [369, 1, 29.073011, 5.814602, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [370, 1, 85.591776, 17.118355, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [371, 1, 430.66013, 86.132026, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [372, 1, 249.745997, 49.949199, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [373, 1, 168.52878, 33.705756, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [374, 1, 86.418705, 17.283741, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [375, 1, 283.483358, 56.696672, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [376, 1, 310.927852, 62.18557, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [377, 1, 222.495169, 44.499034, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [378, 1, 222.066912, 44.413382, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [379, 1, 76.536953, 15.307391, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [380, 1, 0, 0, 0, 0, 0, 1.001552, 0, 380.0, 0, 1.1,\n 0.9], [381, 1, 255.944236, 51.188847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [382, 1, 0, 0, 0, 0, 0, 1.000904, 0, 380.0, 0, 1.1, 0.9], [383, 1,\n 0, 0, 0, 0, 0, 0.999115, 0, 380.0, 0, 1.1, 0.9], [384, 1, 90.316363, \n 18.063273, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [385, 1, 113.996976, \n 22.799395, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [386, 1, 91.593152, \n 18.31863, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [387, 1, 186.533196, \n 37.306639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [388, 1, 1001.680535, \n 200.336107, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [389, 1, 0, 0, 0, 0, \n 0, 0.999916, 0, 380.0, 0, 1.1, 0.9], [390, 1, 82.706419, 16.541284, 0, \n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [391, 1, 94.209664, 18.841933, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [392, 1, 180.787399, 36.15748, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [393, 1, 225.769637, 45.153927, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [394, 1, 81.202848, 16.24057, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [395, 1, 112.54213, 22.508426, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [396, 1, 79.712439, 15.942488, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [397, 1, 639.205952, 127.84119, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [398, 1, 276.853905, 55.370781, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [399, 1, 117.959928, 23.591986, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [400, 1, 62.847073, 12.569415, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [401, 1, 0, 0, 0, 0, 0, 1.000689, 0, 380.0, 0, 1.1,\n 0.9], [402, 1, 0, 0, 0, 0, 0, 1.000468, 0, 380.0, 0, 1.1, 0.9], [403, 1,\n 31.205033, 6.241007, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [404, 1, \n 109.937263, 21.987453, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [405, 1, \n 828.818277, 165.763655, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [406, 1, \n 62.797316, 12.559463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [407, 1, \n 124.308664, 24.861733, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [408, 1, \n 359.430945, 71.886189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [409, 1, 0,\n 0, 0, 0, 0, 0.999942, 0, 380.0, 0, 1.1, 0.9], [410, 1, 46.535489, \n 9.307098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [411, 1, 44.001211, \n 8.800242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [412, 1, 3.090603, \n 0.618121, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [413, 1, 154.2885, \n 30.8577, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [414, 1, 13.100763, \n 2.620153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [415, 1, 0, 0, 0, 0, 0,\n 1.000239, 0, 380.0, 0, 1.1, 0.9], [416, 1, 186.568647, 37.313729, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [417, 1, 7.300075, 1.460015, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [418, 1, 152.129169, 30.425834, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [419, 1, 81.311959, 16.262392, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [420, 1, 81.864619, 16.372924, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [421, 1, 117.923897, 23.584779, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [422, 1, 86.394999, 17.279, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [423, 1, 181.448589, 36.289718, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [424, 1, 13.081976, 2.616395, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [425, 1, 107.436029, 21.487206, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [426, 1, 8.901406, 1.780281, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [427, 1, 74.807559, 14.961512, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [428, 1, 33.541388, 6.708278, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [429, 1, 378.506604, 75.701321, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [430, 1, 201.617449, 40.32349, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [431, 1, 134.824684, 26.964937, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [432, 1, 157.601785, 31.520357, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [433, 1, 80.561831, 16.112366, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [434, 1, 41.928301, 8.38566, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [435, 1, 167.686807, 33.537361, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [436, 1, 89.525173, 17.905035, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [437, 1, 20.388419, 4.077684, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [438, 1, 54.716933, 10.943387, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [439, 1, 101.875856, 20.375171, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [440, 1, 86.095509, 17.219102, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [441, 1, 66.003743, 13.200749, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [442, 1, 87.345295, 17.469059, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [443, 1, 189.372821, 37.874564, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [444, 1, 0, 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1,\n 0.9], [445, 1, 86.048822, 17.209764, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [446, 1, 39.900067, 7.980013, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [447, 1, 75.857823, 15.171565, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [448, 1, 55.747797, 11.149559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 449, 1, 281.099266, 56.219853, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 450, 1, 172.019337, 34.403867, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 451, 1, 73.504711, 14.700942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 452, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9], [453, 1, \n 49.262417, 9.852483, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [454, 1, \n 34.368712, 6.873742, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [455, 1, \n 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [456, 1, \n 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [457, 1, \n 171.846191, 34.369238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [458, 1, \n 163.447396, 32.689479, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [459, 1, \n 198.921561, 39.784312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [460, 1, \n 261.423915, 52.284783, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [461, 1, \n 271.93756, 54.387512, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [462, 1, \n 83.187109, 16.637422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [463, 1, \n 42.625596, 8.525119, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [464, 1, \n 42.67712, 8.535424, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [465, 1, \n 68.935213, 13.787043, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [466, 1, \n 55.966672, 11.193334, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [467, 1, \n 51.647972, 10.329594, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [468, 1, \n 84.682258, 16.936452, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [469, 1, \n 52.475899, 10.49518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [470, 1, \n 133.635974, 26.727195, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [471, 1, \n 131.576667, 26.315333, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [472, 1, \n 46.021552, 9.20431, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [473, 1, \n 84.506543, 16.901309, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [474, 1, \n 43.646746, 8.729349, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [475, 1, \n 42.832665, 8.566533, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [476, 1, \n 48.407958, 9.681592, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [477, 1, \n 78.119975, 15.623995, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [478, 1, \n 98.132926, 19.626585, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [479, 1, \n 177.838657, 35.567731, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [480, 1, \n 77.949906, 15.589981, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [481, 1, \n 67.695306, 13.539061, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [482, 1, \n 76.865108, 15.373022, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [483, 1, \n 65.368141, 13.073628, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [484, 1, \n 51.245443, 10.249089, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [485, 1, \n 76.547129, 15.309426, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [486, 1, \n 704.196192, 140.839238, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9], [487,\n 1, 178.44006, 35.688012, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [488, 1,\n 514.1666, 102.83332, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [489, 1, \n 135.327186, 27.065437, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [490, 1, \n 42.108774, 8.421755, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [491, 1, \n 57.900104, 11.580021, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [492, 1, \n 90.290026, 18.058005, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [493, 1, \n 116.373036, 23.274607, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [494, 1, \n 159.050014, 31.810003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [495, 1, \n 125.200788, 25.040158, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [496, 1, \n 8.868181, 1.773636, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [497, 1, \n 1108.963227, 221.792645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [498, 1,\n 52.009376, 10.401875, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [499, 1, \n 72.596567, 14.519313, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [500, 1, \n 39.745767, 7.949153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [501, 1, \n 67.242984, 13.448597, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [502, 1, \n 265.394132, 53.078826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [503, 1, \n 81.27987, 16.255974, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [504, 1, \n 53.225877, 10.645175, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [505, 1, \n 377.519214, 75.503843, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [506, 1, \n 118.498636, 23.699727, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [507, 1, \n 112.71728, 22.543456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [508, 1, \n 163.866255, 32.773251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [509, 1, \n 215.943222, 43.188644, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [510, 1, \n 136.424234, 27.284847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [511, 1, \n 119.003612, 23.800722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [512, 1, \n 78.609233, 15.721847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [513, 1, \n 43.305299, 8.66106, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [514, 1, \n 107.782698, 21.55654, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [515, 1, \n 96.14857, 19.229714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [516, 1, \n 107.567625, 21.513525, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [517, 1, \n 50.527088, 10.105418, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [518, 1, \n 284.571762, 56.914352, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [519, 1, \n 28.007071, 5.601414, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [520, 1, \n 113.075388, 22.615078, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [521, 1, \n 102.145474, 20.429095, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [522, 1, \n 87.457782, 17.491556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [523, 1, \n 47.077529, 9.415506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [524, 1, \n 136.642116, 27.328423, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [525, 1, \n 162.787043, 32.557409, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [526, 1, \n 49.35397, 9.870794, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [527, 1, \n 54.18719, 10.837438, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [528, 1, \n 118.26861, 23.653722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [529, 1, \n 151.602845, 30.320569, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [530, 1, \n 64.243093, 12.848619, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [531, 1, \n 65.318252, 13.06365, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [532, 1, \n 62.694136, 12.538827, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [533, 1, \n 56.181511, 11.236302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [534, 1, \n 154.980048, 30.99601, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [535, 1, \n 194.025074, 38.805015, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [536, 1, \n 152.933571, 30.586714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [537, 1, \n 50.874697, 10.174939, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [538, 1, \n 38.030453, 7.606091, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [539, 1, \n 40.352648, 8.07053, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [540, 1, \n 36.335787, 7.267157, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [541, 1, \n 93.858474, 18.771695, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [542, 1, \n 128.932532, 25.786506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [543, 1, \n 70.422315, 14.084463, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [544, 1, \n 131.162551, 26.23251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [545, 1, \n 282.414482, 56.482896, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [546, 1, \n 141.550404, 28.310081, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [547, 1, \n 182.963197, 36.592639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [548, 1, \n 59.225944, 11.845189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [549, 1, \n 50.643246, 10.128649, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [550, 1, \n 41.78929, 8.357858, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [551, 1, \n 40.283868, 8.056774, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [552, 1, \n 200.04515, 40.00903, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [553, 1, \n 1.384003, 0.276801, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [554, 1, \n 202.666621, 40.533324, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [555, 1, \n 77.218226, 15.443645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [556, 1, \n 119.459166, 23.891833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [557, 1, \n 253.807751, 50.76155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [558, 1, \n 149.659946, 29.931989, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [559, 1, \n 80.096562, 16.019312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [560, 1, \n 125.129779, 25.025956, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [561, 1, \n 68.617518, 13.723504, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [562, 1, \n 187.457919, 37.491584, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [563, 1, \n 131.798194, 26.359639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [564, 1, \n 260.235901, 52.04718, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [565, 1, \n 196.360882, 39.272176, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [566, 1, \n 0.315398, 0.06308, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [567, 1, \n 319.193421, 63.838684, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [568, 1, \n 295.176685, 59.035337, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [569, 1, \n 207.688389, 41.537678, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [570, 1, \n 324.238974, 64.847795, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [571, 1, \n 238.729406, 47.745881, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [572, 1, \n 421.078814, 84.215763, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [573, 1, \n 122.570522, 24.514104, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [574, 1, \n 233.543651, 46.70873, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [575, 1, \n 4.388704, 0.877741, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [576, 1, \n 283.987513, 56.797503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [577, 1, \n 313.066628, 62.613326, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [578, 1, \n 298.905533, 59.781107, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [579, 1, \n 109.048896, 21.809779, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [580, 1, \n 22.702358, 4.540472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [581, 1, \n 0.13045, 0.02609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [582, 1, \n 82.137246, 16.427449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [583, 1, \n 94.208402, 18.84168, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [584, 1, \n 54.052269, 10.810454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [585, 1, \n 93.84139, 18.768278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9]]'], {}), '([[586, 3, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [589, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [590, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [593, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [594, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [595, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [597, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [598, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [600, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [601, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [602, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [603, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [607, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [608, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [609, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [610, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [612, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [613, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [614, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [616, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [617, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [618, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [621, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [623, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [624, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [629, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [631, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [632, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [637, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [638, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [639, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [640, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [641, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [642, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [643, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [646, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [650, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [652, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [655, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [657, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [658, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [661, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [662, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [663, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [666, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [668, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [670, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [675, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [676, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [678, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [679, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [681, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [683, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [687, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [689, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [691, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [693, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [694, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [695, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [697, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [698, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [701, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [702, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [704, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [705, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [711, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [713, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [714, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [716, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [717, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [719, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [722, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [724, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [725, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [727, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [728, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [730, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [731, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [732, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [733, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [735, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [737, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [738, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [741, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [742, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [745, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [746, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [747, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [749, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [750, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [753, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [758, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [760, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [761, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [762, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [763, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [765, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [767, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [769, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [771, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [774, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [776, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [777, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [778, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [781, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [784, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [785, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [788, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [789, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [790, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [791, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [792, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [795, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [798, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [800, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [801, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [802, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [805, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [806, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [808, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [809, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [810, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [814, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [815, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [816, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [817, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [818, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [821, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [822, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [825, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [826, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [829, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [830, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [833, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [834, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [835, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [839, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [840, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [842, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [843, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [844, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [847, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [848, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [849, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [850, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [851, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [852, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [853, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [854, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [855, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [856, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [857, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [858, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [859, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [860, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [862, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [863, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [864, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [865, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [867, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [869, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [870, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [872, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [873, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [874, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [875, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [877, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [881, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [882, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [883, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [886, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [889, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [893, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [894, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [895, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [896, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [900, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [902, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [903, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [905, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [907, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [909, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [911, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [913, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [914, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [915, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [916, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [918, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [919, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [920, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [922, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [923, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [928, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [931, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [934, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [935, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [936, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [937, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [939, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [940, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [943, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [944, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [946, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [948, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [950, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [951, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [952, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [956, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [957, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [958, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [959, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [960, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [963, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [965, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [966, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [967, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [968, 2, 0, 0, 0, 0, 0, 0.99951, \n 0, 220.0, 0, 1.1, 0.9], [969, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, \n 1.1, 0.9], [971, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [973, 2,\n 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [976, 2, 0, 0, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [977, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [980, 2, 0, \n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [981, 2, 0, 0, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [983, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [984, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [985, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [986, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [987, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [988, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [990, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [993, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [994, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [995, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [996, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [998, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [999, 2, 0, 0, \n 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1000, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [1002, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [1003, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1006, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1007, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1008, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1010, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1011, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1012, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1014, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1018, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1019, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1023, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1025, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1026, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1028, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1029, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1030, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1031, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1032, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1033, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1034, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1035, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1036, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1037, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1038, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1039, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1041, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1042, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1044, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1046, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1047, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1048, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1049, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1050, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1051, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1052, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1053, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1054, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1055, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1056, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1057, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1058, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1059, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1060, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1061, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1062, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1063, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1064, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1065, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1066, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1067, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1068, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1069, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1070, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1071, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1072, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1073, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1074, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1075, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1077, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1078, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1079, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1080, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1081, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1082, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1083, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1084, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1085, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1086, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1087, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1088, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1089, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1090, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1091, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1092, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1093, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1094, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1095, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1096, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1097, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1098, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1099, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1100, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1101, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1102, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1103, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1104, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1105, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1106, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1107, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1108, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1109, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1110, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1111, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1112, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1113, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1114, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1115, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1116, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1117, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1118, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1119, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1120, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1121, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1122, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1123, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1124, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1125, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1126, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1127, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1128, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1129, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1130, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1131, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1132, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1133, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1134, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1135, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1136, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1137, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1138, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1139, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1140, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1141, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1142, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1143, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1144, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1145, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1146, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1147, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1148, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1149, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1150, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1151, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1152, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1153, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1154, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1155, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1156, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1157, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1158, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1159, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1160, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1161, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1162, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1164, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1166, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1167, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1168, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1169, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1170, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1171, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1172, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1173, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1174, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1175, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1176, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1177, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1178, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1179, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1180, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1181, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1182, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1183, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1184, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1185, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1186, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1187, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1188, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1189, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1190, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1191, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1192, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1193, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1194, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1195, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1196, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1197, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1198, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1199, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1200, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1201, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1202, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1203, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1204, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1205, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1206, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1207, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1208, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1209, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1210, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1211, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1212, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1213, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1214, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1215, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1216, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1217, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1218, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1219, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1220, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1221, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1222, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1223, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1224, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1225, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1226, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1227, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1228, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1229, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1230, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1231, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1232, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1233, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1234, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1235, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1236, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1237, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1238, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1239, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1240, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1241, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1242, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1243, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1244, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1245, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1246, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1247, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1248, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1249, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1250, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1251, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1252, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1253, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1254, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1255, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1256, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1257, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1258, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1259, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1260, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1261, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1262, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1263, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1264, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1265, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1266, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1267, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1270, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1271, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1272, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1273, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1274, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1275, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1276, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1277, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1278, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1279, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1280, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1282, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1283, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1284, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1285, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1286, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1287, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1288, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1289, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1290, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1291, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1292, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1293, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1294, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1295, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1296, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1297, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1300, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1301, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1302, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1303, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1304, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1305, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1306, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1307, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1308, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1309, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1310, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1311, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1312, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1313, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1314, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1315, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1316, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1317, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1318, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1319, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1320, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1321, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1322, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1323, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1324, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1325, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1326, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1327, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1328, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1329, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1330, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1331, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1332, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1333, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1334, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1336, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1337, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1338, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1339, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1340, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1341, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1342, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1343, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1344, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1345, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1346, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1348, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1349, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1350, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1351, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1352, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1355, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1356, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1357, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1358, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1359, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1360, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1361, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1362, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1363, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1364, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1365, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1366, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1367, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1368, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1369, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1370, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1371, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1372, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1373, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1374, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1375, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1376, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1377, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1378, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1379, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1380, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1381, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1382, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1383, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1384, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1385, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1386, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1387, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1388, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1389, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1390, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1391, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1392, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1393, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1394, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1395, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1396, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1397, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1398, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1399, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1400, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1401, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1402, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1403, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1404, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1405, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1406, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1407, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1408, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1409, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1410, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1411, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1412, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1413, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1414, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1415, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1416, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1417, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1418, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1419, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1421, 2, 0, 0, 0, 0, 0, \n 0.99951, 0, 220.0, 0, 1.1, 0.9], [1422, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0,\n 0, 1.1, 0.9], [1423, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 1424, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1425, 2, 0, 0, 0,\n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1426, 2, 0, 0, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [1427, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [1428, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1431, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1432, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1433, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1434, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1435, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1436, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1437, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1438, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1439, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1440, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1441, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1442, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1443, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1444, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1445, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1446, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1447, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1448, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1449, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1450, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1451, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1452, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1453, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1454, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1455, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1456, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1457, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1458, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1459, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1460, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1461, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1462, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1463, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1464, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1465, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1466, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1467, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1468, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1469, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1470, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1471, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1472, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1473, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1474, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1475, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1476, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1477, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1479, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1480, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1481, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1482, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1483, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1484, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1485, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1486, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1487, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1488, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1489, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1490, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1491, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1492, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1493, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1494, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1495, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1497, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1498, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1500, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1501, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1502, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1503, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1504, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1505, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1506, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1507, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1508, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1510, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1511, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1512, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1513, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1514, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1516, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1517, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1518, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1519, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1520, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1521, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1522, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1523, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1524, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1525, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1526, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1527, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1528, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1529, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1530, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1531, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1532, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1534, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1535, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1536, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1537, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1538, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1539, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1540, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1541, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1542, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1543, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1544, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1545, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1546, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1547, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1548, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1549, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1550, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1551, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1552, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1553, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1554, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1555, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1556, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1557, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1558, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1559, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1560, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1561, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1562, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1563, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1564, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1565, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1566, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1567, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1568, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1569, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1570, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1571, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1572, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1573, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1574, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1575, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1576, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1577, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1578, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1579, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1580, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1581, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1582, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1583, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1584, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1585, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1586, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1587, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1588, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1589, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1590, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1591, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1592, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1593, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1594, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1595, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1596, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1597, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1598, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1599, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1600, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1601, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1602, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1603, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1604, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1605, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1606, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1607, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1608, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1609, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1610, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1611, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1612, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1613, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1614, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1615, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1616, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1617, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1618, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1619, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1620, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1621, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1622, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1623, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1624, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1625, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1626, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1627, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1628, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1629, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1630, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1631, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1632, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1633, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1634, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1635, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1636, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1637, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1638, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1639, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1640, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1641, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1642, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1643, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1644, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1645, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1646, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1647, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1648, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1649, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1650, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1651, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1652, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1653, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1654, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1655, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1656, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1657, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1658, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1659, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1660, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1661, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1662, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1663, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1664, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1665, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1666, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1667, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1668, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1669, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1670, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1671, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1672, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1673, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1674, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1675, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1676, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1677, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1678, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1679, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1680, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1681, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1682, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1683, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1684, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1685, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1686, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1687, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1688, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1689, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1690, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1691, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1692, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1693, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1694, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1695, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1696, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1697, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1698, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1699, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1700, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1701, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1702, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1703, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1704, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1705, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1706, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1707, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1708, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1709, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1710, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1711, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1712, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1713, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1714, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1715, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1716, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1717, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1718, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1719, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1720, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1721, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1722, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1723, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1724, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1725, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1726, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1727, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1728, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1729, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1730, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1731, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1732, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1733, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1734, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1735, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1736, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1737, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1738, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1739, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1740, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1741, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1742, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1743, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1744, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1745, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1746, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1747, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1748, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1749, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1750, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1751, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1752, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1753, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1754, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1755, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1756, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1757, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1758, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1759, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1760, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1761, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1762, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1763, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1764, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1765, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1766, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1767, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1768, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1769, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1770, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1771, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1772, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1773, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1774, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1775, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1776, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1777, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1778, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1779, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1780, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1781, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1782, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1783, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1784, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1785, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1786, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1787, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1788, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1789, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1790, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1791, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1792, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1793, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1794, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1795, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1796, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1797, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1798, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1799, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1800, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1801, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1802, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1803, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1804, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1805, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1806, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1807, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1808, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1809, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1810, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1811, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1812, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1813, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1814, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1815, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1816, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1817, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1818, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1819, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1820, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1821, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1822, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1823, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1824, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1825, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1826, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1827, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1828, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1829, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1830, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1831, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1832, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1833, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1834, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1836, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1837, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1838, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1839, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1840, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1841, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1842, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1843, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1844, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1845, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1846, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1847, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1848, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1849, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1850, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1851, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1852, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1853, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1854, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1855, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1856, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1857, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1858, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1860, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1861, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1862, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1863, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1864, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1865, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1866, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1867, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1868, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1869, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1870, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1871, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1872, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1873, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1874, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1875, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1876, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1877, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1878, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1879, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1880, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1881, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1882, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1883, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1884, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1885, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1886, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1887, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1888, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1889, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1890, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1891, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1892, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1893, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1894, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1895, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1896, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1897, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1,\n 0.9], [1898, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1899, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1900, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1901, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1902, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1903, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1904, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [1905, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [1906, 2, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1907, 2, 0,\n 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [1908, 2, 0, 0, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [1909, 2, 0, 0, 0, 0, 0, 0.99951, 0, 220.0, 0, \n 1.1, 0.9], [1910, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1911,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1912, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [1913, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1914, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1915,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1916, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1917, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1918, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1919,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1920, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1921, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1922, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1923,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1924, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1925, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1926, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1927,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1928, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1929, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1930, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1931,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1932, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1933, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1934, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1935,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1936, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1937, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1938, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1939,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1940, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1941, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1942, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1943,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1944, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1945, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1946, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1947,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1948, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1949, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1950, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1951,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1952, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1953, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1954, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1955,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1956, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1957, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1958, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1959,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1960, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1961, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1962, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1963,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1964, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1965, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1966, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1967,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1968, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1969, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1970, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1971,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1972, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1973, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1974, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1975,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1976, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1977, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1978, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1979,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1980, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1981, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1982, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1983,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1984, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1985, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1986, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1987,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1988, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1989, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1990, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1991,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1992, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1993, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1994, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1995,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1996, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1997, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [1998, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [1999,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2000, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [2001, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [2002, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2003,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2004, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [2005, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [2006, 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2007,\n 2, 0, 0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [2008, 2, 0, 0, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [1, 1, 325.748587, 65.149717, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [2, 1, 0, 0, 0, 0, 0, 1.000012, 0, 380.0, 0, \n 1.1, 0.9], [3, 1, 57.094965, 11.418993, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1,\n 0.9], [4, 1, 93.894564, 18.778913, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [5, 1, 0, 0, 0, 0, 0, 1.00026, 0, 380.0, 0, 1.1, 0.9], [6, 1, \n 275.713362, 55.142672, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [7, 1, \n 207.784304, 41.556861, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [8, 1, \n 173.85906, 34.771812, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [9, 1, \n 117.578165, 23.515633, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [10, 1, 0,\n 0, 0, 0, 0, 1.000518, 0, 380.0, 0, 1.1, 0.9], [11, 1, 103.018516, \n 20.603703, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [12, 1, 0, 0, 0, 0, 0,\n 1.00057, 0, 380.0, 0, 1.1, 0.9], [13, 1, 0, 0, 0, 0, 0, 1.000425, 0, \n 380.0, 0, 1.1, 0.9], [14, 1, 246.382498, 49.2765, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [15, 1, 0, 0, 0, 0, 0, 1.000581, 0, 380.0, 0, 1.1,\n 0.9], [16, 1, 420.196361, 84.039272, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [17, 1, 98.967281, 19.793456, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [18, 1, 0, 0, 0, 0, 0, 1.002692, 0, 380.0, 0, 1.1, 0.9], [19, 1, \n 244.510845, 48.902169, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [20, 1, 0,\n 0, 0, 0, 0, 0.998777, 0, 380.0, 0, 1.1, 0.9], [21, 1, 1051.434139, \n 210.286828, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [22, 1, 0, 0, 0, 0, 0,\n 1.000461, 0, 380.0, 0, 1.1, 0.9], [23, 1, 137.668379, 27.533676, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [24, 1, 0, 0, 0, 0, 0, 0.999996, 0, \n 380.0, 0, 1.1, 0.9], [25, 1, 65.847745, 13.169549, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [26, 1, 0, 0, 0, 0, 0, 1.000752, 0, 380.0, 0, 1.1,\n 0.9], [27, 1, 80.82993, 16.165986, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [28, 1, 238.828227, 47.765645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 29, 1, 87.72658, 17.545316, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [30, \n 1, 0, 0, 0, 0, 0, 0.99974, 0, 380.0, 0, 1.1, 0.9], [31, 1, 172.643645, \n 34.528729, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [32, 1, 0, 0, 0, 0, 0,\n 0.999876, 0, 380.0, 0, 1.1, 0.9], [33, 1, 216.462687, 43.292537, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [34, 1, 42.945181, 8.589036, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [35, 1, 2.843198, 0.56864, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [36, 1, 9.41342, 1.882684, 0, 0, 0, 1.0, 0, 220.0,\n 0, 1.1, 0.9], [37, 1, 0, 0, 0, 0, 0, 1.003518, 0, 380.0, 0, 1.1, 0.9],\n [38, 1, 226.790299, 45.35806, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [39,\n 1, 74.262139, 14.852428, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [40, 1, \n 77.569126, 15.513825, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [41, 1, \n 83.36923, 16.673846, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [42, 1, 0, 0,\n 0, 0, 0, 1.001382, 0, 380.0, 0, 1.1, 0.9], [43, 1, 127.850472, \n 25.570094, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [44, 1, 163.565722, \n 32.713144, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [45, 1, 86.824343, \n 17.364869, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [46, 1, 0, 0, 0, 0, 0,\n 1.000154, 0, 380.0, 0, 1.1, 0.9], [47, 1, 377.519214, 75.503843, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [48, 1, 259.494186, 51.898837, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [49, 1, 65.638937, 13.127787, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [50, 1, 95.579153, 19.115831, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [51, 1, 123.864343, 24.772869, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [52, 1, 0, 0, 0, 0, 0, 1.000109, 0, 380.0, 0, 1.1,\n 0.9], [53, 1, 187.944302, 37.58886, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [54, 1, 95.486648, 19.09733, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 55, 1, 93.644497, 18.728899, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [56,\n 1, 0, 0, 0, 0, 0, 0.999658, 0, 380.0, 0, 1.1, 0.9], [57, 1, 111.782276,\n 22.356455, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [58, 1, 256.054306, \n 51.210861, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [59, 1, 73.130675, \n 14.626135, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [60, 1, 38.556521, \n 7.711304, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [61, 1, 0, 0, 0, 0, 0, \n 0.999552, 0, 380.0, 0, 1.1, 0.9], [62, 1, 293.946406, 58.789281, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [63, 1, 173.514047, 34.702809, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [64, 1, 1841.335671, 368.267134, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [65, 1, 6.135361, 1.227072, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [66, 1, 194.668019, 38.933604, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [67, 1, 417.595693, 83.519139, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [68, 1, 0, 0, 0, 0, 0, 0.998236, 0, 380.0, 0, 1.1,\n 0.9], [69, 1, 0, 0, 0, 0, 0, 0.999783, 0, 380.0, 0, 1.1, 0.9], [70, 1, \n 789.995804, 157.999161, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [71, 1, \n 183.584849, 36.71697, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [72, 1, \n 300.686791, 60.137358, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [73, 1, \n 96.261172, 19.252234, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [74, 1, 0, \n 0, 0, 0, 0, 1.001507, 0, 380.0, 0, 1.1, 0.9], [75, 1, 119.975301, \n 23.99506, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [76, 1, 115.802488, \n 23.160498, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [77, 1, 112.162624, \n 22.432525, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [78, 1, 0, 0, 0, 0, 0,\n 1.000176, 0, 380.0, 0, 1.1, 0.9], [79, 1, 115.816553, 23.163311, 0, 0, \n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [80, 1, 123.01505, 24.60301, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [81, 1, 138.867238, 27.773448, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [82, 1, 4.621583, 0.924317, 0, 0, 0, 1.0, \n 0, 220.0, 0, 1.1, 0.9], [83, 1, 309.217998, 61.8436, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [84, 1, 30.440604, 6.088121, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [85, 1, 105.562105, 21.112421, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [86, 1, 0, 0, 0, 0, 0, 1.00001, 0, 380.0, 0, 1.1, \n 0.9], [87, 1, 0, 0, 0, 0, 0, 1.000289, 0, 380.0, 0, 1.1, 0.9], [88, 1, \n 85.202609, 17.040522, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [89, 1, \n 105.706878, 21.141376, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [90, 1, \n 122.086777, 24.417355, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [91, 1, \n 42.406867, 8.481373, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [92, 1, \n 46.280769, 9.256154, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [93, 1, \n 45.392163, 9.078433, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [94, 1, 0, 0,\n 0, 0, 0, 1.00115, 0, 380.0, 0, 1.1, 0.9], [95, 1, 0, 0, 0, 0, 0, 1.0007,\n 0, 380.0, 0, 1.1, 0.9], [96, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, \n 1.1, 0.9], [97, 1, 6.384069, 1.276814, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [98, 1, 117.377345, 23.475469, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [99, 1, 0, 0, 0, 0, 0, 1.000519, 0, 380.0, 0, 1.1, 0.9], [100, 1,\n 0, 0, 0, 0, 0, 1.002126, 0, 380.0, 0, 1.1, 0.9], [101, 1, 83.11513, \n 16.623026, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [102, 1, 160.873209, \n 32.174642, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [103, 1, 188.09191, \n 37.618382, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [104, 1, 0, 0, 0, 0, 0,\n 1.000066, 0, 380.0, 0, 1.1, 0.9], [105, 1, 0, 0, 0, 0, 0, 1.000146, 0, \n 380.0, 0, 1.1, 0.9], [106, 1, 0, 0, 0, 0, 0, 0.999963, 0, 380.0, 0, 1.1,\n 0.9], [107, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9], [108, 1,\n 132.675911, 26.535182, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [109, 1, \n 53.718212, 10.743642, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [110, 1, \n 69.728393, 13.945679, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [111, 1, \n 122.880269, 24.576054, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [112, 1, \n 62.192906, 12.438581, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [113, 1, \n 98.03855, 19.60771, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [114, 1, \n 144.38681, 28.877362, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [115, 1, \n 93.077688, 18.615538, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [116, 1, \n 155.75271, 31.150542, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [117, 1, 0,\n 0, 0, 0, 0, 1.000162, 0, 380.0, 0, 1.1, 0.9], [118, 1, 241.160786, \n 48.232157, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [119, 1, 46.746863, \n 9.349373, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [120, 1, 0, 0, 0, 0, 0,\n 1.00083, 0, 380.0, 0, 1.1, 0.9], [121, 1, 63.482261, 12.696452, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [122, 1, 55.578075, 11.115615, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [123, 1, 0, 0, 0, 0, 0, 1.000079, 0, 380.0,\n 0, 1.1, 0.9], [124, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0, 0, 1.1, 0.9],\n [125, 1, 0, 0, 0, 0, 0, 0.999463, 0, 380.0, 0, 1.1, 0.9], [126, 1, \n 291.397229, 58.279446, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [127, 1, \n 225.280714, 45.056143, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [128, 1, 0,\n 0, 0, 0, 0, 1.000968, 0, 380.0, 0, 1.1, 0.9], [129, 1, 0, 0, 0, 0, 0, \n 0.999994, 0, 380.0, 0, 1.1, 0.9], [130, 1, 310.621123, 62.124225, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [131, 1, 68.584875, 13.716975, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [132, 1, 178.584646, 35.716929, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [133, 1, 59.81886, 11.963772, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [134, 1, 59.573903, 11.914781, 0, 0, 0, 1.0, 0,\n 220.0, 0, 1.1, 0.9], [135, 1, 59.652888, 11.930578, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [136, 1, 57.787513, 11.557503, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [137, 1, 46.224691, 9.244938, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [138, 1, 0, 0, 0, 0, 0, 1.000239, 0, 380.0, 0, 1.1,\n 0.9], [139, 1, 90.549485, 18.109897, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [140, 1, 62.618846, 12.523769, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [141, 1, 74.19228, 14.838456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9\n ], [142, 1, 81.637993, 16.327599, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [143, 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9], [144, 1, \n 74.363771, 14.872754, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [145, 1, \n 216.326177, 43.265235, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [146, 1, \n 278.885136, 55.777027, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [147, 1, \n 170.940166, 34.188033, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [148, 1, \n 241.227956, 48.245591, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [149, 1, \n 155.517918, 31.103584, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [150, 1, \n 203.044789, 40.608958, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [151, 1, \n 47.847194, 9.569439, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [152, 1, \n 99.325814, 19.865163, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [153, 1, \n 177.213406, 35.442681, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [154, 1, \n 182.033335, 36.406667, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [155, 1, \n 189.603806, 37.920761, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [156, 1, 0,\n 0, 0, 0, 0, 0.999987, 0, 380.0, 0, 1.1, 0.9], [157, 1, 0, 0, 0, 0, 0, \n 1.001031, 0, 380.0, 0, 1.1, 0.9], [158, 1, 49.954288, 9.990858, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [159, 1, 0, 0, 0, 0, 0, 1.001191, 0, 380.0,\n 0, 1.1, 0.9], [160, 1, 0, 0, 0, 0, 0, 1.000005, 0, 380.0, 0, 1.1, 0.9],\n [161, 1, 155.079459, 31.015892, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 162, 1, 231.797832, 46.359566, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 163, 1, 46.357377, 9.271475, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [164,\n 1, 46.543808, 9.308762, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [165, 1, \n 0, 0, 0, 0, 0, 1.000008, 0, 380.0, 0, 1.1, 0.9], [166, 1, 54.417242, \n 10.883448, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [167, 1, 76.551361, \n 15.310272, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [168, 1, 52.245327, \n 10.449065, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [169, 1, 178.850819, \n 35.770164, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [170, 1, 134.391309, \n 26.878262, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [171, 1, 114.702931, \n 22.940586, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [172, 1, 56.293074, \n 11.258615, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [173, 1, 53.776547, \n 10.755309, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [174, 1, 80.699328, \n 16.139866, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [175, 1, 53.741302, \n 10.74826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [176, 1, 187.268482, \n 37.453696, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [177, 1, 30.536855, \n 6.107371, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [178, 1, 161.730672, \n 32.346134, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [179, 1, 59.592171, \n 11.918434, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [180, 1, 52.383043, \n 10.476609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [181, 1, 39.537212, \n 7.907442, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [182, 1, 1.791054, \n 0.358211, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [183, 1, 536.118855, \n 107.223771, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [184, 1, 0, 0, 0, 0, \n 0, 0.999412, 0, 380.0, 0, 1.1, 0.9], [185, 1, 114.645917, 22.929183, 0,\n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [186, 1, 61.736231, 12.347246, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [187, 1, 36.109408, 7.221882, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [188, 1, 53.741302, 10.74826, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [189, 1, 197.196893, 39.439379, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [190, 1, 260.829785, 52.165957, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [191, 1, 0, 0, 0, 0, 0, 1.000009, 0, 380.0, 0, 1.1,\n 0.9], [192, 1, 62.815713, 12.563143, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [193, 1, 53.654613, 10.730923, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [194, 1, 37.038638, 7.407728, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [195, 1, 0, 0, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [196, 1, \n 51.963051, 10.39261, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [197, 1, \n 82.328556, 16.465711, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [198, 1, \n 48.717631, 9.743526, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [199, 1, \n 62.722328, 12.544466, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [200, 1, \n 53.742549, 10.74851, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [201, 1, 0, \n 0, 0, 0, 0, 1.000603, 0, 380.0, 0, 1.1, 0.9], [202, 1, 55.070857, \n 11.014171, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [203, 1, 7.256079, \n 1.451216, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [204, 1, 212.674227, \n 42.534845, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [205, 1, 106.346688, \n 21.269338, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [206, 1, 51.038978, \n 10.207796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [207, 1, 151.767938, \n 30.353588, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [208, 1, 44.689673, \n 8.937935, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [209, 1, 62.103028, \n 12.420606, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [210, 1, 71.344757, \n 14.268951, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [211, 1, 250.721465, \n 50.144293, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [212, 1, 62.839799, \n 12.56796, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [213, 1, 294.578929, \n 58.915786, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [214, 1, 198.21428, \n 39.642856, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [215, 1, 419.133986, \n 83.826797, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [216, 1, 141.326419, \n 28.265284, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [217, 1, 45.286003, \n 9.057201, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [218, 1, 137.965387, \n 27.593077, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [219, 1, 221.727192, \n 44.345438, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [220, 1, 0, 0, 0, 0, 0,\n 0.9995, 0, 380.0, 0, 1.1, 0.9], [221, 1, 126.484966, 25.296993, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [222, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 380.0,\n 0, 1.1, 0.9], [223, 1, 125.354431, 25.070886, 0, 0, 0, 1.0, 0, 220.0, 0,\n 1.1, 0.9], [224, 1, 145.769935, 29.153987, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [225, 1, 261.73828, 52.347656, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [226, 1, 91.433269, 18.286654, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [227, 1, 113.907309, 22.781462, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [228, 1, 111.682638, 22.336528, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [229, 1, 247.134629, 49.426926, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [230, 1, 59.276997, 11.855399, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [231, 1, 0, 0, 0, 0, 0, 1.0008, 0, 380.0, 0, 1.1, 0.9], [232,\n 1, 0, 0, 0, 0, 0, 0.999985, 0, 380.0, 0, 1.1, 0.9], [233, 1, 0, 0, 0, 0,\n 0, 0.999572, 0, 380.0, 0, 1.1, 0.9], [234, 1, 211.151257, 42.230251, 0,\n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [235, 1, 68.663575, 13.732715, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [236, 1, 0, 0, 0, 0, 0, 0.999972, 0, \n 380.0, 0, 1.1, 0.9], [237, 1, 0.568269, 0.113654, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [238, 1, 77.694084, 15.538817, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [239, 1, 107.344119, 21.468824, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [240, 1, 677.106115, 135.421223, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [241, 1, 501.035004, 100.207001, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [242, 1, 182.435912, 36.487182, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [243, 1, 147.189401, 29.43788, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [244, 1, 175.365238, 35.073048, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [245, 1, 0, 0, 0, 0, 0, 1.001868, 0, 380.0, 0, 1.1,\n 0.9], [246, 1, 0, 0, 0, 0, 0, 1.000314, 0, 380.0, 0, 1.1, 0.9], [247, 1,\n 34.80024, 6.960048, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [248, 1, 0, 0,\n 0, 0, 0, 1.000002, 0, 380.0, 0, 1.1, 0.9], [249, 1, 0, 0, 0, 0, 0, \n 1.000002, 0, 380.0, 0, 1.1, 0.9], [250, 1, 0, 0, 0, 0, 0, 1.000003, 0, \n 380.0, 0, 1.1, 0.9], [251, 1, 86.366303, 17.273261, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [252, 1, 221.490058, 44.298012, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [253, 1, 97.242587, 19.448517, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [254, 1, 31.047944, 6.209589, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [255, 1, 152.691204, 30.538241, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [256, 1, 175.110241, 35.022048, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [257, 1, 84.512076, 16.902415, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [258, 1, 275.414649, 55.08293, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [259, 1, 0, 0, 0, 0, 0, 0.999267, 0, 380.0, 0, 1.1,\n 0.9], [260, 1, 171.407259, 34.281452, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [261, 1, 0, 0, 0, 0, 0, 1.001914, 0, 380.0, 0, 1.1, 0.9], [262, 1,\n 0, 0, 0, 0, 0, 1.000151, 0, 380.0, 0, 1.1, 0.9], [263, 1, 245.883489, \n 49.176698, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [264, 1, 318.309439, \n 63.661888, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [265, 1, 0, 0, 0, 0, 0,\n 1.000004, 0, 380.0, 0, 1.1, 0.9], [266, 1, 153.403945, 30.680789, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [267, 1, 194.022708, 38.804542, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [268, 1, 67.469917, 13.493983, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [269, 1, 54.180873, 10.836175, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [270, 1, 0, 0, 0, 0, 0, 1.000003, 0, 380.0,\n 0, 1.1, 0.9], [271, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [272, 1, 1.105489, 0.221098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [273,\n 1, 151.176192, 30.235238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [274, 1,\n 293.866602, 58.77332, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [275, 1, \n 55.013432, 11.002686, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [276, 1, \n 214.456344, 42.891269, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [277, 1, 0,\n 0, 0, 0, 0, 0.999517, 0, 380.0, 0, 1.1, 0.9], [278, 1, 167.418237, \n 33.483647, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [279, 1, 0, 0, 0, 0, 0,\n 0.999817, 0, 380.0, 0, 1.1, 0.9], [280, 1, 0, 0, 0, 0, 0, 0.999266, 0, \n 380.0, 0, 1.1, 0.9], [281, 1, 221.13944, 44.227888, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [282, 1, 312.725416, 62.545083, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [283, 1, 125.353926, 25.070785, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [284, 1, 190.167711, 38.033542, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [285, 1, 84.808128, 16.961626, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [286, 1, 177.744137, 35.548827, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [287, 1, 109.245452, 21.84909, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [288, 1, 70.265914, 14.053183, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [289, 1, 110.507903, 22.101581, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [290, 1, 0, 0, 0, 0, 0, 1.004495, 0, 380.0, 0, 1.1,\n 0.9], [291, 1, 72.723946, 14.544789, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [292, 1, 143.371926, 28.674385, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [293, 1, 126.359101, 25.27182, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [294, 1, 33.672791, 6.734558, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [295, 1, 70.455207, 14.091041, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9],\n [296, 1, 200.022498, 40.0045, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 297, 1, 210.22589, 42.045178, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 298, 1, 111.003448, 22.20069, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 299, 1, 107.506102, 21.50122, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 300, 1, 292.875731, 58.575146, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 301, 1, 0, 0, 0, 0, 0, 0.999437, 0, 380.0, 0, 1.1, 0.9], [302, 1, \n 246.711976, 49.342395, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [303, 1, \n 126.718426, 25.343685, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [304, 1, \n 108.813201, 21.76264, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [305, 1, 0,\n 0, 0, 0, 0, 0.99961, 0, 380.0, 0, 1.1, 0.9], [306, 1, 0, 0, 0, 0, 0, \n 1.001597, 0, 380.0, 0, 1.1, 0.9], [307, 1, 129.062569, 25.812514, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [308, 1, 159.116952, 31.82339, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [309, 1, 260.337709, 52.067542, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [310, 1, 0, 0, 0, 0, 0, 0.999901, 0, 380.0,\n 0, 1.1, 0.9], [311, 1, 221.133187, 44.226637, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [312, 1, 99.449747, 19.889949, 0, 0, 0, 1.0, 0, 380.0, 0, \n 1.1, 0.9], [313, 1, 0, 0, 0, 0, 0, 1.000862, 0, 380.0, 0, 1.1, 0.9], [\n 314, 1, 308.032014, 61.606403, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 315, 1, 0, 0, 0, 0, 0, 1.00159, 0, 380.0, 0, 1.1, 0.9], [316, 1, \n 120.690947, 24.138189, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [317, 1, \n 162.50594, 32.501188, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [318, 1, \n 267.057251, 53.41145, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [319, 1, \n 9.567058, 1.913412, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [320, 1, 0, 0,\n 0, 0, 0, 0.999996, 0, 380.0, 0, 1.1, 0.9], [321, 1, 226.312454, \n 45.262491, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [322, 1, 28.811032, \n 5.762206, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [323, 1, 2.997543, \n 0.599509, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [324, 1, 529.89302, \n 105.978604, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [325, 1, 172.614935, \n 34.522987, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [326, 1, 13.995083, \n 2.799017, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [327, 1, 120.437246, \n 24.087449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [328, 1, 205.243578, \n 41.048716, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [329, 1, 308.704638, \n 61.740928, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [330, 1, 0, 0, 0, 0, 0,\n 1.002351, 0, 380.0, 0, 1.1, 0.9], [331, 1, 24.510098, 4.90202, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [332, 1, 0, 0, 0, 0, 0, 1.00029, 0, 380.0,\n 0, 1.1, 0.9], [333, 1, 257.534094, 51.506819, 0, 0, 0, 1.0, 0, 380.0, 0,\n 1.1, 0.9], [334, 1, 0, 0, 0, 0, 0, 1.000078, 0, 380.0, 0, 1.1, 0.9], [\n 335, 1, 262.832973, 52.566595, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 336, 1, 0, 0, 0, 0, 0, 0.998883, 0, 380.0, 0, 1.1, 0.9], [337, 1, \n 104.54725, 20.90945, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [338, 1, \n 283.756092, 56.751218, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [339, 1, \n 175.499218, 35.099844, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [340, 1, \n 148.381042, 29.676208, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [341, 1, \n 134.139426, 26.827885, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [342, 1, \n 232.687766, 46.537553, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [343, 1, \n 127.655901, 25.53118, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [344, 1, \n 320.06392, 64.012784, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [345, 1, \n 349.977293, 69.995459, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [346, 1, \n 347.438228, 69.487646, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [347, 1, \n 121.505179, 24.301036, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [348, 1, \n 317.622541, 63.524508, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [349, 1, 0,\n 0, 0, 0, 0, 1.002227, 0, 380.0, 0, 1.1, 0.9], [350, 1, 166.629421, \n 33.325884, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [351, 1, 0, 0, 0, 0, 0,\n 1.002311, 0, 380.0, 0, 1.1, 0.9], [352, 1, 1102.969172, 220.593834, 0, \n 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [353, 1, 3.315894, 0.663179, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [354, 1, 22.527896, 4.505579, 0, 0, 0, 1.0,\n 0, 220.0, 0, 1.1, 0.9], [355, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, \n 1.1, 0.9], [356, 1, 0.0, 0.0, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 357, 1, 0.05647, 0.011294, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [358, \n 1, 0, 0, 0, 0, 0, 1.001145, 0, 380.0, 0, 1.1, 0.9], [359, 1, 3.297102, \n 0.65942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [360, 1, 0, 0, 0, 0, 0, \n 1.000743, 0, 380.0, 0, 1.1, 0.9], [361, 1, 84.386359, 16.877272, 0, 0, \n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [362, 1, 240.544798, 48.10896, 0, 0, 0,\n 1.0, 0, 220.0, 0, 1.1, 0.9], [363, 1, 354.159899, 70.83198, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [364, 1, 83.559152, 16.71183, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [365, 1, 74.998776, 14.999755, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [366, 1, 148.647335, 29.729467, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [367, 1, 71.849947, 14.369989, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [368, 1, 35.380095, 7.076019, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [369, 1, 29.073011, 5.814602, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [370, 1, 85.591776, 17.118355, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [371, 1, 430.66013, 86.132026, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [372, 1, 249.745997, 49.949199, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [373, 1, 168.52878, 33.705756, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [374, 1, 86.418705, 17.283741, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [375, 1, 283.483358, 56.696672, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [376, 1, 310.927852, 62.18557, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [377, 1, 222.495169, 44.499034, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [378, 1, 222.066912, 44.413382, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [379, 1, 76.536953, 15.307391, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [380, 1, 0, 0, 0, 0, 0, 1.001552, 0, 380.0, 0, 1.1,\n 0.9], [381, 1, 255.944236, 51.188847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, \n 0.9], [382, 1, 0, 0, 0, 0, 0, 1.000904, 0, 380.0, 0, 1.1, 0.9], [383, 1,\n 0, 0, 0, 0, 0, 0.999115, 0, 380.0, 0, 1.1, 0.9], [384, 1, 90.316363, \n 18.063273, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [385, 1, 113.996976, \n 22.799395, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [386, 1, 91.593152, \n 18.31863, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [387, 1, 186.533196, \n 37.306639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [388, 1, 1001.680535, \n 200.336107, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [389, 1, 0, 0, 0, 0, \n 0, 0.999916, 0, 380.0, 0, 1.1, 0.9], [390, 1, 82.706419, 16.541284, 0, \n 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [391, 1, 94.209664, 18.841933, 0, 0,\n 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [392, 1, 180.787399, 36.15748, 0, 0, 0,\n 1.0, 0, 380.0, 0, 1.1, 0.9], [393, 1, 225.769637, 45.153927, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [394, 1, 81.202848, 16.24057, 0, 0, 0, 1.0,\n 0, 380.0, 0, 1.1, 0.9], [395, 1, 112.54213, 22.508426, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [396, 1, 79.712439, 15.942488, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [397, 1, 639.205952, 127.84119, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [398, 1, 276.853905, 55.370781, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [399, 1, 117.959928, 23.591986, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [400, 1, 62.847073, 12.569415, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [401, 1, 0, 0, 0, 0, 0, 1.000689, 0, 380.0, 0, 1.1,\n 0.9], [402, 1, 0, 0, 0, 0, 0, 1.000468, 0, 380.0, 0, 1.1, 0.9], [403, 1,\n 31.205033, 6.241007, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [404, 1, \n 109.937263, 21.987453, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [405, 1, \n 828.818277, 165.763655, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [406, 1, \n 62.797316, 12.559463, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [407, 1, \n 124.308664, 24.861733, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [408, 1, \n 359.430945, 71.886189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [409, 1, 0,\n 0, 0, 0, 0, 0.999942, 0, 380.0, 0, 1.1, 0.9], [410, 1, 46.535489, \n 9.307098, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [411, 1, 44.001211, \n 8.800242, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [412, 1, 3.090603, \n 0.618121, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [413, 1, 154.2885, \n 30.8577, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [414, 1, 13.100763, \n 2.620153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [415, 1, 0, 0, 0, 0, 0,\n 1.000239, 0, 380.0, 0, 1.1, 0.9], [416, 1, 186.568647, 37.313729, 0, 0,\n 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [417, 1, 7.300075, 1.460015, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [418, 1, 152.129169, 30.425834, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [419, 1, 81.311959, 16.262392, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [420, 1, 81.864619, 16.372924, 0, 0, 0, \n 1.0, 0, 220.0, 0, 1.1, 0.9], [421, 1, 117.923897, 23.584779, 0, 0, 0, \n 1.0, 0, 380.0, 0, 1.1, 0.9], [422, 1, 86.394999, 17.279, 0, 0, 0, 1.0, \n 0, 380.0, 0, 1.1, 0.9], [423, 1, 181.448589, 36.289718, 0, 0, 0, 1.0, 0,\n 380.0, 0, 1.1, 0.9], [424, 1, 13.081976, 2.616395, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [425, 1, 107.436029, 21.487206, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [426, 1, 8.901406, 1.780281, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [427, 1, 74.807559, 14.961512, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [428, 1, 33.541388, 6.708278, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [429, 1, 378.506604, 75.701321, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [430, 1, 201.617449, 40.32349, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [431, 1, 134.824684, 26.964937, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [432, 1, 157.601785, 31.520357, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [433, 1, 80.561831, 16.112366, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [434, 1, 41.928301, 8.38566, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [435, 1, 167.686807, 33.537361, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [436, 1, 89.525173, 17.905035, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [437, 1, 20.388419, 4.077684, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [438, 1, 54.716933, 10.943387, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [439, 1, 101.875856, 20.375171, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [440, 1, 86.095509, 17.219102, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [441, 1, 66.003743, 13.200749, 0, 0, 0, 1.0, 0, \n 220.0, 0, 1.1, 0.9], [442, 1, 87.345295, 17.469059, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [443, 1, 189.372821, 37.874564, 0, 0, 0, 1.0, 0, \n 380.0, 0, 1.1, 0.9], [444, 1, 0, 0, 0, 0, 0, 0.999997, 0, 380.0, 0, 1.1,\n 0.9], [445, 1, 86.048822, 17.209764, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, \n 0.9], [446, 1, 39.900067, 7.980013, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9\n ], [447, 1, 75.857823, 15.171565, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9],\n [448, 1, 55.747797, 11.149559, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 449, 1, 281.099266, 56.219853, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 450, 1, 172.019337, 34.403867, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [\n 451, 1, 73.504711, 14.700942, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [\n 452, 1, 0, 0, 0, 0, 0, 0.999998, 0, 380.0, 0, 1.1, 0.9], [453, 1, \n 49.262417, 9.852483, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [454, 1, \n 34.368712, 6.873742, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [455, 1, \n 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [456, 1, \n 56.035293, 11.207059, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [457, 1, \n 171.846191, 34.369238, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [458, 1, \n 163.447396, 32.689479, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [459, 1, \n 198.921561, 39.784312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [460, 1, \n 261.423915, 52.284783, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [461, 1, \n 271.93756, 54.387512, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [462, 1, \n 83.187109, 16.637422, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [463, 1, \n 42.625596, 8.525119, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [464, 1, \n 42.67712, 8.535424, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [465, 1, \n 68.935213, 13.787043, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [466, 1, \n 55.966672, 11.193334, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [467, 1, \n 51.647972, 10.329594, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [468, 1, \n 84.682258, 16.936452, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [469, 1, \n 52.475899, 10.49518, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [470, 1, \n 133.635974, 26.727195, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [471, 1, \n 131.576667, 26.315333, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [472, 1, \n 46.021552, 9.20431, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [473, 1, \n 84.506543, 16.901309, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [474, 1, \n 43.646746, 8.729349, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [475, 1, \n 42.832665, 8.566533, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [476, 1, \n 48.407958, 9.681592, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [477, 1, \n 78.119975, 15.623995, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [478, 1, \n 98.132926, 19.626585, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [479, 1, \n 177.838657, 35.567731, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [480, 1, \n 77.949906, 15.589981, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [481, 1, \n 67.695306, 13.539061, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [482, 1, \n 76.865108, 15.373022, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [483, 1, \n 65.368141, 13.073628, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [484, 1, \n 51.245443, 10.249089, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [485, 1, \n 76.547129, 15.309426, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [486, 1, \n 704.196192, 140.839238, 0, 0, 0, 0.99951, 0, 220.0, 0, 1.1, 0.9], [487,\n 1, 178.44006, 35.688012, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [488, 1,\n 514.1666, 102.83332, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [489, 1, \n 135.327186, 27.065437, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9], [490, 1, \n 42.108774, 8.421755, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [491, 1, \n 57.900104, 11.580021, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [492, 1, \n 90.290026, 18.058005, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [493, 1, \n 116.373036, 23.274607, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [494, 1, \n 159.050014, 31.810003, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [495, 1, \n 125.200788, 25.040158, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [496, 1, \n 8.868181, 1.773636, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [497, 1, \n 1108.963227, 221.792645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [498, 1,\n 52.009376, 10.401875, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [499, 1, \n 72.596567, 14.519313, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [500, 1, \n 39.745767, 7.949153, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [501, 1, \n 67.242984, 13.448597, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [502, 1, \n 265.394132, 53.078826, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [503, 1, \n 81.27987, 16.255974, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [504, 1, \n 53.225877, 10.645175, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [505, 1, \n 377.519214, 75.503843, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [506, 1, \n 118.498636, 23.699727, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [507, 1, \n 112.71728, 22.543456, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [508, 1, \n 163.866255, 32.773251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [509, 1, \n 215.943222, 43.188644, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [510, 1, \n 136.424234, 27.284847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [511, 1, \n 119.003612, 23.800722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [512, 1, \n 78.609233, 15.721847, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [513, 1, \n 43.305299, 8.66106, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [514, 1, \n 107.782698, 21.55654, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [515, 1, \n 96.14857, 19.229714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [516, 1, \n 107.567625, 21.513525, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [517, 1, \n 50.527088, 10.105418, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [518, 1, \n 284.571762, 56.914352, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [519, 1, \n 28.007071, 5.601414, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [520, 1, \n 113.075388, 22.615078, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [521, 1, \n 102.145474, 20.429095, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [522, 1, \n 87.457782, 17.491556, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [523, 1, \n 47.077529, 9.415506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [524, 1, \n 136.642116, 27.328423, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [525, 1, \n 162.787043, 32.557409, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [526, 1, \n 49.35397, 9.870794, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [527, 1, \n 54.18719, 10.837438, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [528, 1, \n 118.26861, 23.653722, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [529, 1, \n 151.602845, 30.320569, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [530, 1, \n 64.243093, 12.848619, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [531, 1, \n 65.318252, 13.06365, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [532, 1, \n 62.694136, 12.538827, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [533, 1, \n 56.181511, 11.236302, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [534, 1, \n 154.980048, 30.99601, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [535, 1, \n 194.025074, 38.805015, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [536, 1, \n 152.933571, 30.586714, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [537, 1, \n 50.874697, 10.174939, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [538, 1, \n 38.030453, 7.606091, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [539, 1, \n 40.352648, 8.07053, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [540, 1, \n 36.335787, 7.267157, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [541, 1, \n 93.858474, 18.771695, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [542, 1, \n 128.932532, 25.786506, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [543, 1, \n 70.422315, 14.084463, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [544, 1, \n 131.162551, 26.23251, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [545, 1, \n 282.414482, 56.482896, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [546, 1, \n 141.550404, 28.310081, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [547, 1, \n 182.963197, 36.592639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [548, 1, \n 59.225944, 11.845189, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [549, 1, \n 50.643246, 10.128649, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [550, 1, \n 41.78929, 8.357858, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [551, 1, \n 40.283868, 8.056774, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [552, 1, \n 200.04515, 40.00903, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [553, 1, \n 1.384003, 0.276801, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [554, 1, \n 202.666621, 40.533324, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [555, 1, \n 77.218226, 15.443645, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [556, 1, \n 119.459166, 23.891833, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [557, 1, \n 253.807751, 50.76155, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [558, 1, \n 149.659946, 29.931989, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [559, 1, \n 80.096562, 16.019312, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [560, 1, \n 125.129779, 25.025956, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [561, 1, \n 68.617518, 13.723504, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [562, 1, \n 187.457919, 37.491584, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [563, 1, \n 131.798194, 26.359639, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [564, 1, \n 260.235901, 52.04718, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [565, 1, \n 196.360882, 39.272176, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [566, 1, \n 0.315398, 0.06308, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [567, 1, \n 319.193421, 63.838684, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [568, 1, \n 295.176685, 59.035337, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [569, 1, \n 207.688389, 41.537678, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [570, 1, \n 324.238974, 64.847795, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [571, 1, \n 238.729406, 47.745881, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [572, 1, \n 421.078814, 84.215763, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [573, 1, \n 122.570522, 24.514104, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [574, 1, \n 233.543651, 46.70873, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [575, 1, \n 4.388704, 0.877741, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [576, 1, \n 283.987513, 56.797503, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [577, 1, \n 313.066628, 62.613326, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [578, 1, \n 298.905533, 59.781107, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [579, 1, \n 109.048896, 21.809779, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [580, 1, \n 22.702358, 4.540472, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [581, 1, \n 0.13045, 0.02609, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [582, 1, \n 82.137246, 16.427449, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [583, 1, \n 94.208402, 18.84168, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [584, 1, \n 54.052269, 10.810454, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9], [585, 1, \n 93.84139, 18.768278, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9]])\n', (120, 112642), False, 'from numpy import array\n'), ((134977, 258381), 'numpy.array', 'array', (['[[586, 272.0, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [590, 38.0, 0, 9999, -9999, 1.0, 100, 1, \n 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [593, 11.1, 0, 9999, -9999,\n 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [594, 19.0, 0,\n 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [595, 1115.083703, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [597, 95.0, 0, 9999, -9999, 1.0, 100, 1, 95.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [598, 12.0, 0, 9999, -9999, 1.0,\n 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [599, 9.3, 0, 9999,\n -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [600, \n 16.9, 0, 9999, -9999, 1.0, 100, 1, 16.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [602, 24.6, 0, 9999, -9999, 1.0, 100, 1, 24.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [603, 837.82977, 0, 9999, -9999,\n 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [607, \n 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [609, 36.4, 0, 9999, -9999, 1.0, 100, 1, \n 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [610, 61.5, 0, 9999, -9999,\n 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [612, 30.0, 0,\n 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [616, 29.0, 0, 9999, -9999, 1.0, 100, 1, \n 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [617, 137.0, 0, 9999, -\n 9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [618, \n 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [623, 760.0, 0, 9999, -9999, 1.0,\n 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [624, 27.0, 0, \n 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [631, 79.8, 0, 9999, -9999, 1.0, 100, 1,\n 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [632, 45.1, 0, 9999, -9999,\n 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [637, 53.7, 0,\n 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [640, 12.0, 0, 9999, -9999, 1.0, 100, 1,\n 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [641, 12.6, 0, 9999, -9999,\n 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [642, 28.9, 0,\n 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [647, 14.0, 0, 9999, -9999, 1.0, 100, 1,\n 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [650, 1324.5, 0, 9999, -\n 9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [652,\n 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [658, 95.0, 0, 9999, -9999, 1.0,\n 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [661, 32.7, 0, \n 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [666, 28.9, 0, 9999, -9999, 1.0, 100, 1, \n 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [668, 766.0, 0, 9999, -\n 9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [670, \n 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [675, 10.6, 0, 9999, -9999, 1.0, 100, 1, 10.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [676, 370.0, 0, 9999, -9999, 1.0,\n 100, 1, 370.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [678, 1017.0, 0, \n 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [679, 547.278885, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [683, 27.5, 0, 9999, -9999, 1.0,\n 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [687, 1329.0, 0, \n 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [693, 194.0, 0, 9999, -9999, 1.0, 100, 1,\n 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [694, 16.4, 0, 9999, -\n 9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [695, \n 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [698, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [701, 47.2, 0, \n 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [702, 73.4, 0, 9999, -9999, 1.0, 100, 1, 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [704, 508.0, 0, 9999, -9999, 1.0, 100, 1, 508.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [705, 17.0, 0, 9999, -9999, 1.0, 100, 1, \n 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [707, 34.0, 0, 9999, -9999,\n 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [708, 7.8, 0,\n 9999, -9999, 1.0, 100, 1, 7.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 711, 102.08865, 0, 9999, -9999, 1.0, 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [713, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [714, 15.0, 0, 9999, -9999, 1.0, 100,\n 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [716, 0.1, 0, 9999, -\n 9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [717, \n 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [719, 1347.602507, 0, 9999, -9999, 1.0, 100, 1, 1958.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [722, 20.7, 0, 9999, -9999, 1.0, 100, 1,\n 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [723, 19.7, 0, 9999, -9999,\n 1.0, 100, 1, 19.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [724, 12.1, 0,\n 9999, -9999, 1.0, 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [725, 800.0, 0, 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [728, 510.0, 0, 9999, -9999, 1.0, 100, 1,\n 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [730, 633.2, 0, 9999, -\n 9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [731, \n 774.368631, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [733, 396.6, 0, 9999, -9999, 1.0, 100, 1,\n 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [735, 84.8, 0, 9999, -\n 9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [737, \n 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [739, 59.9, 0, 9999, -9999, 1.0, 100, 1, 59.9, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [741, 214.0, 0, 9999, -9999, 1.0,\n 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [742, 9.0, 0, \n 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 743, 1410.0, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [745, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [746, 100.0, 0, 9999, -9999, 1.0, 100, 1,\n 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [747, 12.5, 0, 9999, -\n 9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [748, \n 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [750, 90.8, 0, 9999, -9999, 1.0, 100, 1, 90.8,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [753, 297.43075, 0, 9999, -9999,\n 1.0, 100, 1, 311.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [758, 18.5, \n 0, 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [760, 342.451659, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [762, 1105.0, 0, 9999, -9999, \n 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [763, 20.3,\n 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [769, 43.3, 0, 9999, -9999, 1.0, 100, 1,\n 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [771, 690.0, 0, 9999, -\n 9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [772, \n 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [776, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [777, 79.0, 0, 9999, -9999, 1.0,\n 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [778, 14.7, 0, \n 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [781, 981.561684, 0, 9999, -9999, 1.0, 100, 1, 1310.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [784, 967.134125, 0, 9999, -9999, 1.0, 100, 1, \n 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [785, 3.0, 0, 9999, -\n 9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [787, \n 778.0, 0, 9999, -9999, 1.0, 100, 1, 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [788, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [789, 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [790, 75.8, 0, 9999, -9999, 1.0,\n 100, 1, 75.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [791, 10.0, 0, \n 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [795, 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [798, 116.273516, 0, 9999, -9999, 1.0, 100,\n 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [800, 36.5, 0, 9999, -\n 9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [801, \n 50.0, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [805, 661.169352, 0, 9999, -9999, 1.0, 100, 1, \n 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [806, 35.8, 0, 9999, -\n 9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [808, \n 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [810, 97.9, 0, 9999, -9999, 1.0, 100, 1, 97.9,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [811, 25.2, 0, 9999, -9999, 1.0,\n 100, 1, 25.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [814, 89.0, 0, \n 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [815, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [816, 80.1, 0, 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [817, 54.0, 0, 9999, -9999, 1.0, 100, 1, \n 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [818, 757.0, 0, 9999, -\n 9999, 1.0, 100, 1, 757.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [821, \n 82.5, 0, 9999, -9999, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [822, 134.0, 0, 9999, -9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [825, 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [826, 58.0, 0, 9999, -9999, 1.0,\n 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [829, 211.0, 0, \n 9999, -9999, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [833, 18.6, 0, 9999, -9999, 1.0, 100, 1, 18.6, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [834, 23.3, 0, 9999, -9999, 1.0, 100, 1, \n 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [835, 63.7, 0, 9999, -9999,\n 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [836, 25.5, 0,\n 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [837, 472.0, 0, 9999, -9999, 1.0, 100, 1, 472.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [839, 73.3, 0, 9999, -9999, 1.0, 100, 1, 73.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [840, 1158.147571, 0, 9999, -9999, 1.0, \n 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [841, 23.3, 0, \n 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [842, 540.5, 0, 9999, -9999, 1.0, 100, 1, 540.5, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [843, 333.0, 0, 9999, -9999, 1.0, 100, 1, 333.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [844, 40.0, 0, 9999, -9999, 1.0, 100, 1,\n 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [845, 318.0, 0, 9999, -\n 9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [847, \n 124.467036, 0, 9999, -9999, 1.0, 100, 1, 280.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [849, 779.0, 0, 9999, -9999, 1.0, 100, 1,\n 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [850, 16.0, 0, 9999, -\n 9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [851, \n 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [854, 81.8, 0, 9999, -9999, 1.0,\n 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [855, 688.0, 0, \n 9999, -9999, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [856, 36.0, 0, 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [858, 56.8, 0, 9999, -9999, 1.0, 100, 1,\n 56.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [859, 85.0, 0, 9999, -9999,\n 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [860, 25.0, 0,\n 9999, -9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [862, 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [864, 875.0, 0, 9999, -9999, 1.0, 100, 1, \n 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [865, 11.0, 0, 9999, -\n 9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [867, \n 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [870, 58.4, 0, 9999, -9999, 1.0, 100, 1, \n 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [872, 22.5, 0, 9999, -9999,\n 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [873, 122.0, \n 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [877, 24.8, 0, 9999, -9999, 1.0, 100,\n 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [881, 1001.3, 0, 9999, \n -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [882,\n 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [883, 18.0, 0, 9999, -9999, 1.0, 100, 1, 18.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [886, 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [889, 9.5, 0, 9999, -9999, 1.0, \n 100, 1, 9.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [890, 48.0, 0, 9999,\n -9999, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [893, \n 60.0, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [894, 158.0, 0, 9999, -9999, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [895, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [896, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [898, 84.6, 0, \n 9999, -9999, 1.0, 100, 1, 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [900, 112.6, 0, 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [903, 20.1, 0, 9999, -9999, 1.0, 100, 1,\n 20.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [905, 121.080178, 0, 9999,\n -9999, 1.0, 100, 1, 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [907,\n 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [911, 288.5, 0, 9999, -9999, 1.0, 100, 1, 288.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [913, 33.01098, 0, 9999, -9999, \n 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [914, 112.1, \n 0, 9999, -9999, 1.0, 100, 1, 112.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [915, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [916, 196.0, 0, 9999, -9999, 1.0, 100, 1, 196.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [917, 17.0, 0, 9999, -9999, 1.0, 100,\n 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [918, 38.5, 0, 9999, -\n 9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [919, \n 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [921, 124.0, 0, 9999, -9999, 1.0, 100, 1, 124.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [922, 164.0, 0, 9999, -9999, 1.0,\n 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [923, 146.0, 0, \n 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [928, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [931, 217.1, 0, 9999, -9999, 1.0, 100, 1, \n 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [934, 296.0, 0, 9999, -\n 9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [935, \n 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [936, 104.4, 0, 9999, -9999, 1.0, 100, 1, 104.4, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [937, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [939, 0.1, 0, 9999, -9999, 1.0, \n 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [940, 29.6, 0, 9999,\n -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [942, \n 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [943, 66.3, 0, 9999, -9999, 1.0, 100, 1, 66.3, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [944, 25.4, 0, 9999, -9999, 1.0, 100, 1, 25.4, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [945, 35.0, 0, 9999, -9999, 1.0,\n 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [946, 80.0, 0, \n 9999, -9999, 1.0, 100, 1, 80.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [951, 393.739186, 0, 9999, -9999, 1.0, 100,\n 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [952, 31.7, 0, 9999, -\n 9999, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [956, \n 65.0, 0, 9999, -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [957, 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [959, 45.5, 0, 9999, -9999, 1.0, 100,\n 1, 45.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [960, 26.5, 0, 9999, -\n 9999, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [963, \n 559.823432, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [966, 66.0, 0, 9999, -9999, 1.0, 100, 1,\n 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [967, 37.5, 0, 9999, -9999,\n 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [968, 54.0, 0,\n 9999, -9999, 0.99951, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [969, 56.9, 0, 9999, -9999, 0.99951, 100, 1, 56.9, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [973, 1347.0, 0, 9999, -9999, \n 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [976, 26.9,\n 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [978, 4.6, 0, 9999, -9999, 1.0, 100, 1, 4.6, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [980, 309.665775, 0, 9999, -9999, 1.0, \n 100, 1, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [981, 119.0, 0, \n 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [984, 465.0, 0, 9999, -9999, 1.0, 100, 1, \n 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [985, 22.0, 0, 9999, -\n 9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [986, \n 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [990, 300.0, 0, 9999, -9999, 1.0, 100,\n 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [993, 392.0, 0, 9999, \n -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [994,\n 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [997, 18.8, 0, 9999, -9999, 1.0, 100,\n 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [998, 423.0, 0, 9999, -\n 9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [999, \n 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1003, 900.0, 0, 9999, -9999, 1.0, \n 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1006, 122.0, 0, \n 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1007, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1010, 750.0, 0, 9999, -9999, 1.0, 100, \n 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1011, 18.7, 0, 9999, \n -9999, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1012,\n 2835.0, 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [1014, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1018, 175.9, 0, 9999, -9999, 1.0, 100, \n 1, 175.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1019, 120.0, 0, 9999,\n -9999, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1023,\n 0.2, 0, 9999, -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1025, 113.6, 0, 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, \n 655.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1028, 193.856792, 0, \n 9999, -9999, 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1029, 47.945063, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1030, 512.154762, 0, 9999, -9999, 1.0, 100, 1, \n 1018.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1031, 465.297424, 0, \n 9999, -9999, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1032, 38.015413, 0, 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1033, 2.188896, 0, 9999, -9999, 1.0, 100, 1, \n 50.164506, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1034, 28.459011, 0, \n 9999, -9999, 1.0, 100, 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1035, 13.483148, 0, 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1036, 10.668878, 0, 9999, -9999, 1.0, 100,\n 1, 67.223077, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1037, 6.453908, 0,\n 9999, -9999, 1.0, 100, 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1038, 9.267765, 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1039, 0.034961, 0, 9999, -9999, 1.0, 100, \n 1, 132.724114, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1041, 45.839958,\n 0, 9999, -9999, 1.0, 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1042, 0.015112, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1044, 2.20729, 0, 9999, -9999, 1.0, 100,\n 1, 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1046, 4.510177, 0,\n 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1047, 1.291195, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1048, 3.439348, 0, 9999, -9999, 1.0, 100, \n 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1049, 90.190989, \n 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1050, 2.855489, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1051, 25.039476, 0, 9999, -9999, 1.0, \n 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1052, \n 4.408997, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1053, 3.393402, 0, 9999, -9999, 1.0, 100, 1, 16.368087,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1054, 64.159181, 0, 9999, -9999,\n 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1055, \n 0.001443, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1056, 46.702863, 0, 9999, -9999, 1.0, 100, 1, \n 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1057, 1.149455, 0, \n 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1058, 7.224099, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1059, 17.207922, 0, 9999, -9999, 1.0, \n 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1060, \n 0.105899, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1061, 5.978684, 0, 9999, -9999, 1.0, 100, 1, \n 161.862597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1062, 0.003268, 0, \n 9999, -9999, 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1063, 0.002721, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1064, 43.352939, 0, 9999, -9999, 1.0, 100,\n 1, 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1065, 64.557076,\n 0, 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1066, 6.593551, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1067, 8.448185, 0, 9999, -9999, 1.0, \n 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1068, \n 0.145539, 0, 9999, -9999, 1.0, 100, 1, 5.009022, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1069, 0.157038, 0, 9999, -9999, 1.0, 100, 1, 3.190759,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1070, 0.029105, 0, 9999, -9999,\n 1.0, 100, 1, 0.788599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1071, \n 0.624346, 0, 9999, -9999, 1.0, 100, 1, 4.328696, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1072, 40.600927, 0, 9999, -9999, 1.0, 100, 1, \n 112.606433, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1073, 17.346842, 0,\n 9999, -9999, 1.0, 100, 1, 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1074, 48.373759, 0, 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1075, 2.832969, 0, 9999, -9999, 1.0, \n 100, 1, 15.783448, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1077, \n 0.795164, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1078, 1.572306, 0, 9999, -9999, 1.0, 100, 1, 34.413246,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1079, 23.483715, 0, 9999, -9999,\n 1.0, 100, 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1080, \n 12.71579, 0, 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1081, 41.337281, 0, 9999, -9999, 1.0, 100, 1, \n 405.642115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1082, 17.035693, 0,\n 9999, -9999, 1.0, 100, 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1083, 30.072335, 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1084, 27.557337, 0, 9999, -9999, 1.0, \n 100, 1, 602.719371, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1085, \n 6.690069, 0, 9999, -9999, 1.0, 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1086, 9.340055, 0, 9999, -9999, 1.0, 100, 1, \n 225.59917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1087, 2.219279, 0, \n 9999, -9999, 1.0, 100, 1, 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1088, 0.225948, 0, 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1089, 0.685877, 0, 9999, -9999, 1.0, 100, \n 1, 384.449592, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1090, 18.3652, 0,\n 9999, -9999, 1.0, 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1091, 0.069841, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1092, 0.000886, 0, 9999, -9999, 1.0, 100, 1, \n 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1093, 19.472331, 0, \n 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1094, 0.324922, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1095, 0.017632, 0, 9999, -9999, 1.0, 100, \n 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1096, 5.653431, 0,\n 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1097, 0.849404, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1098, 22.024295, 0, 9999, -9999, 1.0, 100,\n 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1099, 111.287059,\n 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1100, 0.000469, 0, 9999, -9999, 1.0, 100, 1, 0.026696, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1101, 0.059355, 0, 9999, -9999, 1.0, \n 100, 1, 83.930665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1102, \n 0.348019, 0, 9999, -9999, 1.0, 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1103, 4.374488, 0, 9999, -9999, 1.0, 100, 1, \n 245.381701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1104, 0.020088, 0, \n 9999, -9999, 1.0, 100, 1, 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1105, 0.140469, 0, 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1106, 0.24489, 0, 9999, -9999, 1.0, 100, 1,\n 2.289793, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1107, 4.365112, 0, \n 9999, -9999, 1.0, 100, 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1108, 15.005714, 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1109, 0.032298, 0, 9999, -9999, 1.0, \n 100, 1, 0.77821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1110, 0.109011,\n 0, 9999, -9999, 1.0, 100, 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1111, 3.982839, 0, 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1112, 13.347732, 0, 9999, -9999, 1.0, \n 100, 1, 69.53429, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1113, \n 0.690017, 0, 9999, -9999, 1.0, 100, 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1114, 3.236521, 0, 9999, -9999, 1.0, 100, 1, 13.446889,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1115, 12.945936, 0, 9999, -9999,\n 1.0, 100, 1, 50.575278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1116, \n 7.186063, 0, 9999, -9999, 1.0, 100, 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1117, 21.735816, 0, 9999, -9999, 1.0, 100, 1, \n 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1118, 1.167272, 0, \n 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1119, 9.731188, 0, 9999, -9999, 1.0, 100, 1, 43.254023, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1120, 0.454554, 0, 9999, -9999, 1.0, 100, \n 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1121, 0.129799, 0,\n 9999, -9999, 1.0, 100, 1, 0.540589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1122, 0.277958, 0, 9999, -9999, 1.0, 100, 1, 1.462883, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1123, 0.327821, 0, 9999, -9999, 1.0, 100, \n 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1124, 0.319573, 0,\n 9999, -9999, 1.0, 100, 1, 1.288283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1125, 1.853524, 0, 9999, -9999, 1.0, 100, 1, 25.818899, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1126, 2.010115, 0, 9999, -9999, 1.0, 100, \n 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1127, 6.767523, 0,\n 9999, -9999, 1.0, 100, 1, 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1128, 0.159146, 0, 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1129, 0.240204, 0, 9999, -9999, 1.0, 100, 1, \n 4.738747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1130, 0.112767, 0, \n 9999, -9999, 1.0, 100, 1, 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1131, 0.151265, 0, 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1132, 0.043874, 0, 9999, -9999, 1.0, 100, \n 1, 0.359497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1133, 0.17278, 0, \n 9999, -9999, 1.0, 100, 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1134, 0.12208, 0, 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1135, 0.70461, 0, 9999, -9999, 1.0, 100, 1, \n 8.117819, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1136, 0.085367, 0, \n 9999, -9999, 1.0, 100, 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1137, 0.542436, 0, 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1138, 0.28633, 0, 9999, -9999, 1.0, 100, 1, \n 1.254278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1139, 3.827873, 0, \n 9999, -9999, 1.0, 100, 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1140, 8.531552, 0, 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1141, 14.71591, 0, 9999, -9999, 1.0, 100, \n 1, 119.46456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1142, 0.282411, 0,\n 9999, -9999, 1.0, 100, 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1143, 2.17636, 0, 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1144, 14.468173, 0, 9999, -9999, 1.0, 100,\n 1, 52.527382, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1145, 41.721366, \n 0, 9999, -9999, 1.0, 100, 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1146, 0.206808, 0, 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1147, 10.482934, 0, 9999, -9999, 1.0, \n 100, 1, 45.703707, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1148, \n 1.12205, 0, 9999, -9999, 1.0, 100, 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1149, 0.384525, 0, 9999, -9999, 1.0, 100, 1, 8.556784,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1150, 0.21385, 0, 9999, -9999, \n 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1151, \n 0.761655, 0, 9999, -9999, 1.0, 100, 1, 13.036113, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1152, 0.007549, 0, 9999, -9999, 1.0, 100, 1, 0.116518,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1153, 0.005643, 0, 9999, -9999,\n 1.0, 100, 1, 0.068788, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1154, \n 0.013178, 0, 9999, -9999, 1.0, 100, 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1155, 0.036293, 0, 9999, -9999, 1.0, 100, 1, 0.609451,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1156, 2.725518, 0, 9999, -9999,\n 1.0, 100, 1, 16.022334, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1157, \n 0.254864, 0, 9999, -9999, 1.0, 100, 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1158, 0.090066, 0, 9999, -9999, 1.0, 100, 1, 1.04304, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1159, 1.846823, 0, 9999, -9999,\n 1.0, 100, 1, 13.498087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1160, \n 4.449778, 0, 9999, -9999, 1.0, 100, 1, 238.377761, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1161, 0.968938, 0, 9999, -9999, 1.0, 100, 1, \n 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1162, 0.004399, 0, \n 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1164, 0.681555, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1166, 19.037928, 0, 9999, -9999, 1.0, 100,\n 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1167, 0.436847, 0,\n 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1168, 0.092048, 0, 9999, -9999, 1.0, 100, 1, 1.345774, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1169, 0.15256, 0, 9999, -9999, 1.0, 100, 1, \n 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1170, 0.022911, 0, \n 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1171, 1.218434, 0, 9999, -9999, 1.0, 100, 1, 9.029885, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1172, 0.184488, 0, 9999, -9999, 1.0, 100, 1, \n 3.584043, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1173, 0.074867, 0, \n 9999, -9999, 1.0, 100, 1, 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1174, 0.108899, 0, 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1175, 0.04558, 0, 9999, -9999, 1.0, 100, 1,\n 0.855454, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1176, 0.013921, 0, \n 9999, -9999, 1.0, 100, 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1177, 1.759222, 0, 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1178, 0.209645, 0, 9999, -9999, 1.0, 100, 1, \n 3.167999, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1179, 0.051465, 0, \n 9999, -9999, 1.0, 100, 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1180, 0.059365, 0, 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1181, 23.821689, 0, 9999, -9999, 1.0, 100,\n 1, 85.739557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1182, 24.612874, \n 0, 9999, -9999, 1.0, 100, 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1183, 3.24107, 0, 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1184, 0.358312, 0, 9999, -9999, 1.0, \n 100, 1, 4.219005, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1185, \n 2.182901, 0, 9999, -9999, 1.0, 100, 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1186, 2.184012, 0, 9999, -9999, 1.0, 100, 1, 38.916368,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1187, 0.459888, 0, 9999, -9999,\n 1.0, 100, 1, 9.814574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1188, \n 53.562608, 0, 9999, -9999, 1.0, 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1189, 1.204921, 0, 9999, -9999, 1.0, 100, 1, \n 20.261805, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1190, 32.667547, 0, \n 9999, -9999, 1.0, 100, 1, 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1191, 17.953145, 0, 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1192, 2.590747, 0, 9999, -9999, 1.0, 100, \n 1, 21.454569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1193, 0.222396, 0,\n 9999, -9999, 1.0, 100, 1, 2.399953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1194, 0.77085, 0, 9999, -9999, 1.0, 100, 1, 8.986036, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1195, 0.015425, 0, 9999, -9999, 1.0, 100, 1, \n 0.202359, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1196, 0.029284, 0, \n 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1197, 0.11597, 0, 9999, -9999, 1.0, 100, 1, 90.592266, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1198, 4.134805, 0, 9999, -9999, 1.0, 100, \n 1, 39.819157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1199, 61.376881, \n 0, 9999, -9999, 1.0, 100, 1, 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1200, 21.487973, 0, 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1201, 0.691822, 0, 9999, -9999, 1.0, \n 100, 1, 25.166667, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1202, \n 3.586635, 0, 9999, -9999, 1.0, 100, 1, 49.89238, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1203, 12.725115, 0, 9999, -9999, 1.0, 100, 1, \n 182.623256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1204, 2.799582, 0, \n 9999, -9999, 1.0, 100, 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1205, 0.000146, 0, 9999, -9999, 1.0, 100, 1, 0.548843, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1206, 0.411467, 0, 9999, -9999, 1.0, 100, \n 1, 3.806894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1207, 0.331325, 0,\n 9999, -9999, 1.0, 100, 1, 3.575453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1208, 0.105374, 0, 9999, -9999, 1.0, 100, 1, 2.242031, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1209, 0.00265, 0, 9999, -9999, 1.0, 100, 1,\n 1.268261, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1210, 0.69402, 0, \n 9999, -9999, 1.0, 100, 1, 9.02599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1211, 5.750967, 0, 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1212, 26.199295, 0, 9999, -9999, 1.0, 100,\n 1, 91.171888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1213, 21.1062, 0,\n 9999, -9999, 1.0, 100, 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1214, 0.541037, 0, 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1215, 0.15338, 0, 9999, -9999, 1.0, 100, 1,\n 2.252965, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1216, 3.319201, 0, \n 9999, -9999, 1.0, 100, 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1217, 2.664727, 0, 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1218, 0.10866, 0, 9999, -9999, 1.0, 100, 1,\n 0.980482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1219, 0.83454, 0, \n 9999, -9999, 1.0, 100, 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1220, 1.729113, 0, 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1221, 43.354712, 0, 9999, -9999, 1.0, 100,\n 1, 593.230436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1222, 54.25302, \n 0, 9999, -9999, 1.0, 100, 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1223, 0.828555, 0, 9999, -9999, 1.0, 100, 1, 3.806101, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1224, 15.875443, 0, 9999, -9999, 1.0, \n 100, 1, 160.523778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1225, \n 1.071926, 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1226, 0.118196, 0, 9999, -9999, 1.0, 100, 1, 3.982858,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1227, 3.258837, 0, 9999, -9999,\n 1.0, 100, 1, 17.482807, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1228, \n 0.156042, 0, 9999, -9999, 1.0, 100, 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1229, 7.933585, 0, 9999, -9999, 1.0, 100, 1, 51.244222,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1230, 0.045286, 0, 9999, -9999,\n 1.0, 100, 1, 1.681276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1231, \n 1.223909, 0, 9999, -9999, 1.0, 100, 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1232, 2.573754, 0, 9999, -9999, 1.0, 100, 1, 75.075088,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1233, 173.598538, 0, 9999, -\n 9999, 1.0, 100, 1, 575.36828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1234, 33.990216, 0, 9999, -9999, 1.0, 100, 1, 101.1394, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1235, 0.001519, 0, 9999, -9999, 1.0, 100, 1, \n 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1236, 0.010199, 0, \n 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1237, 3.462839, 0, 9999, -9999, 1.0, 100, 1, 14.605409, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1238, 12.106922, 0, 9999, -9999, 1.0, 100,\n 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1239, 0.483742, \n 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1240, 63.552975, 0, 9999, -9999, 1.0, 100, 1, 339.51051, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1241, 9.744883, 0, 9999, -9999, 1.0, \n 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1242, \n 1.158061, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1243, 4.669682, 0, 9999, -9999, 1.0, 100, 1, 83.079842,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1244, 115.794463, 0, 9999, -\n 9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1245, 0.241619, 0, 9999, -9999, 1.0, 100, 1, 8.080896, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1246, 18.525152, 0, 9999, -9999, 1.0, 100, 1, \n 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1247, 5.100639, 0, \n 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1248, 13.259573, 0, 9999, -9999, 1.0, 100, 1, 91.958275, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1249, 1.47167, 0, 9999, -9999, 1.0, 100, 1,\n 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1250, 0.772338, 0, \n 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1251, 2.007729, 0, 9999, -9999, 1.0, 100, 1, 23.404345, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1252, 1.728628, 0, 9999, -9999, 1.0, 100, \n 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1253, 17.018216, \n 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1254, 26.927476, 0, 9999, -9999, 1.0, 100, 1, 82.278695, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1255, 0.726767, 0, 9999, -9999, 1.0, \n 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1256, \n 3.218337, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1257, 19.556961, 0, 9999, -9999, 1.0, 100, 1, 88.95288,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1258, 75.724888, 0, 9999, -9999,\n 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1259, \n 26.547394, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1260, 0.629507, 0, 9999, -9999, 1.0, 100, 1, \n 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1261, 10.592114, 0, \n 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1262, 0.066859, 0, 9999, -9999, 1.0, 100, 1, 0.524108, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1263, 0.05282, 0, 9999, -9999, 1.0, 100, 1,\n 0.352421, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1264, 8.646042, 0, \n 9999, -9999, 1.0, 100, 1, 82.035361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1265, 0.87289, 0, 9999, -9999, 1.0, 100, 1, 6.654727, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1266, 19.839091, 0, 9999, -9999, 1.0, 100, 1,\n 119.710849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1267, 1.42905, 0, \n 9999, -9999, 1.0, 100, 1, 39.469006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1270, 2.867892, 0, 9999, -9999, 1.0, 100, 1, 38.950511, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1271, 2.180592, 0, 9999, -9999, 1.0, 100, \n 1, 47.371792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1272, 0.12233, 0,\n 9999, -9999, 1.0, 100, 1, 1.23166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1273, 0.402412, 0, 9999, -9999, 1.0, 100, 1, 2.169201, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1274, 4.613569, 0, 9999, -9999, 1.0, 100, 1, \n 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1275, 5.039854, 0, \n 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1276, 0.577089, 0, 9999, -9999, 1.0, 100, 1, 25.655641, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1277, 1.713473, 0, 9999, -9999, 1.0, 100, \n 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1278, 7.145337, 0,\n 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1279, 2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.004344, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1280, 0.008871, 0, 9999, -9999, 1.0, 100, 1, \n 0.626494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1282, 0.164926, 0, \n 9999, -9999, 1.0, 100, 1, 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1283, 24.042404, 0, 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1284, 2.961479, 0, 9999, -9999, 1.0, \n 100, 1, 28.426322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1285, \n 0.002761, 0, 9999, -9999, 1.0, 100, 1, 2.937048, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1286, 2.24876, 0, 9999, -9999, 1.0, 100, 1, 17.872201,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1287, 4.55563, 0, 9999, -9999, \n 1.0, 100, 1, 93.199628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1288, \n 3.72473, 0, 9999, -9999, 1.0, 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1289, 7.121503, 0, 9999, -9999, 1.0, 100, 1, \n 184.149235, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1290, 0.310739, 0, \n 9999, -9999, 1.0, 100, 1, 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1291, 5.174079, 0, 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1292, 3.680955, 0, 9999, -9999, 1.0, 100, \n 1, 41.682074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1293, 0.037266, 0,\n 9999, -9999, 1.0, 100, 1, 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1294, 0.017452, 0, 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1295, 0.038533, 0, 9999, -9999, 1.0, 100, 1, \n 5.873666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1296, 0.669408, 0, \n 9999, -9999, 1.0, 100, 1, 27.356489, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1297, 11.612135, 0, 9999, -9999, 1.0, 100, 1, 177.778742, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1300, 11.138034, 0, 9999, -9999, 1.0, \n 100, 1, 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1301, \n 27.94748, 0, 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1302, 1.775766, 0, 9999, -9999, 1.0, 100, 1, 4.877299,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1303, 1.506596, 0, 9999, -9999,\n 1.0, 100, 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1304, \n 2.218171, 0, 9999, -9999, 1.0, 100, 1, 9.594319, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1305, 0.000322, 0, 9999, -9999, 1.0, 100, 1, 0.004567,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1306, 0.093112, 0, 9999, -9999,\n 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1307, \n 0.071688, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1308, 0.05088, 0, 9999, -9999, 1.0, 100, 1, 3.278321, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1309, 0.089478, 0, 9999, -9999,\n 1.0, 100, 1, 3.34909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1310, \n 0.043944, 0, 9999, -9999, 1.0, 100, 1, 1.64589, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1311, 1.283616, 0, 9999, -9999, 1.0, 100, 1, 11.854004,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1312, 32.144668, 0, 9999, -9999,\n 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1313, \n 7.034633, 0, 9999, -9999, 1.0, 100, 1, 30.836748, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1314, 2.705834, 0, 9999, -9999, 1.0, 100, 1, 12.003987,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1315, 1.715196, 0, 9999, -9999,\n 1.0, 100, 1, 7.879027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1316, \n 0.001198, 0, 9999, -9999, 1.0, 100, 1, 2.757497, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1317, 1.374919, 0, 9999, -9999, 1.0, 100, 1, 23.958574,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1318, 0.053995, 0, 9999, -9999,\n 1.0, 100, 1, 1.956332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1319, \n 2.412989, 0, 9999, -9999, 1.0, 100, 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1320, 2.01785, 0, 9999, -9999, 1.0, 100, 1, 20.75859, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1321, 0.017436, 0, 9999, -9999,\n 1.0, 100, 1, 0.161123, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1322, \n 0.131102, 0, 9999, -9999, 1.0, 100, 1, 0.929763, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1323, 68.564796, 0, 9999, -9999, 1.0, 100, 1, \n 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1324, 1.440474, 0, \n 9999, -9999, 1.0, 100, 1, 13.063258, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1325, 4.968484, 0, 9999, -9999, 1.0, 100, 1, 90.497559, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1326, 2.423617, 0, 9999, -9999, 1.0, 100, \n 1, 56.928865, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1327, 3.105262, 0,\n 9999, -9999, 1.0, 100, 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1328, 1.651998, 0, 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1329, 17.013592, 0, 9999, -9999, 1.0, 100,\n 1, 218.675424, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1330, 6.13151, 0,\n 9999, -9999, 1.0, 100, 1, 30.131028, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1331, 0.035299, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1332, 0.021045, 0, 9999, -9999, 1.0, 100, \n 1, 26.293088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1333, 5.410888, 0,\n 9999, -9999, 1.0, 100, 1, 45.650254, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1334, 0.000137, 0, 9999, -9999, 1.0, 100, 1, 1.215341, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1336, 3.321284, 0, 9999, -9999, 1.0, 100, \n 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1337, 1.111612, 0,\n 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1338, 0.06346, 0, 9999, -9999, 1.0, 100, 1, 0.832524, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1339, 0.579758, 0, 9999, -9999, 1.0, 100, 1, \n 10.086482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1340, 0.035501, 0, \n 9999, -9999, 1.0, 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1341, 6.581426, 0, 9999, -9999, 1.0, 100, 1, 205.513321, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1342, 0.031756, 0, 9999, -9999, 1.0, 100, \n 1, 0.734589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1343, 0.005344, 0,\n 9999, -9999, 1.0, 100, 1, 1.102108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1344, 0.017248, 0, 9999, -9999, 1.0, 100, 1, 0.226057, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1345, 0.124928, 0, 9999, -9999, 1.0, 100, \n 1, 3.971188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1346, 12.149372, 0,\n 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1348, 2.617463, 0, 9999, -9999, 1.0, 100, 1, 22.707927, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1349, 2.716996, 0, 9999, -9999, 1.0, 100, \n 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1350, 0.016036, 0,\n 9999, -9999, 1.0, 100, 1, 0.094971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1351, 5.3e-05, 0, 9999, -9999, 1.0, 100, 1, 0.015958, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1352, 0.007111, 0, 9999, -9999, 1.0, 100, 1, \n 0.83726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1355, 0.046937, 0, \n 9999, -9999, 1.0, 100, 1, 1.688324, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1356, 12.885707, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1357, 6.737632, 0, 9999, -9999, 1.0, 100, \n 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1358, 0.006907, 0,\n 9999, -9999, 1.0, 100, 1, 0.247293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1359, 0.897683, 0, 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1360, 3.153322, 0, 9999, -9999, 1.0, 100, \n 1, 17.135983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1361, 8.263279, 0,\n 9999, -9999, 1.0, 100, 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1362, 12.630815, 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1363, 0.006147, 0, 9999, -9999, 1.0, 100, \n 1, 0.036158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1364, 0.007668, 0,\n 9999, -9999, 1.0, 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1365, 9.7e-05, 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1366, 0.005584, 0, 9999, -9999, 1.0, 100, 1, \n 1.229992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1367, 6.250932, 0, \n 9999, -9999, 1.0, 100, 1, 43.863891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1368, 0.096174, 0, 9999, -9999, 1.0, 100, 1, 3.298243, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1369, 1.432042, 0, 9999, -9999, 1.0, 100, \n 1, 7.968859, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1370, 0.012611, 0,\n 9999, -9999, 1.0, 100, 1, 0.343308, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1371, 1.656353, 0, 9999, -9999, 1.0, 100, 1, 81.767208, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1372, 0.996171, 0, 9999, -9999, 1.0, 100, \n 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1373, 1.384774, \n 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1374, 45.514504, 0, 9999, -9999, 1.0, 100, 1, 108.220146, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1375, 25.096659, 0, 9999, -9999, 1.0,\n 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1376, \n 21.592139, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1377, 1.308187, 0, 9999, -9999, 1.0, 100, 1, \n 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1378, 0.068137, 0, \n 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1379, 0.067837, 0, 9999, -9999, 1.0, 100, 1, 0.805984, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1380, 0.148081, 0, 9999, -9999, 1.0, 100, \n 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1381, 0.079283, 0,\n 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1382, 75.120774, 0, 9999, -9999, 1.0, 100, 1, 138.839906, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1383, 57.921895, 0, 9999, -9999, 1.0, 100,\n 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1384, 0.898474, \n 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1385, 0.010214, 0, 9999, -9999, 1.0, 100, 1, 0.124455, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1386, 0.058117, 0, 9999, -9999, 1.0, \n 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1387, \n 0.177086, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1388, 0.113278, 0, 9999, -9999, 1.0, 100, 1, 0.928188,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1389, 0.02606, 0, 9999, -9999, \n 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1390, \n 0.189214, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1391, 0.022705, 0, 9999, -9999, 1.0, 100, 1, 0.521719,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1392, 1.653278, 0, 9999, -9999,\n 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1393, \n 0.304577, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1394, 0.242243, 0, 9999, -9999, 1.0, 100, 1, 1.077886,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1395, 0.016054, 0, 9999, -9999,\n 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1396, \n 0.005171, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1397, 1.529697, 0, 9999, -9999, 1.0, 100, 1, 25.084545,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1398, 0.154931, 0, 9999, -9999,\n 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1399, \n 1.184332, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1400, 0.28671, 0, 9999, -9999, 1.0, 100, 1, 1.297197, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1401, 5.131858, 0, 9999, -9999,\n 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1402, \n 1.568442, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1403, 48.266806, 0, 9999, -9999, 1.0, 100, 1, \n 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1404, 51.082464, 0,\n 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1405, 1.986189, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1406, 1.132197, 0, 9999, -9999, 1.0, 100, \n 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1407, 0.012144, 0,\n 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1408, 3.606729, 0, 9999, -9999, 1.0, 100, 1, 41.078698, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1409, 0.595096, 0, 9999, -9999, 1.0, 100, \n 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1410, 1.341977, 0,\n 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1411, 6.631827, 0, 9999, -9999, 1.0, 100, 1, 39.395367, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1412, 0.149883, 0, 9999, -9999, 1.0, 100, \n 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1413, 0.108024, 0,\n 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1414, 0.018773, 0, 9999, -9999, 1.0, 100, 1, 25.992489, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1415, 0.000673, 0, 9999, -9999, 1.0, 100, \n 1, 7.454501, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1416, 0.000128, 0,\n 9999, -9999, 1.0, 100, 1, 7.958002, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1417, 2.2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.001311, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1418, 3.131184, 0, 9999, -9999, 1.0, 100, 1, \n 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1419, 0.892644, 0, \n 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1421, 0.846121, 0, 9999, -9999, 0.99951, 100, 1, 6.972369, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1422, 0.569459, 0, 9999, -9999, 1.0, \n 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1423, \n 0.239313, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1424, 0.085377, 0, 9999, -9999, 1.0, 100, 1, \n 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1425, 7.009151, 0, \n 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1426, 16.98374, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1427, 2.554959, 0, 9999, -9999, 1.0, 100, \n 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1428, 0.012327, \n 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1431, 5.108838, 0, 9999, -9999, 1.0, 100, 1, 227.662022, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1432, 0.587459, 0, 9999, -9999, 1.0, \n 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1433, \n 118.811298, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [1434, 0.031591, 0, 9999, -9999, 1.0, 100, 1, \n 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1435, 4.644217, 0, \n 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1436, 14.975035, 0, 9999, -9999, 1.0, 100, 1, 98.434116, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1437, 12.49617, 0, 9999, -9999, 1.0, 100, \n 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1438, 64.510912,\n 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1439, 0.058606, 0, 9999, -9999, 1.0, 100, 1, 99.103164, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1440, 0.000863, 0, 9999, -9999, 1.0, \n 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1441, 0.00601,\n 0, 9999, -9999, 1.0, 100, 1, 0.171578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1442, 0.057526, 0, 9999, -9999, 1.0, 100, 1, 0.715522, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1443, 24.032003, 0, 9999, -9999, 1.0, \n 100, 1, 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1444, \n 1.205148, 0, 9999, -9999, 1.0, 100, 1, 8.981696, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1445, 2.394259, 0, 9999, -9999, 1.0, 100, 1, 25.036799,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1446, 20.59301, 0, 9999, -9999,\n 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1447, \n 8.109674, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1448, 1.364062, 0, 9999, -9999, 1.0, 100, 1, 7.523578,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1449, 6.727523, 0, 9999, -9999,\n 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1450, \n 10.232409, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1451, 13.044952, 0, 9999, -9999, 1.0, 100, 1, \n 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1452, 4.020652, 0, \n 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1453, 12.794164, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1454, 71.645573, 0, 9999, -9999, 1.0, 100,\n 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1455, 0.038966, \n 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1456, 3.746818, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1457, 0.244411, 0, 9999, -9999, 1.0, \n 100, 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1458, \n 0.030047, 0, 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1459, 1.173315, 0, 9999, -9999, 1.0, 100, 1, 5.309059,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1460, 5.043479, 0, 9999, -9999,\n 1.0, 100, 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1461, \n 3.497456, 0, 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1462, 0.462345, 0, 9999, -9999, 1.0, 100, 1, 2.402686,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1463, 0.170398, 0, 9999, -9999,\n 1.0, 100, 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1464, \n 24.648093, 0, 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1465, 0.600752, 0, 9999, -9999, 1.0, 100, 1, \n 5.299939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1466, 0.332156, 0, \n 9999, -9999, 1.0, 100, 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1467, 0.100837, 0, 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1468, 6.628756, 0, 9999, -9999, 1.0, 100, \n 1, 23.789171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1469, 3.982867, 0,\n 9999, -9999, 1.0, 100, 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1470, 19.817875, 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1471, 25.471799, 0, 9999, -9999, 1.0, 100,\n 1, 159.165074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1472, 0.789769, \n 0, 9999, -9999, 1.0, 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1473, 0.721082, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1474, 0.081557, 0, 9999, -9999, 1.0, \n 100, 1, 1.398948, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1475, \n 0.020827, 0, 9999, -9999, 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1476, 81.826956, 0, 9999, -9999, 1.0, 100, 1, \n 250.480113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1477, 0.580029, 0, \n 9999, -9999, 1.0, 100, 1, 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1479, 0.004362, 0, 9999, -9999, 1.0, 100, 1, 5.592606, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1480, 0.04074, 0, 9999, -9999, 1.0, 100, 1,\n 18.681964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1481, 0.004051, 0, \n 9999, -9999, 1.0, 100, 1, 0.053146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1482, 0.788081, 0, 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1483, 0.141817, 0, 9999, -9999, 1.0, 100, \n 1, 3.599649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1484, 0.002023, 0,\n 9999, -9999, 1.0, 100, 1, 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1485, 0.038114, 0, 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1486, 0.196086, 0, 9999, -9999, 1.0, 100, 1, \n 2.89934, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1487, 0.083872, 0, \n 9999, -9999, 1.0, 100, 1, 1.142917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1488, 0.007448, 0, 9999, -9999, 1.0, 100, 1, 5.569856, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1489, 0.028558, 0, 9999, -9999, 1.0, 100, \n 1, 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1490, 15.603052, 0,\n 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1491, 5.539285, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1492, 3.975544, 0, 9999, -9999, 1.0, 100, \n 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1493, 3.904134, \n 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1494, 56.119552, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1495, 1.179889, 0, 9999, -9999, 1.0,\n 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1497, \n 12.800197, 0, 9999, -9999, 1.0, 100, 1, 89.070006, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1498, 22.315881, 0, 9999, -9999, 1.0, 100, 1, \n 105.800802, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1500, 0.040223, 0, \n 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1501, 1.659338, 0, 9999, -9999, 1.0, 100, 1, 8.165333, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1502, 0.015933, 0, 9999, -9999, 1.0, 100, \n 1, 0.938928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1503, 3.644376, 0,\n 9999, -9999, 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1504, 15.995903, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1505, 0.973825, 0, 9999, -9999, 1.0, \n 100, 1, 26.765913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1506, \n 1.68035, 0, 9999, -9999, 1.0, 100, 1, 56.406717, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1507, 0.198063, 0, 9999, -9999, 1.0, 100, 1, 15.438042,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1508, 0.014206, 0, 9999, -9999,\n 1.0, 100, 1, 0.065259, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1510, \n 7.904758, 0, 9999, -9999, 1.0, 100, 1, 107.008141, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1511, 34.313644, 0, 9999, -9999, 1.0, 100, 1, \n 155.22192, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1512, 5.508085, 0, \n 9999, -9999, 1.0, 100, 1, 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1513, 2.253286, 0, 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1514, 0.00068, 0, 9999, -9999, 1.0, 100, 1,\n 0.027711, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1516, 0.000622, 0, \n 9999, -9999, 1.0, 100, 1, 0.02881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1517, 0.14151, 0, 9999, -9999, 1.0, 100, 1, 1.286804, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1518, 0.056948, 0, 9999, -9999, 1.0, 100, 1, \n 0.670542, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1519, 0.003953, 0, \n 9999, -9999, 1.0, 100, 1, 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1520, 1.320701, 0, 9999, -9999, 1.0, 100, 1, 79.674256, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1521, 0.488031, 0, 9999, -9999, 1.0, 100, \n 1, 31.179116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1522, 0.667681, 0,\n 9999, -9999, 1.0, 100, 1, 40.212666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1523, 0.358897, 0, 9999, -9999, 1.0, 100, 1, 20.304521, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1524, 0.421411, 0, 9999, -9999, 1.0, 100, \n 1, 26.159251, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1525, 8.369013, 0,\n 9999, -9999, 1.0, 100, 1, 68.425403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1526, 13.439194, 0, 9999, -9999, 1.0, 100, 1, 44.478558, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1527, 47.41109, 0, 9999, -9999, 1.0, 100, \n 1, 103.998682, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1528, 19.05121, \n 0, 9999, -9999, 1.0, 100, 1, 41.386726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1529, 4.347441, 0, 9999, -9999, 1.0, 100, 1, 84.378012, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1530, 36.879435, 0, 9999, -9999, 1.0, \n 100, 1, 79.055155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1531, \n 98.758267, 0, 9999, -9999, 1.0, 100, 1, 183.821409, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1532, 3.146672, 0, 9999, -9999, 1.0, 100, 1, \n 37.379033, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1534, 16.179525, 0, \n 9999, -9999, 1.0, 100, 1, 29.516607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1535, 2.910988, 0, 9999, -9999, 1.0, 100, 1, 8.931779, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1536, 13.30894, 0, 9999, -9999, 1.0, 100, \n 1, 39.26145, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1537, 5.590481, 0,\n 9999, -9999, 1.0, 100, 1, 99.740166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1538, 3.755931, 0, 9999, -9999, 1.0, 100, 1, 130.774402, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1539, 6.565652, 0, 9999, -9999, 1.0, 100, \n 1, 201.766963, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1540, 0.089836, \n 0, 9999, -9999, 1.0, 100, 1, 4.160189, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1541, 0.293356, 0, 9999, -9999, 1.0, 100, 1, 3.429917, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1542, 1.778872, 0, 9999, -9999, 1.0, \n 100, 1, 50.287947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1543, \n 7.196474, 0, 9999, -9999, 1.0, 100, 1, 14.788669, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1544, 15.520031, 0, 9999, -9999, 1.0, 100, 1, \n 121.437126, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1545, 64.930835, 0,\n 9999, -9999, 1.0, 100, 1, 185.545128, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1546, 55.458703, 0, 9999, -9999, 1.0, 100, 1, 255.44343, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1547, 71.747708, 0, 9999, -9999, 1.0, 100,\n 1, 362.597919, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1548, 9.874324, \n 0, 9999, -9999, 1.0, 100, 1, 21.273779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1549, 26.315546, 0, 9999, -9999, 1.0, 100, 1, 77.017486, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1550, 2.578653, 0, 9999, -9999, 1.0, \n 100, 1, 5.214715, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1551, \n 4.679853, 0, 9999, -9999, 1.0, 100, 1, 9.576491, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1552, 1.571054, 0, 9999, -9999, 1.0, 100, 1, 54.035471,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1553, 1.205813, 0, 9999, -9999,\n 1.0, 100, 1, 92.480282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1554, \n 4.550451, 0, 9999, -9999, 1.0, 100, 1, 155.333413, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1555, 2.8799, 0, 9999, -9999, 1.0, 100, 1, \n 103.865774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1556, 1.072108, 0, \n 9999, -9999, 1.0, 100, 1, 40.376346, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1557, 0.628445, 0, 9999, -9999, 1.0, 100, 1, 25.990242, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1558, 0.94404, 0, 9999, -9999, 1.0, 100, 1,\n 24.622373, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1559, 4.593798, 0, \n 9999, -9999, 1.0, 100, 1, 112.609207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1560, 1.15871, 0, 9999, -9999, 1.0, 100, 1, 86.395942, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1561, 0.554621, 0, 9999, -9999, 1.0, 100, \n 1, 19.127379, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1562, 1.20192, 0,\n 9999, -9999, 1.0, 100, 1, 61.888351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1563, 3.188963, 0, 9999, -9999, 1.0, 100, 1, 106.233907, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1564, 26.839461, 0, 9999, -9999, 1.0, 100,\n 1, 58.27282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1565, 0.825577, 0,\n 9999, -9999, 1.0, 100, 1, 12.83938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1566, 9.367373, 0, 9999, -9999, 1.0, 100, 1, 358.676351, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1567, 0.521067, 0, 9999, -9999, 1.0, 100, \n 1, 29.531771, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1568, 2.721294, 0,\n 9999, -9999, 1.0, 100, 1, 89.300597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1569, 7.514268, 0, 9999, -9999, 1.0, 100, 1, 328.718571, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1570, 6.439178, 0, 9999, -9999, 1.0, 100, \n 1, 243.241909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1571, 10.260218,\n 0, 9999, -9999, 1.0, 100, 1, 203.443403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1572, 6.054092, 0, 9999, -9999, 1.0, 100, 1, 232.127956, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1573, 2.410514, 0, 9999, -9999, 1.0, \n 100, 1, 80.403772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1574, \n 3.788724, 0, 9999, -9999, 1.0, 100, 1, 144.715972, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1575, 10.428356, 0, 9999, -9999, 1.0, 100, 1, \n 153.606376, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1576, 2.443, 0, \n 9999, -9999, 1.0, 100, 1, 34.262017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1577, 15.38133, 0, 9999, -9999, 1.0, 100, 1, 217.054488, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1578, 0.821275, 0, 9999, -9999, 1.0, 100, \n 1, 16.348222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1579, 14.528543, \n 0, 9999, -9999, 1.0, 100, 1, 35.164333, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1580, 12.79112, 0, 9999, -9999, 1.0, 100, 1, 21.892492, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1581, 2.068277, 0, 9999, -9999, 1.0, \n 100, 1, 156.277964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1582, \n 0.165737, 0, 9999, -9999, 1.0, 100, 1, 8.151092, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1583, 0.043758, 0, 9999, -9999, 1.0, 100, 1, 1.791968,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1584, 1.216571, 0, 9999, -9999,\n 1.0, 100, 1, 81.24993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1585, \n 0.048815, 0, 9999, -9999, 1.0, 100, 1, 3.685182, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1586, 0.843323, 0, 9999, -9999, 1.0, 100, 1, 61.31549,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1587, 2.519864, 0, 9999, -9999,\n 1.0, 100, 1, 191.635296, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1588, \n 3.852362, 0, 9999, -9999, 1.0, 100, 1, 59.424343, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1589, 19.154329, 0, 9999, -9999, 1.0, 100, 1, \n 48.538268, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1590, 20.947358, 0, \n 9999, -9999, 1.0, 100, 1, 119.077525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1591, 23.168103, 0, 9999, -9999, 1.0, 100, 1, 142.8447, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1592, 0.253241, 0, 9999, -9999, 1.0, 100, \n 1, 9.842361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1593, 0.15675, 0, \n 9999, -9999, 1.0, 100, 1, 7.183183, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1594, 0.292231, 0, 9999, -9999, 1.0, 100, 1, 9.56089, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1595, 2.231011, 0, 9999, -9999, 1.0, 100, 1, \n 54.79001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1596, 4.880936, 0, \n 9999, -9999, 1.0, 100, 1, 138.730049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1597, 0.08322, 0, 9999, -9999, 1.0, 100, 1, 2.858987, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1598, 0.112467, 0, 9999, -9999, 1.0, 100, 1, \n 4.795494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1599, 3.84912, 0, \n 9999, -9999, 1.0, 100, 1, 86.703571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1600, 2.069032, 0, 9999, -9999, 1.0, 100, 1, 25.356501, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1601, 0.561492, 0, 9999, -9999, 1.0, 100, \n 1, 7.643653, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1602, 2.906505, 0,\n 9999, -9999, 1.0, 100, 1, 45.658169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1603, 1.783351, 0, 9999, -9999, 1.0, 100, 1, 26.209248, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1604, 1.098497, 0, 9999, -9999, 1.0, 100, \n 1, 16.363032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1605, 2.754133, 0,\n 9999, -9999, 1.0, 100, 1, 43.477178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1606, 2.112869, 0, 9999, -9999, 1.0, 100, 1, 42.024907, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1607, 1.261272, 0, 9999, -9999, 1.0, 100, \n 1, 19.395236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1608, 1.278121, 0,\n 9999, -9999, 1.0, 100, 1, 19.491249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1609, 0.483623, 0, 9999, -9999, 1.0, 100, 1, 6.052272, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1610, 1.005066, 0, 9999, -9999, 1.0, 100, \n 1, 18.571656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1611, 0.46381, 0,\n 9999, -9999, 1.0, 100, 1, 6.420554, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1612, 0.857392, 0, 9999, -9999, 1.0, 100, 1, 10.811203, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1613, 1.011747, 0, 9999, -9999, 1.0, 100, \n 1, 27.976217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1614, 1.022581, 0,\n 9999, -9999, 1.0, 100, 1, 28.183827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1615, 2.737635, 0, 9999, -9999, 1.0, 100, 1, 193.234776, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1616, 0.13616, 0, 9999, -9999, 1.0, 100, 1,\n 6.865586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1617, 0.214465, 0, \n 9999, -9999, 1.0, 100, 1, 10.63107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1618, 0.137271, 0, 9999, -9999, 1.0, 100, 1, 4.920368, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1619, 0.137714, 0, 9999, -9999, 1.0, 100, \n 1, 6.689637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1620, 0.054616, 0,\n 9999, -9999, 1.0, 100, 1, 1.912024, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1621, 0.643767, 0, 9999, -9999, 1.0, 100, 1, 8.056388, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1622, 0.454891, 0, 9999, -9999, 1.0, 100, \n 1, 5.693597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1623, 0.781413, 0,\n 9999, -9999, 1.0, 100, 1, 20.717111, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1624, 0.43014, 0, 9999, -9999, 1.0, 100, 1, 8.938454, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1625, 4.394301, 0, 9999, -9999, 1.0, 100, 1, \n 65.182465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1626, 0.907896, 0, \n 9999, -9999, 1.0, 100, 1, 11.878862, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1627, 0.828216, 0, 9999, -9999, 1.0, 100, 1, 10.196496, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1628, 3.64562, 0, 9999, -9999, 1.0, 100, 1,\n 66.613993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1629, 3.996364, 0, \n 9999, -9999, 1.0, 100, 1, 121.671047, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1630, 0.97886, 0, 9999, -9999, 1.0, 100, 1, 12.452584, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1631, 1.229738, 0, 9999, -9999, 1.0, 100, \n 1, 32.486249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1632, 1.735442, 0,\n 9999, -9999, 1.0, 100, 1, 25.874893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1633, 1.043532, 0, 9999, -9999, 1.0, 100, 1, 67.433329, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1634, 0.770553, 0, 9999, -9999, 1.0, 100, \n 1, 9.643044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1635, 1.42036, 0, \n 9999, -9999, 1.0, 100, 1, 19.166135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1636, 0.484297, 0, 9999, -9999, 1.0, 100, 1, 25.181406, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1637, 0.890327, 0, 9999, -9999, 1.0, 100, \n 1, 29.114828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1638, 0.393448, 0,\n 9999, -9999, 1.0, 100, 1, 12.162188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1639, 0.529161, 0, 9999, -9999, 1.0, 100, 1, 29.183593, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1640, 0.055855, 0, 9999, -9999, 1.0, 100, \n 1, 2.237652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1641, 0.128633, 0,\n 9999, -9999, 1.0, 100, 1, 5.023705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1642, 0.300365, 0, 9999, -9999, 1.0, 100, 1, 11.730623, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1643, 0.0778, 0, 9999, -9999, 1.0, 100, 1,\n 3.417684, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1644, 0.519067, 0, \n 9999, -9999, 1.0, 100, 1, 11.76596, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1645, 0.212854, 0, 9999, -9999, 1.0, 100, 1, 11.144882, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1646, 0.08389, 0, 9999, -9999, 1.0, 100, 1,\n 3.73271, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1647, 0.49549, 0, 9999,\n -9999, 1.0, 100, 1, 17.434827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1648, 51.620123, 0, 9999, -9999, 1.0, 100, 1, 109.345623, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1649, 1.143986, 0, 9999, -9999, 1.0, 100, 1, \n 23.481556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1650, 68.504496, 0, \n 9999, -9999, 1.0, 100, 1, 176.928964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1651, 25.884619, 0, 9999, -9999, 1.0, 100, 1, 161.276649, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1652, 22.304037, 0, 9999, -9999, 1.0, \n 100, 1, 84.070562, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1653, \n 5.825901, 0, 9999, -9999, 1.0, 100, 1, 18.431241, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1654, 5.458977, 0, 9999, -9999, 1.0, 100, 1, 47.53021,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1655, 0.218497, 0, 9999, -9999,\n 1.0, 100, 1, 10.79071, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1656, \n 0.047498, 0, 9999, -9999, 1.0, 100, 1, 2.680105, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1657, 0.095463, 0, 9999, -9999, 1.0, 100, 1, 5.6313, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1658, 0.045291, 0, 9999, -9999,\n 1.0, 100, 1, 1.879381, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1659, \n 17.538243, 0, 9999, -9999, 1.0, 100, 1, 91.77667, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1660, 12.937488, 0, 9999, -9999, 1.0, 100, 1, \n 186.942171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1661, 31.605385, 0,\n 9999, -9999, 1.0, 100, 1, 138.604087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1662, 0.063493, 0, 9999, -9999, 1.0, 100, 1, 3.040325, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1663, 0.024501, 0, 9999, -9999, 1.0, 100, \n 1, 1.600649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1664, 0.036775, 0,\n 9999, -9999, 1.0, 100, 1, 1.578207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1665, 0.738544, 0, 9999, -9999, 1.0, 100, 1, 48.659717, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1666, 0.10553, 0, 9999, -9999, 1.0, 100, 1,\n 2.877877, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1667, 0.158158, 0, \n 9999, -9999, 1.0, 100, 1, 5.227282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1668, 0.093074, 0, 9999, -9999, 1.0, 100, 1, 3.927043, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1669, 0.940983, 0, 9999, -9999, 1.0, 100, \n 1, 72.677935, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1670, 1.496978, 0,\n 9999, -9999, 1.0, 100, 1, 111.043025, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1671, 2.781499, 0, 9999, -9999, 1.0, 100, 1, 62.404971, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1672, 0.388881, 0, 9999, -9999, 1.0, 100, \n 1, 10.579925, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1673, 0.334706, 0,\n 9999, -9999, 1.0, 100, 1, 4.091034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1674, 1.005445, 0, 9999, -9999, 1.0, 100, 1, 47.970381, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1675, 0.90703, 0, 9999, -9999, 1.0, 100, 1,\n 31.233663, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1676, 1.387516, 0, \n 9999, -9999, 1.0, 100, 1, 83.173368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1677, 0.214899, 0, 9999, -9999, 1.0, 100, 1, 13.887293, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1678, 1.315679, 0, 9999, -9999, 1.0, 100, \n 1, 226.804108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1679, 0.418866, \n 0, 9999, -9999, 1.0, 100, 1, 71.380413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1680, 1.040782, 0, 9999, -9999, 1.0, 100, 1, 52.148102, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1681, 0.272268, 0, 9999, -9999, 1.0, \n 100, 1, 17.30062, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1682, \n 0.618993, 0, 9999, -9999, 1.0, 100, 1, 39.892468, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1683, 0.37783, 0, 9999, -9999, 1.0, 100, 1, 9.189765, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1684, 16.720062, 0, 9999, -9999,\n 1.0, 100, 1, 40.575646, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1685, \n 38.280956, 0, 9999, -9999, 1.0, 100, 1, 74.922434, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1686, 1.592396, 0, 9999, -9999, 1.0, 100, 1, \n 81.035483, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1687, 1.448229, 0, \n 9999, -9999, 1.0, 100, 1, 112.01808, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1688, 0.25044, 0, 9999, -9999, 1.0, 100, 1, 18.158729, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1689, 2.728973, 0, 9999, -9999, 1.0, 100, \n 1, 116.696894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1690, 1.881404, \n 0, 9999, -9999, 1.0, 100, 1, 116.477465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1691, 1.937312, 0, 9999, -9999, 1.0, 100, 1, 228.38653, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1692, 0.360216, 0, 9999, -9999, 1.0, \n 100, 1, 26.501573, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1693, \n 6.045706, 0, 9999, -9999, 1.0, 100, 1, 86.236575, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1694, 0.838517, 0, 9999, -9999, 1.0, 100, 1, 53.656832,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1695, 0.366512, 0, 9999, -9999,\n 1.0, 100, 1, 23.132774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1696, \n 0.676037, 0, 9999, -9999, 1.0, 100, 1, 53.34209, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1697, 73.968329, 0, 9999, -9999, 1.0, 100, 1, \n 136.821485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1698, 7.947772, 0, \n 9999, -9999, 1.0, 100, 1, 25.60631, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1699, 0.032287, 0, 9999, -9999, 1.0, 100, 1, 5.356106, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1700, 0.345167, 0, 9999, -9999, 1.0, 100, \n 1, 55.825815, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1701, 0.33727, 0,\n 9999, -9999, 1.0, 100, 1, 37.297196, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1702, 1.288316, 0, 9999, -9999, 1.0, 100, 1, 25.149806, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1703, 2.47381, 0, 9999, -9999, 1.0, 100, 1,\n 48.587768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1704, 5.787415, 0, \n 9999, -9999, 1.0, 100, 1, 127.647586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1705, 2.86247, 0, 9999, -9999, 1.0, 100, 1, 52.051788, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1706, 0.421435, 0, 9999, -9999, 1.0, 100, \n 1, 6.76178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1707, 0.423471, 0, \n 9999, -9999, 1.0, 100, 1, 11.7078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1708, 1.09922, 0, 9999, -9999, 1.0, 100, 1, 26.288692, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1709, 4.063842, 0, 9999, -9999, 1.0, 100, 1, \n 226.257418, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1710, 3.872336, 0, \n 9999, -9999, 1.0, 100, 1, 183.631947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1711, 0.031912, 0, 9999, -9999, 1.0, 100, 1, 7.213854, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1712, 1.519606, 0, 9999, -9999, 1.0, 100, \n 1, 75.638853, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1713, 1.926968, 0,\n 9999, -9999, 1.0, 100, 1, 90.775073, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1714, 0.691647, 0, 9999, -9999, 1.0, 100, 1, 42.312538, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1715, 4.380165, 0, 9999, -9999, 1.0, 100, \n 1, 155.279397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1716, 99.103248,\n 0, 9999, -9999, 1.0, 100, 1, 156.979012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1717, 1.370715, 0, 9999, -9999, 1.0, 100, 1, 82.928251, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1718, 189.035332, 0, 9999, -9999, 1.0, \n 100, 1, 301.614349, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1719, \n 0.996406, 0, 9999, -9999, 1.0, 100, 1, 19.488967, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1720, 2.459531, 0, 9999, -9999, 1.0, 100, 1, 54.067169,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1721, 1.395162, 0, 9999, -9999,\n 1.0, 100, 1, 82.151947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1722, \n 0.307342, 0, 9999, -9999, 1.0, 100, 1, 21.329566, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1723, 1.879056, 0, 9999, -9999, 1.0, 100, 1, 2.855273,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1724, 23.913688, 0, 9999, -9999,\n 1.0, 100, 1, 36.268783, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1725, \n 3.302072, 0, 9999, -9999, 1.0, 100, 1, 55.750844, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1726, 4.692439, 0, 9999, -9999, 1.0, 100, 1, 84.308501,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1727, 0.009857, 0, 9999, -9999,\n 1.0, 100, 1, 0.456443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1728, \n 1.500178, 0, 9999, -9999, 1.0, 100, 1, 65.283314, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1729, 9.626622, 0, 9999, -9999, 1.0, 100, 1, \n 220.758669, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1730, 2.579093, 0, \n 9999, -9999, 1.0, 100, 1, 51.367164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1731, 5.370488, 0, 9999, -9999, 1.0, 100, 1, 151.90213, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1732, 4.730721, 0, 9999, -9999, 1.0, 100, \n 1, 383.858473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1733, 1.601396, \n 0, 9999, -9999, 1.0, 100, 1, 60.655652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1734, 0.994327, 0, 9999, -9999, 1.0, 100, 1, 77.375277, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1735, 5.493087, 0, 9999, -9999, 1.0, \n 100, 1, 153.887449, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1736, \n 1.217485, 0, 9999, -9999, 1.0, 100, 1, 89.439426, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1737, 13.67404, 0, 9999, -9999, 1.0, 100, 1, \n 194.473407, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1738, 6.79528, 0, \n 9999, -9999, 1.0, 100, 1, 116.049526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1739, 1.628928, 0, 9999, -9999, 1.0, 100, 1, 33.525947, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1740, 3.170471, 0, 9999, -9999, 1.0, 100, \n 1, 66.638954, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1741, 0.703631, 0,\n 9999, -9999, 1.0, 100, 1, 35.869318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1742, 0.41138, 0, 9999, -9999, 1.0, 100, 1, 25.619162, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1743, 0.014153, 0, 9999, -9999, 1.0, 100, \n 1, 0.986841, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1744, 0.06008, 0, \n 9999, -9999, 1.0, 100, 1, 3.775325, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1745, 0.52858, 0, 9999, -9999, 1.0, 100, 1, 31.215591, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1746, 2.317817, 0, 9999, -9999, 1.0, 100, \n 1, 172.123236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1747, 0.45041, 0,\n 9999, -9999, 1.0, 100, 1, 25.963706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1748, 1.875782, 0, 9999, -9999, 1.0, 100, 1, 67.219313, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1749, 5.661322, 0, 9999, -9999, 1.0, 100, \n 1, 218.703564, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1750, 0.722982, \n 0, 9999, -9999, 1.0, 100, 1, 22.191848, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1751, 0.570436, 0, 9999, -9999, 1.0, 100, 1, 18.416283, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1752, 2.485541, 0, 9999, -9999, 1.0, \n 100, 1, 136.190504, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1753, \n 2.307659, 0, 9999, -9999, 1.0, 100, 1, 79.270006, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1754, 9.096135, 0, 9999, -9999, 1.0, 100, 1, 408.37422,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1755, 1.808269, 0, 9999, -9999,\n 1.0, 100, 1, 46.277001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1756, \n 1.755721, 0, 9999, -9999, 1.0, 100, 1, 93.807787, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1757, 13.59206, 0, 9999, -9999, 1.0, 100, 1, 197.08743,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1758, 4.309907, 0, 9999, -9999,\n 1.0, 100, 1, 311.473267, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1759, \n 4.837918, 0, 9999, -9999, 1.0, 100, 1, 156.546089, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1760, 2.229657, 0, 9999, -9999, 1.0, 100, 1, \n 114.687411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1761, 1.4435, 0, \n 9999, -9999, 1.0, 100, 1, 48.443946, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1762, 4.898546, 0, 9999, -9999, 1.0, 100, 1, 107.077622, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1763, 5.490835, 0, 9999, -9999, 1.0, 100, \n 1, 90.136674, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1764, 1.223566, 0,\n 9999, -9999, 1.0, 100, 1, 21.994769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1765, 7.971301, 0, 9999, -9999, 1.0, 100, 1, 112.249863, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1766, 9.468566, 0, 9999, -9999, 1.0, 100, \n 1, 99.811208, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1767, 48.00237, 0,\n 9999, -9999, 1.0, 100, 1, 95.5909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1768, 55.735285, 0, 9999, -9999, 1.0, 100, 1, 159.818572, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1769, 21.168997, 0, 9999, -9999, 1.0, 100,\n 1, 235.581664, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1770, 252.472611,\n 0, 9999, -9999, 1.0, 100, 1, 479.248156, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1771, 171.272253, 0, 9999, -9999, 1.0, 100, 1, 276.640075, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1772, 5.981185, 0, 9999, -9999, 1.0,\n 100, 1, 272.215345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1773, \n 31.853074, 0, 9999, -9999, 1.0, 100, 1, 533.823159, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1774, 1.38998, 0, 9999, -9999, 1.0, 100, 1, \n 88.57714, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1775, 3.602189, 0, \n 9999, -9999, 1.0, 100, 1, 197.787397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1776, 3.86406, 0, 9999, -9999, 1.0, 100, 1, 111.203656, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1777, 4.186652, 0, 9999, -9999, 1.0, 100, \n 1, 199.457983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1778, 2.885068, \n 0, 9999, -9999, 1.0, 100, 1, 80.070627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1779, 6.121667, 0, 9999, -9999, 1.0, 100, 1, 78.485044, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1780, 4.042606, 0, 9999, -9999, 1.0, \n 100, 1, 97.872974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1781, \n 3.124553, 0, 9999, -9999, 1.0, 100, 1, 7.067063, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1782, 4.836581, 0, 9999, -9999, 1.0, 100, 1, 9.94901, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1783, 5.154731, 0, 9999, -9999,\n 1.0, 100, 1, 10.739092, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1784, \n 2.922371, 0, 9999, -9999, 1.0, 100, 1, 240.920274, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1785, 3.064711, 0, 9999, -9999, 1.0, 100, 1, \n 275.41262, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1786, 15.899962, 0, \n 9999, -9999, 1.0, 100, 1, 195.868213, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1787, 65.367372, 0, 9999, -9999, 1.0, 100, 1, 123.060646, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1788, 0.117389, 0, 9999, -9999, 1.0, \n 100, 1, 9.486282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1789, \n 0.289917, 0, 9999, -9999, 1.0, 100, 1, 24.05804, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1790, 0.010999, 0, 9999, -9999, 1.0, 100, 1, 1.412167,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1791, 0.007829, 0, 9999, -9999,\n 1.0, 100, 1, 1.171034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1792, \n 0.044079, 0, 9999, -9999, 1.0, 100, 1, 8.914306, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1793, 0.236603, 0, 9999, -9999, 1.0, 100, 1, 41.722817,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1794, 0.20779, 0, 9999, -9999, \n 1.0, 100, 1, 6.617641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1795, \n 0.266407, 0, 9999, -9999, 1.0, 100, 1, 3.33586, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1796, 4.643687, 0, 9999, -9999, 1.0, 100, 1, 10.434523,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1797, 1.892799, 0, 9999, -9999,\n 1.0, 100, 1, 63.411765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1798, \n 0.404733, 0, 9999, -9999, 1.0, 100, 1, 14.835758, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1799, 6.065791, 0, 9999, -9999, 1.0, 100, 1, 51.10225,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1800, 12.893851, 0, 9999, -9999,\n 1.0, 100, 1, 79.286766, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1801, \n 0.096655, 0, 9999, -9999, 1.0, 100, 1, 21.006749, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1802, 0.050346, 0, 9999, -9999, 1.0, 100, 1, 11.305192,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1803, 0.067486, 0, 9999, -9999,\n 1.0, 100, 1, 15.182571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1804, \n 8.857977, 0, 9999, -9999, 1.0, 100, 1, 399.133201, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1805, 0.372681, 0, 9999, -9999, 1.0, 100, 1, \n 23.20491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1806, 0.645338, 0, \n 9999, -9999, 1.0, 100, 1, 21.469357, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1807, 0.476964, 0, 9999, -9999, 1.0, 100, 1, 28.156483, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1808, 2.263578, 0, 9999, -9999, 1.0, 100, \n 1, 118.262712, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1809, 0.706651, \n 0, 9999, -9999, 1.0, 100, 1, 33.031228, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1810, 1.838324, 0, 9999, -9999, 1.0, 100, 1, 74.139408, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1811, 0.934047, 0, 9999, -9999, 1.0, \n 100, 1, 53.408299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1812, \n 0.847076, 0, 9999, -9999, 1.0, 100, 1, 47.34526, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1813, 5.040034, 0, 9999, -9999, 1.0, 100, 1, \n 180.894957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1814, 1.305803, 0, \n 9999, -9999, 1.0, 100, 1, 62.572642, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1815, 1.125706, 0, 9999, -9999, 1.0, 100, 1, 61.953143, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1816, 0.526674, 0, 9999, -9999, 1.0, 100, \n 1, 30.445169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1817, 6.103422, 0,\n 9999, -9999, 1.0, 100, 1, 280.614897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1818, 2.278102, 0, 9999, -9999, 1.0, 100, 1, 173.515675, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1819, 0.043942, 0, 9999, -9999, 1.0, 100, \n 1, 1.538348, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1820, 1.0414, 0, \n 9999, -9999, 1.0, 100, 1, 79.71358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1821, 2.208855, 0, 9999, -9999, 1.0, 100, 1, 196.67938, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1822, 98.239685, 0, 9999, -9999, 1.0, 100,\n 1, 170.831584, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1823, 4.830701, \n 0, 9999, -9999, 1.0, 100, 1, 131.456153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1824, 2.976789, 0, 9999, -9999, 1.0, 100, 1, 56.565054, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1825, 49.61097, 0, 9999, -9999, 1.0, \n 100, 1, 81.59195, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1826, 2.40722,\n 0, 9999, -9999, 1.0, 100, 1, 74.101252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1827, 0.690669, 0, 9999, -9999, 1.0, 100, 1, 30.303552, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1828, 27.146571, 0, 9999, -9999, 1.0, \n 100, 1, 43.298921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1829, \n 37.866018, 0, 9999, -9999, 1.0, 100, 1, 69.263255, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1830, 2.915109, 0, 9999, -9999, 1.0, 100, 1, \n 27.724768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1831, 39.925327, 0, \n 9999, -9999, 1.0, 100, 1, 69.89001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1832, 0.828831, 0, 9999, -9999, 1.0, 100, 1, 26.560625, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1833, 1.109798, 0, 9999, -9999, 1.0, 100, \n 1, 81.361962, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1834, 2.554402, 0,\n 9999, -9999, 1.0, 100, 1, 102.529569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1836, 0.477418, 0, 9999, -9999, 1.0, 100, 1, 6.417969, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1837, 4.200009, 0, 9999, -9999, 1.0, 100, \n 1, 12.629331, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1838, 1.443062, 0,\n 9999, -9999, 1.0, 100, 1, 25.580913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1839, 28.228214, 0, 9999, -9999, 1.0, 100, 1, 183.749133, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1840, 10.988953, 0, 9999, -9999, 1.0, \n 100, 1, 132.975197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1841, \n 0.340284, 0, 9999, -9999, 1.0, 100, 1, 22.982632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1842, 1.417646, 0, 9999, -9999, 1.0, 100, 1, 7.468633,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1843, 0.588474, 0, 9999, -9999,\n 1.0, 100, 1, 19.264686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1844, \n 0.345625, 0, 9999, -9999, 1.0, 100, 1, 32.384294, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1845, 0.373692, 0, 9999, -9999, 1.0, 100, 1, 31.436002,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1846, 0.117694, 0, 9999, -9999,\n 1.0, 100, 1, 3.74984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1847, \n 6.98851, 0, 9999, -9999, 1.0, 100, 1, 120.215574, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1848, 0.671868, 0, 9999, -9999, 1.0, 100, 1, 9.514696,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1849, 1.591079, 0, 9999, -9999,\n 1.0, 100, 1, 37.619097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1850, \n 3.459291, 0, 9999, -9999, 1.0, 100, 1, 48.54058, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1851, 5.355057, 0, 9999, -9999, 1.0, 100, 1, 7.956444,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1852, 26.334441, 0, 9999, -9999,\n 1.0, 100, 1, 37.606916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1853, \n 21.05905, 0, 9999, -9999, 1.0, 100, 1, 30.116711, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1854, 1.087784, 0, 9999, -9999, 1.0, 100, 1, 2.241167,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1855, 4.821441, 0, 9999, -9999,\n 1.0, 100, 1, 121.687485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1856, \n 16.158296, 0, 9999, -9999, 1.0, 100, 1, 63.654358, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1857, 1.392575, 0, 9999, -9999, 1.0, 100, 1, \n 41.229597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1858, 0.962874, 0, \n 9999, -9999, 1.0, 100, 1, 27.374415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1860, 5.321111, 0, 9999, -9999, 1.0, 100, 1, 84.163604, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1861, 1.232397, 0, 9999, -9999, 1.0, 100, \n 1, 26.861144, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1862, 0.420971, 0,\n 9999, -9999, 1.0, 100, 1, 32.512826, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1863, 0.38232, 0, 9999, -9999, 1.0, 100, 1, 30.063729, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1864, 1.848854, 0, 9999, -9999, 1.0, 100, \n 1, 138.236316, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1865, 26.719416,\n 0, 9999, -9999, 1.0, 100, 1, 68.097772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1866, 37.73908, 0, 9999, -9999, 1.0, 100, 1, 98.289141, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1867, 0.07468, 0, 9999, -9999, 1.0, 100,\n 1, 2.041288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1868, 0.184336, 0,\n 9999, -9999, 1.0, 100, 1, 6.453374, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1869, 0.097593, 0, 9999, -9999, 1.0, 100, 1, 2.759448, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1870, 0.859649, 0, 9999, -9999, 1.0, 100, \n 1, 54.564665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1871, 1.592185, 0,\n 9999, -9999, 1.0, 100, 1, 52.648444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1872, 0.137763, 0, 9999, -9999, 1.0, 100, 1, 1.683854, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1873, 0.231084, 0, 9999, -9999, 1.0, 100, \n 1, 9.025283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1874, 0.083646, 0,\n 9999, -9999, 1.0, 100, 1, 3.554415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1875, 0.158111, 0, 9999, -9999, 1.0, 100, 1, 7.837576, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1876, 0.141013, 0, 9999, -9999, 1.0, 100, \n 1, 4.936672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1877, 0.032441, 0,\n 9999, -9999, 1.0, 100, 1, 1.135717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1878, 0.168939, 0, 9999, -9999, 1.0, 100, 1, 8.374329, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1879, 0.048728, 0, 9999, -9999, 1.0, 100, \n 1, 1.752881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1880, 0.763602, 0,\n 9999, -9999, 1.0, 100, 1, 38.46747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1881, 0.30875, 0, 9999, -9999, 1.0, 100, 1, 4.535799, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1882, 0.374878, 0, 9999, -9999, 1.0, 100, 1, \n 5.120641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1883, 0.501411, 0, \n 9999, -9999, 1.0, 100, 1, 6.940957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1884, 0.420718, 0, 9999, -9999, 1.0, 100, 1, 5.865468, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1885, 0.774015, 0, 9999, -9999, 1.0, 100, \n 1, 47.510175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1886, 0.082618, 0,\n 9999, -9999, 1.0, 100, 1, 5.255398, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1887, 0.584546, 0, 9999, -9999, 1.0, 100, 1, 16.937671, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1888, 0.279655, 0, 9999, -9999, 1.0, 100, \n 1, 4.141211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1889, 2.215842, 0,\n 9999, -9999, 1.0, 100, 1, 91.335184, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1890, 0.651391, 0, 9999, -9999, 1.0, 100, 1, 24.842697, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1891, 0.495423, 0, 9999, -9999, 1.0, 100, \n 1, 30.836318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1892, 0.592029, 0,\n 9999, -9999, 1.0, 100, 1, 38.14699, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1893, 0.992301, 0, 9999, -9999, 1.0, 100, 1, 46.5682, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1894, 0.671605, 0, 9999, -9999, 1.0, 100, 1, \n 31.347572, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1895, 0.005762, 0, \n 9999, -9999, 1.0, 100, 1, 0.140628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1896, 0.578794, 0, 9999, -9999, 1.0, 100, 1, 45.257234, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1897, 0.22732, 0, 9999, -9999, 1.0, 100, 1,\n 14.824595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1898, 0.253484, 0, \n 9999, -9999, 1.0, 100, 1, 18.270499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1899, 0.15769, 0, 9999, -9999, 1.0, 100, 1, 12.000496, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1900, 49.440108, 0, 9999, -9999, 1.0, 100,\n 1, 78.114509, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1901, 85.852576, \n 0, 9999, -9999, 1.0, 100, 1, 133.539659, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1902, 144.692709, 0, 9999, -9999, 1.0, 100, 1, 281.819662, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1903, 38.213684, 0, 9999, -9999, 1.0,\n 100, 1, 135.492385, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1904, \n 49.601, 0, 9999, -9999, 1.0, 100, 1, 79.184428, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1905, 0.245402, 0, 9999, -9999, 1.0, 100, 1, 9.160607,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1906, 1.441792, 0, 9999, -9999,\n 1.0, 100, 1, 72.356523, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1907, \n 0.557731, 0, 9999, -9999, 1.0, 100, 1, 28.893637, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1908, 0.972014, 0, 9999, -9999, 1.0, 100, 1, 50.477866,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1909, 2.006953, 0, 9999, -9999,\n 0.99951, 100, 1, 32.874676, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1910, 1.289808, 0, 9999, -9999, 1.0, 100, 1, 20.259486, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1911, 0.514865, 0, 9999, -9999, 1.0, 100, 1, \n 8.189799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1912, 69.733436, 0, \n 9999, -9999, 1.0, 100, 1, 101.236915, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1913, 0.109472, 0, 9999, -9999, 1.0, 100, 1, 6.782522, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1914, 0.280751, 0, 9999, -9999, 1.0, 100, \n 1, 15.944561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1915, 57.319413, \n 0, 9999, -9999, 1.0, 100, 1, 159.570248, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1916, 99.107497, 0, 9999, -9999, 1.0, 100, 1, 277.793548, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1917, 42.116008, 0, 9999, -9999, 1.0,\n 100, 1, 186.387377, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1918, \n 58.749074, 0, 9999, -9999, 1.0, 100, 1, 120.486097, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1919, 28.497622, 0, 9999, -9999, 1.0, 100, 1, \n 61.1613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1920, 1.811743, 0, \n 9999, -9999, 1.0, 100, 1, 9.95472, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1921, 145.712044, 0, 9999, -9999, 1.0, 100, 1, 230.400935, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1922, 45.36466, 0, 9999, -9999, 1.0, \n 100, 1, 66.116137, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1923, \n 9.238607, 0, 9999, -9999, 1.0, 100, 1, 21.836163, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1924, 5.019655, 0, 9999, -9999, 1.0, 100, 1, 36.518326,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1925, 5.170419, 0, 9999, -9999,\n 1.0, 100, 1, 135.324361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1926, \n 3.340663, 0, 9999, -9999, 1.0, 100, 1, 96.610178, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1927, 23.399289, 0, 9999, -9999, 1.0, 100, 1, \n 65.668809, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1928, 0.747036, 0, \n 9999, -9999, 1.0, 100, 1, 1.509884, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1929, 0.180301, 0, 9999, -9999, 1.0, 100, 1, 4.804832, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1930, 0.214601, 0, 9999, -9999, 1.0, 100, \n 1, 11.004973, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1931, 0.663788, 0,\n 9999, -9999, 1.0, 100, 1, 38.07556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1932, 1.83202, 0, 9999, -9999, 1.0, 100, 1, 46.722379, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1933, 0.735851, 0, 9999, -9999, 1.0, 100, \n 1, 44.239188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1934, 47.829223, \n 0, 9999, -9999, 1.0, 100, 1, 383.418198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1935, 3.280962, 0, 9999, -9999, 1.0, 100, 1, 62.335643, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1936, 0.079477, 0, 9999, -9999, 1.0, \n 100, 1, 6.00797, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1937, 2.133855,\n 0, 9999, -9999, 1.0, 100, 1, 134.605733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1938, 1.44698, 0, 9999, -9999, 1.0, 100, 1, 89.425619, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1939, 1.447635, 0, 9999, -9999, 1.0, \n 100, 1, 103.003683, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1940, \n 0.249661, 0, 9999, -9999, 1.0, 100, 1, 18.980829, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1941, 0.521998, 0, 9999, -9999, 1.0, 100, 1, \n 104.495097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1942, 0.789037, 0, \n 9999, -9999, 1.0, 100, 1, 70.75487, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1943, 0.083093, 0, 9999, -9999, 1.0, 100, 1, 3.652558, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1944, 1.445543, 0, 9999, -9999, 1.0, 100, \n 1, 93.133765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1945, 0.304251, 0,\n 9999, -9999, 1.0, 100, 1, 10.651443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1946, 0.037403, 0, 9999, -9999, 1.0, 100, 1, 1.309439, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1947, 1.219744, 0, 9999, -9999, 1.0, 100, \n 1, 17.996246, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1948, 4.586959, 0,\n 9999, -9999, 1.0, 100, 1, 83.075413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1949, 0.82436, 0, 9999, -9999, 1.0, 100, 1, 10.193229, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1950, 0.070892, 0, 9999, -9999, 1.0, 100, \n 1, 0.866493, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1951, 0.63205, 0, \n 9999, -9999, 1.0, 100, 1, 7.917597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1952, 3.277791, 0, 9999, -9999, 1.0, 100, 1, 67.723951, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1953, 0.21067, 0, 9999, -9999, 1.0, 100, 1,\n 8.928556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1954, 0.230766, 0, \n 9999, -9999, 1.0, 100, 1, 12.726892, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1955, 0.181558, 0, 9999, -9999, 1.0, 100, 1, 6.625255, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1956, 2.572929, 0, 9999, -9999, 1.0, 100, \n 1, 38.724888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1957, 3.910752, 0,\n 9999, -9999, 1.0, 100, 1, 131.682322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1958, 0.89549, 0, 9999, -9999, 1.0, 100, 1, 59.791759, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1959, 3.736043, 0, 9999, -9999, 1.0, 100, \n 1, 35.986928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1960, 0.47403, 0,\n 9999, -9999, 1.0, 100, 1, 13.579895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1961, 0.360769, 0, 9999, -9999, 1.0, 100, 1, 17.841481, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1962, 0.056937, 0, 9999, -9999, 1.0, 100, \n 1, 3.150179, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1963, 0.011195, 0,\n 9999, -9999, 1.0, 100, 1, 0.73138, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1964, 1.912109, 0, 9999, -9999, 1.0, 100, 1, 66.594121, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1965, 0.412755, 0, 9999, -9999, 1.0, 100, \n 1, 18.785491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1966, 0.856742, 0,\n 9999, -9999, 1.0, 100, 1, 2.674199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1967, 4.700675, 0, 9999, -9999, 1.0, 100, 1, 99.074235, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1968, 20.406765, 0, 9999, -9999, 1.0, 100,\n 1, 201.733891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1969, 0.416455, \n 0, 9999, -9999, 1.0, 100, 1, 15.048118, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1970, 145.974713, 0, 9999, -9999, 1.0, 100, 1, 236.871781, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1971, 0.435823, 0, 9999, -9999, 1.0,\n 100, 1, 14.404409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1972, \n 0.001026, 0, 9999, -9999, 1.0, 100, 1, 0.028378, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1973, 0.01934, 0, 9999, -9999, 1.0, 100, 1, 0.534696, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1974, 0.0995, 0, 9999, -9999, \n 1.0, 100, 1, 2.750907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1975, \n 3.231276, 0, 9999, -9999, 1.0, 100, 1, 81.92918, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1976, 1.378981, 0, 9999, -9999, 1.0, 100, 1, 2.17499, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1977, 65.42762, 0, 9999, -9999,\n 1.0, 100, 1, 226.383637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1978, \n 0.106404, 0, 9999, -9999, 1.0, 100, 1, 1.331592, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1979, 133.220566, 0, 9999, -9999, 1.0, 100, 1, \n 189.722792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1980, 6.868705, 0, \n 9999, -9999, 1.0, 100, 1, 100.61941, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1981, 7.688742, 0, 9999, -9999, 1.0, 100, 1, 144.682717, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1982, 5.752632, 0, 9999, -9999, 1.0, 100, \n 1, 134.93778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1983, 3.530567, 0,\n 9999, -9999, 1.0, 100, 1, 155.990147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1984, 1.936985, 0, 9999, -9999, 1.0, 100, 1, 94.470611, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1985, 1.330237, 0, 9999, -9999, 1.0, 100, \n 1, 41.975835, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1986, 5.765495, 0,\n 9999, -9999, 1.0, 100, 1, 298.346979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1987, 5.389422, 0, 9999, -9999, 1.0, 100, 1, 393.914067, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1988, 33.80903, 0, 9999, -9999, 1.0, 100, \n 1, 251.944939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1989, 6.748426, \n 0, 9999, -9999, 1.0, 100, 1, 10.378288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1990, 1.381387, 0, 9999, -9999, 1.0, 100, 1, 50.351426, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1991, 47.912587, 0, 9999, -9999, 1.0, \n 100, 1, 849.576944, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1992, \n 6.27345, 0, 9999, -9999, 1.0, 100, 1, 233.477991, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1993, 9.719656, 0, 9999, -9999, 1.0, 100, 1, \n 242.698643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1994, 5.08751, 0, \n 9999, -9999, 1.0, 100, 1, 255.834576, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1995, 4.092824, 0, 9999, -9999, 1.0, 100, 1, 262.446698, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1996, 1.534479, 0, 9999, -9999, 1.0, 100, \n 1, 91.306832, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1997, 0.151788, 0,\n 9999, -9999, 1.0, 100, 1, 26.592561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1998, 7.104695, 0, 9999, -9999, 1.0, 100, 1, 12.126511, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1999, 4.534769, 0, 9999, -9999, 1.0, 100, \n 1, 199.184531, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2000, 7.544127, \n 0, 9999, -9999, 1.0, 100, 1, 579.835051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [2001, 3.950905, 0, 9999, -9999, 1.0, 100, 1, 122.315703, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2002, 1.721932, 0, 9999, -9999, 1.0, \n 100, 1, 30.606436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2003, \n 14.962198, 0, 9999, -9999, 1.0, 100, 1, 23.645071, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [2004, 10.900896, 0, 9999, -9999, 1.0, 100, 1, \n 17.73338, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2005, 2.306607, 0, \n 9999, -9999, 1.0, 100, 1, 72.071456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [2006, 1.851369, 0, 9999, -9999, 1.0, 100, 1, 59.660888, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [2007, 0.061806, 0, 9999, -9999, 1.0, 100, \n 1, 1.681507, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2008, 0.00429, 0, \n 9999, -9999, 1.0, 100, 1, 0.116706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]'], {}), '([[586, 272.0, 0, 9999, -9999, 1.0, 100, 1, 272.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [589, 63.1, 0, 9999, -9999, 1.0, 100, 1, 63.1, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [590, 38.0, 0, 9999, -9999, 1.0, 100,\n 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [593, 11.1, 0, 9999, -\n 9999, 1.0, 100, 1, 11.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [594, \n 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [595, 1115.083703, 0, 9999, -9999, 1.0, 100, 1, 4730.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [597, 95.0, 0, 9999, -9999, 1.0, 100, 1,\n 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [598, 12.0, 0, 9999, -9999,\n 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [599, 9.3, 0,\n 9999, -9999, 1.0, 100, 1, 9.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 600, 16.9, 0, 9999, -9999, 1.0, 100, 1, 16.9, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [601, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [602, 24.6, 0, 9999, -9999, 1.0, 100, 1, \n 24.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [603, 837.82977, 0, 9999, \n -9999, 1.0, 100, 1, 3455.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [607,\n 1800.0, 0, 9999, -9999, 1.0, 100, 1, 1800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [608, 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [609, 36.4, 0, 9999, -9999, 1.0, 100, 1, \n 36.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [610, 61.5, 0, 9999, -9999,\n 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [612, 30.0, 0,\n 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [613, 85.0, 0, 9999, -9999, 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [614, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [616, 29.0, 0, 9999, -9999, 1.0, 100, 1, \n 29.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [617, 137.0, 0, 9999, -\n 9999, 1.0, 100, 1, 137.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [618, \n 33.4, 0, 9999, -9999, 1.0, 100, 1, 33.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [619, 118.0, 0, 9999, -9999, 1.0, 100, 1, 118.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [621, 765.0, 0, 9999, -9999, 1.0, 100, 1, 765.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [623, 760.0, 0, 9999, -9999, 1.0,\n 100, 1, 760.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [624, 27.0, 0, \n 9999, -9999, 1.0, 100, 1, 27.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [628, 449.0, 0, 9999, -9999, 1.0, 100, 1, 449.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [629, 75.3, 0, 9999, -9999, 1.0, 100, 1, 75.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [631, 79.8, 0, 9999, -9999, 1.0, 100, 1,\n 79.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [632, 45.1, 0, 9999, -9999,\n 1.0, 100, 1, 45.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [637, 53.7, 0,\n 9999, -9999, 1.0, 100, 1, 53.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [638, 128.7, 0, 9999, -9999, 1.0, 100, 1, 128.7, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [639, 15.8, 0, 9999, -9999, 1.0, 100, 1, 15.8, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [640, 12.0, 0, 9999, -9999, 1.0, 100, 1,\n 12.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [641, 12.6, 0, 9999, -9999,\n 1.0, 100, 1, 12.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [642, 28.9, 0,\n 9999, -9999, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [643, 857.0, 0, 9999, -9999, 1.0, 100, 1, 857.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [646, 103.0, 0, 9999, -9999, 1.0, 100, 1, 103.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [647, 14.0, 0, 9999, -9999, 1.0, 100, 1,\n 14.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [650, 1324.5, 0, 9999, -\n 9999, 1.0, 100, 1, 1324.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [652,\n 46.9, 0, 9999, -9999, 1.0, 100, 1, 46.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [655, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [657, 38.0, 0, 9999, -9999, 1.0, 100, 1, 38.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [658, 95.0, 0, 9999, -9999, 1.0,\n 100, 1, 95.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [661, 32.7, 0, \n 9999, -9999, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [662, 9.2, 0, 9999, -9999, 1.0, 100, 1, 9.2, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [663, 15.0, 0, 9999, -9999, 1.0, 100, 1, 15.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [666, 28.9, 0, 9999, -9999, 1.0, 100, 1, \n 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [668, 766.0, 0, 9999, -\n 9999, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [670, \n 24.0, 0, 9999, -9999, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [672, 33.1, 0, 9999, -9999, 1.0, 100, 1, 33.1, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [675, 10.6, 0, 9999, -9999, 1.0, 100, 1, 10.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [676, 370.0, 0, 9999, -9999, 1.0,\n 100, 1, 370.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [678, 1017.0, 0, \n 9999, -9999, 1.0, 100, 1, 1017.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [679, 547.278885, 0, 9999, -9999, 1.0, 100, 1, 695.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [681, 40.1, 0, 9999, -9999, 1.0, 100, 1, 40.1, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [683, 27.5, 0, 9999, -9999, 1.0,\n 100, 1, 27.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [687, 1329.0, 0, \n 9999, -9999, 1.0, 100, 1, 1329.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [689, 310.0, 0, 9999, -9999, 1.0, 100, 1, 310.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [691, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [693, 194.0, 0, 9999, -9999, 1.0, 100, 1,\n 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [694, 16.4, 0, 9999, -\n 9999, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [695, \n 14.7, 0, 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [696, 721.0, 0, 9999, -9999, 1.0, 100, 1, 721.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [697, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [698, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [701, 47.2, 0, \n 9999, -9999, 1.0, 100, 1, 47.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [702, 73.4, 0, 9999, -9999, 1.0, 100, 1, 73.4, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [704, 508.0, 0, 9999, -9999, 1.0, 100, 1, 508.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [705, 17.0, 0, 9999, -9999, 1.0, 100, 1, \n 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [707, 34.0, 0, 9999, -9999,\n 1.0, 100, 1, 34.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [708, 7.8, 0,\n 9999, -9999, 1.0, 100, 1, 7.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 711, 102.08865, 0, 9999, -9999, 1.0, 100, 1, 176.1, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [713, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [714, 15.0, 0, 9999, -9999, 1.0, 100,\n 1, 15.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [716, 0.1, 0, 9999, -\n 9999, 1.0, 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [717, \n 11.0, 0, 9999, -9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [719, 1347.602507, 0, 9999, -9999, 1.0, 100, 1, 1958.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [722, 20.7, 0, 9999, -9999, 1.0, 100, 1,\n 20.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [723, 19.7, 0, 9999, -9999,\n 1.0, 100, 1, 19.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [724, 12.1, 0,\n 9999, -9999, 1.0, 100, 1, 12.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [725, 800.0, 0, 9999, -9999, 1.0, 100, 1, 800.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [727, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [728, 510.0, 0, 9999, -9999, 1.0, 100, 1,\n 510.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [730, 633.2, 0, 9999, -\n 9999, 1.0, 100, 1, 633.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [731, \n 774.368631, 0, 9999, -9999, 1.0, 100, 1, 895.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [732, 14.6, 0, 9999, -9999, 1.0, 100, 1, 14.6, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [733, 396.6, 0, 9999, -9999, 1.0, 100, 1,\n 396.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [735, 84.8, 0, 9999, -\n 9999, 1.0, 100, 1, 84.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [737, \n 28.0, 0, 9999, -9999, 1.0, 100, 1, 28.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [738, 138.5, 0, 9999, -9999, 1.0, 100, 1, 138.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [739, 59.9, 0, 9999, -9999, 1.0, 100, 1, 59.9, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [741, 214.0, 0, 9999, -9999, 1.0,\n 100, 1, 214.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [742, 9.0, 0, \n 9999, -9999, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 743, 1410.0, 0, 9999, -9999, 1.0, 100, 1, 1410.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [745, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [746, 100.0, 0, 9999, -9999, 1.0, 100, 1,\n 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [747, 12.5, 0, 9999, -\n 9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [748, \n 110.0, 0, 9999, -9999, 1.0, 100, 1, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [749, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [750, 90.8, 0, 9999, -9999, 1.0, 100, 1, 90.8,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [753, 297.43075, 0, 9999, -9999,\n 1.0, 100, 1, 311.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [758, 18.5, \n 0, 9999, -9999, 1.0, 100, 1, 18.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [760, 342.451659, 0, 9999, -9999, 1.0, 100, 1, 794.0, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [761, 15.7, 0, 9999, -9999, 1.0, 100, 1, 15.7,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [762, 1105.0, 0, 9999, -9999, \n 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [763, 20.3,\n 0, 9999, -9999, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [765, 59.0, 0, 9999, -9999, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [767, 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [769, 43.3, 0, 9999, -9999, 1.0, 100, 1,\n 43.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [771, 690.0, 0, 9999, -\n 9999, 1.0, 100, 1, 690.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [772, \n 18.8, 0, 9999, -9999, 1.0, 100, 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [774, 33.5, 0, 9999, -9999, 1.0, 100, 1, 33.5, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [776, 56.0, 0, 9999, -9999, 1.0, 100, 1, 56.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [777, 79.0, 0, 9999, -9999, 1.0,\n 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [778, 14.7, 0, \n 9999, -9999, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [781, 981.561684, 0, 9999, -9999, 1.0, 100, 1, 1310.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [784, 967.134125, 0, 9999, -9999, 1.0, 100, 1, \n 1275.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [785, 3.0, 0, 9999, -\n 9999, 1.0, 100, 1, 3.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [787, \n 778.0, 0, 9999, -9999, 1.0, 100, 1, 778.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [788, 875.0, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [789, 77.4, 0, 9999, -9999, 1.0, 100, 1, 77.4,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [790, 75.8, 0, 9999, -9999, 1.0,\n 100, 1, 75.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [791, 10.0, 0, \n 9999, -9999, 1.0, 100, 1, 10.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [792, 62.7, 0, 9999, -9999, 1.0, 100, 1, 62.7, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [795, 13.6, 0, 9999, -9999, 1.0, 100, 1, 13.6, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [798, 116.273516, 0, 9999, -9999, 1.0, 100,\n 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [800, 36.5, 0, 9999, -\n 9999, 1.0, 100, 1, 36.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [801, \n 50.0, 0, 9999, -9999, 1.0, 100, 1, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [802, 500.0, 0, 9999, -9999, 1.0, 100, 1, 500.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [805, 661.169352, 0, 9999, -9999, 1.0, 100, 1, \n 1410.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [806, 35.8, 0, 9999, -\n 9999, 1.0, 100, 1, 35.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [808, \n 217.5, 0, 9999, -9999, 1.0, 100, 1, 217.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [809, 12.5, 0, 9999, -9999, 1.0, 100, 1, 12.5, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [810, 97.9, 0, 9999, -9999, 1.0, 100, 1, 97.9,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [811, 25.2, 0, 9999, -9999, 1.0,\n 100, 1, 25.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [814, 89.0, 0, \n 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [815, 13.4, 0, 9999, -9999, 1.0, 100, 1, 13.4, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [816, 80.1, 0, 9999, -9999, 1.0, 100, 1, 80.1, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [817, 54.0, 0, 9999, -9999, 1.0, 100, 1, \n 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [818, 757.0, 0, 9999, -\n 9999, 1.0, 100, 1, 757.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [821, \n 82.5, 0, 9999, -9999, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [822, 134.0, 0, 9999, -9999, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [825, 42.7, 0, 9999, -9999, 1.0, 100, 1, 42.7, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [826, 58.0, 0, 9999, -9999, 1.0,\n 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [829, 211.0, 0, \n 9999, -9999, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [830, 89.0, 0, 9999, -9999, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [833, 18.6, 0, 9999, -9999, 1.0, 100, 1, 18.6, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [834, 23.3, 0, 9999, -9999, 1.0, 100, 1, \n 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [835, 63.7, 0, 9999, -9999,\n 1.0, 100, 1, 63.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [836, 25.5, 0,\n 9999, -9999, 1.0, 100, 1, 25.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [837, 472.0, 0, 9999, -9999, 1.0, 100, 1, 472.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [839, 73.3, 0, 9999, -9999, 1.0, 100, 1, 73.3, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [840, 1158.147571, 0, 9999, -9999, 1.0, \n 100, 1, 1391.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [841, 23.3, 0, \n 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [842, 540.5, 0, 9999, -9999, 1.0, 100, 1, 540.5, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [843, 333.0, 0, 9999, -9999, 1.0, 100, 1, 333.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [844, 40.0, 0, 9999, -9999, 1.0, 100, 1,\n 40.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [845, 318.0, 0, 9999, -\n 9999, 1.0, 100, 1, 318.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [847, \n 124.467036, 0, 9999, -9999, 1.0, 100, 1, 280.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [848, 42.0, 0, 9999, -9999, 1.0, 100, 1, 42.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [849, 779.0, 0, 9999, -9999, 1.0, 100, 1,\n 779.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [850, 16.0, 0, 9999, -\n 9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [851, \n 79.5, 0, 9999, -9999, 1.0, 100, 1, 79.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [852, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [853, 11.6, 0, 9999, -9999, 1.0, 100, 1, 11.6, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [854, 81.8, 0, 9999, -9999, 1.0,\n 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [855, 688.0, 0, \n 9999, -9999, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [856, 36.0, 0, 9999, -9999, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [857, 1402.0, 0, 9999, -9999, 1.0, 100, 1, 1402.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [858, 56.8, 0, 9999, -9999, 1.0, 100, 1,\n 56.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [859, 85.0, 0, 9999, -9999,\n 1.0, 100, 1, 85.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [860, 25.0, 0,\n 9999, -9999, 1.0, 100, 1, 25.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [862, 725.0, 0, 9999, -9999, 1.0, 100, 1, 725.0, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [863, 0.6, 0, 9999, -9999, 1.0, 100, 1, 0.6, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [864, 875.0, 0, 9999, -9999, 1.0, 100, 1, \n 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [865, 11.0, 0, 9999, -\n 9999, 1.0, 100, 1, 11.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [867, \n 769.0, 0, 9999, -9999, 1.0, 100, 1, 769.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0], [869, 1360.0, 0, 9999, -9999, 1.0, 100, 1, 1360.0, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [870, 58.4, 0, 9999, -9999, 1.0, 100, 1, \n 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [872, 22.5, 0, 9999, -9999,\n 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [873, 122.0, \n 0, 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [874, 20.7, 0, 9999, -9999, 1.0, 100, 1, 20.7, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [875, 24.4, 0, 9999, -9999, 1.0, 100, 1, 24.4, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [877, 24.8, 0, 9999, -9999, 1.0, 100,\n 1, 24.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [881, 1001.3, 0, 9999, \n -9999, 1.0, 100, 1, 1001.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [882,\n 17.4, 0, 9999, -9999, 1.0, 100, 1, 17.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [883, 18.0, 0, 9999, -9999, 1.0, 100, 1, 18.0, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [886, 2572.0, 0, 9999, -9999, 1.0, 100, 1, 2572.0,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [889, 9.5, 0, 9999, -9999, 1.0, \n 100, 1, 9.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [890, 48.0, 0, 9999,\n -9999, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [893, \n 60.0, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [894, 158.0, 0, 9999, -9999, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [895, 19.0, 0, 9999, -9999, 1.0, 100, 1, 19.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [896, 24.0, 0, 9999, -9999, 1.0,\n 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [898, 84.6, 0, \n 9999, -9999, 1.0, 100, 1, 84.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [900, 112.6, 0, 9999, -9999, 1.0, 100, 1, 112.6, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [902, 19.5, 0, 9999, -9999, 1.0, 100, 1, 19.5, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [903, 20.1, 0, 9999, -9999, 1.0, 100, 1,\n 20.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [905, 121.080178, 0, 9999,\n -9999, 1.0, 100, 1, 137.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [907,\n 67.3, 0, 9999, -9999, 1.0, 100, 1, 67.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [909, 36.8, 0, 9999, -9999, 1.0, 100, 1, 36.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [911, 288.5, 0, 9999, -9999, 1.0, 100, 1, 288.5, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [913, 33.01098, 0, 9999, -9999, \n 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [914, 112.1, \n 0, 9999, -9999, 1.0, 100, 1, 112.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [915, 12.0, 0, 9999, -9999, 1.0, 100, 1, 12.0, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [916, 196.0, 0, 9999, -9999, 1.0, 100, 1, 196.0, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [917, 17.0, 0, 9999, -9999, 1.0, 100,\n 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [918, 38.5, 0, 9999, -\n 9999, 1.0, 100, 1, 38.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [919, \n 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [920, 12.8, 0, 9999, -9999, 1.0, 100, 1, 12.8, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [921, 124.0, 0, 9999, -9999, 1.0, 100, 1, 124.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [922, 164.0, 0, 9999, -9999, 1.0,\n 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [923, 146.0, 0, \n 9999, -9999, 1.0, 100, 1, 146.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [925, 26.0, 0, 9999, -9999, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [928, 61.5, 0, 9999, -9999, 1.0, 100, 1, 61.5, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [931, 217.1, 0, 9999, -9999, 1.0, 100, 1, \n 217.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [934, 296.0, 0, 9999, -\n 9999, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [935, \n 23.1, 0, 9999, -9999, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [936, 104.4, 0, 9999, -9999, 1.0, 100, 1, 104.4, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [937, 30.0, 0, 9999, -9999, 1.0, 100, 1, 30.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [939, 0.1, 0, 9999, -9999, 1.0, \n 100, 1, 0.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [940, 29.6, 0, 9999,\n -9999, 1.0, 100, 1, 29.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [942, \n 51.9, 0, 9999, -9999, 1.0, 100, 1, 51.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [943, 66.3, 0, 9999, -9999, 1.0, 100, 1, 66.3, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [944, 25.4, 0, 9999, -9999, 1.0, 100, 1, 25.4, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [945, 35.0, 0, 9999, -9999, 1.0,\n 100, 1, 35.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [946, 80.0, 0, \n 9999, -9999, 1.0, 100, 1, 80.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [948, 79.0, 0, 9999, -9999, 1.0, 100, 1, 79.0, 0.0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0], [950, 16.0, 0, 9999, -9999, 1.0, 100, 1, 16.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [951, 393.739186, 0, 9999, -9999, 1.0, 100,\n 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [952, 31.7, 0, 9999, -\n 9999, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [956, \n 65.0, 0, 9999, -9999, 1.0, 100, 1, 65.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [957, 6.0, 0, 9999, -9999, 1.0, 100, 1, 6.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [958, 66.7, 0, 9999, -9999, 1.0, 100, 1, 66.7, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [959, 45.5, 0, 9999, -9999, 1.0, 100,\n 1, 45.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [960, 26.5, 0, 9999, -\n 9999, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [963, \n 559.823432, 0, 9999, -9999, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [965, 352.0, 0, 9999, -9999, 1.0, 100, 1, 352.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [966, 66.0, 0, 9999, -9999, 1.0, 100, 1,\n 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [967, 37.5, 0, 9999, -9999,\n 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [968, 54.0, 0,\n 9999, -9999, 0.99951, 100, 1, 54.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [969, 56.9, 0, 9999, -9999, 0.99951, 100, 1, 56.9, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [971, 20.0, 0, 9999, -9999, 1.0, 100, 1, 20.0, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [973, 1347.0, 0, 9999, -9999, \n 1.0, 100, 1, 1347.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [976, 26.9,\n 0, 9999, -9999, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [977, 324.0, 0, 9999, -9999, 1.0, 100, 1, 324.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [978, 4.6, 0, 9999, -9999, 1.0, 100, 1, 4.6, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [980, 309.665775, 0, 9999, -9999, 1.0, \n 100, 1, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [981, 119.0, 0, \n 9999, -9999, 1.0, 100, 1, 119.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [982, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [983, 44.0, 0, 9999, -9999, 1.0, 100, 1, 44.0, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [984, 465.0, 0, 9999, -9999, 1.0, 100, 1, \n 465.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [985, 22.0, 0, 9999, -\n 9999, 1.0, 100, 1, 22.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [986, \n 11.2, 0, 9999, -9999, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [987, 164.5, 0, 9999, -9999, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [988, 5.1, 0, 9999, -9999, 1.0, 100, 1, 5.1, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [990, 300.0, 0, 9999, -9999, 1.0, 100,\n 1, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [993, 392.0, 0, 9999, \n -9999, 1.0, 100, 1, 392.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [994,\n 33.0, 0, 9999, -9999, 1.0, 100, 1, 33.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [995, 4.2, 0, 9999, -9999, 1.0, 100, 1, 4.2, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [996, 11.5, 0, 9999, -9999, 1.0, 100, 1, 11.5, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [997, 18.8, 0, 9999, -9999, 1.0, 100,\n 1, 18.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [998, 423.0, 0, 9999, -\n 9999, 1.0, 100, 1, 423.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [999, \n 15.6, 0, 9999, -9999, 1.0, 100, 1, 15.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1000, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1002, 9.9, 0, 9999, -9999, 1.0, 100, 1, 9.9, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1003, 900.0, 0, 9999, -9999, 1.0, \n 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1006, 122.0, 0, \n 9999, -9999, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1007, 23.3, 0, 9999, -9999, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1008, 49.0, 0, 9999, -9999, 1.0, 100, 1, 49.0, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1010, 750.0, 0, 9999, -9999, 1.0, 100, \n 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1011, 18.7, 0, 9999, \n -9999, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1012,\n 2835.0, 0, 9999, -9999, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0], [1014, 750.0, 0, 9999, -9999, 1.0, 100, 1, 750.0, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1018, 175.9, 0, 9999, -9999, 1.0, 100, \n 1, 175.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1019, 120.0, 0, 9999,\n -9999, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1023,\n 0.2, 0, 9999, -9999, 1.0, 100, 1, 0.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1025, 113.6, 0, 9999, -9999, 1.0, 100, 1, 113.6, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1026, 655.6, 0, 9999, -9999, 1.0, 100, 1, \n 655.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1028, 193.856792, 0, \n 9999, -9999, 1.0, 100, 1, 400.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1029, 47.945063, 0, 9999, -9999, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1030, 512.154762, 0, 9999, -9999, 1.0, 100, 1, \n 1018.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1031, 465.297424, 0, \n 9999, -9999, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1032, 38.015413, 0, 9999, -9999, 1.0, 100, 1, 153.510391, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1033, 2.188896, 0, 9999, -9999, 1.0, 100, 1, \n 50.164506, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1034, 28.459011, 0, \n 9999, -9999, 1.0, 100, 1, 84.262779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1035, 13.483148, 0, 9999, -9999, 1.0, 100, 1, 49.886469, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1036, 10.668878, 0, 9999, -9999, 1.0, 100,\n 1, 67.223077, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1037, 6.453908, 0,\n 9999, -9999, 1.0, 100, 1, 94.684044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1038, 9.267765, 0, 9999, -9999, 1.0, 100, 1, 85.798525, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1039, 0.034961, 0, 9999, -9999, 1.0, 100, \n 1, 132.724114, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1041, 45.839958,\n 0, 9999, -9999, 1.0, 100, 1, 204.187624, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1042, 0.015112, 0, 9999, -9999, 1.0, 100, 1, 52.70053, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1044, 2.20729, 0, 9999, -9999, 1.0, 100,\n 1, 36.163532, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1046, 4.510177, 0,\n 9999, -9999, 1.0, 100, 1, 106.787063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1047, 1.291195, 0, 9999, -9999, 1.0, 100, 1, 13.029581, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1048, 3.439348, 0, 9999, -9999, 1.0, 100, \n 1, 71.656883, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1049, 90.190989, \n 0, 9999, -9999, 1.0, 100, 1, 293.755375, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1050, 2.855489, 0, 9999, -9999, 1.0, 100, 1, 52.781606, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1051, 25.039476, 0, 9999, -9999, 1.0, \n 100, 1, 304.42978, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1052, \n 4.408997, 0, 9999, -9999, 1.0, 100, 1, 20.66869, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1053, 3.393402, 0, 9999, -9999, 1.0, 100, 1, 16.368087,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1054, 64.159181, 0, 9999, -9999,\n 1.0, 100, 1, 273.855776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1055, \n 0.001443, 0, 9999, -9999, 1.0, 100, 1, 2.856069, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1056, 46.702863, 0, 9999, -9999, 1.0, 100, 1, \n 603.943953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1057, 1.149455, 0, \n 9999, -9999, 1.0, 100, 1, 426.979979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1058, 7.224099, 0, 9999, -9999, 1.0, 100, 1, 1055.735174, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1059, 17.207922, 0, 9999, -9999, 1.0, \n 100, 1, 414.871332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1060, \n 0.105899, 0, 9999, -9999, 1.0, 100, 1, 10.351632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1061, 5.978684, 0, 9999, -9999, 1.0, 100, 1, \n 161.862597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1062, 0.003268, 0, \n 9999, -9999, 1.0, 100, 1, 2.878561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1063, 0.002721, 0, 9999, -9999, 1.0, 100, 1, 8.670916, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1064, 43.352939, 0, 9999, -9999, 1.0, 100,\n 1, 209.786524, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1065, 64.557076,\n 0, 9999, -9999, 1.0, 100, 1, 339.421643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1066, 6.593551, 0, 9999, -9999, 1.0, 100, 1, 134.399019, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1067, 8.448185, 0, 9999, -9999, 1.0, \n 100, 1, 32.653526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1068, \n 0.145539, 0, 9999, -9999, 1.0, 100, 1, 5.009022, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1069, 0.157038, 0, 9999, -9999, 1.0, 100, 1, 3.190759,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1070, 0.029105, 0, 9999, -9999,\n 1.0, 100, 1, 0.788599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1071, \n 0.624346, 0, 9999, -9999, 1.0, 100, 1, 4.328696, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1072, 40.600927, 0, 9999, -9999, 1.0, 100, 1, \n 112.606433, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1073, 17.346842, 0,\n 9999, -9999, 1.0, 100, 1, 77.81765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1074, 48.373759, 0, 9999, -9999, 1.0, 100, 1, 153.592986, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1075, 2.832969, 0, 9999, -9999, 1.0, \n 100, 1, 15.783448, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1077, \n 0.795164, 0, 9999, -9999, 1.0, 100, 1, 26.120041, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1078, 1.572306, 0, 9999, -9999, 1.0, 100, 1, 34.413246,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1079, 23.483715, 0, 9999, -9999,\n 1.0, 100, 1, 72.327992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1080, \n 12.71579, 0, 9999, -9999, 1.0, 100, 1, 132.149983, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1081, 41.337281, 0, 9999, -9999, 1.0, 100, 1, \n 405.642115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1082, 17.035693, 0,\n 9999, -9999, 1.0, 100, 1, 510.054159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1083, 30.072335, 0, 9999, -9999, 1.0, 100, 1, 633.681488, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1084, 27.557337, 0, 9999, -9999, 1.0, \n 100, 1, 602.719371, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1085, \n 6.690069, 0, 9999, -9999, 1.0, 100, 1, 113.714399, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1086, 9.340055, 0, 9999, -9999, 1.0, 100, 1, \n 225.59917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1087, 2.219279, 0, \n 9999, -9999, 1.0, 100, 1, 116.66597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1088, 0.225948, 0, 9999, -9999, 1.0, 100, 1, 36.782492, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1089, 0.685877, 0, 9999, -9999, 1.0, 100, \n 1, 384.449592, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1090, 18.3652, 0,\n 9999, -9999, 1.0, 100, 1, 89.140897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1091, 0.069841, 0, 9999, -9999, 1.0, 100, 1, 45.7939, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1092, 0.000886, 0, 9999, -9999, 1.0, 100, 1, \n 54.002032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1093, 19.472331, 0, \n 9999, -9999, 1.0, 100, 1, 155.605298, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1094, 0.324922, 0, 9999, -9999, 1.0, 100, 1, 3.759038, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1095, 0.017632, 0, 9999, -9999, 1.0, 100, \n 1, 0.204951, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1096, 5.653431, 0,\n 9999, -9999, 1.0, 100, 1, 84.50612, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1097, 0.849404, 0, 9999, -9999, 1.0, 100, 1, 4.601122, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1098, 22.024295, 0, 9999, -9999, 1.0, 100,\n 1, 71.025499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1099, 111.287059,\n 0, 9999, -9999, 1.0, 100, 1, 290.937198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1100, 0.000469, 0, 9999, -9999, 1.0, 100, 1, 0.026696, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1101, 0.059355, 0, 9999, -9999, 1.0, \n 100, 1, 83.930665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1102, \n 0.348019, 0, 9999, -9999, 1.0, 100, 1, 350.979988, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1103, 4.374488, 0, 9999, -9999, 1.0, 100, 1, \n 245.381701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1104, 0.020088, 0, \n 9999, -9999, 1.0, 100, 1, 0.206918, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1105, 0.140469, 0, 9999, -9999, 1.0, 100, 1, 2.178593, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1106, 0.24489, 0, 9999, -9999, 1.0, 100, 1,\n 2.289793, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1107, 4.365112, 0, \n 9999, -9999, 1.0, 100, 1, 76.221615, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1108, 15.005714, 0, 9999, -9999, 1.0, 100, 1, 320.422751, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1109, 0.032298, 0, 9999, -9999, 1.0, \n 100, 1, 0.77821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1110, 0.109011,\n 0, 9999, -9999, 1.0, 100, 1, 1.654557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1111, 3.982839, 0, 9999, -9999, 1.0, 100, 1, 89.637993, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1112, 13.347732, 0, 9999, -9999, 1.0, \n 100, 1, 69.53429, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1113, \n 0.690017, 0, 9999, -9999, 1.0, 100, 1, 3.536361, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1114, 3.236521, 0, 9999, -9999, 1.0, 100, 1, 13.446889,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1115, 12.945936, 0, 9999, -9999,\n 1.0, 100, 1, 50.575278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1116, \n 7.186063, 0, 9999, -9999, 1.0, 100, 1, 32.601142, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1117, 21.735816, 0, 9999, -9999, 1.0, 100, 1, \n 90.792541, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1118, 1.167272, 0, \n 9999, -9999, 1.0, 100, 1, 8.725012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1119, 9.731188, 0, 9999, -9999, 1.0, 100, 1, 43.254023, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1120, 0.454554, 0, 9999, -9999, 1.0, 100, \n 1, 2.416001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1121, 0.129799, 0,\n 9999, -9999, 1.0, 100, 1, 0.540589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1122, 0.277958, 0, 9999, -9999, 1.0, 100, 1, 1.462883, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1123, 0.327821, 0, 9999, -9999, 1.0, 100, \n 1, 1.464336, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1124, 0.319573, 0,\n 9999, -9999, 1.0, 100, 1, 1.288283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1125, 1.853524, 0, 9999, -9999, 1.0, 100, 1, 25.818899, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1126, 2.010115, 0, 9999, -9999, 1.0, 100, \n 1, 29.154893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1127, 6.767523, 0,\n 9999, -9999, 1.0, 100, 1, 105.296621, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1128, 0.159146, 0, 9999, -9999, 1.0, 100, 1, 3.06139, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1129, 0.240204, 0, 9999, -9999, 1.0, 100, 1, \n 4.738747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1130, 0.112767, 0, \n 9999, -9999, 1.0, 100, 1, 1.025754, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1131, 0.151265, 0, 9999, -9999, 1.0, 100, 1, 2.897078, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1132, 0.043874, 0, 9999, -9999, 1.0, 100, \n 1, 0.359497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1133, 0.17278, 0, \n 9999, -9999, 1.0, 100, 1, 0.719597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1134, 0.12208, 0, 9999, -9999, 1.0, 100, 1, 0.508453, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1135, 0.70461, 0, 9999, -9999, 1.0, 100, 1, \n 8.117819, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1136, 0.085367, 0, \n 9999, -9999, 1.0, 100, 1, 0.4027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n [1137, 0.542436, 0, 9999, -9999, 1.0, 100, 1, 3.669012, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1138, 0.28633, 0, 9999, -9999, 1.0, 100, 1, \n 1.254278, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1139, 3.827873, 0, \n 9999, -9999, 1.0, 100, 1, 19.822769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1140, 8.531552, 0, 9999, -9999, 1.0, 100, 1, 28.389457, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1141, 14.71591, 0, 9999, -9999, 1.0, 100, \n 1, 119.46456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1142, 0.282411, 0,\n 9999, -9999, 1.0, 100, 1, 1.215733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1143, 2.17636, 0, 9999, -9999, 1.0, 100, 1, 25.239356, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1144, 14.468173, 0, 9999, -9999, 1.0, 100,\n 1, 52.527382, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1145, 41.721366, \n 0, 9999, -9999, 1.0, 100, 1, 175.889627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1146, 0.206808, 0, 9999, -9999, 1.0, 100, 1, 0.861317, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1147, 10.482934, 0, 9999, -9999, 1.0, \n 100, 1, 45.703707, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1148, \n 1.12205, 0, 9999, -9999, 1.0, 100, 1, 17.645529, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1149, 0.384525, 0, 9999, -9999, 1.0, 100, 1, 8.556784,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1150, 0.21385, 0, 9999, -9999, \n 1.0, 100, 1, 3.62256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1151, \n 0.761655, 0, 9999, -9999, 1.0, 100, 1, 13.036113, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1152, 0.007549, 0, 9999, -9999, 1.0, 100, 1, 0.116518,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1153, 0.005643, 0, 9999, -9999,\n 1.0, 100, 1, 0.068788, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1154, \n 0.013178, 0, 9999, -9999, 1.0, 100, 1, 0.160625, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1155, 0.036293, 0, 9999, -9999, 1.0, 100, 1, 0.609451,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1156, 2.725518, 0, 9999, -9999,\n 1.0, 100, 1, 16.022334, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1157, \n 0.254864, 0, 9999, -9999, 1.0, 100, 1, 4.354147, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1158, 0.090066, 0, 9999, -9999, 1.0, 100, 1, 1.04304, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1159, 1.846823, 0, 9999, -9999,\n 1.0, 100, 1, 13.498087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1160, \n 4.449778, 0, 9999, -9999, 1.0, 100, 1, 238.377761, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1161, 0.968938, 0, 9999, -9999, 1.0, 100, 1, \n 25.263391, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1162, 0.004399, 0, \n 9999, -9999, 1.0, 100, 1, 502.409178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1164, 0.681555, 0, 9999, -9999, 1.0, 100, 1, 285.625412, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1166, 19.037928, 0, 9999, -9999, 1.0, 100,\n 1, 83.277163, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1167, 0.436847, 0,\n 9999, -9999, 1.0, 100, 1, 5.05378, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1168, 0.092048, 0, 9999, -9999, 1.0, 100, 1, 1.345774, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1169, 0.15256, 0, 9999, -9999, 1.0, 100, 1, \n 2.721845, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1170, 0.022911, 0, \n 9999, -9999, 1.0, 100, 1, 0.26599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1171, 1.218434, 0, 9999, -9999, 1.0, 100, 1, 9.029885, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1172, 0.184488, 0, 9999, -9999, 1.0, 100, 1, \n 3.584043, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1173, 0.074867, 0, \n 9999, -9999, 1.0, 100, 1, 254.253327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1174, 0.108899, 0, 9999, -9999, 1.0, 100, 1, 1.260082, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1175, 0.04558, 0, 9999, -9999, 1.0, 100, 1,\n 0.855454, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1176, 0.013921, 0, \n 9999, -9999, 1.0, 100, 1, 0.23222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1177, 1.759222, 0, 9999, -9999, 1.0, 100, 1, 27.87401, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1178, 0.209645, 0, 9999, -9999, 1.0, 100, 1, \n 3.167999, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1179, 0.051465, 0, \n 9999, -9999, 1.0, 100, 1, 1.306293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1180, 0.059365, 0, 9999, -9999, 1.0, 100, 1, 0.688545, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1181, 23.821689, 0, 9999, -9999, 1.0, 100,\n 1, 85.739557, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1182, 24.612874, \n 0, 9999, -9999, 1.0, 100, 1, 99.319579, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1183, 3.24107, 0, 9999, -9999, 1.0, 100, 1, 38.222575, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1184, 0.358312, 0, 9999, -9999, 1.0, \n 100, 1, 4.219005, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1185, \n 2.182901, 0, 9999, -9999, 1.0, 100, 1, 11.343971, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1186, 2.184012, 0, 9999, -9999, 1.0, 100, 1, 38.916368,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1187, 0.459888, 0, 9999, -9999,\n 1.0, 100, 1, 9.814574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1188, \n 53.562608, 0, 9999, -9999, 1.0, 100, 1, 179.712741, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1189, 1.204921, 0, 9999, -9999, 1.0, 100, 1, \n 20.261805, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1190, 32.667547, 0, \n 9999, -9999, 1.0, 100, 1, 220.533673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1191, 17.953145, 0, 9999, -9999, 1.0, 100, 1, 73.079413, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1192, 2.590747, 0, 9999, -9999, 1.0, 100, \n 1, 21.454569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1193, 0.222396, 0,\n 9999, -9999, 1.0, 100, 1, 2.399953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1194, 0.77085, 0, 9999, -9999, 1.0, 100, 1, 8.986036, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1195, 0.015425, 0, 9999, -9999, 1.0, 100, 1, \n 0.202359, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1196, 0.029284, 0, \n 9999, -9999, 1.0, 100, 1, 160.697956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1197, 0.11597, 0, 9999, -9999, 1.0, 100, 1, 90.592266, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1198, 4.134805, 0, 9999, -9999, 1.0, 100, \n 1, 39.819157, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1199, 61.376881, \n 0, 9999, -9999, 1.0, 100, 1, 201.421956, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1200, 21.487973, 0, 9999, -9999, 1.0, 100, 1, 56.012408, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1201, 0.691822, 0, 9999, -9999, 1.0, \n 100, 1, 25.166667, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1202, \n 3.586635, 0, 9999, -9999, 1.0, 100, 1, 49.89238, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1203, 12.725115, 0, 9999, -9999, 1.0, 100, 1, \n 182.623256, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1204, 2.799582, 0, \n 9999, -9999, 1.0, 100, 1, 47.541821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1205, 0.000146, 0, 9999, -9999, 1.0, 100, 1, 0.548843, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1206, 0.411467, 0, 9999, -9999, 1.0, 100, \n 1, 3.806894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1207, 0.331325, 0,\n 9999, -9999, 1.0, 100, 1, 3.575453, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1208, 0.105374, 0, 9999, -9999, 1.0, 100, 1, 2.242031, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1209, 0.00265, 0, 9999, -9999, 1.0, 100, 1,\n 1.268261, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1210, 0.69402, 0, \n 9999, -9999, 1.0, 100, 1, 9.02599, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1211, 5.750967, 0, 9999, -9999, 1.0, 100, 1, 18.005229, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1212, 26.199295, 0, 9999, -9999, 1.0, 100,\n 1, 91.171888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1213, 21.1062, 0,\n 9999, -9999, 1.0, 100, 1, 57.342704, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1214, 0.541037, 0, 9999, -9999, 1.0, 100, 1, 4.505907, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1215, 0.15338, 0, 9999, -9999, 1.0, 100, 1,\n 2.252965, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1216, 3.319201, 0, \n 9999, -9999, 1.0, 100, 1, 67.754469, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1217, 2.664727, 0, 9999, -9999, 1.0, 100, 1, 35.871617, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1218, 0.10866, 0, 9999, -9999, 1.0, 100, 1,\n 0.980482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1219, 0.83454, 0, \n 9999, -9999, 1.0, 100, 1, 12.33953, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1220, 1.729113, 0, 9999, -9999, 1.0, 100, 1, 30.597849, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1221, 43.354712, 0, 9999, -9999, 1.0, 100,\n 1, 593.230436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1222, 54.25302, \n 0, 9999, -9999, 1.0, 100, 1, 211.057769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1223, 0.828555, 0, 9999, -9999, 1.0, 100, 1, 3.806101, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1224, 15.875443, 0, 9999, -9999, 1.0, \n 100, 1, 160.523778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1225, \n 1.071926, 0, 9999, -9999, 1.0, 100, 1, 34.931481, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1226, 0.118196, 0, 9999, -9999, 1.0, 100, 1, 3.982858,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1227, 3.258837, 0, 9999, -9999,\n 1.0, 100, 1, 17.482807, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1228, \n 0.156042, 0, 9999, -9999, 1.0, 100, 1, 3.021367, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1229, 7.933585, 0, 9999, -9999, 1.0, 100, 1, 51.244222,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1230, 0.045286, 0, 9999, -9999,\n 1.0, 100, 1, 1.681276, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1231, \n 1.223909, 0, 9999, -9999, 1.0, 100, 1, 33.55478, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1232, 2.573754, 0, 9999, -9999, 1.0, 100, 1, 75.075088,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1233, 173.598538, 0, 9999, -\n 9999, 1.0, 100, 1, 575.36828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1234, 33.990216, 0, 9999, -9999, 1.0, 100, 1, 101.1394, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1235, 0.001519, 0, 9999, -9999, 1.0, 100, 1, \n 9.03734, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1236, 0.010199, 0, \n 9999, -9999, 1.0, 100, 1, 82.225035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1237, 3.462839, 0, 9999, -9999, 1.0, 100, 1, 14.605409, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1238, 12.106922, 0, 9999, -9999, 1.0, 100,\n 1, 188.691049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1239, 0.483742, \n 0, 9999, -9999, 1.0, 100, 1, 2.267706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1240, 63.552975, 0, 9999, -9999, 1.0, 100, 1, 339.51051, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1241, 9.744883, 0, 9999, -9999, 1.0, \n 100, 1, 385.361595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1242, \n 1.158061, 0, 9999, -9999, 1.0, 100, 1, 27.074038, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1243, 4.669682, 0, 9999, -9999, 1.0, 100, 1, 83.079842,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1244, 115.794463, 0, 9999, -\n 9999, 1.0, 100, 1, 323.472536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1245, 0.241619, 0, 9999, -9999, 1.0, 100, 1, 8.080896, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1246, 18.525152, 0, 9999, -9999, 1.0, 100, 1, \n 57.127825, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1247, 5.100639, 0, \n 9999, -9999, 1.0, 100, 1, 21.833396, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1248, 13.259573, 0, 9999, -9999, 1.0, 100, 1, 91.958275, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1249, 1.47167, 0, 9999, -9999, 1.0, 100, 1,\n 76.135177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1250, 0.772338, 0, \n 9999, -9999, 1.0, 100, 1, 30.830519, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1251, 2.007729, 0, 9999, -9999, 1.0, 100, 1, 23.404345, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1252, 1.728628, 0, 9999, -9999, 1.0, 100, \n 1, 14.887727, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1253, 17.018216, \n 0, 9999, -9999, 1.0, 100, 1, 64.502694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1254, 26.927476, 0, 9999, -9999, 1.0, 100, 1, 82.278695, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1255, 0.726767, 0, 9999, -9999, 1.0, \n 100, 1, 3.818419, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1256, \n 3.218337, 0, 9999, -9999, 1.0, 100, 1, 15.091842, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1257, 19.556961, 0, 9999, -9999, 1.0, 100, 1, 88.95288,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1258, 75.724888, 0, 9999, -9999,\n 1.0, 100, 1, 235.487329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1259, \n 26.547394, 0, 9999, -9999, 1.0, 100, 1, 109.288719, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1260, 0.629507, 0, 9999, -9999, 1.0, 100, 1, \n 20.168717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1261, 10.592114, 0, \n 9999, -9999, 1.0, 100, 1, 201.699555, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1262, 0.066859, 0, 9999, -9999, 1.0, 100, 1, 0.524108, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1263, 0.05282, 0, 9999, -9999, 1.0, 100, 1,\n 0.352421, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1264, 8.646042, 0, \n 9999, -9999, 1.0, 100, 1, 82.035361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1265, 0.87289, 0, 9999, -9999, 1.0, 100, 1, 6.654727, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1266, 19.839091, 0, 9999, -9999, 1.0, 100, 1,\n 119.710849, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1267, 1.42905, 0, \n 9999, -9999, 1.0, 100, 1, 39.469006, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1270, 2.867892, 0, 9999, -9999, 1.0, 100, 1, 38.950511, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1271, 2.180592, 0, 9999, -9999, 1.0, 100, \n 1, 47.371792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1272, 0.12233, 0,\n 9999, -9999, 1.0, 100, 1, 1.23166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1273, 0.402412, 0, 9999, -9999, 1.0, 100, 1, 2.169201, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1274, 4.613569, 0, 9999, -9999, 1.0, 100, 1, \n 53.095629, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1275, 5.039854, 0, \n 9999, -9999, 1.0, 100, 1, 99.0753, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1276, 0.577089, 0, 9999, -9999, 1.0, 100, 1, 25.655641, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1277, 1.713473, 0, 9999, -9999, 1.0, 100, \n 1, 65.611252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1278, 7.145337, 0,\n 9999, -9999, 1.0, 100, 1, 170.437781, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1279, 2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.004344, 0.0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0], [1280, 0.008871, 0, 9999, -9999, 1.0, 100, 1, \n 0.626494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1282, 0.164926, 0, \n 9999, -9999, 1.0, 100, 1, 4.363037, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1283, 24.042404, 0, 9999, -9999, 1.0, 100, 1, 1297.764428, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1284, 2.961479, 0, 9999, -9999, 1.0, \n 100, 1, 28.426322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1285, \n 0.002761, 0, 9999, -9999, 1.0, 100, 1, 2.937048, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1286, 2.24876, 0, 9999, -9999, 1.0, 100, 1, 17.872201,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1287, 4.55563, 0, 9999, -9999, \n 1.0, 100, 1, 93.199628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1288, \n 3.72473, 0, 9999, -9999, 1.0, 100, 1, 148.402692, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1289, 7.121503, 0, 9999, -9999, 1.0, 100, 1, \n 184.149235, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1290, 0.310739, 0, \n 9999, -9999, 1.0, 100, 1, 4.901974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1291, 5.174079, 0, 9999, -9999, 1.0, 100, 1, 98.293351, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1292, 3.680955, 0, 9999, -9999, 1.0, 100, \n 1, 41.682074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1293, 0.037266, 0,\n 9999, -9999, 1.0, 100, 1, 2.402107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1294, 0.017452, 0, 9999, -9999, 1.0, 100, 1, 5.39743, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1295, 0.038533, 0, 9999, -9999, 1.0, 100, 1, \n 5.873666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1296, 0.669408, 0, \n 9999, -9999, 1.0, 100, 1, 27.356489, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1297, 11.612135, 0, 9999, -9999, 1.0, 100, 1, 177.778742, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1300, 11.138034, 0, 9999, -9999, 1.0, \n 100, 1, 23.74405, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1301, \n 27.94748, 0, 9999, -9999, 1.0, 100, 1, 60.863304, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1302, 1.775766, 0, 9999, -9999, 1.0, 100, 1, 4.877299,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1303, 1.506596, 0, 9999, -9999,\n 1.0, 100, 1, 4.335516, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1304, \n 2.218171, 0, 9999, -9999, 1.0, 100, 1, 9.594319, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1305, 0.000322, 0, 9999, -9999, 1.0, 100, 1, 0.004567,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1306, 0.093112, 0, 9999, -9999,\n 1.0, 100, 1, 1.827014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1307, \n 0.071688, 0, 9999, -9999, 1.0, 100, 1, 0.29894, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1308, 0.05088, 0, 9999, -9999, 1.0, 100, 1, 3.278321, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1309, 0.089478, 0, 9999, -9999,\n 1.0, 100, 1, 3.34909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1310, \n 0.043944, 0, 9999, -9999, 1.0, 100, 1, 1.64589, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1311, 1.283616, 0, 9999, -9999, 1.0, 100, 1, 11.854004,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1312, 32.144668, 0, 9999, -9999,\n 1.0, 100, 1, 262.264924, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1313, \n 7.034633, 0, 9999, -9999, 1.0, 100, 1, 30.836748, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1314, 2.705834, 0, 9999, -9999, 1.0, 100, 1, 12.003987,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1315, 1.715196, 0, 9999, -9999,\n 1.0, 100, 1, 7.879027, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1316, \n 0.001198, 0, 9999, -9999, 1.0, 100, 1, 2.757497, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1317, 1.374919, 0, 9999, -9999, 1.0, 100, 1, 23.958574,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1318, 0.053995, 0, 9999, -9999,\n 1.0, 100, 1, 1.956332, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1319, \n 2.412989, 0, 9999, -9999, 1.0, 100, 1, 17.708276, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1320, 2.01785, 0, 9999, -9999, 1.0, 100, 1, 20.75859, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1321, 0.017436, 0, 9999, -9999,\n 1.0, 100, 1, 0.161123, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1322, \n 0.131102, 0, 9999, -9999, 1.0, 100, 1, 0.929763, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1323, 68.564796, 0, 9999, -9999, 1.0, 100, 1, \n 199.111909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1324, 1.440474, 0, \n 9999, -9999, 1.0, 100, 1, 13.063258, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1325, 4.968484, 0, 9999, -9999, 1.0, 100, 1, 90.497559, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1326, 2.423617, 0, 9999, -9999, 1.0, 100, \n 1, 56.928865, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1327, 3.105262, 0,\n 9999, -9999, 1.0, 100, 1, 50.796895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1328, 1.651998, 0, 9999, -9999, 1.0, 100, 1, 16.063343, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1329, 17.013592, 0, 9999, -9999, 1.0, 100,\n 1, 218.675424, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1330, 6.13151, 0,\n 9999, -9999, 1.0, 100, 1, 30.131028, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1331, 0.035299, 0, 9999, -9999, 1.0, 100, 1, 0.289238, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1332, 0.021045, 0, 9999, -9999, 1.0, 100, \n 1, 26.293088, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1333, 5.410888, 0,\n 9999, -9999, 1.0, 100, 1, 45.650254, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1334, 0.000137, 0, 9999, -9999, 1.0, 100, 1, 1.215341, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1336, 3.321284, 0, 9999, -9999, 1.0, 100, \n 1, 29.773035, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1337, 1.111612, 0,\n 9999, -9999, 1.0, 100, 1, 121.31241, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1338, 0.06346, 0, 9999, -9999, 1.0, 100, 1, 0.832524, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1339, 0.579758, 0, 9999, -9999, 1.0, 100, 1, \n 10.086482, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1340, 0.035501, 0, \n 9999, -9999, 1.0, 100, 1, 70.098327, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1341, 6.581426, 0, 9999, -9999, 1.0, 100, 1, 205.513321, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1342, 0.031756, 0, 9999, -9999, 1.0, 100, \n 1, 0.734589, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1343, 0.005344, 0,\n 9999, -9999, 1.0, 100, 1, 1.102108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1344, 0.017248, 0, 9999, -9999, 1.0, 100, 1, 0.226057, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1345, 0.124928, 0, 9999, -9999, 1.0, 100, \n 1, 3.971188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1346, 12.149372, 0,\n 9999, -9999, 1.0, 100, 1, 214.719215, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1348, 2.617463, 0, 9999, -9999, 1.0, 100, 1, 22.707927, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1349, 2.716996, 0, 9999, -9999, 1.0, 100, \n 1, 42.352342, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1350, 0.016036, 0,\n 9999, -9999, 1.0, 100, 1, 0.094971, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1351, 5.3e-05, 0, 9999, -9999, 1.0, 100, 1, 0.015958, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1352, 0.007111, 0, 9999, -9999, 1.0, 100, 1, \n 0.83726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1355, 0.046937, 0, \n 9999, -9999, 1.0, 100, 1, 1.688324, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1356, 12.885707, 0, 9999, -9999, 1.0, 100, 1, 73.486231, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1357, 6.737632, 0, 9999, -9999, 1.0, 100, \n 1, 56.459913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1358, 0.006907, 0,\n 9999, -9999, 1.0, 100, 1, 0.247293, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1359, 0.897683, 0, 9999, -9999, 1.0, 100, 1, 70.633589, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1360, 3.153322, 0, 9999, -9999, 1.0, 100, \n 1, 17.135983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1361, 8.263279, 0,\n 9999, -9999, 1.0, 100, 1, 63.207173, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1362, 12.630815, 0, 9999, -9999, 1.0, 100, 1, 79.107216, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1363, 0.006147, 0, 9999, -9999, 1.0, 100, \n 1, 0.036158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1364, 0.007668, 0,\n 9999, -9999, 1.0, 100, 1, 0.061068, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1365, 9.7e-05, 0, 9999, -9999, 1.0, 100, 1, 0.000456, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1366, 0.005584, 0, 9999, -9999, 1.0, 100, 1, \n 1.229992, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1367, 6.250932, 0, \n 9999, -9999, 1.0, 100, 1, 43.863891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1368, 0.096174, 0, 9999, -9999, 1.0, 100, 1, 3.298243, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1369, 1.432042, 0, 9999, -9999, 1.0, 100, \n 1, 7.968859, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1370, 0.012611, 0,\n 9999, -9999, 1.0, 100, 1, 0.343308, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1371, 1.656353, 0, 9999, -9999, 1.0, 100, 1, 81.767208, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1372, 0.996171, 0, 9999, -9999, 1.0, 100, \n 1, 192.966588, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1373, 1.384774, \n 0, 9999, -9999, 1.0, 100, 1, 35.200257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1374, 45.514504, 0, 9999, -9999, 1.0, 100, 1, 108.220146, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1375, 25.096659, 0, 9999, -9999, 1.0,\n 100, 1, 61.223816, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1376, \n 21.592139, 0, 9999, -9999, 1.0, 100, 1, 176.213655, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1377, 1.308187, 0, 9999, -9999, 1.0, 100, 1, \n 234.376272, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1378, 0.068137, 0, \n 9999, -9999, 1.0, 100, 1, 246.029906, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1379, 0.067837, 0, 9999, -9999, 1.0, 100, 1, 0.805984, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1380, 0.148081, 0, 9999, -9999, 1.0, 100, \n 1, 1.213356, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1381, 0.079283, 0,\n 9999, -9999, 1.0, 100, 1, 1.01257, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1382, 75.120774, 0, 9999, -9999, 1.0, 100, 1, 138.839906, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1383, 57.921895, 0, 9999, -9999, 1.0, 100,\n 1, 109.821439, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1384, 0.898474, \n 0, 9999, -9999, 1.0, 100, 1, 4.669135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1385, 0.010214, 0, 9999, -9999, 1.0, 100, 1, 0.124455, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1386, 0.058117, 0, 9999, -9999, 1.0, \n 100, 1, 0.673858, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1387, \n 0.177086, 0, 9999, -9999, 1.0, 100, 1, 3.493561, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1388, 0.113278, 0, 9999, -9999, 1.0, 100, 1, 0.928188,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1389, 0.02606, 0, 9999, -9999, \n 1.0, 100, 1, 0.213536, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1390, \n 0.189214, 0, 9999, -9999, 1.0, 100, 1, 3.732816, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1391, 0.022705, 0, 9999, -9999, 1.0, 100, 1, 0.521719,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1392, 1.653278, 0, 9999, -9999,\n 1.0, 100, 1, 19.306386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1393, \n 0.304577, 0, 9999, -9999, 1.0, 100, 1, 1.376509, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1394, 0.242243, 0, 9999, -9999, 1.0, 100, 1, 1.077886,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1395, 0.016054, 0, 9999, -9999,\n 1.0, 100, 1, 0.073776, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1396, \n 0.005171, 0, 9999, -9999, 1.0, 100, 1, 0.026112, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1397, 1.529697, 0, 9999, -9999, 1.0, 100, 1, 25.084545,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1398, 0.154931, 0, 9999, -9999,\n 1.0, 100, 1, 2.779641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1399, \n 1.184332, 0, 9999, -9999, 1.0, 100, 1, 17.868157, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1400, 0.28671, 0, 9999, -9999, 1.0, 100, 1, 1.297197, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1401, 5.131858, 0, 9999, -9999,\n 1.0, 100, 1, 89.339497, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1402, \n 1.568442, 0, 9999, -9999, 1.0, 100, 1, 26.328902, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1403, 48.266806, 0, 9999, -9999, 1.0, 100, 1, \n 119.651672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1404, 51.082464, 0,\n 9999, -9999, 1.0, 100, 1, 134.800518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1405, 1.986189, 0, 9999, -9999, 1.0, 100, 1, 29.550802, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1406, 1.132197, 0, 9999, -9999, 1.0, 100, \n 1, 10.763987, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1407, 0.012144, 0,\n 9999, -9999, 1.0, 100, 1, 0.211614, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1408, 3.606729, 0, 9999, -9999, 1.0, 100, 1, 41.078698, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1409, 0.595096, 0, 9999, -9999, 1.0, 100, \n 1, 12.019786, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1410, 1.341977, 0,\n 9999, -9999, 1.0, 100, 1, 37.466518, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1411, 6.631827, 0, 9999, -9999, 1.0, 100, 1, 39.395367, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1412, 0.149883, 0, 9999, -9999, 1.0, 100, \n 1, 5.987601, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1413, 0.108024, 0,\n 9999, -9999, 1.0, 100, 1, 5.679791, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1414, 0.018773, 0, 9999, -9999, 1.0, 100, 1, 25.992489, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1415, 0.000673, 0, 9999, -9999, 1.0, 100, \n 1, 7.454501, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1416, 0.000128, 0,\n 9999, -9999, 1.0, 100, 1, 7.958002, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1417, 2.2e-05, 0, 9999, -9999, 1.0, 100, 1, 0.001311, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1418, 3.131184, 0, 9999, -9999, 1.0, 100, 1, \n 88.264613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1419, 0.892644, 0, \n 9999, -9999, 1.0, 100, 1, 33.260903, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1421, 0.846121, 0, 9999, -9999, 0.99951, 100, 1, 6.972369, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1422, 0.569459, 0, 9999, -9999, 1.0, \n 100, 1, 4.730495, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1423, \n 0.239313, 0, 9999, -9999, 1.0, 100, 1, 1.931017, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1424, 0.085377, 0, 9999, -9999, 1.0, 100, 1, \n 219.092115, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1425, 7.009151, 0, \n 9999, -9999, 1.0, 100, 1, 21.366402, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1426, 16.98374, 0, 9999, -9999, 1.0, 100, 1, 68.762602, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1427, 2.554959, 0, 9999, -9999, 1.0, 100, \n 1, 480.698671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1428, 0.012327, \n 0, 9999, -9999, 1.0, 100, 1, 334.885743, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1431, 5.108838, 0, 9999, -9999, 1.0, 100, 1, 227.662022, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1432, 0.587459, 0, 9999, -9999, 1.0, \n 100, 1, 12.058931, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1433, \n 118.811298, 0, 9999, -9999, 1.0, 100, 1, 1289.241188, 0.0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0], [1434, 0.031591, 0, 9999, -9999, 1.0, 100, 1, \n 99.440014, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1435, 4.644217, 0, \n 9999, -9999, 1.0, 100, 1, 86.713217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1436, 14.975035, 0, 9999, -9999, 1.0, 100, 1, 98.434116, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1437, 12.49617, 0, 9999, -9999, 1.0, 100, \n 1, 238.321958, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1438, 64.510912,\n 0, 9999, -9999, 1.0, 100, 1, 392.815158, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1439, 0.058606, 0, 9999, -9999, 1.0, 100, 1, 99.103164, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1440, 0.000863, 0, 9999, -9999, 1.0, \n 100, 1, 0.833609, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1441, 0.00601,\n 0, 9999, -9999, 1.0, 100, 1, 0.171578, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1442, 0.057526, 0, 9999, -9999, 1.0, 100, 1, 0.715522, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1443, 24.032003, 0, 9999, -9999, 1.0, \n 100, 1, 103.005076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1444, \n 1.205148, 0, 9999, -9999, 1.0, 100, 1, 8.981696, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1445, 2.394259, 0, 9999, -9999, 1.0, 100, 1, 25.036799,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1446, 20.59301, 0, 9999, -9999,\n 1.0, 100, 1, 758.547933, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1447, \n 8.109674, 0, 9999, -9999, 1.0, 100, 1, 89.477411, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1448, 1.364062, 0, 9999, -9999, 1.0, 100, 1, 7.523578,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1449, 6.727523, 0, 9999, -9999,\n 1.0, 100, 1, 95.437673, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1450, \n 10.232409, 0, 9999, -9999, 1.0, 100, 1, 59.256809, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1451, 13.044952, 0, 9999, -9999, 1.0, 100, 1, \n 68.198838, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1452, 4.020652, 0, \n 9999, -9999, 1.0, 100, 1, 24.068921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1453, 12.794164, 0, 9999, -9999, 1.0, 100, 1, 64.93775, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1454, 71.645573, 0, 9999, -9999, 1.0, 100,\n 1, 155.126607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1455, 0.038966, \n 0, 9999, -9999, 1.0, 100, 1, 0.654438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1456, 3.746818, 0, 9999, -9999, 1.0, 100, 1, 50.054822, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1457, 0.244411, 0, 9999, -9999, 1.0, \n 100, 1, 2.002672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1458, \n 0.030047, 0, 9999, -9999, 1.0, 100, 1, 0.246199, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1459, 1.173315, 0, 9999, -9999, 1.0, 100, 1, 5.309059,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1460, 5.043479, 0, 9999, -9999,\n 1.0, 100, 1, 101.498473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1461, \n 3.497456, 0, 9999, -9999, 1.0, 100, 1, 17.951737, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1462, 0.462345, 0, 9999, -9999, 1.0, 100, 1, 2.402686,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1463, 0.170398, 0, 9999, -9999,\n 1.0, 100, 1, 0.711207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1464, \n 24.648093, 0, 9999, -9999, 1.0, 100, 1, 218.884211, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1465, 0.600752, 0, 9999, -9999, 1.0, 100, 1, \n 5.299939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1466, 0.332156, 0, \n 9999, -9999, 1.0, 100, 1, 5.685017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1467, 0.100837, 0, 9999, -9999, 1.0, 100, 1, 2.096155, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1468, 6.628756, 0, 9999, -9999, 1.0, 100, \n 1, 23.789171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1469, 3.982867, 0,\n 9999, -9999, 1.0, 100, 1, 65.007467, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1470, 19.817875, 0, 9999, -9999, 1.0, 100, 1, 78.965265, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1471, 25.471799, 0, 9999, -9999, 1.0, 100,\n 1, 159.165074, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1472, 0.789769, \n 0, 9999, -9999, 1.0, 100, 1, 11.980182, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1473, 0.721082, 0, 9999, -9999, 1.0, 100, 1, 8.362608, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1474, 0.081557, 0, 9999, -9999, 1.0, \n 100, 1, 1.398948, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1475, \n 0.020827, 0, 9999, -9999, 1.0, 100, 1, 0.39088, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1476, 81.826956, 0, 9999, -9999, 1.0, 100, 1, \n 250.480113, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1477, 0.580029, 0, \n 9999, -9999, 1.0, 100, 1, 12.122974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1479, 0.004362, 0, 9999, -9999, 1.0, 100, 1, 5.592606, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1480, 0.04074, 0, 9999, -9999, 1.0, 100, 1,\n 18.681964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1481, 0.004051, 0, \n 9999, -9999, 1.0, 100, 1, 0.053146, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1482, 0.788081, 0, 9999, -9999, 1.0, 100, 1, 17.51083, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1483, 0.141817, 0, 9999, -9999, 1.0, 100, \n 1, 3.599649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1484, 0.002023, 0,\n 9999, -9999, 1.0, 100, 1, 0.02991, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1485, 0.038114, 0, 9999, -9999, 1.0, 100, 1, 0.563547, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1486, 0.196086, 0, 9999, -9999, 1.0, 100, 1, \n 2.89934, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1487, 0.083872, 0, \n 9999, -9999, 1.0, 100, 1, 1.142917, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1488, 0.007448, 0, 9999, -9999, 1.0, 100, 1, 5.569856, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1489, 0.028558, 0, 9999, -9999, 1.0, 100, \n 1, 0.118938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1490, 15.603052, 0,\n 9999, -9999, 1.0, 100, 1, 782.463701, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1491, 5.539285, 0, 9999, -9999, 1.0, 100, 1, 84.622838, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1492, 3.975544, 0, 9999, -9999, 1.0, 100, \n 1, 229.927503, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1493, 3.904134, \n 0, 9999, -9999, 1.0, 100, 1, 83.557175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1494, 56.119552, 0, 9999, -9999, 1.0, 100, 1, 404.486733, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1495, 1.179889, 0, 9999, -9999, 1.0,\n 100, 1, 66.920717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1497, \n 12.800197, 0, 9999, -9999, 1.0, 100, 1, 89.070006, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1498, 22.315881, 0, 9999, -9999, 1.0, 100, 1, \n 105.800802, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1500, 0.040223, 0, \n 9999, -9999, 1.0, 100, 1, 0.154817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1501, 1.659338, 0, 9999, -9999, 1.0, 100, 1, 8.165333, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1502, 0.015933, 0, 9999, -9999, 1.0, 100, \n 1, 0.938928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1503, 3.644376, 0,\n 9999, -9999, 1.0, 100, 1, 45.972187, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1504, 15.995903, 0, 9999, -9999, 1.0, 100, 1, 188.822836, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1505, 0.973825, 0, 9999, -9999, 1.0, \n 100, 1, 26.765913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1506, \n 1.68035, 0, 9999, -9999, 1.0, 100, 1, 56.406717, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1507, 0.198063, 0, 9999, -9999, 1.0, 100, 1, 15.438042,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1508, 0.014206, 0, 9999, -9999,\n 1.0, 100, 1, 0.065259, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1510, \n 7.904758, 0, 9999, -9999, 1.0, 100, 1, 107.008141, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1511, 34.313644, 0, 9999, -9999, 1.0, 100, 1, \n 155.22192, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1512, 5.508085, 0, \n 9999, -9999, 1.0, 100, 1, 64.130052, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1513, 2.253286, 0, 9999, -9999, 1.0, 100, 1, 23.051786, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1514, 0.00068, 0, 9999, -9999, 1.0, 100, 1,\n 0.027711, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1516, 0.000622, 0, \n 9999, -9999, 1.0, 100, 1, 0.02881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1517, 0.14151, 0, 9999, -9999, 1.0, 100, 1, 1.286804, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1518, 0.056948, 0, 9999, -9999, 1.0, 100, 1, \n 0.670542, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1519, 0.003953, 0, \n 9999, -9999, 1.0, 100, 1, 0.04654, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1520, 1.320701, 0, 9999, -9999, 1.0, 100, 1, 79.674256, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1521, 0.488031, 0, 9999, -9999, 1.0, 100, \n 1, 31.179116, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1522, 0.667681, 0,\n 9999, -9999, 1.0, 100, 1, 40.212666, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1523, 0.358897, 0, 9999, -9999, 1.0, 100, 1, 20.304521, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1524, 0.421411, 0, 9999, -9999, 1.0, 100, \n 1, 26.159251, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1525, 8.369013, 0,\n 9999, -9999, 1.0, 100, 1, 68.425403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1526, 13.439194, 0, 9999, -9999, 1.0, 100, 1, 44.478558, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1527, 47.41109, 0, 9999, -9999, 1.0, 100, \n 1, 103.998682, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1528, 19.05121, \n 0, 9999, -9999, 1.0, 100, 1, 41.386726, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1529, 4.347441, 0, 9999, -9999, 1.0, 100, 1, 84.378012, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1530, 36.879435, 0, 9999, -9999, 1.0, \n 100, 1, 79.055155, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1531, \n 98.758267, 0, 9999, -9999, 1.0, 100, 1, 183.821409, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1532, 3.146672, 0, 9999, -9999, 1.0, 100, 1, \n 37.379033, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1534, 16.179525, 0, \n 9999, -9999, 1.0, 100, 1, 29.516607, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1535, 2.910988, 0, 9999, -9999, 1.0, 100, 1, 8.931779, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1536, 13.30894, 0, 9999, -9999, 1.0, 100, \n 1, 39.26145, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1537, 5.590481, 0,\n 9999, -9999, 1.0, 100, 1, 99.740166, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1538, 3.755931, 0, 9999, -9999, 1.0, 100, 1, 130.774402, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1539, 6.565652, 0, 9999, -9999, 1.0, 100, \n 1, 201.766963, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1540, 0.089836, \n 0, 9999, -9999, 1.0, 100, 1, 4.160189, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 0], [1541, 0.293356, 0, 9999, -9999, 1.0, 100, 1, 3.429917, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1542, 1.778872, 0, 9999, -9999, 1.0, \n 100, 1, 50.287947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1543, \n 7.196474, 0, 9999, -9999, 1.0, 100, 1, 14.788669, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1544, 15.520031, 0, 9999, -9999, 1.0, 100, 1, \n 121.437126, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1545, 64.930835, 0,\n 9999, -9999, 1.0, 100, 1, 185.545128, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1546, 55.458703, 0, 9999, -9999, 1.0, 100, 1, 255.44343, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1547, 71.747708, 0, 9999, -9999, 1.0, 100,\n 1, 362.597919, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1548, 9.874324, \n 0, 9999, -9999, 1.0, 100, 1, 21.273779, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1549, 26.315546, 0, 9999, -9999, 1.0, 100, 1, 77.017486, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1550, 2.578653, 0, 9999, -9999, 1.0, \n 100, 1, 5.214715, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1551, \n 4.679853, 0, 9999, -9999, 1.0, 100, 1, 9.576491, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1552, 1.571054, 0, 9999, -9999, 1.0, 100, 1, 54.035471,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1553, 1.205813, 0, 9999, -9999,\n 1.0, 100, 1, 92.480282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1554, \n 4.550451, 0, 9999, -9999, 1.0, 100, 1, 155.333413, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1555, 2.8799, 0, 9999, -9999, 1.0, 100, 1, \n 103.865774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1556, 1.072108, 0, \n 9999, -9999, 1.0, 100, 1, 40.376346, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1557, 0.628445, 0, 9999, -9999, 1.0, 100, 1, 25.990242, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1558, 0.94404, 0, 9999, -9999, 1.0, 100, 1,\n 24.622373, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1559, 4.593798, 0, \n 9999, -9999, 1.0, 100, 1, 112.609207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1560, 1.15871, 0, 9999, -9999, 1.0, 100, 1, 86.395942, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1561, 0.554621, 0, 9999, -9999, 1.0, 100, \n 1, 19.127379, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1562, 1.20192, 0,\n 9999, -9999, 1.0, 100, 1, 61.888351, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1563, 3.188963, 0, 9999, -9999, 1.0, 100, 1, 106.233907, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1564, 26.839461, 0, 9999, -9999, 1.0, 100,\n 1, 58.27282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1565, 0.825577, 0,\n 9999, -9999, 1.0, 100, 1, 12.83938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1566, 9.367373, 0, 9999, -9999, 1.0, 100, 1, 358.676351, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1567, 0.521067, 0, 9999, -9999, 1.0, 100, \n 1, 29.531771, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1568, 2.721294, 0,\n 9999, -9999, 1.0, 100, 1, 89.300597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1569, 7.514268, 0, 9999, -9999, 1.0, 100, 1, 328.718571, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1570, 6.439178, 0, 9999, -9999, 1.0, 100, \n 1, 243.241909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1571, 10.260218,\n 0, 9999, -9999, 1.0, 100, 1, 203.443403, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1572, 6.054092, 0, 9999, -9999, 1.0, 100, 1, 232.127956, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1573, 2.410514, 0, 9999, -9999, 1.0, \n 100, 1, 80.403772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1574, \n 3.788724, 0, 9999, -9999, 1.0, 100, 1, 144.715972, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1575, 10.428356, 0, 9999, -9999, 1.0, 100, 1, \n 153.606376, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1576, 2.443, 0, \n 9999, -9999, 1.0, 100, 1, 34.262017, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1577, 15.38133, 0, 9999, -9999, 1.0, 100, 1, 217.054488, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1578, 0.821275, 0, 9999, -9999, 1.0, 100, \n 1, 16.348222, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1579, 14.528543, \n 0, 9999, -9999, 1.0, 100, 1, 35.164333, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1580, 12.79112, 0, 9999, -9999, 1.0, 100, 1, 21.892492, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1581, 2.068277, 0, 9999, -9999, 1.0, \n 100, 1, 156.277964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1582, \n 0.165737, 0, 9999, -9999, 1.0, 100, 1, 8.151092, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1583, 0.043758, 0, 9999, -9999, 1.0, 100, 1, 1.791968,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1584, 1.216571, 0, 9999, -9999,\n 1.0, 100, 1, 81.24993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1585, \n 0.048815, 0, 9999, -9999, 1.0, 100, 1, 3.685182, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1586, 0.843323, 0, 9999, -9999, 1.0, 100, 1, 61.31549,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1587, 2.519864, 0, 9999, -9999,\n 1.0, 100, 1, 191.635296, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1588, \n 3.852362, 0, 9999, -9999, 1.0, 100, 1, 59.424343, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1589, 19.154329, 0, 9999, -9999, 1.0, 100, 1, \n 48.538268, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1590, 20.947358, 0, \n 9999, -9999, 1.0, 100, 1, 119.077525, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1591, 23.168103, 0, 9999, -9999, 1.0, 100, 1, 142.8447, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1592, 0.253241, 0, 9999, -9999, 1.0, 100, \n 1, 9.842361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1593, 0.15675, 0, \n 9999, -9999, 1.0, 100, 1, 7.183183, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1594, 0.292231, 0, 9999, -9999, 1.0, 100, 1, 9.56089, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1595, 2.231011, 0, 9999, -9999, 1.0, 100, 1, \n 54.79001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1596, 4.880936, 0, \n 9999, -9999, 1.0, 100, 1, 138.730049, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1597, 0.08322, 0, 9999, -9999, 1.0, 100, 1, 2.858987, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1598, 0.112467, 0, 9999, -9999, 1.0, 100, 1, \n 4.795494, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1599, 3.84912, 0, \n 9999, -9999, 1.0, 100, 1, 86.703571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1600, 2.069032, 0, 9999, -9999, 1.0, 100, 1, 25.356501, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1601, 0.561492, 0, 9999, -9999, 1.0, 100, \n 1, 7.643653, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1602, 2.906505, 0,\n 9999, -9999, 1.0, 100, 1, 45.658169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1603, 1.783351, 0, 9999, -9999, 1.0, 100, 1, 26.209248, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1604, 1.098497, 0, 9999, -9999, 1.0, 100, \n 1, 16.363032, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1605, 2.754133, 0,\n 9999, -9999, 1.0, 100, 1, 43.477178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1606, 2.112869, 0, 9999, -9999, 1.0, 100, 1, 42.024907, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1607, 1.261272, 0, 9999, -9999, 1.0, 100, \n 1, 19.395236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1608, 1.278121, 0,\n 9999, -9999, 1.0, 100, 1, 19.491249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1609, 0.483623, 0, 9999, -9999, 1.0, 100, 1, 6.052272, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1610, 1.005066, 0, 9999, -9999, 1.0, 100, \n 1, 18.571656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1611, 0.46381, 0,\n 9999, -9999, 1.0, 100, 1, 6.420554, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1612, 0.857392, 0, 9999, -9999, 1.0, 100, 1, 10.811203, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1613, 1.011747, 0, 9999, -9999, 1.0, 100, \n 1, 27.976217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1614, 1.022581, 0,\n 9999, -9999, 1.0, 100, 1, 28.183827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1615, 2.737635, 0, 9999, -9999, 1.0, 100, 1, 193.234776, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1616, 0.13616, 0, 9999, -9999, 1.0, 100, 1,\n 6.865586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1617, 0.214465, 0, \n 9999, -9999, 1.0, 100, 1, 10.63107, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1618, 0.137271, 0, 9999, -9999, 1.0, 100, 1, 4.920368, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1619, 0.137714, 0, 9999, -9999, 1.0, 100, \n 1, 6.689637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1620, 0.054616, 0,\n 9999, -9999, 1.0, 100, 1, 1.912024, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1621, 0.643767, 0, 9999, -9999, 1.0, 100, 1, 8.056388, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1622, 0.454891, 0, 9999, -9999, 1.0, 100, \n 1, 5.693597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1623, 0.781413, 0,\n 9999, -9999, 1.0, 100, 1, 20.717111, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1624, 0.43014, 0, 9999, -9999, 1.0, 100, 1, 8.938454, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1625, 4.394301, 0, 9999, -9999, 1.0, 100, 1, \n 65.182465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1626, 0.907896, 0, \n 9999, -9999, 1.0, 100, 1, 11.878862, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1627, 0.828216, 0, 9999, -9999, 1.0, 100, 1, 10.196496, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1628, 3.64562, 0, 9999, -9999, 1.0, 100, 1,\n 66.613993, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1629, 3.996364, 0, \n 9999, -9999, 1.0, 100, 1, 121.671047, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1630, 0.97886, 0, 9999, -9999, 1.0, 100, 1, 12.452584, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1631, 1.229738, 0, 9999, -9999, 1.0, 100, \n 1, 32.486249, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1632, 1.735442, 0,\n 9999, -9999, 1.0, 100, 1, 25.874893, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1633, 1.043532, 0, 9999, -9999, 1.0, 100, 1, 67.433329, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1634, 0.770553, 0, 9999, -9999, 1.0, 100, \n 1, 9.643044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1635, 1.42036, 0, \n 9999, -9999, 1.0, 100, 1, 19.166135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1636, 0.484297, 0, 9999, -9999, 1.0, 100, 1, 25.181406, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1637, 0.890327, 0, 9999, -9999, 1.0, 100, \n 1, 29.114828, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1638, 0.393448, 0,\n 9999, -9999, 1.0, 100, 1, 12.162188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1639, 0.529161, 0, 9999, -9999, 1.0, 100, 1, 29.183593, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1640, 0.055855, 0, 9999, -9999, 1.0, 100, \n 1, 2.237652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1641, 0.128633, 0,\n 9999, -9999, 1.0, 100, 1, 5.023705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1642, 0.300365, 0, 9999, -9999, 1.0, 100, 1, 11.730623, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1643, 0.0778, 0, 9999, -9999, 1.0, 100, 1,\n 3.417684, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1644, 0.519067, 0, \n 9999, -9999, 1.0, 100, 1, 11.76596, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1645, 0.212854, 0, 9999, -9999, 1.0, 100, 1, 11.144882, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1646, 0.08389, 0, 9999, -9999, 1.0, 100, 1,\n 3.73271, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1647, 0.49549, 0, 9999,\n -9999, 1.0, 100, 1, 17.434827, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1648, 51.620123, 0, 9999, -9999, 1.0, 100, 1, 109.345623, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1649, 1.143986, 0, 9999, -9999, 1.0, 100, 1, \n 23.481556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1650, 68.504496, 0, \n 9999, -9999, 1.0, 100, 1, 176.928964, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1651, 25.884619, 0, 9999, -9999, 1.0, 100, 1, 161.276649, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1652, 22.304037, 0, 9999, -9999, 1.0, \n 100, 1, 84.070562, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1653, \n 5.825901, 0, 9999, -9999, 1.0, 100, 1, 18.431241, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1654, 5.458977, 0, 9999, -9999, 1.0, 100, 1, 47.53021,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1655, 0.218497, 0, 9999, -9999,\n 1.0, 100, 1, 10.79071, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1656, \n 0.047498, 0, 9999, -9999, 1.0, 100, 1, 2.680105, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1657, 0.095463, 0, 9999, -9999, 1.0, 100, 1, 5.6313, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1658, 0.045291, 0, 9999, -9999,\n 1.0, 100, 1, 1.879381, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1659, \n 17.538243, 0, 9999, -9999, 1.0, 100, 1, 91.77667, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1660, 12.937488, 0, 9999, -9999, 1.0, 100, 1, \n 186.942171, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1661, 31.605385, 0,\n 9999, -9999, 1.0, 100, 1, 138.604087, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1662, 0.063493, 0, 9999, -9999, 1.0, 100, 1, 3.040325, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1663, 0.024501, 0, 9999, -9999, 1.0, 100, \n 1, 1.600649, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1664, 0.036775, 0,\n 9999, -9999, 1.0, 100, 1, 1.578207, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1665, 0.738544, 0, 9999, -9999, 1.0, 100, 1, 48.659717, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1666, 0.10553, 0, 9999, -9999, 1.0, 100, 1,\n 2.877877, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1667, 0.158158, 0, \n 9999, -9999, 1.0, 100, 1, 5.227282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1668, 0.093074, 0, 9999, -9999, 1.0, 100, 1, 3.927043, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1669, 0.940983, 0, 9999, -9999, 1.0, 100, \n 1, 72.677935, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1670, 1.496978, 0,\n 9999, -9999, 1.0, 100, 1, 111.043025, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1671, 2.781499, 0, 9999, -9999, 1.0, 100, 1, 62.404971, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1672, 0.388881, 0, 9999, -9999, 1.0, 100, \n 1, 10.579925, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1673, 0.334706, 0,\n 9999, -9999, 1.0, 100, 1, 4.091034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1674, 1.005445, 0, 9999, -9999, 1.0, 100, 1, 47.970381, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1675, 0.90703, 0, 9999, -9999, 1.0, 100, 1,\n 31.233663, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1676, 1.387516, 0, \n 9999, -9999, 1.0, 100, 1, 83.173368, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1677, 0.214899, 0, 9999, -9999, 1.0, 100, 1, 13.887293, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1678, 1.315679, 0, 9999, -9999, 1.0, 100, \n 1, 226.804108, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1679, 0.418866, \n 0, 9999, -9999, 1.0, 100, 1, 71.380413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1680, 1.040782, 0, 9999, -9999, 1.0, 100, 1, 52.148102, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1681, 0.272268, 0, 9999, -9999, 1.0, \n 100, 1, 17.30062, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1682, \n 0.618993, 0, 9999, -9999, 1.0, 100, 1, 39.892468, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1683, 0.37783, 0, 9999, -9999, 1.0, 100, 1, 9.189765, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1684, 16.720062, 0, 9999, -9999,\n 1.0, 100, 1, 40.575646, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1685, \n 38.280956, 0, 9999, -9999, 1.0, 100, 1, 74.922434, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1686, 1.592396, 0, 9999, -9999, 1.0, 100, 1, \n 81.035483, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1687, 1.448229, 0, \n 9999, -9999, 1.0, 100, 1, 112.01808, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1688, 0.25044, 0, 9999, -9999, 1.0, 100, 1, 18.158729, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1689, 2.728973, 0, 9999, -9999, 1.0, 100, \n 1, 116.696894, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1690, 1.881404, \n 0, 9999, -9999, 1.0, 100, 1, 116.477465, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1691, 1.937312, 0, 9999, -9999, 1.0, 100, 1, 228.38653, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1692, 0.360216, 0, 9999, -9999, 1.0, \n 100, 1, 26.501573, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1693, \n 6.045706, 0, 9999, -9999, 1.0, 100, 1, 86.236575, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1694, 0.838517, 0, 9999, -9999, 1.0, 100, 1, 53.656832,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1695, 0.366512, 0, 9999, -9999,\n 1.0, 100, 1, 23.132774, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1696, \n 0.676037, 0, 9999, -9999, 1.0, 100, 1, 53.34209, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1697, 73.968329, 0, 9999, -9999, 1.0, 100, 1, \n 136.821485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1698, 7.947772, 0, \n 9999, -9999, 1.0, 100, 1, 25.60631, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1699, 0.032287, 0, 9999, -9999, 1.0, 100, 1, 5.356106, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1700, 0.345167, 0, 9999, -9999, 1.0, 100, \n 1, 55.825815, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1701, 0.33727, 0,\n 9999, -9999, 1.0, 100, 1, 37.297196, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1702, 1.288316, 0, 9999, -9999, 1.0, 100, 1, 25.149806, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1703, 2.47381, 0, 9999, -9999, 1.0, 100, 1,\n 48.587768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1704, 5.787415, 0, \n 9999, -9999, 1.0, 100, 1, 127.647586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1705, 2.86247, 0, 9999, -9999, 1.0, 100, 1, 52.051788, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1706, 0.421435, 0, 9999, -9999, 1.0, 100, \n 1, 6.76178, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1707, 0.423471, 0, \n 9999, -9999, 1.0, 100, 1, 11.7078, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1708, 1.09922, 0, 9999, -9999, 1.0, 100, 1, 26.288692, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1709, 4.063842, 0, 9999, -9999, 1.0, 100, 1, \n 226.257418, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1710, 3.872336, 0, \n 9999, -9999, 1.0, 100, 1, 183.631947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1711, 0.031912, 0, 9999, -9999, 1.0, 100, 1, 7.213854, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1712, 1.519606, 0, 9999, -9999, 1.0, 100, \n 1, 75.638853, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1713, 1.926968, 0,\n 9999, -9999, 1.0, 100, 1, 90.775073, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1714, 0.691647, 0, 9999, -9999, 1.0, 100, 1, 42.312538, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1715, 4.380165, 0, 9999, -9999, 1.0, 100, \n 1, 155.279397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1716, 99.103248,\n 0, 9999, -9999, 1.0, 100, 1, 156.979012, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1717, 1.370715, 0, 9999, -9999, 1.0, 100, 1, 82.928251, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1718, 189.035332, 0, 9999, -9999, 1.0, \n 100, 1, 301.614349, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1719, \n 0.996406, 0, 9999, -9999, 1.0, 100, 1, 19.488967, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1720, 2.459531, 0, 9999, -9999, 1.0, 100, 1, 54.067169,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1721, 1.395162, 0, 9999, -9999,\n 1.0, 100, 1, 82.151947, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1722, \n 0.307342, 0, 9999, -9999, 1.0, 100, 1, 21.329566, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1723, 1.879056, 0, 9999, -9999, 1.0, 100, 1, 2.855273,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1724, 23.913688, 0, 9999, -9999,\n 1.0, 100, 1, 36.268783, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1725, \n 3.302072, 0, 9999, -9999, 1.0, 100, 1, 55.750844, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1726, 4.692439, 0, 9999, -9999, 1.0, 100, 1, 84.308501,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1727, 0.009857, 0, 9999, -9999,\n 1.0, 100, 1, 0.456443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1728, \n 1.500178, 0, 9999, -9999, 1.0, 100, 1, 65.283314, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1729, 9.626622, 0, 9999, -9999, 1.0, 100, 1, \n 220.758669, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1730, 2.579093, 0, \n 9999, -9999, 1.0, 100, 1, 51.367164, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1731, 5.370488, 0, 9999, -9999, 1.0, 100, 1, 151.90213, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1732, 4.730721, 0, 9999, -9999, 1.0, 100, \n 1, 383.858473, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1733, 1.601396, \n 0, 9999, -9999, 1.0, 100, 1, 60.655652, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1734, 0.994327, 0, 9999, -9999, 1.0, 100, 1, 77.375277, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1735, 5.493087, 0, 9999, -9999, 1.0, \n 100, 1, 153.887449, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1736, \n 1.217485, 0, 9999, -9999, 1.0, 100, 1, 89.439426, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1737, 13.67404, 0, 9999, -9999, 1.0, 100, 1, \n 194.473407, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1738, 6.79528, 0, \n 9999, -9999, 1.0, 100, 1, 116.049526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1739, 1.628928, 0, 9999, -9999, 1.0, 100, 1, 33.525947, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1740, 3.170471, 0, 9999, -9999, 1.0, 100, \n 1, 66.638954, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1741, 0.703631, 0,\n 9999, -9999, 1.0, 100, 1, 35.869318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1742, 0.41138, 0, 9999, -9999, 1.0, 100, 1, 25.619162, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1743, 0.014153, 0, 9999, -9999, 1.0, 100, \n 1, 0.986841, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1744, 0.06008, 0, \n 9999, -9999, 1.0, 100, 1, 3.775325, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1745, 0.52858, 0, 9999, -9999, 1.0, 100, 1, 31.215591, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1746, 2.317817, 0, 9999, -9999, 1.0, 100, \n 1, 172.123236, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1747, 0.45041, 0,\n 9999, -9999, 1.0, 100, 1, 25.963706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1748, 1.875782, 0, 9999, -9999, 1.0, 100, 1, 67.219313, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1749, 5.661322, 0, 9999, -9999, 1.0, 100, \n 1, 218.703564, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1750, 0.722982, \n 0, 9999, -9999, 1.0, 100, 1, 22.191848, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1751, 0.570436, 0, 9999, -9999, 1.0, 100, 1, 18.416283, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1752, 2.485541, 0, 9999, -9999, 1.0, \n 100, 1, 136.190504, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1753, \n 2.307659, 0, 9999, -9999, 1.0, 100, 1, 79.270006, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1754, 9.096135, 0, 9999, -9999, 1.0, 100, 1, 408.37422,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1755, 1.808269, 0, 9999, -9999,\n 1.0, 100, 1, 46.277001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1756, \n 1.755721, 0, 9999, -9999, 1.0, 100, 1, 93.807787, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1757, 13.59206, 0, 9999, -9999, 1.0, 100, 1, 197.08743,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1758, 4.309907, 0, 9999, -9999,\n 1.0, 100, 1, 311.473267, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1759, \n 4.837918, 0, 9999, -9999, 1.0, 100, 1, 156.546089, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1760, 2.229657, 0, 9999, -9999, 1.0, 100, 1, \n 114.687411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1761, 1.4435, 0, \n 9999, -9999, 1.0, 100, 1, 48.443946, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1762, 4.898546, 0, 9999, -9999, 1.0, 100, 1, 107.077622, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1763, 5.490835, 0, 9999, -9999, 1.0, 100, \n 1, 90.136674, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1764, 1.223566, 0,\n 9999, -9999, 1.0, 100, 1, 21.994769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1765, 7.971301, 0, 9999, -9999, 1.0, 100, 1, 112.249863, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1766, 9.468566, 0, 9999, -9999, 1.0, 100, \n 1, 99.811208, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1767, 48.00237, 0,\n 9999, -9999, 1.0, 100, 1, 95.5909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1768, 55.735285, 0, 9999, -9999, 1.0, 100, 1, 159.818572, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1769, 21.168997, 0, 9999, -9999, 1.0, 100,\n 1, 235.581664, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1770, 252.472611,\n 0, 9999, -9999, 1.0, 100, 1, 479.248156, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1771, 171.272253, 0, 9999, -9999, 1.0, 100, 1, 276.640075, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1772, 5.981185, 0, 9999, -9999, 1.0,\n 100, 1, 272.215345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1773, \n 31.853074, 0, 9999, -9999, 1.0, 100, 1, 533.823159, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1774, 1.38998, 0, 9999, -9999, 1.0, 100, 1, \n 88.57714, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1775, 3.602189, 0, \n 9999, -9999, 1.0, 100, 1, 197.787397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1776, 3.86406, 0, 9999, -9999, 1.0, 100, 1, 111.203656, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1777, 4.186652, 0, 9999, -9999, 1.0, 100, \n 1, 199.457983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1778, 2.885068, \n 0, 9999, -9999, 1.0, 100, 1, 80.070627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1779, 6.121667, 0, 9999, -9999, 1.0, 100, 1, 78.485044, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1780, 4.042606, 0, 9999, -9999, 1.0, \n 100, 1, 97.872974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1781, \n 3.124553, 0, 9999, -9999, 1.0, 100, 1, 7.067063, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1782, 4.836581, 0, 9999, -9999, 1.0, 100, 1, 9.94901, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1783, 5.154731, 0, 9999, -9999,\n 1.0, 100, 1, 10.739092, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1784, \n 2.922371, 0, 9999, -9999, 1.0, 100, 1, 240.920274, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1785, 3.064711, 0, 9999, -9999, 1.0, 100, 1, \n 275.41262, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1786, 15.899962, 0, \n 9999, -9999, 1.0, 100, 1, 195.868213, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1787, 65.367372, 0, 9999, -9999, 1.0, 100, 1, 123.060646, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1788, 0.117389, 0, 9999, -9999, 1.0, \n 100, 1, 9.486282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1789, \n 0.289917, 0, 9999, -9999, 1.0, 100, 1, 24.05804, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1790, 0.010999, 0, 9999, -9999, 1.0, 100, 1, 1.412167,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1791, 0.007829, 0, 9999, -9999,\n 1.0, 100, 1, 1.171034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1792, \n 0.044079, 0, 9999, -9999, 1.0, 100, 1, 8.914306, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1793, 0.236603, 0, 9999, -9999, 1.0, 100, 1, 41.722817,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1794, 0.20779, 0, 9999, -9999, \n 1.0, 100, 1, 6.617641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1795, \n 0.266407, 0, 9999, -9999, 1.0, 100, 1, 3.33586, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1796, 4.643687, 0, 9999, -9999, 1.0, 100, 1, 10.434523,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1797, 1.892799, 0, 9999, -9999,\n 1.0, 100, 1, 63.411765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1798, \n 0.404733, 0, 9999, -9999, 1.0, 100, 1, 14.835758, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1799, 6.065791, 0, 9999, -9999, 1.0, 100, 1, 51.10225,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1800, 12.893851, 0, 9999, -9999,\n 1.0, 100, 1, 79.286766, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1801, \n 0.096655, 0, 9999, -9999, 1.0, 100, 1, 21.006749, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1802, 0.050346, 0, 9999, -9999, 1.0, 100, 1, 11.305192,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1803, 0.067486, 0, 9999, -9999,\n 1.0, 100, 1, 15.182571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1804, \n 8.857977, 0, 9999, -9999, 1.0, 100, 1, 399.133201, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1805, 0.372681, 0, 9999, -9999, 1.0, 100, 1, \n 23.20491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1806, 0.645338, 0, \n 9999, -9999, 1.0, 100, 1, 21.469357, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1807, 0.476964, 0, 9999, -9999, 1.0, 100, 1, 28.156483, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1808, 2.263578, 0, 9999, -9999, 1.0, 100, \n 1, 118.262712, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1809, 0.706651, \n 0, 9999, -9999, 1.0, 100, 1, 33.031228, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1810, 1.838324, 0, 9999, -9999, 1.0, 100, 1, 74.139408, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1811, 0.934047, 0, 9999, -9999, 1.0, \n 100, 1, 53.408299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1812, \n 0.847076, 0, 9999, -9999, 1.0, 100, 1, 47.34526, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1813, 5.040034, 0, 9999, -9999, 1.0, 100, 1, \n 180.894957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1814, 1.305803, 0, \n 9999, -9999, 1.0, 100, 1, 62.572642, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1815, 1.125706, 0, 9999, -9999, 1.0, 100, 1, 61.953143, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1816, 0.526674, 0, 9999, -9999, 1.0, 100, \n 1, 30.445169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1817, 6.103422, 0,\n 9999, -9999, 1.0, 100, 1, 280.614897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1818, 2.278102, 0, 9999, -9999, 1.0, 100, 1, 173.515675, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1819, 0.043942, 0, 9999, -9999, 1.0, 100, \n 1, 1.538348, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1820, 1.0414, 0, \n 9999, -9999, 1.0, 100, 1, 79.71358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1821, 2.208855, 0, 9999, -9999, 1.0, 100, 1, 196.67938, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1822, 98.239685, 0, 9999, -9999, 1.0, 100,\n 1, 170.831584, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1823, 4.830701, \n 0, 9999, -9999, 1.0, 100, 1, 131.456153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1824, 2.976789, 0, 9999, -9999, 1.0, 100, 1, 56.565054, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1825, 49.61097, 0, 9999, -9999, 1.0, \n 100, 1, 81.59195, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1826, 2.40722,\n 0, 9999, -9999, 1.0, 100, 1, 74.101252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1827, 0.690669, 0, 9999, -9999, 1.0, 100, 1, 30.303552, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1828, 27.146571, 0, 9999, -9999, 1.0, \n 100, 1, 43.298921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1829, \n 37.866018, 0, 9999, -9999, 1.0, 100, 1, 69.263255, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1830, 2.915109, 0, 9999, -9999, 1.0, 100, 1, \n 27.724768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1831, 39.925327, 0, \n 9999, -9999, 1.0, 100, 1, 69.89001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1832, 0.828831, 0, 9999, -9999, 1.0, 100, 1, 26.560625, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1833, 1.109798, 0, 9999, -9999, 1.0, 100, \n 1, 81.361962, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1834, 2.554402, 0,\n 9999, -9999, 1.0, 100, 1, 102.529569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1836, 0.477418, 0, 9999, -9999, 1.0, 100, 1, 6.417969, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1837, 4.200009, 0, 9999, -9999, 1.0, 100, \n 1, 12.629331, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1838, 1.443062, 0,\n 9999, -9999, 1.0, 100, 1, 25.580913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1839, 28.228214, 0, 9999, -9999, 1.0, 100, 1, 183.749133, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1840, 10.988953, 0, 9999, -9999, 1.0, \n 100, 1, 132.975197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1841, \n 0.340284, 0, 9999, -9999, 1.0, 100, 1, 22.982632, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1842, 1.417646, 0, 9999, -9999, 1.0, 100, 1, 7.468633,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1843, 0.588474, 0, 9999, -9999,\n 1.0, 100, 1, 19.264686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1844, \n 0.345625, 0, 9999, -9999, 1.0, 100, 1, 32.384294, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1845, 0.373692, 0, 9999, -9999, 1.0, 100, 1, 31.436002,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1846, 0.117694, 0, 9999, -9999,\n 1.0, 100, 1, 3.74984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1847, \n 6.98851, 0, 9999, -9999, 1.0, 100, 1, 120.215574, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1848, 0.671868, 0, 9999, -9999, 1.0, 100, 1, 9.514696,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1849, 1.591079, 0, 9999, -9999,\n 1.0, 100, 1, 37.619097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1850, \n 3.459291, 0, 9999, -9999, 1.0, 100, 1, 48.54058, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1851, 5.355057, 0, 9999, -9999, 1.0, 100, 1, 7.956444,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1852, 26.334441, 0, 9999, -9999,\n 1.0, 100, 1, 37.606916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1853, \n 21.05905, 0, 9999, -9999, 1.0, 100, 1, 30.116711, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1854, 1.087784, 0, 9999, -9999, 1.0, 100, 1, 2.241167,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1855, 4.821441, 0, 9999, -9999,\n 1.0, 100, 1, 121.687485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1856, \n 16.158296, 0, 9999, -9999, 1.0, 100, 1, 63.654358, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [1857, 1.392575, 0, 9999, -9999, 1.0, 100, 1, \n 41.229597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1858, 0.962874, 0, \n 9999, -9999, 1.0, 100, 1, 27.374415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1860, 5.321111, 0, 9999, -9999, 1.0, 100, 1, 84.163604, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1861, 1.232397, 0, 9999, -9999, 1.0, 100, \n 1, 26.861144, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1862, 0.420971, 0,\n 9999, -9999, 1.0, 100, 1, 32.512826, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1863, 0.38232, 0, 9999, -9999, 1.0, 100, 1, 30.063729, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1864, 1.848854, 0, 9999, -9999, 1.0, 100, \n 1, 138.236316, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1865, 26.719416,\n 0, 9999, -9999, 1.0, 100, 1, 68.097772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1866, 37.73908, 0, 9999, -9999, 1.0, 100, 1, 98.289141, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1867, 0.07468, 0, 9999, -9999, 1.0, 100,\n 1, 2.041288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1868, 0.184336, 0,\n 9999, -9999, 1.0, 100, 1, 6.453374, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1869, 0.097593, 0, 9999, -9999, 1.0, 100, 1, 2.759448, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1870, 0.859649, 0, 9999, -9999, 1.0, 100, \n 1, 54.564665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1871, 1.592185, 0,\n 9999, -9999, 1.0, 100, 1, 52.648444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1872, 0.137763, 0, 9999, -9999, 1.0, 100, 1, 1.683854, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1873, 0.231084, 0, 9999, -9999, 1.0, 100, \n 1, 9.025283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1874, 0.083646, 0,\n 9999, -9999, 1.0, 100, 1, 3.554415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1875, 0.158111, 0, 9999, -9999, 1.0, 100, 1, 7.837576, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1876, 0.141013, 0, 9999, -9999, 1.0, 100, \n 1, 4.936672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1877, 0.032441, 0,\n 9999, -9999, 1.0, 100, 1, 1.135717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1878, 0.168939, 0, 9999, -9999, 1.0, 100, 1, 8.374329, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1879, 0.048728, 0, 9999, -9999, 1.0, 100, \n 1, 1.752881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1880, 0.763602, 0,\n 9999, -9999, 1.0, 100, 1, 38.46747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1881, 0.30875, 0, 9999, -9999, 1.0, 100, 1, 4.535799, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1882, 0.374878, 0, 9999, -9999, 1.0, 100, 1, \n 5.120641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1883, 0.501411, 0, \n 9999, -9999, 1.0, 100, 1, 6.940957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1884, 0.420718, 0, 9999, -9999, 1.0, 100, 1, 5.865468, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1885, 0.774015, 0, 9999, -9999, 1.0, 100, \n 1, 47.510175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1886, 0.082618, 0,\n 9999, -9999, 1.0, 100, 1, 5.255398, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1887, 0.584546, 0, 9999, -9999, 1.0, 100, 1, 16.937671, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1888, 0.279655, 0, 9999, -9999, 1.0, 100, \n 1, 4.141211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1889, 2.215842, 0,\n 9999, -9999, 1.0, 100, 1, 91.335184, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1890, 0.651391, 0, 9999, -9999, 1.0, 100, 1, 24.842697, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1891, 0.495423, 0, 9999, -9999, 1.0, 100, \n 1, 30.836318, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1892, 0.592029, 0,\n 9999, -9999, 1.0, 100, 1, 38.14699, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1893, 0.992301, 0, 9999, -9999, 1.0, 100, 1, 46.5682, 0.0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0], [1894, 0.671605, 0, 9999, -9999, 1.0, 100, 1, \n 31.347572, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1895, 0.005762, 0, \n 9999, -9999, 1.0, 100, 1, 0.140628, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1896, 0.578794, 0, 9999, -9999, 1.0, 100, 1, 45.257234, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1897, 0.22732, 0, 9999, -9999, 1.0, 100, 1,\n 14.824595, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1898, 0.253484, 0, \n 9999, -9999, 1.0, 100, 1, 18.270499, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1899, 0.15769, 0, 9999, -9999, 1.0, 100, 1, 12.000496, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1900, 49.440108, 0, 9999, -9999, 1.0, 100,\n 1, 78.114509, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1901, 85.852576, \n 0, 9999, -9999, 1.0, 100, 1, 133.539659, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1902, 144.692709, 0, 9999, -9999, 1.0, 100, 1, 281.819662, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1903, 38.213684, 0, 9999, -9999, 1.0,\n 100, 1, 135.492385, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1904, \n 49.601, 0, 9999, -9999, 1.0, 100, 1, 79.184428, 0.0, 0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0], [1905, 0.245402, 0, 9999, -9999, 1.0, 100, 1, 9.160607,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1906, 1.441792, 0, 9999, -9999,\n 1.0, 100, 1, 72.356523, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1907, \n 0.557731, 0, 9999, -9999, 1.0, 100, 1, 28.893637, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1908, 0.972014, 0, 9999, -9999, 1.0, 100, 1, 50.477866,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1909, 2.006953, 0, 9999, -9999,\n 0.99951, 100, 1, 32.874676, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [\n 1910, 1.289808, 0, 9999, -9999, 1.0, 100, 1, 20.259486, 0.0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0], [1911, 0.514865, 0, 9999, -9999, 1.0, 100, 1, \n 8.189799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1912, 69.733436, 0, \n 9999, -9999, 1.0, 100, 1, 101.236915, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1913, 0.109472, 0, 9999, -9999, 1.0, 100, 1, 6.782522, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1914, 0.280751, 0, 9999, -9999, 1.0, 100, \n 1, 15.944561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1915, 57.319413, \n 0, 9999, -9999, 1.0, 100, 1, 159.570248, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1916, 99.107497, 0, 9999, -9999, 1.0, 100, 1, 277.793548, 0.0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1917, 42.116008, 0, 9999, -9999, 1.0,\n 100, 1, 186.387377, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1918, \n 58.749074, 0, 9999, -9999, 1.0, 100, 1, 120.486097, 0.0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0], [1919, 28.497622, 0, 9999, -9999, 1.0, 100, 1, \n 61.1613, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1920, 1.811743, 0, \n 9999, -9999, 1.0, 100, 1, 9.95472, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1921, 145.712044, 0, 9999, -9999, 1.0, 100, 1, 230.400935, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1922, 45.36466, 0, 9999, -9999, 1.0, \n 100, 1, 66.116137, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1923, \n 9.238607, 0, 9999, -9999, 1.0, 100, 1, 21.836163, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1924, 5.019655, 0, 9999, -9999, 1.0, 100, 1, 36.518326,\n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1925, 5.170419, 0, 9999, -9999,\n 1.0, 100, 1, 135.324361, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1926, \n 3.340663, 0, 9999, -9999, 1.0, 100, 1, 96.610178, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1927, 23.399289, 0, 9999, -9999, 1.0, 100, 1, \n 65.668809, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1928, 0.747036, 0, \n 9999, -9999, 1.0, 100, 1, 1.509884, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1929, 0.180301, 0, 9999, -9999, 1.0, 100, 1, 4.804832, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1930, 0.214601, 0, 9999, -9999, 1.0, 100, \n 1, 11.004973, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1931, 0.663788, 0,\n 9999, -9999, 1.0, 100, 1, 38.07556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1932, 1.83202, 0, 9999, -9999, 1.0, 100, 1, 46.722379, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1933, 0.735851, 0, 9999, -9999, 1.0, 100, \n 1, 44.239188, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1934, 47.829223, \n 0, 9999, -9999, 1.0, 100, 1, 383.418198, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1935, 3.280962, 0, 9999, -9999, 1.0, 100, 1, 62.335643, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1936, 0.079477, 0, 9999, -9999, 1.0, \n 100, 1, 6.00797, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1937, 2.133855,\n 0, 9999, -9999, 1.0, 100, 1, 134.605733, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1938, 1.44698, 0, 9999, -9999, 1.0, 100, 1, 89.425619, 0.0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1939, 1.447635, 0, 9999, -9999, 1.0, \n 100, 1, 103.003683, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1940, \n 0.249661, 0, 9999, -9999, 1.0, 100, 1, 18.980829, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1941, 0.521998, 0, 9999, -9999, 1.0, 100, 1, \n 104.495097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1942, 0.789037, 0, \n 9999, -9999, 1.0, 100, 1, 70.75487, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1943, 0.083093, 0, 9999, -9999, 1.0, 100, 1, 3.652558, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1944, 1.445543, 0, 9999, -9999, 1.0, 100, \n 1, 93.133765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1945, 0.304251, 0,\n 9999, -9999, 1.0, 100, 1, 10.651443, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1946, 0.037403, 0, 9999, -9999, 1.0, 100, 1, 1.309439, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1947, 1.219744, 0, 9999, -9999, 1.0, 100, \n 1, 17.996246, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1948, 4.586959, 0,\n 9999, -9999, 1.0, 100, 1, 83.075413, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1949, 0.82436, 0, 9999, -9999, 1.0, 100, 1, 10.193229, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1950, 0.070892, 0, 9999, -9999, 1.0, 100, \n 1, 0.866493, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1951, 0.63205, 0, \n 9999, -9999, 1.0, 100, 1, 7.917597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1952, 3.277791, 0, 9999, -9999, 1.0, 100, 1, 67.723951, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1953, 0.21067, 0, 9999, -9999, 1.0, 100, 1,\n 8.928556, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1954, 0.230766, 0, \n 9999, -9999, 1.0, 100, 1, 12.726892, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1955, 0.181558, 0, 9999, -9999, 1.0, 100, 1, 6.625255, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1956, 2.572929, 0, 9999, -9999, 1.0, 100, \n 1, 38.724888, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1957, 3.910752, 0,\n 9999, -9999, 1.0, 100, 1, 131.682322, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1958, 0.89549, 0, 9999, -9999, 1.0, 100, 1, 59.791759, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1959, 3.736043, 0, 9999, -9999, 1.0, 100, \n 1, 35.986928, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1960, 0.47403, 0,\n 9999, -9999, 1.0, 100, 1, 13.579895, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1961, 0.360769, 0, 9999, -9999, 1.0, 100, 1, 17.841481, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1962, 0.056937, 0, 9999, -9999, 1.0, 100, \n 1, 3.150179, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1963, 0.011195, 0,\n 9999, -9999, 1.0, 100, 1, 0.73138, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n ], [1964, 1.912109, 0, 9999, -9999, 1.0, 100, 1, 66.594121, 0.0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1965, 0.412755, 0, 9999, -9999, 1.0, 100, \n 1, 18.785491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1966, 0.856742, 0,\n 9999, -9999, 1.0, 100, 1, 2.674199, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0], [1967, 4.700675, 0, 9999, -9999, 1.0, 100, 1, 99.074235, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1968, 20.406765, 0, 9999, -9999, 1.0, 100,\n 1, 201.733891, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1969, 0.416455, \n 0, 9999, -9999, 1.0, 100, 1, 15.048118, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1970, 145.974713, 0, 9999, -9999, 1.0, 100, 1, 236.871781, 0.0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1971, 0.435823, 0, 9999, -9999, 1.0,\n 100, 1, 14.404409, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1972, \n 0.001026, 0, 9999, -9999, 1.0, 100, 1, 0.028378, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1973, 0.01934, 0, 9999, -9999, 1.0, 100, 1, 0.534696, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1974, 0.0995, 0, 9999, -9999, \n 1.0, 100, 1, 2.750907, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1975, \n 3.231276, 0, 9999, -9999, 1.0, 100, 1, 81.92918, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1976, 1.378981, 0, 9999, -9999, 1.0, 100, 1, 2.17499, \n 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1977, 65.42762, 0, 9999, -9999,\n 1.0, 100, 1, 226.383637, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1978, \n 0.106404, 0, 9999, -9999, 1.0, 100, 1, 1.331592, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1979, 133.220566, 0, 9999, -9999, 1.0, 100, 1, \n 189.722792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1980, 6.868705, 0, \n 9999, -9999, 1.0, 100, 1, 100.61941, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1981, 7.688742, 0, 9999, -9999, 1.0, 100, 1, 144.682717, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1982, 5.752632, 0, 9999, -9999, 1.0, 100, \n 1, 134.93778, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1983, 3.530567, 0,\n 9999, -9999, 1.0, 100, 1, 155.990147, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1984, 1.936985, 0, 9999, -9999, 1.0, 100, 1, 94.470611, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1985, 1.330237, 0, 9999, -9999, 1.0, 100, \n 1, 41.975835, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1986, 5.765495, 0,\n 9999, -9999, 1.0, 100, 1, 298.346979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1987, 5.389422, 0, 9999, -9999, 1.0, 100, 1, 393.914067, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1988, 33.80903, 0, 9999, -9999, 1.0, 100, \n 1, 251.944939, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1989, 6.748426, \n 0, 9999, -9999, 1.0, 100, 1, 10.378288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [1990, 1.381387, 0, 9999, -9999, 1.0, 100, 1, 50.351426, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1991, 47.912587, 0, 9999, -9999, 1.0, \n 100, 1, 849.576944, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1992, \n 6.27345, 0, 9999, -9999, 1.0, 100, 1, 233.477991, 0.0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0], [1993, 9.719656, 0, 9999, -9999, 1.0, 100, 1, \n 242.698643, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1994, 5.08751, 0, \n 9999, -9999, 1.0, 100, 1, 255.834576, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1995, 4.092824, 0, 9999, -9999, 1.0, 100, 1, 262.446698, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1996, 1.534479, 0, 9999, -9999, 1.0, 100, \n 1, 91.306832, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1997, 0.151788, 0,\n 9999, -9999, 1.0, 100, 1, 26.592561, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [1998, 7.104695, 0, 9999, -9999, 1.0, 100, 1, 12.126511, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [1999, 4.534769, 0, 9999, -9999, 1.0, 100, \n 1, 199.184531, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2000, 7.544127, \n 0, 9999, -9999, 1.0, 100, 1, 579.835051, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0], [2001, 3.950905, 0, 9999, -9999, 1.0, 100, 1, 122.315703, 0.0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2002, 1.721932, 0, 9999, -9999, 1.0, \n 100, 1, 30.606436, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2003, \n 14.962198, 0, 9999, -9999, 1.0, 100, 1, 23.645071, 0.0, 0, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0], [2004, 10.900896, 0, 9999, -9999, 1.0, 100, 1, \n 17.73338, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2005, 2.306607, 0, \n 9999, -9999, 1.0, 100, 1, 72.071456, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0], [2006, 1.851369, 0, 9999, -9999, 1.0, 100, 1, 59.660888, 0.0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0], [2007, 0.061806, 0, 9999, -9999, 1.0, 100, \n 1, 1.681507, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2008, 0.00429, 0, \n 9999, -9999, 1.0, 100, 1, 0.116706, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])\n', (134982, 258381), False, 'from numpy import array\n'), ((265875, 436452), 'numpy.array', 'array', (['[[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [589, 108, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [590, 108, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [593, 112, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [594, 114, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [595, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [600,\n 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [601, 119, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [602, 121, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [603, 526, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [607, 127, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [608, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [609, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [613,\n 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [614, 130, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [616, 132, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [617, 133, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [618, 133, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [619, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [621, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [628, \n 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [629, 145, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [631, 145, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [632, 145, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [637, 148, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [638, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [639, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [642,\n 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [643, 534, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [646, 536, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [647, 536, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [650, 166, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [652, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [655, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [661,\n 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [662, 178, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [663, 178, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [666, 180, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [668, 183, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [670, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [672, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [675, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [678, 194,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [679, 196, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [681, 197, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [683, 200, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [687, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [689, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [691, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [694, \n 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [695, 210, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [696, 211, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [697, 211, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [698, 212, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [701, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [702, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [707,\n 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [708, 221, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [711, 224, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [713, 225, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [714, 225, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [716, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [717, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [722, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [723,\n 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [724, 238, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [725, 239, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [727, 243, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [728, 244, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [730, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [731, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [732, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [733, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [735,\n 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [737, 256, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [738, 258, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [739, 264, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [741, 264, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [742, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [743, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [745, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [746, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [747,\n 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [748, 274, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [749, 274, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [750, 557, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [753, 28, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [758, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [760, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [761, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [762, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [763,\n 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [765, 560, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [767, 292, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [769, 293, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [771, 297, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [772, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0,\n 1, -360, 360], [774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360,\n 360], [776, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 777, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [778, 300,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [781, 303, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [784, 563, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [785, 501, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [787, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [788, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [789, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [790, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [791,\n 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [792, 316, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [795, 319, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [798, 324, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [800, 326, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [801, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [802, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [805, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [808,\n 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [809, 329, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [810, 568, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [811, 568, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [814, 570, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [815, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [816, 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [817, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [818, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [821, \n 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [822, 339, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [825, 339, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [826, 339, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [829, 345, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [830, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [833, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [834, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [835, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [836,\n 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [837, 350, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [839, 350, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [840, 573, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [841, 573, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [842, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [843, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [844, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [845, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [847,\n 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [848, 574, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [849, 574, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [850, 574, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [851, 575, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [852, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [853, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [854, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [856,\n 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [857, 365, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [858, 368, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [859, 368, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [860, 371, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [862, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [863, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [867,\n 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [869, 503, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [870, 503, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [872, 378, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [873, 576, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [874, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [875, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [877, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [881, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [882,\n 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [883, 388, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [886, 394, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [889, 397, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [890, 40, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [893, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [894, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [895, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [896, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [898,\n 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [900, 405, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [902, 405, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [903, 406, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [905, 413, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [907, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [909, 417, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [911, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [913, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [914,\n 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [915, 423, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [916, 43, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [917, 43, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [918, 424, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [919, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [920, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [922, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [923,\n 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [925, 44, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [928, 435, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [931, 439, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [934, 45, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [935, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [936, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [937, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [939, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [940,\n 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [942, 458, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [943, 458, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [944, 458, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [945, 459, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [946, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [948, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [951, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [952, \n 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [956, 478, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [957, 478, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [958, 478, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [959, 478, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [960, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [963, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [967, 49,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [968, 486, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [969, 486, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [971, 51, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [973, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [976, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [977, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [978, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [980, \n 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [981, 62, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [982, 62, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [983, 62, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [984, 63, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [985, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [986, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [988, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [990, 67,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [993, 67, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [994, 67, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [995, 509, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [996, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [997, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [999, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1000, 71,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1002, 71, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1003, 72, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1006, 511, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1007, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1008, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1010, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1012,\n 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1014, 83, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1018, 514, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1019, 514, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1023, 515, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1025, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1026, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1028, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1029, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1031, \n 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1032, 1, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1033, 3, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1034, 4, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1035, 6, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1036, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1037, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1041, \n 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1042, 17, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1044, 21, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1046, 25, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1047, 27, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1048, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1049, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1052,\n 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1053, 35, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1054, 36, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1055, 38, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1056, 39, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1057, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1058, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1061,\n 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1062, 47, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1063, 48, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1064, 49, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1065, 50, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1066, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1067, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1068, 54, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1069, 55, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1070,\n 57, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1071, 58, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1072, 59, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1073, 60, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1074, 62, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1075, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1077, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1078, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1079, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1080,\n 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1081, 71, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1082, 72, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1083, 73, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1084, 75, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1085, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1086, 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1087, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1088, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1089,\n 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1090, 82, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1091, 83, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1092, 84, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1093, 85, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1094, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1095, 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1096, 90, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1097, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1098,\n 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1099, 93, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1100, 97, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1101, 98, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1102, 101, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1103, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1104, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1106, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1107, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1108, \n 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1109, 112, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1110, 113, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1111, 114, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1112, 115, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1113, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1114, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1116, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1117, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1118, \n 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1119, 127, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1120, 130, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1121, 131, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1122, 132, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1123, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1124, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1126, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1127, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1128, \n 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1129, 140, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1130, 141, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1131, 142, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1132, 144, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1133, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1134, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1136, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1137, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1138, \n 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1139, 151, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1140, 152, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1141, 153, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1142, 154, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1143, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1144, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1146, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1147, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1148, \n 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1149, 166, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1150, 167, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1151, 168, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1152, 169, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1153, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1154, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1156, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1157, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1158, \n 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1159, 176, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1160, 177, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1161, 178, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1162, 179, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1164, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1166, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1167, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1168, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1169, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1170, \n 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1171, 189, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1172, 190, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1173, 192, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1174, 193, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1175, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1176, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1178, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1179, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1180, \n 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1181, 202, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1182, 203, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1183, 204, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1184, 205, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1185, 206, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1186, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1188, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1189, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1190, \n 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1191, 212, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1192, 213, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1193, 214, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1194, 215, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1195, 216, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1196, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1200, \n 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1201, 223, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1202, 224, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1203, 225, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1204, 226, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1205, 227, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1206, 228, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1207, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1208, 230, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1209, 234, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1210, \n 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1211, 237, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1212, 238, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1213, 239, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1214, 240, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1215, 241, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1216, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1220, \n 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1221, 252, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1222, 253, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1223, 254, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1224, 255, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1225, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1226, 257, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1227, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1230, \n 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1231, 266, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1232, 267, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1233, 268, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1234, 269, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1235, 271, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1236, 272, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1237, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1238, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1240, \n 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1241, 278, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1242, 281, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1243, 282, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1244, 283, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1245, 284, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1246, 285, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1247, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1248, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1250, \n 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1251, 291, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1252, 292, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1253, 293, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1254, 294, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1255, 295, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1256, 296, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1257, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1258, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1260, \n 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1261, 302, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1262, 303, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1263, 304, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1264, 307, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1265, 308, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1266, 309, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1270, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1271, 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1272, \n 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1273, 319, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1274, 321, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1275, 322, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1276, 323, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1277, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1278, 325, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1279, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1280, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1282, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1283, \n 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1284, 333, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1285, 335, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1286, 337, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1287, 338, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1288, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1289, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1291, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1292, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1293, \n 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1294, 345, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1295, 346, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1296, 347, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1297, 348, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1300, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1301, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1304, 357, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1305, \n 359, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1306, 361, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1307, 362, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1308, 363, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1309, 364, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1310, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1311, 366, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1312, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1313, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1314, 369, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1315, \n 370, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1316, 371, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1317, 372, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1318, 373, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1319, 374, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1320, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1321, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1322, 377, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1324, 379, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1325, \n 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1326, 384, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1327, 385, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1328, 386, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1329, 387, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1330, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1331, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1332, 391, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1333, 392, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1334, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1336, \n 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1337, 396, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1338, 397, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1339, 398, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1340, 399, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1341, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1342, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1343, 404, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1344, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1345, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1346, \n 407, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1348, 410, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1349, 411, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1350, 412, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1351, 413, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1352, 414, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1355, 418, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1358, 421, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1359, \n 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1360, 423, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1361, 424, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1362, 425, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1363, 426, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1364, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1365, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1367, 430, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1368, 431, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1369, \n 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1370, 433, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1371, 434, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1372, 435, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1373, 436, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1374, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1375, 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1376, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1377, 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1378, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1379, \n 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1380, 443, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1381, 445, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1382, 446, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1383, 447, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1384, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1385, 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1386, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1387, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1388, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1389, \n 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1390, 455, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1391, 456, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1392, 457, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1393, 458, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1394, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1395, 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1396, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1397, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1398, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1399, \n 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1400, 465, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1401, 466, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1402, 467, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1403, 468, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1404, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1405, 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1406, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1407, 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1408, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1409, \n 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1410, 475, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1411, 476, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1412, 477, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1413, 478, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1414, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1415, 480, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1416, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1417, 482, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1418, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1419, \n 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1421, 486, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1422, 487, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1423, 488, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1424, 489, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1425, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1426, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1431, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1432, \n 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1433, 498, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1434, 499, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1435, 500, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1436, 501, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1437, 502, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1438, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1441, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1442, \n 507, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1443, 508, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1444, 509, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1445, 510, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1446, 511, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1447, 512, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1448, 513, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1449, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1452, \n 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1453, 518, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1454, 519, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1455, 520, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1456, 521, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1457, 522, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1458, 523, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1459, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1462, \n 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1463, 528, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1464, 529, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1465, 530, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1466, 531, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1467, 532, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1468, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1469, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1472, \n 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1473, 538, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1474, 539, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1475, 540, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1476, 541, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1477, 542, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1479, 544, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1480, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1481, 546, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1482, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1483, \n 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1484, 549, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1485, 550, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1486, 551, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1487, 552, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1488, 554, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1489, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1491, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1492, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1493, \n 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1494, 560, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1495, 561, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1497, 563, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1498, 564, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1500, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1501, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1502, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1505, \n 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1506, 572, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1507, 573, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1508, 574, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1510, 576, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1511, 577, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1512, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1513, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1514, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1516, 582, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1517, \n 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1518, 584, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1519, 585, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1520, 1, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1521, 3, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1522, 4, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1523, 6, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1524, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1525, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1526, 9,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1527, 11, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1528, 14, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1529, 16, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1530, 17, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1531, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1532, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1534, 25, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1535,\n 27, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1536, 28, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1537, 29, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1538, 31, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1539, 33, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1540, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1541, 35, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1542, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1543, 38, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1544,\n 39, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1545, 40, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1546, 41, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1547, 43, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1548, 44, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1549, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1550, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1551, 48, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1552, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1553,\n 50, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1554, 51, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1555, 53, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1556, 54, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1557, 55, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1558, 57, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1559, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1560, 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1561, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1562,\n 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1563, 63, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1564, 64, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1565, 65, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1566, 66, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1567, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1568, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1569, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1570, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1571,\n 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1572, 75, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1573, 76, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1574, 77, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1575, 79, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1576, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1577, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1578, 82, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1579, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1580,\n 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1581, 85, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1582, 88, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1583, 89, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1584, 90, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1585, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1586, 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1587, 93, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1588, 97, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1589,\n 98, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1590, 101, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1591, 102, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1592, 103, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1593, 108, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1594, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1595, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1596, 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1597, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1598, 113, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1599, \n 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1600, 115, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1601, 116, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1602, 118, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1603, 119, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1604, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1605, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1606, 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1607, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1608, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1609, \n 131, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1610, 132, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1611, 133, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1612, 134, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1613, 135, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1614, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1615, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1616, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1617, 140, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1618, 141, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1619, \n 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1620, 144, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1621, 145, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1622, 146, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1623, 147, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1624, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1625, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1626, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1627, 151, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1628, 152, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1629, \n 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1630, 154, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1631, 155, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1632, 158, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1633, 161, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1634, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1635, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1636, 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1637, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1638, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1639, \n 168, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1640, 169, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1641, 170, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1642, 171, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1643, 172, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1644, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1645, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1646, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1647, 176, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1648, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1649, \n 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1650, 179, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1651, 180, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1652, 181, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1653, 182, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1654, 183, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1655, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1656, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1657, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1658, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1659, \n 189, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1660, 190, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1661, 192, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1662, 193, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1663, 194, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1664, 196, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1665, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1666, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1667, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1668, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1669, \n 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1670, 203, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1671, 204, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1672, 205, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1673, 206, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1674, 207, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1675, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1676, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1677, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1678, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1679, \n 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1680, 213, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1681, 214, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1682, 215, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1683, 216, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1684, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1685, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1686, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1687, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1688, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1689, \n 223, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1690, 224, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1691, 225, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1692, 226, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1693, 227, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1694, 228, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1695, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1696, 230, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1697, 234, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1698, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1699, \n 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1700, 238, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1701, 239, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1702, 240, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1703, 241, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1704, 242, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1705, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1706, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1707, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1708, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1709, \n 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1710, 253, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1711, 254, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1712, 255, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1713, 256, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1714, 257, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1715, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1716, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1717, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1718, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1719, \n 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1720, 267, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1721, 268, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1722, 269, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1723, 271, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1724, 272, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1725, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1726, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1727, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1728, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1729, \n 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1730, 281, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1731, 282, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1732, 283, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1733, 284, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1734, 285, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1735, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1736, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1737, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1738, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1739, \n 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1740, 292, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1741, 293, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1742, 294, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1743, 295, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1744, 296, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1745, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1746, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1747, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1748, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1749, \n 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1750, 303, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1751, 304, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1752, 307, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1753, 308, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1754, 309, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1755, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1756, 312, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1757, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1758, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1759, \n 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1760, 318, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1761, 319, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1762, 321, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1763, 322, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1764, 323, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1765, 324, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1766, 325, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1767, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1768, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1769, \n 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1770, 329, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1771, 331, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1772, 333, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1773, 335, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1774, 337, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1775, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1776, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1777, 340, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1778, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1779, \n 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1780, 343, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1781, 344, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1782, 345, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1783, 346, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1784, 347, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1785, 348, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1786, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1787, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1788, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1789, \n 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1790, 355, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1791, 356, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1792, 357, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1793, 359, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1794, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1795, 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1796, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1797, 364, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1798, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1799, \n 366, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1800, 367, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1801, 368, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1802, 369, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1803, 370, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1804, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1805, 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1806, 373, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1807, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1808, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1809, \n 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1810, 377, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1811, 378, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1812, 379, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1813, 381, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1814, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1815, 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1816, 386, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1817, 387, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1818, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1819, \n 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1820, 391, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1821, 392, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1822, 393, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1823, 394, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1824, 395, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1825, 396, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1826, 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1827, 398, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1828, 399, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1829, \n 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1830, 403, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1831, 404, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1832, 405, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1833, 406, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1834, 407, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1836, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1837, 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1838, 412, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1839, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1840, \n 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1841, 416, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1842, 417, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1843, 418, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1844, 419, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1845, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1846, 421, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1847, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1848, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1849, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1850, \n 425, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1851, 426, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1852, 427, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1853, 428, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1854, 429, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1855, 430, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1856, 431, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1857, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1858, 433, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1860, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1861, \n 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1862, 437, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1863, 438, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1864, 439, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1865, 440, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1866, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1867, 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1868, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1869, 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1870, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1871, \n 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1872, 448, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1873, 449, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1874, 450, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1875, 451, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1876, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1877, 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1878, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1879, 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1880, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1881, \n 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1882, 459, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1883, 460, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1884, 461, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1885, 462, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1886, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1887, 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1888, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1889, 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1890, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1891, \n 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1892, 469, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1893, 470, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1894, 471, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1895, 472, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1896, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1897, 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1898, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1899, 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1900, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1901, \n 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1902, 479, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1903, 480, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1904, 481, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1905, 482, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1906, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1907, 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1908, 485, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1909, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1910, 487, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1911, \n 488, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1912, 489, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1913, 490, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1914, 491, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1915, 492, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1916, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1917, 494, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1918, 495, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1919, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1920, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1921, \n 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1922, 499, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1923, 500, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1924, 501, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1925, 502, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1926, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1927, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1928, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1929, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1930, 507, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1931, \n 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1932, 509, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1933, 510, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1934, 511, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1935, 512, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1936, 513, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1937, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1938, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1939, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1940, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1941, \n 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1942, 519, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1943, 520, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1944, 521, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1945, 522, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1946, 523, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1947, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1948, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1949, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1950, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1951, \n 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1952, 529, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1953, 530, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1954, 531, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1955, 532, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1956, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1957, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1958, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1959, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1960, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1961, \n 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1962, 539, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1963, 540, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1964, 541, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1965, 542, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1966, 543, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1967, 544, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1968, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1969, 546, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1970, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1971, \n 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1972, 549, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1973, 550, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1974, 551, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1975, 552, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1976, 553, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1977, 554, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1978, 555, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1979, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1980, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1981, \n 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1982, 559, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1983, 560, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1984, 561, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1985, 562, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1986, 563, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1987, 564, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1988, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1989, 566, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1990, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1991, \n 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1992, 569, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1993, 570, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1994, 571, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1995, 572, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1996, 573, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1997, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1998, 575, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1999, 576, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 2000, 577, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2001, \n 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2002, 579, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2003, 580, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2004, 581, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [2005, 582, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [2006, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [2007, 584, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [2008, 585, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1, 490, 0, 0.01433884297520661, 0.151691958358336, 991.0, 991.0,\n 991.0, 0, 2, 1, -360, 43.375], [3, 4, 0, 0.006291637811634348, \n 0.903417549506624, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 72.681], [491,\n 6, 0, 0.011200661157024791, 0.118492839955776, 991.0, 991.0, 991.0, 0, \n 2, 1, -360, 33.882], [7, 5, 0, 0.005794840720221606, \n 0.20802058859584005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.471], [8,\n 9, 0, 0.0024379328254847646, 0.350063268897336, 3423.0, 3423.0, 3423.0,\n 0, 1, 1, -360, 28.163], [492, 11, 0, 0.018224793388429753, \n 0.0482004476327704, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.565], [11, \n 493, 0, 0.030286942148760328, 0.08010209706571599, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 45.809], [492, 493, 0, 0.04521652892561983, \n 0.11958747011094399, 495.0, 495.0, 495.0, 0, 1, 1, -360, 68.39], [494, \n 14, 0, 0.012990743801652892, 0.137430291356512, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 39.297], [13, 15, 0, 0.007681959833795014, \n 0.27576354266704156, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 44.371], [\n 16, 5, 0, 0.006275623268698061, 0.22527950450957998, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 36.248000000000005], [17, 18, 0, \n 0.04623522622347646, 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, \n 1, -360, 200.291], [17, 12, 0, 0.0056020313942728535, 0.113118303398186,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.268], [14, 495, 0, \n 0.0017957024793388433, 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 5.432], [494, 19, 0, 0.010246611570247935, 0.10839986031771602,\n 991.0, 991.0, 991.0, 0, 1, 1, -360, 30.996], [20, 21, 0, \n 0.005415685595567867, 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 31.281], [20, 22, 0, 0.0049706544321329645, 0.713737278110032,\n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 57.42100000000001], [497, 23, 0,\n 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 3.313], [23, 499, 0, 0.020799669421487598, 0.22004164444829602, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919], [25, 26, 0, \n 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 8.193], [25, 22, 0, 0.0035578254847645433, 0.0319293051869808,\n 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275], [23, 27, 0, \n 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 41.95399999999999], [28, 23, 0, 0.012841652892561981, \n 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423], [8, 21,\n 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0,\n 2, 1, -360, 28.584], [9, 29, 0, 0.002212863573407202, \n 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 25.563000000000002], [30, 25, 0, 0.019958795013850415, \n 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 57.641000000000005], [31, 32, 0, 0.0299776084949446, 0.605319030583196,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863], [32, 33, 0, \n 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 72.61399999999999], [34, 35, 0, 0.001931900826446281, \n 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 5.843999999999999], [35, 36, 0, 0.0008730578512396695, \n 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641], [490, 6,\n 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 74.645], [37, 10, 0, 0.02404639889196676, 0.485553838251812, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169], [10, 38, 0, \n 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 29.669], [37, 38, 0, 0.01437834718372576, 1.1613317560186958, \n 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574], [39, 40, 0, \n 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1,\n -360, 195.877], [39, 41, 0, 0.017466989843005543, 0.35269996139852006, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667], [42, 41, 0, \n 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 134.922], [18, 42, 0, 0.03439750692520776, 0.6945672650962679,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01], [492, 43, 0, \n 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 55.03], [44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0,\n 495.0, 495.0, 0, 1, 1, -360, 38.755], [44, 505, 0, 0.006061487603305785,\n 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168], [46, 12,\n 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0,\n 2, 1, -360, 17.029], [47, 48, 0, 0.005344182825484765, \n 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, \n 7.7170000000000005], [49, 50, 0, 0.0019151662049861494, \n 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 5.531000000000001], [31, 33, 0, 0.013475992613088641, \n 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378], [\n 31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 40.647], [52, 53, 0, 0.010464421745152355, \n 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885], [\n 52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 32.978], [506, 55, 0, 0.012634380165289257, \n 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219], [506, \n 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 66.788], [57, 506, 0, 0.004687272727272727, \n 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179], [57, 58,\n 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 21.835], [58, 506, 0, 0.019797685950413223, 0.052360391943288,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003], [59, 60, 0, \n 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, \n -360, 56.049], [508, 62, 0, 0.051111404958677685, 0.03379452026753001, \n 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653], [30, 61, 0, \n 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 90.79], [63, 506, 0, 0.027457190082644623, 0.072618044249872, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996], [13, 64, 0, \n 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 17.116], [65, 66, 0, 0.03778185595567867, 0.7629053006222161, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671], [59, 67, 0, \n 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 14.982999999999999], [61, 67, 0, 0.012931440443213295, \n 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346], [68, \n 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 64.4], [70, 69, 0, 0.009625346260387812, \n 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.596000000000004], [71, 72, 0, 0.008878635734072021, \n 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283], [73,\n 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 54.278], [37, 75, 0, 0.027459141274238225, \n 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 118.95299999999999], [72, 75, 0, 0.006688711911357341, \n 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634], [37,\n 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 156.914], [76, 77, 0, 0.004683777700831025, \n 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107], [77,\n 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0,\n 0, 2, 1, -360, 41.955], [73, 72, 0, 0.025475069252077563, \n 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 110.35799999999999], [18, 40, 0, 0.01302770083102493, 0.26306018504072,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001], [492, 45, 0,\n 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, \n -360, 70.03699999999999], [10, 74, 0, 0.030167359187465374, \n 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685], [45,\n 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 62.038000000000004], [78, 32, 0, 0.013458795013850415, \n 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738], [\n 79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 21.999000000000002], [81, 79, 0, \n 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 62.195], [34, 82, 0, 0.0015497520661157025, \n 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344], [83, \n 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 13.652000000000001], [83, 499, 0, 0.04179570247933885, \n 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608], [85, \n 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 46.343999999999994], [87, 86, 0, 0.01904968836565097, \n 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031], [88,\n 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 5.752000000000001], [90, 86, 0, 0.012097818559556786, \n 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877], [91,\n 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 1.07], [86, 92, 0, 0.0001852493074792244, \n 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07], [\n 86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 47.086999999999996], [94, 86, 0, \n 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 74.417], [86, 95, 0, 0.010421052631578947, 0.37409026526870803,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192], [513, 517, 0, \n 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 1.321], [97, 66, 0, 0.03812777008310249, 0.34217338998058805, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113], [42, 98, 0, \n 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1,\n -360, 35.716], [99, 100, 0, 0.016371537396121884, 0.587698093837988, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001], [42, 101, 0,\n 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1,\n -360, 47.163000000000004], [102, 42, 0, 0.012403047091412742, \n 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64], [\n 103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 40.854], [104, 103, 0, 0.0028852146814404432, \n 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665], [\n 105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 37.005], [106, 107, 0, 0.005714219759923823, \n 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754], [\n 108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 14.687000000000001], [109, 106, 0, \n 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 17.504], [110, 111, 0, 0.019821849030470913, \n 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491], [\n 87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 35.441], [113, 87, 0, 0.003981648199445983, \n 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998], [\n 87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 63.803000000000004], [110, 114, 0, \n 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 67.37899999999999], [115, 116, 0, 0.007048925619834712, \n 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323], [117,\n 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 34.584], [117, 119, 0, 0.0038738746537396117, \n 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 44.751000000000005], [117, 120, 0, 0.005886686288088643, \n 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003], [\n 121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.228], [123, 124, 0, 0.0018386426592797783, \n 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62], [125,\n 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 28.54], [127, 119, 0, 0.0029027008310249305, \n 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766], [\n 118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 42.726000000000006], [121, 119, 0, \n 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 14.743], [530, 527, 0, 0.022726611570247933, \n 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374], [125,\n 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 16.932000000000002], [125, 123, 0, 0.0019078081717451524,\n 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039], [\n 131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 20.646], [133, 123, 0, 0.003864439058171745, \n 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 22.320999999999998], [524, 134, 0, 0.008092231404958678, \n 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479], [135,\n 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 30.283], [123, 131, 0, 0.003138331024930748, \n 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127], [\n 117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 62.381], [137, 521, 0, 0.013832396694214875, \n 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843], [531,\n 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, \n 0, 1, 1, -360, 13.5], [139, 521, 0, 0.021257520661157023, \n 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152], [140,\n 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 28.023000000000003], [522, 141, 0, 0.012168595041322314,\n 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405], [142,\n 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, \n 0, 2, 1, -360, 21.357], [530, 526, 0, 0.020281652892561983, \n 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676], [140, \n 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 7.062], [142, 144, 0, 0.006678126721756199, \n 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151], [140, \n 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 30.930999999999997], [145, 146, 0, 0.028527603305785125,\n 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148], [147, \n 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 37.227], [144, 523, 0, 0.008479338842975206, \n 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825], [139, \n 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 22.116999999999997], [140, 141, 0, 0.008362975206611572,\n 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 12.649000000000001], [528, 526, 0, 0.015389090909090908, \n 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276], [528, \n 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 21.638], [149, 150, 0, 0.013604628099173552, \n 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 20.576999999999998], [145, 528, 0, 0.00320595041322314, \n 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849], [530, \n 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 19.881], [524, 152, 0, 0.014598347107438016, \n 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08], [149, 525,\n 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 51.114], [139, 514, 0, 0.007824132231404959, \n 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.834000000000001], [126, 120, 0, 0.012780297783933518, \n 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819], [530,\n 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 34.1], [528, 147, 0, 0.15786710743801652, 0.104380679149868,\n 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387], [528, 154, 0, \n 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, \n 1, -360, 9.874], [130, 120, 0, 0.01450502077562327, 0.5206947188067639,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781], [528, 155, 0, \n 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -\n 360, 121.485], [524, 533, 0, 0.004432727272727273, 0.0468942356109744, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409], [524, 149, 0, \n 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, \n 1, -360, 17.065], [154, 150, 0, 0.007539173553719007, \n 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 11.402999999999999], [157, 110, 0, 0.009962084487534625, \n 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 57.541000000000004], [119, 158, 0, 0.0002490189289012004, \n 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315], [\n 159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 31.674], [536, 161, 0, 0.021314380165289255, \n 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238], [115, \n 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 11.477], [162, 134, 0, 0.0015910743801652895, \n 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813], [115, \n 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.73], [138, 87, 0, 0.0011838642659279777, \n 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 13.675999999999998], [123, 163, 0, 0.0022778739612188364, \n 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157], [\n 112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0,\n 3423.0, 0, 2, 1, -360, 10.019], [112, 165, 0, 0.005989439058171744, \n 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595], [\n 166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 15.207], [167, 537, 0, 0.00832595041322314, \n 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186], [168,\n 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.743], [531, 520, 0, 0.016156694214876033, \n 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 24.436999999999998], [139, 520, 0, 0.010682314049586776, \n 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157], [520, \n 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 3.427], [168, 105, 0, 0.007340893351800554, \n 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401], [\n 520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, \n 495.0, 0, 2, 1, -360, 8.837], [171, 89, 0, 0.005505454545454546, \n 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654], [521,\n 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 19.072], [123, 173, 0, 0.005247403047091413, \n 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309], [\n 521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.117], [37, 39, 0, 0.004338873499549862, \n 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592], [\n 530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 19.857], [530, 176, 0, 0.005685289256198347, \n 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599], [88, 530,\n 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 9.099], [177, 496, 0, 0.018632066115702478, \n 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 56.361999999999995], [178, 525, 0, 0.03106842975206612, \n 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 46.99100000000001], [179, 493, 0, 0.057079669421487594, \n 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333], [180,\n 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 62.053999999999995], [182, 180, 0, 0.00866314049586777, \n 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206], [179,\n 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 29.603], [180, 493, 0, 0.06676561983471074, \n 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 100.98299999999999], [183, 30, 0, 0.0024804362880886427, \n 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654], [183,\n 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 29.628], [538, 185, 0, 0.018631404958677687, \n 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09], [538, \n 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 21.945999999999998], [184, 186, 0, 0.0016554709141274237, \n 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 9.562000000000001], [184, 187, 0, 0.002698753462603878, \n 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588], [\n 520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 10.342], [89, 175, 0, 0.0037309090909090903, \n 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643], [185, \n 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 8.792], [89, 188, 0, 0.003108760330578513, \n 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702], [189,\n 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 37.253], [539, 172, 0, 0.0021570247933884296, \n 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525], [504,\n 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 0.9329999999999999], [105, 186, 0, 0.003273372576177285,\n 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907], [\n 105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.540999999999999], [539, 193, 0, \n 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -\n 360, 8.482999999999999], [187, 194, 0, 4.8649584487534626e-05, \n 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562], [539,\n 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.647], [539, 196, 0, 0.00332297520661157, \n 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026], [197, \n 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 7.165], [110, 198, 0, 0.00018724030470914128, \n 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 2.1630000000000003], [197, 539, 0, 0.009172231404958677, \n 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873], [199,\n 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 27.322], [134, 526, 0, 0.007771239669421488, \n 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.754000000000001], [200, 193, 0, 0.0009322314049586776, \n 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82], [4, \n 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 79.282], [202, 86, 0, 0.00013365650969529087, \n 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772], [85,\n 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 21.962], [147, 204, 0, 0.0073874380165289254, \n 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 22.346999999999998], [147, 205, 0, 0.005959669421487603, \n 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507], [123, \n 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 6.646], [537, 207, 0, 0.018456198347107437, \n 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915], [165, \n 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 23.948], [4, 94, 0, 0.013687673130193905, \n 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06], [4,\n 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0,\n 0, 3, 1, -360, 0.902], [209, 4, 0, 0.0022369286703601107, \n 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 25.840999999999998], [119, 163, 0, 0.003535145429362881, \n 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419], [\n 210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 1.82], [99, 211, 0, 0.0035045013850415513, \n 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242], [99,\n 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 125.443], [212, 99, 0, 0.008453774238227147, \n 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 48.82899999999999], [213, 214, 0, 0.01490115702479339, \n 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076], [510,\n 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 13.157], [128, 69, 0, 0.010711651662049862, \n 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741], [\n 216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 111.228], [217, 98, 0, 0.0012787396121883656, \n 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386], [\n 504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 41.565], [177, 504, 0, 0.07054809917355372, \n 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704], [219,\n 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 45.501000000000005], [219, 220, 0, \n 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 15.048], [94, 95, 0, 0.01070740997229917, 0.38436979242743197,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004], [159, 221, \n 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, \n 2, 1, -360, 57.397], [34, 161, 0, 0.010965289256198347, \n 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17], [222, \n 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 26.834], [211, 52, 0, 0.05267313019390582, \n 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12], [215, \n 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.707], [224, 215, 0, 0.019086280991735535, \n 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.868000000000002], [225, 224, 0, 0.04200925619834711, \n 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 63.538999999999994], [224, 223, 0, 0.031061818181818183, \n 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981], [226, 6,\n 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, \n 1, -360, 48.552], [7, 3, 0, 0.009332929362880887, 0.335029305054692, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907], [216, 227, 0, \n 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, \n -360, 114.939], [228, 229, 0, 0.010545454545454545, 0.027890337012274, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95], [227, 230, 0, \n 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 46.128], [231, 53, 0, 0.007193213296398893, 1.0328749562310842,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096], [544, 545, 0, \n 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 19.756], [234, 235, 0, 0.04608859504132231, 0.121893887321888, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709], [546, 214, 0, \n 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 86.251], [233, 227, 0, 0.0029001038781163438, 0.1041066260218888,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998], [237, 238, \n 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 39.816], [212, 100, 0, 0.007955505540166205, 0.285583163531816, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951], [519, 239, 0, \n 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 26.324], [238, 519, 0, 0.015166280991735538, 0.040111375593995205,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939], [213, 240, 0, \n 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1,\n 1, -360, 25.189], [241, 242, 0, 0.009862015235457064, \n 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963], [70,\n 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 44.126999999999995], [509, 213, 0, \n 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 34.375], [68, 243, 0, 0.003611668975069252, 0.1296500701715312, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861], [243, 244, 0, \n 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 4.447], [68, 244, 0, 0.004104051246537396, \n 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705], [544,\n 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 73.168], [245, 227, 0, 0.012676419667590028, \n 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219], [\n 246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 5.8660000000000005], [112, 208, 0, \n 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 10.355], [165, 247, 0, 0.0002113919667590028, \n 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.2209999999999999], [537, 549, 0, 0.00032066115702479337, \n 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485], [537, \n 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.48700000000000004], [537, 551, 0, \n 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 0.401], [110, 251, 0, 0.00023857340720221602, \n 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.3780000000000001], [510, 252, 0, 0.08467702479338843, \n 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 64.03699999999999], [529, 253, 0, 0.04859504132231405, \n 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5], [237, \n 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 50.055], [254, 238, 0, 0.07815008264462811, \n 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 59.101000000000006], [69, 255, 0, 0.0009369806094182826, \n 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 10.824000000000002], [510, 225, 0, 0.021953719008264466, \n 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41], [256, \n 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 15.315], [258, 190, 0, 0.011717451523545707, \n 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84], [258, \n 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 45.58], [260, 261, 0, 0.006791031855955679, \n 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45], [554,\n 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 132.974], [515, 263, 0, 0.006987107438016529, \n 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136], [14, \n 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, \n 1, 1, -360, 51.446000000000005], [116, 555, 0, 0.0009768595041322315, \n 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955], [151, \n 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 10.958], [111, 114, 0, 0.008806613573407202, \n 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867], [77,\n 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 33.321999999999996], [266, 525, 0, \n 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 15.774000000000001], [267, 120, 0, 0.013136945983379503, \n 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 75.87899999999999], [268, 269, 0, 0.0010327272727272726, \n 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 1.5619999999999998], [556, 271, 0, 0.052289586776859506, \n 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 39.544000000000004], [556, 272, 0, 0.04685355371900827, \n 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433], [529,\n 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.234], [128, 274, 0, 0.0029350761772853184, \n 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953], [34,\n 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 0.627], [503, 276, 0, 0.006707438016528925, \n 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29], [503, 504,\n 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1,\n -360, 194.59], [177, 218, 0, 0.04330380165289256, 0.114528740018308, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497], [277, 278, 0, \n 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 83.072], [557, 558, 0, 0.04341289256198347, 0.258338836678648,\n 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493], [557, 559, 0, \n 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 51.665], [559, 558, 0, 0.04474314049586777, 0.11833546501370001, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999], [277, 78, 0, \n 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 103.557], [277, 279, 0, 0.021390927977839334, 0.191970480441328, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777], [78, 279, 0, \n 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 45.665], [281, 282, 0, 0.0023178670360110803, 0.08320574945862161,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388], [283, 161, 0, \n 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1,\n -360, 55.571000000000005], [268, 161, 0, 0.018883636363636366, \n 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 57.123000000000005], [256, 284, 0, 0.010755371900826446, \n 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535], [515, \n 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 61.576], [263, 516, 0, 0.0030355371900826445, \n 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365], [516,\n 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 10.449000000000002], [63, 286, 0, 0.019088925619834708, \n 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872], [287, \n 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 13.105], [8, 102, 0, 0.015100069252077563, \n 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 87.21799999999999], [8, 101, 0, 0.019246883656509697, 0.69091598202144,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17], [80, 288, 0, \n 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 46.11600000000001], [80, 289, 0, 0.0003782317636201524, \n 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, \n 6.553999999999999], [276, 560, 0, 0.01778314049586777, \n 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897], [37,\n 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, \n 2567.0, 0, 2, 1, -360, 48.773999999999994], [290, 74, 0, \n 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1,\n -360, 179.483], [512, 291, 0, 0.0053299173553719, 0.056385693247479204,\n 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123], [78, 292, 0, \n 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2,\n 1, -360, 50.381], [199, 548, 0, 0.0015530578512396695, \n 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349], [491, \n 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.720999999999998], [4, 294, 0, 9.669321329639889e-05, \n 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117], [\n 490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 76.503], [491, 295, 0, 0.010613553719008264, \n 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053], [491,\n 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.656000000000001], [295, 297, 0, 0.020297520661157024, \n 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7], [508, \n 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 35.15], [117, 123, 0, 0.005876211911357341, \n 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941], [133,\n 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 12.907], [71, 74, 0, 0.03904524469065097, \n 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144], [\n 74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 89.09200000000001], [298, 515, 0, \n 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 32.823], [5, 299, 0, 0.0016232686980609415, 0.058271370400665996,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376], [32, 292, 0, \n 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 55.908], [5, 29, 0, 0.00743395083102493, 1.0674425076571843, \n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001], [503, 560, 0,\n 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, \n -360, 45.8], [300, 301, 0, 0.004892053324099723, 0.7024509290644521, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005], [51, 300, 0,\n 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 29.729], [244, 302, 0, 0.007714508310249307, 1.107727813004004,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118], [31, 302, 0, \n 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 50.472], [51, 282, 0, 0.006288434903047093, 0.9029576432132521,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999], [303, 304, 0,\n 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 0.254], [305, 304, 0, 0.003881117266849031, 0.0783689646873844,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813], [305, 259, 0, 0.0025625,\n 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 29.601999999999997], [306, 307, 0, 0.03223268698060942, \n 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088], [305, \n 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 7.01], [305, 309, 0, 0.011014773776523545, \n 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716], [\n 310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 55.253], [306, 309, 0, 0.035333795013850415, \n 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044], [311,\n 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 19.833], [280, 278, 0, 0.009749769159764544, \n 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, \n 84.47200000000001], [311, 32, 0, 0.01205909510619806, \n 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48], [13,\n 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 50.049], [313, 314, 0, 0.006092624653739613, \n 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191], [312,\n 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 51.635], [547, 566, 0, 0.027035702479338848, \n 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783], [245, \n 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 81.803], [312, 316, 0, 8.803670360110802e-05, \n 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0170000000000001], [312, 314, 0, 0.005339854570637119, \n 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 30.843000000000004], [554, 546, 0, 0.08174743801652892, \n 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 123.64299999999999], [262, 216, 0, 0.042641966759002774, \n 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15], [317,\n 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 24.464000000000002], [318, 317, 0, 0.008311634349030471,\n 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006], [\n 231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 203.683], [319, 567, 0, 0.006089586776859504, \n 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421], [557, 321,\n 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 30.264], [277, 65, 0, 0.009430170821779778, 0.7616700793261759,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703], [322, 288, 0, \n 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, \n 1, -360, 56.706], [322, 323, 0, 0.0018503000923372577, 0.14944779312484,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031], [277, 324, 0, \n 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 85.425], [324, 325, 0, 0.01103508771932133, \n 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 47.803999999999995], [277, 325, 0, 0.008665743305609418, \n 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54], [326,\n 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 11.577], [328, 326, 0, 0.10300958677685952, \n 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 77.90100000000001], [328, 327, 0, 0.09827173553719008, \n 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318], [326, \n 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.443999999999996], [568, 329, 0, 0.05699900826446282, \n 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211], [568,\n 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 48.681999999999995], [332, 78, 0, 0.006471029547541551, \n 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065], [333,\n 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 49.559], [332, 333, 0, 0.007504674515235457, \n 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347], [\n 332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 49.456], [66, 334, 0, 0.030625, \n 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445], [330,\n 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0,\n 0, 1, 1, -360, 63.598], [336, 66, 0, 0.015054362880886425, \n 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477], [330, \n 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 112.73700000000001], [68, 70, 0, 0.016314058171745152, \n 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115], [509,\n 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 52.848], [324, 288, 0, 0.012627423822714683, \n 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468], [338,\n 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 27.915], [339, 559, 0, 0.03560595041322315, \n 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927], [339,\n 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 131.762], [559, 340, 0, 0.20983272727272728, \n 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686], [341, \n 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, \n 2567.0, 0, 1, 1, -360, 8.083], [557, 342, 0, 0.006019834710743802, \n 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21], [558, \n 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 32.217], [502, 340, 0, 0.021737520661157025, \n 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756], [72, \n 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, \n 0, 2, 1, -360, 78.03399999999999], [344, 345, 0, 0.0005762927054480609,\n 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993], [\n 346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 6.55], [46, 47, 0, 0.0008975069252077563, \n 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184], [346,\n 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 4.169], [347, 328, 0, 0.029905454545454544, \n 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232], [347,\n 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.862], [571, 348, 0, 0.041548429752066116, \n 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842], [347,\n 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 24.279], [571, 570, 0, 0.17379041322314048, \n 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429], [14,\n 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, \n 1, 1, -360, 32.772], [350, 573, 0, 0.026277685950413226, \n 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745], [15, \n 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 76.222], [352, 15, 0, 0.0015260560941828254, \n 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629], [15,\n 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, \n 5134.0, 0, 1, 1, -360, 61.235], [232, 227, 0, 5.5747922437673134e-05, \n 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161], [\n 565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 59.714], [235, 567, 0, 0.02391404958677686, \n 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34], [567, \n 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, \n 1981.0, 0, 1, 1, -360, 48.816], [353, 519, 0, 0.007621818181818182, \n 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 23.055999999999997], [354, 353, 0, 0.0008436363636363636, \n 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552], [355, \n 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.360999999999999], [354, 356, 0, 0.01855404958677686, \n 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.063000000000002], [357, 358, 0, 0.0034823407202216067, \n 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228], [\n 574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.195], [235, 575, 0, 0.007459504132231404, \n 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565], [167, 361,\n 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 1.864], [528, 362, 0, 0.0011960330578512398, \n 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 3.6180000000000003], [363, 344, 0, 0.0002662742382271468, \n 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538], [\n 259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0,\n 1283.0, 0, 1, 1, -360, 56.618], [54, 56, 0, 0.007723337950138504, \n 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305], [365, \n 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 21.649], [231, 366, 0, 0.0013273891966759002, \n 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 7.667000000000001], [30, 367, 0, 0.01126108033240997, \n 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522], [61, \n 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 58.735], [254, 368, 0, 0.0004297520661157025, \n 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3], [254, \n 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.484], [254, 370, 0, 0.0003669421487603306, \n 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11], [99, \n 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 23.316999999999997], [354, 519, 0, \n 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1,\n -360, 20.457], [571, 371, 0, 0.023726942148760328, 0.06275238397221199,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887], [207, 372, 0, \n 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 3.523], [57, 373, 0, 0.0017725619834710745, \n 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681], [209,\n 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 5.847], [375, 376, 0, 0.0045364727608518006, \n 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652], [\n 376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, \n 1283.0, 1283.0, 0, 1, 1, -360, 13.38], [16, 49, 0, 0.002266101108033241,\n 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178], [318,\n 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 20.599], [378, 297, 0, 0.01753917355371901, \n 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 26.528000000000002], [562, 379, 0, 0.01802314049586777, \n 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26], [576,\n 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 2.735], [576, 381, 0, 0.0034320661157024794, \n 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191], [577, \n 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 90.818], [244, 383, 0, 0.006845567867036011, \n 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655], [\n 244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 116.059], [383, 306, 0, 0.0300685595567867, \n 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838], [380, \n 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 2.958], [252, 225, 0, 0.062094545454545444, \n 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 46.958999999999996], [220, 76, 0, 0.002772074099722992, \n 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023], [542,\n 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 12.009], [385, 384, 0, 0.053734876033057856, \n 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637], [542,\n 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 34.201], [386, 385, 0, 0.003668760330578512, \n 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 11.097999999999999], [387, 578, 0, 0.015444628099173553, \n 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72], [332, \n 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 81.07300000000001], [382, 332, 0, \n 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 102.60700000000001], [382, 388, 0, 0.00476159972299169, \n 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503], [579,\n 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 28.905], [577, 387, 0, 0.07597818181818182, \n 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917], [144,\n 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.647], [37, 49, 0, 0.008441481994459835, \n 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758], [391,\n 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 41.042], [392, 310, 0, 0.007035318559556785, \n 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 20.317999999999998], [260, 393, 0, 0.006341412742382271, \n 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314], [394, \n 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 2.1919999999999997], [395, 282, 0, 0.008762984764542936,\n 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615], [395,\n 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 19.665], [25, 396, 0, 0.008809037396121884, \n 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881], [81,\n 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 43.44], [278, 80, 0, 0.016286011080332407, \n 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068], [81, \n 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 121.60799999999999], [569, 570, 0, 0.03253950413223141, \n 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216], [397, \n 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, \n 1200.0, 0, 1, 1, -360, 9.513], [542, 398, 0, 0.0005580165289256199, \n 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 1.6880000000000002], [398, 385, 0, 0.021893553719008262, \n 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 33.114000000000004], [399, 499, 0, 0.03266380165289256, \n 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 24.701999999999998], [83, 399, 0, 0.025700495867768593, \n 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436], [498,\n 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 18.352999999999998], [518, 239, 0, 0.04685289256198347, \n 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865], [575, \n 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 9.168], [401, 360, 0, 0.007957063711911357, \n 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98], [580, \n 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.790999999999999], [401, 402, 0, 0.0033434903047091418,\n 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656], [403,\n 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 27.701999999999998], [189, 360, 0, 0.028456024930747923, \n 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181], [234, \n 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 12.24], [235, 404, 0, 0.05107504132231405, \n 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251], [235,\n 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.878], [216, 259, 0, 0.0022115650969529088, \n 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 12.774000000000001], [405, 259, 0, 0.0052832409972299165, \n 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516], [\n 405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0,\n 1711.0, 0, 2, 1, -360, 38.323], [406, 230, 0, 8.098164819944598e-05, \n 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871], [\n 542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 38.674], [23, 408, 0, 0.03224528925619835, \n 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771], [577,\n 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 39.321999999999996], [562, 564, 0, 0.06921520661157024, \n 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 104.68799999999999], [582, 507, 0, 0.006357685950413223, \n 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616], [27, \n 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 4.544], [501, 27, 0, 0.003811570247933884, \n 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53], [27, 411,\n 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 7.031000000000001], [411, 410, 0, 0.002054214876033058, \n 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 3.1069999999999998], [403, 360, 0, 0.008191481994459833, \n 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 23.656999999999996], [412, 360, 0, 0.016761772853185596, \n 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408], [326, \n 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 36.533], [414, 413, 0, 0.008093223140495867, \n 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482], [6, \n 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 14.725999999999999], [554, 580, 0, 0.07435371900826447, \n 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46], [262, \n 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 113.53399999999999], [499, 556, 0, 0.04185586776859504, \n 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 63.306999999999995], [224, 229, 0, 0.004135206611570248, \n 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509], [583, \n 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 37.257], [415, 307, 0, 0.015675554016620498, \n 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271], [416, \n 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 3.193], [284, 561, 0, 0.015221487603305786, \n 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045], [543,\n 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 4.002], [418, 506, 0, 0.0009395041322314049, \n 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842], [220, \n 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 26.566999999999997], [295, 419, 0, 0.0012023140495867769,\n 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637], [295, \n 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 2.421], [541, 62, 0, 0.05133355371900827, \n 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821], [52, \n 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 0.802], [60, 160, 0, 6.128808864265928e-05, \n 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177], [535,\n 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.113], [267, 282, 0, 0.0065652700831024926, \n 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921], [52,\n 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 33.164], [28, 27, 0, 0.015726942148760328, \n 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787], [30,\n 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 52.725], [422, 81, 0, 0.0004226685133887349, \n 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324], [119, \n 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 20.673000000000002], [423, 425, 0, \n 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 3.765], [424, 425, 0, 0.005922957063711911, \n 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211], [\n 426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 42.193999999999996], [427, 428, 0, \n 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 0.8059999999999999], [19, 428, 0, 0.023607603305785128, \n 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413], [45, \n 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 38.755], [44, 429, 0, 5.289256198347107e-05, \n 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08], [505, \n 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 9.094], [231, 431, 0, 0.011677285318559558, \n 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 67.44800000000001], [190, 431, 0, 0.009600761772853185, \n 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.45399999999999], [430, 431, 0, 0.0028100761772853187, \n 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 16.230999999999998], [286, 433, 0, 0.01568694214876033, \n 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453], [432,\n 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 0.304], [506, 433, 0, 0.0065904132231404955, \n 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936], [23, \n 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 39.532], [400, 434, 0, 0.008155371900826446, \n 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335], [500,\n 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, \n 0, 2, 1, -360, 9.587], [32, 436, 0, 0.0044813019390581715, \n 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884], [\n 435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 3.832], [78, 436, 0, 0.00897680055401662, \n 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85], [86,\n 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 84.868], [437, 438, 0, 1.0387811634349031e-05, \n 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06], [221,\n 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 13.17], [207, 439, 0, 0.055703801652892564, \n 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 42.126000000000005], [516, 439, 0, 0.05448462809917355, \n 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 41.20399999999999], [513, 439, 0, 0.046726611570247926, \n 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 35.336999999999996], [181, 441, 0, 0.040805289256198356, \n 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718], [440,\n 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.2], [504, 441, 0, 0.05916099173553719, \n 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 89.48100000000001], [135, 442, 0, 0.004956890581717451, \n 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631], [109,\n 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.884], [112, 442, 0, 0.0027304362880886425, \n 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 15.770999999999999], [113, 443, 0, 0.0019885734072022164, \n 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 11.485999999999999], [132, 443, 0, 0.006788434903047091, \n 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21], [\n 107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.129], [444, 445, 0, \n 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 0.455], [112, 445, 0, 0.002816135734072022, \n 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266], [109,\n 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.291], [119, 447, 0, 0.005212690443213296, \n 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217], [100,\n 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 58.563], [446, 447, 0, 2.9518698060941832e-05, \n 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341], [\n 124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.376], [125, 448, 0, 0.00615148891966759, \n 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531], [\n 131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.226], [449, 450, 0, \n 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 13.64], [173, 450, 0, 0.002862361495844876, \n 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533], [\n 184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 23.236], [144, 451, 0, 0.007672727272727273, \n 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605], [140,\n 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.574000000000002], [514, 451, 0, 0.01149289256198347, \n 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383], [537,\n 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 76.723], [141, 585, 0, 0.007994710743801653, \n 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092], [584, \n 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.14], [522, 454, 0, 0.0035008264462809916, \n 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295], [144, \n 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.85], [453, 454, 0, 0.001114710743801653, \n 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686], [199, \n 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.879], [140, 456, 0, 0.005061818181818182, \n 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 7.656000000000001], [455, 456, 0, 0.0011365289256198346, \n 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719], [537, \n 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 29.538], [538, 457, 0, 0.027927272727272728, \n 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12], [153, \n 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 22.758000000000003], [176, 457, 0, 0.004579173553719009, \n 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463], [524,\n 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.532], [458, 459, 0, 0.001993388429752066, \n 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015], [134, \n 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 17.868], [460, 461, 0, 6.611570247933885e-05, \n 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1], [150, \n 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 12.128], [149, 461, 0, 0.005586115702479339, \n 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449], [521, \n 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.850999999999999], [462, 463, 0, 0.007197355371900825,\n 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443], [538,\n 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.235], [110, 464, 0, 0.0025753116343490306, \n 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875], [90,\n 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 42.332], [165, 464, 0, 0.002152527700831025, \n 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433], [\n 458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 3.03], [134, 465, 0, 0.011838677685950413, \n 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906], [524, \n 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.494], [466, 467, 0, 0.0023509349030470914, \n 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579], [110,\n 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.635], [165, 467, 0, 0.0022891274238227145, \n 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 13.222000000000001], [468, 469, 0, 0.0005269421487603305, \n 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797], [541, \n 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 33.866], [490, 469, 0, 0.028243305785123966, \n 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718], [263,\n 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 28.125], [470, 471, 0, 0.001570909090909091, \n 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188], [534,\n 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 18.526], [136, 472, 0, 0.0007079293628808865, \n 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 4.0889999999999995], [110, 472, 0, 0.00019511772853185596, \n 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127], [\n 251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.243], [226, 474, 0, 0.017639669421487602, \n 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34], [473,\n 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0,\n 0, 2, 1, -360, 5.244], [257, 474, 0, 0.020264462809917356, \n 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65], [6, \n 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, \n 1, 1, -360, 61.001000000000005], [299, 475, 0, 0.013238227146814403, \n 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464], [3,\n 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 1.614], [210, 475, 0, 0.0001481994459833795, \n 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856], [\n 297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 29.267], [296, 476, 0, 0.005596694214876033, \n 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465], [295, \n 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 1.433], [313, 478, 0, 0.008696849030470914, \n 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 50.233000000000004], [477, 478, 0, 1.5235457063711912e-05, \n 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 0.08800000000000001], [245, 478, 0, 0.005264542936288089, \n 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408], [479,\n 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.986000000000004], [565, 481, 0, 0.024842314049586776,\n 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574], [480, \n 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.11699999999999999], [415, 482, 0, \n 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 31.831], [56, 482, 0, 0.002630886426592798, 0.0236105947261788, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598], [409, 482, 0, \n 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 2.205], [483, 484, 0, 9.037396121883656e-05, \n 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261], [3, \n 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 28.944000000000003], [301, 484, 0, 0.00966516620498615, \n 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913], [233,\n 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 40.726], [392, 485, 0, 0.00914819944598338, \n 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42], [391, \n 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 0.24600000000000002], [579, 488, 0, \n 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1,\n 1, -360, 21.038], [486, 488, 0, 0.00016969696969690082, \n 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77], [487,\n 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, \n 1486.0, 0, 1, 1, -360, 0.6609999999999999], [270, 489, 0, \n 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 1.008], [331, 489, 0, 0.003002943213296399, \n 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345], [\n 396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 64.968], [519, 253, 0, 0.013353485337561985, \n 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 40.394293146100004], [382, 349, 0, 0.009091647380263157, \n 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 105.02671053600001], [349, 351, 0, 0.0005858117819605263, \n 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 6.76729770521], [459, 465, 0, 1.578788789911157e-05, \n 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.047758360894800005], [549, 550, 0, 3.680432518409091e-05, \n 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.111333083682], [550, 551, 0, 5.755645674710744e-05, \n 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.17410828165999997], [194, 195, 0, 1.7560672583171745e-05, \n 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.202860889681], [247, 248, 0, 2.1755213937811637e-05, \n 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.25131623141], [2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991], [549, 551, 0, \n 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 0.28029073853799996], [54, 365, 0, 2.573045189134349e-05, \n 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.297238180249], [131, 265, 0, 2.7616389041343487e-05, \n 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.319024526206], [91, 92, 0, 2.8945628197853184e-05, \n 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.33437989694200004], [247, 249, 0, 3.098840072160664e-05, \n 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.357978005136], [186, 191, 0, 3.1591661821191135e-05, \n 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.36494687735799997], [129, 173, 0, 3.202671277479225e-05, \n 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.369972585975], [96, 202, 0, 3.5971247867797784e-05, \n 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.415539855369], [53, 320, 0, 3.784209581142659e-05, \n 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.437151890814], [24, 396, 0, 4.144748602818559e-05, \n 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.47880135859800005], [133, 156, 0, 4.431754564044322e-05, \n 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.511956287238], [442, 452, 0, 4.483572190450138e-05, \n 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.517942259441], [445, 452, 0, 4.490753296371191e-05, \n 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.518771820797], [247, 250, 0, 4.594910768732687e-05, \n 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.530804092004], [187, 195, 0, 4.755760376239612e-05, \n 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.549385438663], [216, 236, 0, 5.03353075283241e-05, \n 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.581473472567], [244, 389, 0, 5.1633313019736845e-05, \n 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.596468032004], [394, 406, 0, 5.6346419007686985e-05, \n 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.650913832377], [442, 445, 0, 6.388070648310249e-05, \n 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.737949921293], [442, 444, 0, 6.584378362735456e-05, \n 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.760627388463], [198, 472, 0, 8.37554210498615e-05, 0.0120264578966664,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967], [464, 467, 0, \n 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, \n 1, 1, -360, 0.977332411594], [198, 251, 0, 8.83613182396122e-05, \n 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0207499483], [112, 143, 0, 9.049653833033241e-05, \n 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.04541601079], [2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, \n 1, -360, 360], [10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360]]'], {}), '([[586, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [589, \n 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [590, 108, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [593, 112, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [594, 114, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [595, 115, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [597, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [598, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [599, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [600, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [601,\n 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [602, 121, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [603, 526, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [607, 127, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [608, 127, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [609, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [610, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [612, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [613, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [614,\n 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [616, 132, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [617, 133, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [618, 133, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [619, 134, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [621, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [623, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [624, 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [628, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [629, \n 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [631, 145, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [632, 145, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [637, 148, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [638, 149, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [639, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [640, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [641, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [642, 533, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [643,\n 534, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [646, 536, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [647, 536, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [650, 166, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [652, 167, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [655, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [657, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [658, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [661, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [662,\n 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [663, 178, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [666, 180, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [668, 183, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [670, 183, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [672, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [675, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [676, 19, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [678, 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [679, \n 196, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [681, 197, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [683, 200, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [687, 202, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [689, 204, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [691, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [693, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [694, 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [695, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [696, \n 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [697, 211, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [698, 212, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [701, 215, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [702, 215, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [704, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [705, 217, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [707, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [708, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [711,\n 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [713, 225, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [714, 225, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [716, 226, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [717, 227, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [719, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [722, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [723, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [724, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [725,\n 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [727, 243, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [728, 244, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [730, 547, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [731, 548, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [732, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [733, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [735, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [737, 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [738,\n 258, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [739, 264, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [741, 264, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [742, 264, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [743, 500, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [745, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [746, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [747, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [748, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [749,\n 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [750, 557, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [753, 28, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [758, 286, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [760, 287, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [761, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [762, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [763, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [765, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [767,\n 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [769, 293, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [771, 297, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [772, 3, 0, 1e-05, 0, 9999, 9999,\n 9999, 0, 0, 1, -360, 360], [774, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [776, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [777, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [778, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [781,\n 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [784, 563, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [785, 501, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [787, 308, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [788, 311, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [789, 565, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [790, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [791, 314, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [792, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [795,\n 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [798, 324, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [800, 326, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [801, 327, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [802, 327, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [805, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [806, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [808, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [809, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [810,\n 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [811, 568, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [814, 570, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [815, 335, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [816, 335, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [817, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [818, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [821, 338, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [822, 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [825,\n 339, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [826, 339, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [829, 345, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [830, 345, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [833, 348, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [834, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [835, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [836, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [837, 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [839,\n 350, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [840, 573, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [841, 573, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [842, 352, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [843, 352, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [844, 352, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [845, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [847, 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [848, 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [849, \n 574, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [850, 574, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [851, 575, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [852, 361, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [853, 362, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [854, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [855, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [856, 363, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [857, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [858,\n 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [859, 368, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [860, 371, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [862, 372, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [863, 374, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [864, 374, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [865, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [867, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [869, 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [870,\n 503, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [872, 378, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [873, 576, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [874, 576, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [875, 381, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [877, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [881, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [882, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [883, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [886,\n 394, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [889, 397, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [890, 40, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [893, 400, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [894, 400, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [895, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [896, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [898, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [900, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [902,\n 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [903, 406, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [905, 413, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [907, 583, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [909, 417, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [911, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [913, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [914, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [915, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [916,\n 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [917, 43, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [918, 424, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [919, 427, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [920, 428, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [921, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [922, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [923, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [925, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [928, \n 435, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [931, 439, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [934, 45, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [935, 45, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [936, 445, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [937, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [939, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [940, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [942, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [943,\n 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [944, 458, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [945, 459, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [946, 459, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [948, 462, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [950, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [951, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [952, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [956, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [957, \n 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [958, 478, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [959, 478, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [960, 479, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [963, 481, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [965, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [966, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [967, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [968, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [969, \n 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [971, 51, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [973, 506, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [976, 58, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [977, 59, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [978, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [980, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [981, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [982, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [983, 62,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [984, 63, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [985, 63, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [986, 64, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [987, 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [988, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [990, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [993, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [994, 67,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [995, 509, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [996, 510, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [997, 510, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [998, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [999, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1000, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1002, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1003,\n 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1006, 511, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1007, 511, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1008, 75, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1010, 79, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1011, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1012, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1014, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1018, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1019, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1023, \n 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1025, 518, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1026, 518, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1028, 221, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1029, 268, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1030, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1031, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1032, 1, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1033, 3, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1034, 4,\n 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1035, 6, 0, 1e-05,\n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1036, 7, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1037, 8, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1038, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1039, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1041, 16, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1042, 17, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1044,\n 21, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1046, 25, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1047, 27, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1048, 28, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1049, 29, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1050, 31, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1051, 33, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1052, 34, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1053, 35, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1054,\n 36, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1055, 38, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1056, 39, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1057, 40, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1058, 41, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1059, 43, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1060, 44, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1061, 45, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1062, 47, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1063,\n 48, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1064, 49, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1065, 50, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1066, 51, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1067, 53, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1068, 54, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1069, 55, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1070, 57, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1071, 58, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1072,\n 59, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1073, 60, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1074, 62, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1075, 63, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1077, 65, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1078, 66, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1079, 67, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1080, 70, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1081, 71, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1082,\n 72, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1083, 73, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1084, 75, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1085, 76, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1086, 77, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1087, 79, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1088, 80, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1089, 81, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1090, 82, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1091,\n 83, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1092, 84, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1093, 85, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1094, 88, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1095, 89, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1096, 90, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1097, 91, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1098, 92, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1099, 93, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1100,\n 97, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1101, 98, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1102, 101, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1103, 102, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1104, 103, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1105, 108, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1106, 109, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1107, 110, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1108, 111, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1109, 112, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1110, \n 113, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1111, 114, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1112, 115, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1113, 116, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1114, 118, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1115, 119, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1116, 121, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1117, 122, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1118, 126, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1119, 127, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1120, \n 130, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1121, 131, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1122, 132, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1123, 133, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1124, 134, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1125, 135, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1126, 136, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1127, 137, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1128, 139, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1129, 140, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1130, \n 141, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1131, 142, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1132, 144, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1133, 145, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1134, 146, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1135, 147, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1136, 148, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1137, 149, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1138, 150, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1139, 151, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1140, \n 152, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1141, 153, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1142, 154, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1143, 155, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1144, 158, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1145, 161, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1146, 162, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1147, 163, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1148, 164, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1149, 166, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1150, \n 167, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1151, 168, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1152, 169, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1153, 170, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1154, 171, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1155, 172, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1156, 173, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1157, 174, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1158, 175, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1159, 176, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1160, \n 177, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1161, 178, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1162, 179, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1164, 181, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1166, 183, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1167, 185, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1168, 186, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1169, 187, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1170, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1171, 189, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1172, \n 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1173, 192, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1174, 193, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1175, 194, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1176, 196, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1177, 197, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1178, 198, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1179, 199, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1180, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1181, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1182, \n 203, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1183, 204, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1184, 205, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1185, 206, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1186, 207, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1187, 208, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1188, 209, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1189, 210, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1190, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1191, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1192, \n 213, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1193, 214, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1194, 215, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1195, 216, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1196, 217, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1197, 218, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1198, 219, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1199, 221, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1200, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1201, 223, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1202, \n 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1203, 225, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1204, 226, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1205, 227, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1206, 228, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1207, 229, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1208, 230, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1209, 234, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1210, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1211, 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1212, \n 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1213, 239, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1214, 240, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1215, 241, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1216, 242, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1217, 243, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1218, 244, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1219, 247, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1220, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1221, 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1222, \n 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1223, 254, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1224, 255, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1225, 256, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1226, 257, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1227, 258, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1228, 260, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1229, 263, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1230, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1231, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1232, \n 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1233, 268, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1234, 269, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1235, 271, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1236, 272, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1237, 273, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1238, 274, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1239, 275, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1240, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1241, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1242, \n 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1243, 282, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1244, 283, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1245, 284, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1246, 285, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1247, 286, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1248, 287, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1249, 288, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1250, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1251, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1252, \n 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1253, 293, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1254, 294, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1255, 295, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1256, 296, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1257, 297, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1258, 298, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1259, 299, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1260, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1261, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1262, \n 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1263, 304, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1264, 307, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1265, 308, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1266, 309, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1267, 311, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1270, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1271, 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1272, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1273, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1274, \n 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1275, 322, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1276, 323, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1277, 324, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1278, 325, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1279, 326, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1280, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1282, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1283, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1284, 333, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1285, \n 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1286, 337, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1287, 338, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1288, 339, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1289, 340, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1290, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1291, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1292, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1293, 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1294, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1295, \n 346, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1296, 347, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1297, 348, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1300, 353, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1301, 354, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1302, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1303, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1304, 357, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1305, 359, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1306, 361, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1307, \n 362, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1308, 363, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1309, 364, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1310, 365, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1311, 366, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1312, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1313, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1314, 369, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1315, 370, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1316, 371, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1317, \n 372, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1318, 373, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1319, 374, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1320, 375, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1321, 376, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1322, 377, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1323, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1324, 379, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1325, 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1326, 384, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1327, \n 385, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1328, 386, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1329, 387, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1330, 388, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1331, 390, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1332, 391, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1333, 392, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1334, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1336, 395, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1337, 396, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1338, \n 397, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1339, 398, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1340, 399, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1341, 400, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1342, 403, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1343, 404, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1344, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1345, 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1346, 407, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1348, 410, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1349, \n 411, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1350, 412, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1351, 413, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1352, 414, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1355, 418, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1356, 419, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1357, 420, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1358, 421, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1359, 422, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1360, 423, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1361, \n 424, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1362, 425, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1363, 426, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1364, 427, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1365, 428, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1366, 429, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1367, 430, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1368, 431, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1369, 432, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1370, 433, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1371, \n 434, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1372, 435, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1373, 436, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1374, 437, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1375, 438, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1376, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1377, 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1378, 441, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1379, 442, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1380, 443, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1381, \n 445, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1382, 446, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1383, 447, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1384, 448, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1385, 449, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1386, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1387, 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1388, 453, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1389, 454, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1390, 455, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1391, \n 456, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1392, 457, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1393, 458, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1394, 459, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1395, 460, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1396, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1397, 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1398, 463, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1399, 464, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1400, 465, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1401, \n 466, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1402, 467, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1403, 468, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1404, 469, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1405, 470, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1406, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1407, 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1408, 473, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1409, 474, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1410, 475, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1411, \n 476, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1412, 477, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1413, 478, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1414, 479, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1415, 480, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1416, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1417, 482, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1418, 483, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1419, 484, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1421, 486, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1422, \n 487, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1423, 488, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1424, 489, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1425, 490, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1426, 491, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1427, 492, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1428, 493, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1431, 496, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1432, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1433, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1434, \n 499, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1435, 500, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1436, 501, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1437, 502, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1438, 503, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1439, 504, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1440, 505, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1441, 506, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1442, 507, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1443, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1444, \n 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1445, 510, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1446, 511, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1447, 512, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1448, 513, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1449, 514, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1450, 515, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1451, 516, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1452, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1453, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1454, \n 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1455, 520, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1456, 521, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1457, 522, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1458, 523, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1459, 524, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1460, 525, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1461, 526, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1462, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1463, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1464, \n 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1465, 530, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1466, 531, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1467, 532, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1468, 533, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1469, 534, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1470, 535, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1471, 536, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1472, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1473, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1474, \n 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1475, 540, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1476, 541, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1477, 542, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1479, 544, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1480, 545, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1481, 546, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1482, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1483, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1484, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1485, \n 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1486, 551, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1487, 552, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1488, 554, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1489, 555, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1490, 556, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1491, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1492, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1493, 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1494, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1495, \n 561, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1497, 563, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1498, 564, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1500, 566, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1501, 567, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1502, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1503, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1504, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1505, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1506, 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1507, \n 573, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1508, 574, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1510, 576, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1511, 577, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1512, 578, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1513, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1514, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1516, 582, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1517, 583, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1518, 584, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1519, \n 585, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1520, 1, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1521, 3, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1522, 4, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1523, 6, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1524, 7, 0, 1e-05, 0, 9999, 9999, 9999, 0, \n 0, 1, -360, 360], [1525, 8, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1526, 9, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360],\n [1527, 11, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1528, \n 14, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1529, 16, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1530, 17, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1531, 19, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1532, 21, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1534, 25, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1535, 27, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1536, 28, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1537, 29, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1538,\n 31, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1539, 33, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1540, 34, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1541, 35, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1542, 36, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1543, 38, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1544, 39, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1545, 40, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1546, 41, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1547,\n 43, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1548, 44, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1549, 45, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1550, 47, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1551, 48, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1552, 49, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1553, 50, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1554, 51, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1555, 53, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1556,\n 54, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1557, 55, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1558, 57, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1559, 58, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1560, 59, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1561, 60, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1562, 62, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1563, 63, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1564, 64, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1565,\n 65, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1566, 66, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1567, 67, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1568, 70, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1569, 71, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1570, 72, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1571, 73, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1572, 75, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1573, 76, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1574,\n 77, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1575, 79, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1576, 80, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1577, 81, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1578, 82, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1579, 83, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1580, 84, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1581, 85, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360\n ], [1582, 88, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1583,\n 89, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1584, 90, 0, \n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1585, 91, 0, 1e-05, 0,\n 9999, 9999, 9999, 0, 0, 1, -360, 360], [1586, 92, 0, 1e-05, 0, 9999, \n 9999, 9999, 0, 0, 1, -360, 360], [1587, 93, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1588, 97, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1589, 98, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1590, 101, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1591, 102, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1592, 103, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1593, \n 108, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1594, 109, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1595, 110, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1596, 111, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1597, 112, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1598, 113, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1599, 114, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1600, 115, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1601, 116, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1602, 118, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1603, \n 119, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1604, 121, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1605, 122, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1606, 126, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1607, 127, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1608, 130, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1609, 131, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1610, 132, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1611, 133, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1612, 134, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1613, \n 135, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1614, 136, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1615, 137, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1616, 139, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1617, 140, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1618, 141, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1619, 142, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1620, 144, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1621, 145, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1622, 146, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1623, \n 147, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1624, 148, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1625, 149, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1626, 150, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1627, 151, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1628, 152, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1629, 153, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1630, 154, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1631, 155, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1632, 158, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1633, \n 161, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1634, 162, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1635, 163, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1636, 164, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1637, 166, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1638, 167, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1639, 168, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1640, 169, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1641, 170, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1642, 171, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1643, \n 172, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1644, 173, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1645, 174, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1646, 175, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1647, 176, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1648, 177, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1649, 178, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1650, 179, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1651, 180, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1652, 181, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1653, \n 182, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1654, 183, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1655, 185, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1656, 186, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1657, 187, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1658, 188, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1659, 189, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1660, 190, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1661, 192, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1662, 193, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1663, \n 194, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1664, 196, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1665, 197, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1666, 198, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1667, 199, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1668, 200, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1669, 202, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1670, 203, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1671, 204, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1672, 205, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1673, \n 206, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1674, 207, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1675, 208, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1676, 209, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1677, 210, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1678, 211, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1679, 212, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1680, 213, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1681, 214, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1682, 215, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1683, \n 216, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1684, 217, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1685, 218, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1686, 219, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1687, 221, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1688, 222, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1689, 223, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1690, 224, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1691, 225, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1692, 226, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1693, \n 227, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1694, 228, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1695, 229, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1696, 230, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1697, 234, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1698, 235, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1699, 237, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1700, 238, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1701, 239, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1702, 240, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1703, \n 241, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1704, 242, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1705, 243, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1706, 244, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1707, 247, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1708, 251, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1709, 252, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1710, 253, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1711, 254, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1712, 255, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1713, \n 256, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1714, 257, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1715, 258, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1716, 260, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1717, 263, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1718, 264, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1719, 266, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1720, 267, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1721, 268, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1722, 269, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1723, \n 271, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1724, 272, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1725, 273, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1726, 274, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1727, 275, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1728, 276, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1729, 278, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1730, 281, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1731, 282, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1732, 283, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1733, \n 284, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1734, 285, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1735, 286, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1736, 287, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1737, 288, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1738, 289, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1739, 291, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1740, 292, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1741, 293, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1742, 294, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1743, \n 295, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1744, 296, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1745, 297, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1746, 298, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1747, 299, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1748, 300, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1749, 302, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1750, 303, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1751, 304, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1752, 307, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1753, \n 308, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1754, 309, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1755, 311, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1756, 312, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1757, 314, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1758, 316, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1759, 317, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1760, 318, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1761, 319, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1762, 321, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1763, \n 322, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1764, 323, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1765, 324, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1766, 325, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1767, 326, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1768, 327, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1769, 328, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1770, 329, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1771, 331, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1772, 333, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1773, \n 335, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1774, 337, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1775, 338, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1776, 339, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1777, 340, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1778, 341, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1779, 342, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1780, 343, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1781, 344, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1782, 345, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1783, \n 346, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1784, 347, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1785, 348, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1786, 350, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1787, 352, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1788, 353, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1789, 354, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1790, 355, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1791, 356, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1792, 357, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1793, \n 359, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1794, 361, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1795, 362, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1796, 363, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1797, 364, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1798, 365, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1799, 366, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1800, 367, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1801, 368, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1802, 369, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1803, \n 370, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1804, 371, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1805, 372, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1806, 373, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1807, 374, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1808, 375, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1809, 376, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1810, 377, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1811, 378, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1812, 379, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1813, \n 381, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1814, 384, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1815, 385, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1816, 386, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1817, 387, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1818, 388, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1819, 390, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1820, 391, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1821, 392, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1822, 393, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1823, \n 394, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1824, 395, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1825, 396, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1826, 397, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1827, 398, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1828, 399, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1829, 400, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1830, 403, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1831, 404, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1832, 405, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1833, \n 406, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1834, 407, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1836, 410, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1837, 411, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1838, 412, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1839, 413, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1840, 414, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1841, 416, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1842, 417, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1843, 418, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1844, \n 419, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1845, 420, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1846, 421, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1847, 422, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1848, 423, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1849, 424, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1850, 425, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1851, 426, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1852, 427, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1853, 428, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1854, \n 429, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1855, 430, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1856, 431, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1857, 432, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1858, 433, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1860, 435, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1861, 436, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1862, 437, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1863, 438, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1864, 439, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1865, \n 440, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1866, 441, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1867, 442, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1868, 443, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1869, 445, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1870, 446, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1871, 447, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1872, 448, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1873, 449, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1874, 450, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1875, \n 451, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1876, 453, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1877, 454, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1878, 455, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1879, 456, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1880, 457, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1881, 458, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1882, 459, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1883, 460, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1884, 461, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1885, \n 462, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1886, 463, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1887, 464, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1888, 465, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1889, 466, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1890, 467, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1891, 468, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1892, 469, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1893, 470, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1894, 471, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1895, \n 472, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1896, 473, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1897, 474, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1898, 475, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1899, 476, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1900, 477, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1901, 478, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1902, 479, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1903, 480, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1904, 481, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1905, \n 482, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1906, 483, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1907, 484, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1908, 485, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1909, 486, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1910, 487, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1911, 488, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1912, 489, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1913, 490, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1914, 491, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1915, \n 492, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1916, 493, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1917, 494, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1918, 495, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1919, 496, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1920, 497, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1921, 498, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1922, 499, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1923, 500, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1924, 501, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1925, \n 502, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1926, 503, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1927, 504, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1928, 505, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1929, 506, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1930, 507, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1931, 508, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1932, 509, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1933, 510, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1934, 511, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1935, \n 512, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1936, 513, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1937, 514, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1938, 515, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1939, 516, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1940, 517, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1941, 518, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1942, 519, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1943, 520, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1944, 521, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1945, \n 522, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1946, 523, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1947, 524, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1948, 525, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1949, 526, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1950, 527, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1951, 528, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1952, 529, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1953, 530, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1954, 531, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1955, \n 532, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1956, 533, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1957, 534, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1958, 535, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1959, 536, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1960, 537, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1961, 538, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1962, 539, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1963, 540, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1964, 541, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1965, \n 542, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1966, 543, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1967, 544, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1968, 545, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1969, 546, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1970, 547, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1971, 548, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1972, 549, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1973, 550, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1974, 551, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1975, \n 552, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1976, 553, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1977, 554, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1978, 555, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1979, 556, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1980, 557, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1981, 558, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1982, 559, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1983, 560, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1984, 561, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1985, \n 562, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1986, 563, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1987, 564, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1988, 565, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1989, 566, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [1990, 567, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [1991, 568, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [1992, 569, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [1993, 570, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 1994, 571, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1995, \n 572, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1996, 573, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1997, 574, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [1998, 575, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1999, 576, 0, 1e-05, 0, 9999, 9999, \n 9999, 0, 0, 1, -360, 360], [2000, 577, 0, 1e-05, 0, 9999, 9999, 9999, 0,\n 0, 1, -360, 360], [2001, 578, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -\n 360, 360], [2002, 579, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, \n 360], [2003, 580, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [\n 2004, 581, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2005, \n 582, 0, 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2006, 583, 0,\n 1e-05, 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2007, 584, 0, 1e-05, \n 0, 9999, 9999, 9999, 0, 0, 1, -360, 360], [2008, 585, 0, 1e-05, 0, 9999,\n 9999, 9999, 0, 0, 1, -360, 360], [1, 490, 0, 0.01433884297520661, \n 0.151691958358336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 43.375], [3, 4, \n 0, 0.006291637811634348, 0.903417549506624, 3423.0, 3423.0, 3423.0, 0, \n 2, 1, -360, 72.681], [491, 6, 0, 0.011200661157024791, \n 0.118492839955776, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.882], [7, 5, \n 0, 0.005794840720221606, 0.20802058859584005, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 33.471], [8, 9, 0, 0.0024379328254847646, 0.350063268897336,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 28.163], [492, 11, 0, \n 0.018224793388429753, 0.0482004476327704, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 27.565], [11, 493, 0, 0.030286942148760328, 0.08010209706571599, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.809], [492, 493, 0, \n 0.04521652892561983, 0.11958747011094399, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 68.39], [494, 14, 0, 0.012990743801652892, 0.137430291356512, \n 991.0, 991.0, 991.0, 0, 2, 1, -360, 39.297], [13, 15, 0, \n 0.007681959833795014, 0.27576354266704156, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 44.371], [16, 5, 0, 0.006275623268698061, 0.22527950450957998,\n 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 36.248000000000005], [17, 18, 0,\n 0.04623522622347646, 0.9335989000302801, 1283.0, 1283.0, 1283.0, 0, 1, \n 1, -360, 200.291], [17, 12, 0, 0.0056020313942728535, 0.113118303398186,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.268], [14, 495, 0, \n 0.0017957024793388433, 0.018996904156819597, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 5.432], [494, 19, 0, 0.010246611570247935, 0.10839986031771602,\n 991.0, 991.0, 991.0, 0, 1, 1, -360, 30.996], [20, 21, 0, \n 0.005415685595567867, 0.19440984828307922, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 31.281], [20, 22, 0, 0.0049706544321329645, 0.713737278110032,\n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 57.42100000000001], [497, 23, 0,\n 0.002190413223140496, 0.005793146490362, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 3.313], [23, 499, 0, 0.020799669421487598, 0.22004164444829602, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 62.919], [25, 26, 0, \n 0.00141845567867036, 0.050919084651523595, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 8.193], [25, 22, 0, 0.0035578254847645433, 0.0319293051869808,\n 856.0, 856.0, 856.0, 0, 1, 1, -360, 10.275], [23, 27, 0, \n 0.027738181818181818, 0.073361203699828, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 41.95399999999999], [28, 23, 0, 0.012841652892561981, \n 0.0339632611780132, 495.0, 495.0, 495.0, 0, 1, 1, -360, 19.423], [8, 21,\n 0, 0.004948753462603878, 0.17764812836304802, 1711.0, 1711.0, 1711.0, 0,\n 2, 1, -360, 28.584], [9, 29, 0, 0.002212863573407202, \n 0.31774552934092004, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 25.563000000000002], [30, 25, 0, 0.019958795013850415, \n 0.17911796401827998, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 57.641000000000005], [31, 32, 0, 0.0299776084949446, 0.605319030583196,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 129.863], [32, 33, 0, \n 0.016762234533725762, 0.33846927983213604, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 72.61399999999999], [34, 35, 0, 0.001931900826446281, \n 0.020437759184893597, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 5.843999999999999], [35, 36, 0, 0.0008730578512396695, \n 0.0092361605077588, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.641], [490, 6,\n 0, 0.049352066115702475, 0.130525028606764, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 74.645], [37, 10, 0, 0.02404639889196676, 0.485553838251812, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 104.169], [10, 38, 0, \n 0.006848799630657894, 0.13829351176534158, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 29.669], [37, 38, 0, 0.01437834718372576, 1.1613317560186958, \n 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 124.574], [39, 40, 0, \n 0.04521629732222991, 0.913024308337812, 1283.0, 1283.0, 1283.0, 0, 1, 1,\n -360, 195.877], [39, 41, 0, 0.017466989843005543, 0.35269996139852006, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 75.667], [42, 41, 0, \n 0.031145429362880884, 0.6289001042979919, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 134.922], [18, 42, 0, 0.03439750692520776, 0.6945672650962679,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 149.01], [492, 43, 0, \n 0.01819173553719008, 0.192452068436848, 991.0, 991.0, 991.0, 0, 2, 1, -\n 360, 55.03], [44, 45, 0, 0.02562314049586777, 0.067767398802972, 495.0,\n 495.0, 495.0, 0, 1, 1, -360, 38.755], [44, 505, 0, 0.006061487603305785,\n 0.0160312607980052, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.168], [46, 12,\n 0, 0.0014741170360110802, 0.2116687641962416, 3423.0, 3423.0, 3423.0, 0,\n 2, 1, -360, 17.029], [47, 48, 0, 0.005344182825484765, \n 0.01199019212302604, 428.0, 428.0, 428.0, 0, 1, 1, -360, \n 7.7170000000000005], [49, 50, 0, 0.0019151662049861494, \n 0.0171874439892256, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 5.531000000000001], [31, 33, 0, 0.013475992613088641, \n 0.27211225959163604, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 58.378], [\n 31, 51, 0, 0.003518611495844875, 0.5052381383693519, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 40.647], [52, 53, 0, 0.010464421745152355, \n 1.5025884408875438, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 120.885], [\n 52, 54, 0, 0.0076126500461911354, 0.1537174637168, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 32.978], [506, 55, 0, 0.012634380165289257, \n 0.133660287181212, 991.0, 991.0, 991.0, 0, 1, 1, -360, 38.219], [506, \n 507, 0, 0.044157355371900825, 0.11678619613628, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 66.788], [57, 506, 0, 0.004687272727272727, \n 0.049587095736244, 991.0, 991.0, 991.0, 0, 1, 1, -360, 14.179], [57, 58,\n 0, 0.014436363636363634, 0.0381809096340232, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 21.835], [58, 506, 0, 0.019797685950413223, 0.052360391943288,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 29.944000000000003], [59, 60, 0, \n 0.019407548476454296, 0.174170863885556, 856.0, 856.0, 856.0, 0, 1, 1, \n -360, 56.049], [508, 62, 0, 0.051111404958677685, 0.03379452026753001, \n 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.653], [30, 61, 0, \n 0.03143698060941828, 0.28212765137935203, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 90.79], [63, 506, 0, 0.027457190082644623, 0.072618044249872, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 41.528999999999996], [13, 64, 0, \n 0.0014816481994459833, 0.2127501654814608, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 17.116], [65, 66, 0, 0.03778185595567867, 0.7629053006222161, \n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 163.671], [59, 67, 0, \n 0.0051880193905817175, 0.046559297286324804, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 14.982999999999999], [61, 67, 0, 0.012931440443213295, \n 0.1160517597580644, 856.0, 856.0, 856.0, 0, 1, 1, -360, 37.346], [68, \n 69, 0, 0.011149584487534626, 0.4002427745096039, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 64.4], [70, 69, 0, 0.009625346260387812, \n 0.345526355460808, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.596000000000004], [71, 72, 0, 0.008878635734072021, \n 0.318721276477736, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.283], [73,\n 74, 0, 0.012529547553116345, 0.253001288604392, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 54.278], [37, 75, 0, 0.027459141274238225, \n 0.5544652029066119, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 118.95299999999999], [72, 75, 0, 0.006688711911357341, \n 0.240108375006292, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 38.634], [37,\n 72, 0, 0.036222068328739615, 0.7314094881920841, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 156.914], [76, 77, 0, 0.004683777700831025, \n 0.6725445900750401, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 54.107], [77,\n 51, 0, 0.00363183864265928, 0.5214964473447999, 3423.0, 3423.0, 3423.0,\n 0, 2, 1, -360, 41.955], [73, 72, 0, 0.025475069252077563, \n 0.514402082018968, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 110.35799999999999], [18, 40, 0, 0.01302770083102493, 0.26306018504072,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 56.43600000000001], [492, 45, 0,\n 0.0308703030303719, 0.18370114733484796, 743.0, 743.0, 743.0, 0, 1, 1, \n -360, 70.03699999999999], [10, 74, 0, 0.030167359187465374, \n 0.609150547206812, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 130.685], [45,\n 511, 0, 0.08203371900826446, 0.05424014819960001, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 62.038000000000004], [78, 32, 0, 0.013458795013850415, \n 0.48313777647302397, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 77.738], [\n 79, 80, 0, 0.0038086911357340715, 0.1367226831743568, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 21.999000000000002], [81, 79, 0, \n 0.010767832409972299, 0.3865388099484561, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 62.195], [34, 82, 0, 0.0015497520661157025, \n 0.00409874294399768, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.344], [83, \n 84, 0, 0.00902611570247934, 0.0238720301499152, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 13.652000000000001], [83, 499, 0, 0.04179570247933885, \n 0.0276350398834796, 248.0, 248.0, 248.0, 0, 1, 1, -360, 31.608], [85, \n 86, 0, 0.00802354570637119, 0.28802563884886, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 46.343999999999994], [87, 86, 0, 0.01904968836565097, \n 0.683837154069184, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 110.031], [88,\n 89, 0, 0.00380297520661157, 0.010058007429140002, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 5.752000000000001], [90, 86, 0, 0.012097818559556786, \n 0.434282055192244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 69.877], [91,\n 86, 0, 9.26246537396122e-05, 0.013299992817559201, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 1.07], [86, 92, 0, 0.0001852493074792244, \n 0.0066499964087796005, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.07], [\n 86, 93, 0, 0.008152181440443215, 0.292643346635492, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 47.086999999999996], [94, 86, 0, \n 0.012883829639889197, 0.46249792780547194, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 74.417], [86, 95, 0, 0.010421052631578947, 0.37409026526870803,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 60.192], [513, 517, 0, \n 0.0008733884297520661, 0.0023099144321748, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 1.321], [97, 66, 0, 0.03812777008310249, 0.34217338998058805, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 110.113], [42, 98, 0, \n 0.003091759002770083, 0.44394630230884, 3423.0, 3423.0, 3423.0, 0, 2, 1,\n -360, 35.716], [99, 100, 0, 0.016371537396121884, 0.587698093837988, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 94.56200000000001], [42, 101, 0,\n 0.008165339335180054, 0.29311568282888, 1711.0, 1711.0, 1711.0, 0, 1, 1,\n -360, 47.163000000000004], [102, 42, 0, 0.012403047091412742, \n 0.44523901189173193, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 71.64], [\n 103, 87, 0, 0.007073060941828254, 0.25390556381756, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 40.854], [104, 103, 0, 0.0028852146814404432, \n 0.1035721403291428, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.665], [\n 105, 87, 0, 0.006406682825484765, 0.22998422159488002, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 37.005], [106, 107, 0, 0.005714219759923823, \n 0.11538365264216799, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 24.754], [\n 108, 107, 0, 0.0025427631578947367, 0.09127896939786201, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 14.687000000000001], [109, 106, 0, \n 0.003030470914127424, 0.10878648330773438, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 17.504], [110, 111, 0, 0.019821849030470913, \n 0.7115558306889919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 114.491], [\n 87, 112, 0, 0.006135907202216068, 0.220264039928212, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 35.441], [113, 87, 0, 0.003981648199445983, \n 0.14293141813921081, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 22.998], [\n 87, 85, 0, 0.011046225761772853, 0.3965324494097, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 63.803000000000004], [110, 114, 0, \n 0.011665339335180056, 0.418757110306188, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 67.37899999999999], [115, 116, 0, 0.007048925619834712, \n 0.07457124214588401, 991.0, 991.0, 991.0, 0, 1, 1, -360, 21.323], [117,\n 118, 0, 0.005987534626038782, 0.21493782785077598, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 34.584], [117, 119, 0, 0.0038738746537396117, \n 0.5562504472696961, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 44.751000000000005], [117, 120, 0, 0.005886686288088643, \n 0.8452704781039522, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 68.003], [\n 121, 122, 0, 0.0021170360110803325, 0.0759964075574972, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.228], [123, 124, 0, 0.0018386426592797783, \n 0.0660027680945204, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 10.62], [125,\n 126, 0, 0.004941135734072022, 0.17737467056702802, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 28.54], [127, 119, 0, 0.0029027008310249305, \n 0.1041998502705648, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.766], [\n 118, 128, 0, 0.007397160664819945, 0.265539950057812, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 42.726000000000006], [121, 119, 0, \n 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 14.743], [530, 527, 0, 0.022726611570247933, \n 0.060106736329903994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 34.374], [125,\n 130, 0, 0.002931440443213297, 0.105231531956442, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 16.932000000000002], [125, 123, 0, 0.0019078081717451524,\n 0.2739425623421336, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 22.039], [\n 131, 132, 0, 0.0035744459833795014, 0.12831385593973843, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 20.646], [133, 123, 0, 0.003864439058171745, \n 0.13872389704704202, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 22.320999999999998], [524, 134, 0, 0.008092231404958678, \n 0.08560847143881999, 991.0, 991.0, 991.0, 0, 1, 1, -360, 24.479], [135,\n 136, 0, 0.005242901662049862, 0.1882073282678, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 30.283], [123, 131, 0, 0.003138331024930748, \n 0.1126583971045252, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.127], [\n 117, 128, 0, 0.010800034626038782, 0.38769479063117196, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 62.381], [137, 521, 0, 0.013832396694214875, \n 0.14633421587532003, 991.0, 991.0, 991.0, 0, 2, 1, -360, 41.843], [531,\n 514, 0, 0.0059504132231404955, 0.035409362037522, 743.0, 743.0, 743.0, \n 0, 1, 1, -360, 13.5], [139, 521, 0, 0.021257520661157023, \n 0.05622132386323199, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.152], [140,\n 514, 0, 0.018527603305785127, 0.04900131122836401, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 28.023000000000003], [522, 141, 0, 0.012168595041322314,\n 0.032183175718526795, 495.0, 495.0, 495.0, 0, 1, 1, -360, 18.405], [142,\n 523, 0, 0.007060165289256198, 0.0746901476577608, 991.0, 991.0, 991.0, \n 0, 2, 1, -360, 21.357], [530, 526, 0, 0.020281652892561983, \n 0.053640374808152, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.676], [140, \n 532, 0, 0.004669090909090909, 0.0123486871461184, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 7.062], [142, 144, 0, 0.006678126721756199, \n 0.0397397958689204, 743.0, 743.0, 743.0, 0, 1, 1, -360, 15.151], [140, \n 522, 0, 0.020450247933884298, 0.05408627047793199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 30.930999999999997], [145, 146, 0, 0.028527603305785125,\n 0.07544904460236, 495.0, 495.0, 495.0, 0, 1, 1, -360, 43.148], [147, \n 523, 0, 0.02461289256198347, 0.0650955220034416, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 37.227], [144, 523, 0, 0.008479338842975206, \n 0.0224259292904064, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.825], [139, \n 523, 0, 0.029245619834710742, 0.0193370088934308, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 22.116999999999997], [140, 141, 0, 0.008362975206611572,\n 0.022118173847506, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 12.649000000000001], [528, 526, 0, 0.015389090909090908, \n 0.0407006573227188, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.276], [528, \n 148, 0, 0.014306115702479338, 0.0378364333712244, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 21.638], [149, 150, 0, 0.013604628099173552, \n 0.035981157661543604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 20.576999999999998], [145, 528, 0, 0.00320595041322314, \n 0.0084790121737992, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.849], [530, \n 151, 0, 0.013144462809917355, 0.0347641247737036, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 19.881], [524, 152, 0, 0.014598347107438016, \n 0.03860931919944, 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.08], [149, 525,\n 0, 0.016897190082644627, 0.17875695122823998, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 51.114], [139, 514, 0, 0.007824132231404959, \n 0.020693056313687997, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.834000000000001], [126, 120, 0, 0.012780297783933518, \n 0.458781387757004, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.819], [530,\n 153, 0, 0.02254545454545455, 0.059627617060924, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 34.1], [528, 147, 0, 0.15786710743801652, 0.104380679149868,\n 248.0, 248.0, 248.0, 0, 1, 1, -360, 119.387], [528, 154, 0, \n 0.006528264462809917, 0.017265779790547203, 495.0, 495.0, 495.0, 0, 2, \n 1, -360, 9.874], [130, 120, 0, 0.01450502077562327, 0.5206947188067639,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 83.781], [528, 155, 0, \n 0.16064132231404957, 0.1062149715341, 248.0, 248.0, 248.0, 0, 1, 1, -\n 360, 121.485], [524, 533, 0, 0.004432727272727273, 0.0468942356109744, \n 991.0, 991.0, 991.0, 0, 1, 1, -360, 13.409], [524, 149, 0, \n 0.0056413223140495865, 0.05968007537478799, 991.0, 991.0, 991.0, 0, 2, \n 1, -360, 17.065], [154, 150, 0, 0.007539173553719007, \n 0.0199394052006688, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 11.402999999999999], [157, 110, 0, 0.009962084487534625, \n 0.357614433044424, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 57.541000000000004], [119, 158, 0, 0.0002490189289012004, \n 0.08045252664623159, 5134.0, 5134.0, 5134.0, 0, 3, 1, -360, 4.315], [\n 159, 60, 0, 0.010967451523545706, 0.0984261617997728, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 31.674], [536, 161, 0, 0.021314380165289255, \n 0.056371704363524, 495.0, 495.0, 495.0, 0, 1, 1, -360, 32.238], [115, \n 151, 0, 0.00379404958677686, 0.0401376047510724, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 11.477], [162, 134, 0, 0.0015910743801652895, \n 0.016832124393744, 991.0, 991.0, 991.0, 0, 2, 1, -360, 4.813], [115, \n 526, 0, 0.0037884297520661154, 0.010019537998747198, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.73], [138, 87, 0, 0.0011838642659279777, \n 0.16999131006813442, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 13.675999999999998], [123, 163, 0, 0.0022778739612188364, \n 0.08177009602828919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.157], [\n 112, 164, 0, 0.0008672957063711912, 0.12453516639176802, 3423.0, 3423.0,\n 3423.0, 0, 2, 1, -360, 10.019], [112, 165, 0, 0.005989439058171744, \n 0.21500619230086396, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.595], [\n 166, 165, 0, 0.002632790858725762, 0.09451074335350361, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 15.207], [167, 537, 0, 0.00832595041322314, \n 0.08808100664460242, 991.0, 991.0, 991.0, 0, 2, 1, -360, 25.186], [168,\n 104, 0, 0.002552458448753463, 0.0916270065931116, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.743], [531, 520, 0, 0.016156694214876033, \n 0.042730794079516396, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 24.436999999999998], [139, 520, 0, 0.010682314049586776, \n 0.0282522993797748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.157], [520, \n 169, 0, 0.0011328925619834712, 0.0119849761681232, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 3.427], [168, 105, 0, 0.007340893351800554, \n 0.26352009133553606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 42.401], [\n 520, 170, 0, 0.005842644628099174, 0.015452470732151198, 495.0, 495.0, \n 495.0, 0, 2, 1, -360, 8.837], [171, 89, 0, 0.005505454545454546, \n 0.058242717567848004, 991.0, 991.0, 991.0, 0, 1, 1, -360, 16.654], [521,\n 172, 0, 0.006304793388429752, 0.06669899780522001, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 19.072], [123, 173, 0, 0.005247403047091413, \n 0.18836891696656402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.309], [\n 521, 174, 0, 0.013300495867768597, 0.035176796844864404, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.117], [37, 39, 0, 0.004338873499549862, \n 0.35044859579205606, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 37.592], [\n 530, 175, 0, 0.013128595041322313, 0.0347221581224188, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 19.857], [530, 176, 0, 0.005685289256198347, \n 0.01503630144005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.599], [88, 530,\n 0, 0.006015867768595041, 0.0159106066755372, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 9.099], [177, 496, 0, 0.018632066115702478, \n 0.19711036673178398, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 56.361999999999995], [178, 525, 0, 0.03106842975206612, \n 0.08216895464241199, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 46.99100000000001], [179, 493, 0, 0.057079669421487594, \n 0.15096278779194802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.333], [180,\n 181, 0, 0.041027438016528923, 0.10850827416682, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 62.053999999999995], [182, 180, 0, 0.00866314049586777, \n 0.09164817200545601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 26.206], [179,\n 181, 0, 0.01957223140495868, 0.051764115772731996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 29.603], [180, 493, 0, 0.06676561983471074, \n 0.17657993119175203, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 100.98299999999999], [183, 30, 0, 0.0024804362880886427, \n 0.356166349712776, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 28.654], [183,\n 21, 0, 0.0025647506925207757, 0.36827307214930394, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 29.628], [538, 185, 0, 0.018631404958677687, \n 0.0123189607681008, 248.0, 248.0, 248.0, 0, 1, 1, -360, 14.09], [538, \n 89, 0, 0.014509752066115702, 0.038375005396288, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 21.945999999999998], [184, 186, 0, 0.0016554709141274237, \n 0.059427351084826, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 9.562000000000001], [184, 187, 0, 0.002698753462603878, \n 0.09687863927102919, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 15.588], [\n 520, 172, 0, 0.0034188429752066113, 0.0361682589818792, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 10.342], [89, 175, 0, 0.0037309090909090903, \n 0.0098674088877672, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.643], [185, \n 89, 0, 0.005812892561983471, 0.0153737832609196, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 8.792], [89, 188, 0, 0.003108760330578513, \n 0.008221966434607202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 4.702], [189,\n 190, 0, 0.008599492151454294, 0.17364414688031998, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 37.253], [539, 172, 0, 0.0021570247933884296, \n 0.022819366646419197, 991.0, 991.0, 991.0, 0, 2, 1, -360, 6.525], [504,\n 192, 0, 0.0003084297520661157, 0.00326290713886456, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 0.9329999999999999], [105, 186, 0, 0.003273372576177285,\n 0.1175060580379876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 18.907], [\n 105, 187, 0, 0.0021712257617728533, 0.0779416868808324, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 12.540999999999999], [539, 193, 0, \n 0.005608595041322314, 0.01483346262541, 495.0, 495.0, 495.0, 0, 1, 1, -\n 360, 8.482999999999999], [187, 194, 0, 4.8649584487534626e-05, \n 0.0069856037041576, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.562], [539,\n 540, 0, 0.004394710743801653, 0.0116230138006708, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.647], [539, 196, 0, 0.00332297520661157, \n 0.008788516227194, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.026], [197, \n 540, 0, 0.004737190082644629, 0.012528794024621601, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 7.165], [110, 198, 0, 0.00018724030470914128, \n 0.02688587333118328, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 2.1630000000000003], [197, 539, 0, 0.009172231404958677, \n 0.024258473063998802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 13.873], [199,\n 537, 0, 0.03612826446280991, 0.0238877676441712, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 27.322], [134, 526, 0, 0.007771239669421488, \n 0.020553167475975197, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 11.754000000000001], [200, 193, 0, 0.0009322314049586776, \n 0.009862163056380801, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.82], [4, \n 201, 0, 0.013726108033240996, 0.49273365914097605, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 79.282], [202, 86, 0, 0.00013365650969529087, \n 0.00479794133417816, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.772], [85,\n 203, 0, 0.0019011426592797783, 0.2729854600553416, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 21.962], [147, 204, 0, 0.0073874380165289254, \n 0.0781523963903056, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 22.346999999999998], [147, 205, 0, 0.005959669421487603, \n 0.00394049369636956, 248.0, 248.0, 248.0, 0, 1, 1, -360, 4.507], [123, \n 206, 0, 0.0005753116343490305, 0.0826091142668064, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 6.646], [537, 207, 0, 0.018456198347107437, \n 0.048812461297776, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.915], [165, \n 208, 0, 0.00414612188365651, 0.14883562055771601, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 23.948], [4, 94, 0, 0.013687673130193905, \n 0.49135394025941603, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 79.06], [4,\n 2, 0, 5.2054478301015697e-05, 0.016817654469309, 5134.0, 5134.0, 5134.0,\n 0, 3, 1, -360, 0.902], [209, 4, 0, 0.0022369286703601107, \n 0.32120104149338397, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 25.840999999999998], [119, 163, 0, 0.003535145429362881, \n 0.12690306230914922, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.419], [\n 210, 3, 0, 0.0003150969529085873, 0.011311208844832242, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 1.82], [99, 211, 0, 0.0035045013850415513, \n 0.1258030161741948, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.242], [99,\n 69, 0, 0.021717970914127423, 0.7796219621557, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 125.443], [212, 99, 0, 0.008453774238227147, \n 0.30346978938770003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 48.82899999999999], [213, 214, 0, 0.01490115702479339, \n 0.15764073118032798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 45.076], [510,\n 215, 0, 0.002174710743801653, 0.09202587186721281, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 13.157], [128, 69, 0, 0.010711651662049862, \n 1.538088234801848, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 123.741], [\n 216, 69, 0, 0.009628462603878117, 1.3825528982351443, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 111.228], [217, 98, 0, 0.0012787396121883656, \n 0.045903620070299994, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 7.386], [\n 504, 218, 0, 0.027480991735537193, 0.072680994226412, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 41.565], [177, 504, 0, 0.07054809917355372, \n 0.18658373169634002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 106.704], [219,\n 209, 0, 0.003938798476454294, 0.5655728721401839, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 45.501000000000005], [219, 220, 0, \n 0.0013026315789473684, 0.1870451326342096, 3423.0, 3423.0, 3423.0, 0, 2,\n 1, -360, 15.048], [94, 95, 0, 0.01070740997229917, 0.38436979242743197,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 61.846000000000004], [159, 221, \n 0, 0.009937153739612188, 0.356719480257712, 1711.0, 1711.0, 1711.0, 0, \n 2, 1, -360, 57.397], [34, 161, 0, 0.010965289256198347, \n 0.116002818645824, 991.0, 991.0, 991.0, 0, 2, 1, -360, 33.17], [222, \n 221, 0, 0.0046457756232686975, 0.16677196601221997, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 26.834], [211, 52, 0, 0.05267313019390582, \n 0.472709090515552, 856.0, 856.0, 856.0, 0, 1, 1, -360, 152.12], [215, \n 223, 0, 0.04873190082644628, 0.128884831985184, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.707], [224, 215, 0, 0.019086280991735535, \n 0.050478887076288004, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.868000000000002], [225, 224, 0, 0.04200925619834711, \n 0.11110496071615601, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 63.538999999999994], [224, 223, 0, 0.031061818181818183, \n 0.082151468537468, 495.0, 495.0, 495.0, 0, 1, 1, -360, 46.981], [226, 6,\n 0, 0.06420099173553719, 0.0424492677936932, 248.0, 248.0, 248.0, 0, 1, \n 1, -360, 48.552], [7, 3, 0, 0.009332929362880887, 0.335029305054692, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 53.907], [216, 227, 0, \n 0.01989941135734072, 0.7143401282507, 1711.0, 1711.0, 1711.0, 0, 1, 1, \n -360, 114.939], [228, 229, 0, 0.010545454545454545, 0.027890337012274, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 15.95], [227, 230, 0, \n 0.003993074792243767, 0.573366419334696, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 46.128], [231, 53, 0, 0.007193213296398893, 1.0328749562310842,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 83.096], [544, 545, 0, \n 0.013061818181818181, 0.034545548464856, 495.0, 495.0, 495.0, 0, 1, 1, \n -360, 19.756], [234, 235, 0, 0.04608859504132231, 0.121893887321888, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 69.709], [546, 214, 0, \n 0.057025454545454546, 0.15081940173295602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 86.251], [233, 227, 0, 0.0029001038781163438, 0.1041066260218888,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.750999999999998], [237, 238, \n 0, 0.026324628099173554, 0.06962267451304, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 39.816], [212, 100, 0, 0.007955505540166205, 0.285583163531816, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 45.951], [519, 239, 0, \n 0.01740429752066116, 0.046030422038308406, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 26.324], [238, 519, 0, 0.015166280991735538, 0.040111375593995205,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 22.939], [213, 240, 0, \n 0.01665388429752066, 0.04404574915373599, 1200.0, 1200.0, 1200.0, 0, 1,\n 1, -360, 25.189], [241, 242, 0, 0.009862015235457064, \n 0.3540221919932281, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 56.963], [70,\n 241, 0, 0.003819858033240997, 0.5484941897752321, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 44.126999999999995], [509, 213, 0, \n 0.011363636363636364, 0.120216969880216, 991.0, 991.0, 991.0, 0, 2, 1, \n -360, 34.375], [68, 243, 0, 0.003611668975069252, 0.1296500701715312, \n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 20.861], [243, 244, 0, \n 0.0007699099722991691, 0.027637882270859202, 1711.0, 1711.0, 1711.0, 0,\n 1, 1, -360, 4.447], [68, 244, 0, 0.004104051246537396, \n 0.147325387728876, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 23.705], [544,\n 547, 0, 0.02418776859504132, 0.255884661882476, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 73.168], [245, 227, 0, 0.012676419667590028, \n 0.45505241780707606, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 73.219], [\n 246, 208, 0, 0.0010155817174515235, 0.0364568961999408, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 5.8660000000000005], [112, 208, 0, \n 0.0017927631578947367, 0.0643558063672372, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 10.355], [165, 247, 0, 0.0002113919667590028, \n 0.0075884538459086, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.2209999999999999], [537, 549, 0, 0.00032066115702479337, \n 0.00084807607842936, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.485], [537, \n 550, 0, 0.00032198347107438016, 0.0008515732993697601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.48700000000000004], [537, 551, 0, \n 0.0002651239669421488, 0.0007011927988648, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 0.401], [110, 251, 0, 0.00023857340720221602, \n 0.008564200982522441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 1.3780000000000001], [510, 252, 0, 0.08467702479338843, \n 0.055987884365424005, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 64.03699999999999], [529, 253, 0, 0.04859504132231405, \n 0.12852286961777998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 73.5], [237, \n 239, 0, 0.03309421487603306, 0.08752669712542799, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 50.055], [254, 238, 0, 0.07815008264462811, \n 0.05167231372274401, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 59.101000000000006], [69, 255, 0, 0.0009369806094182826, \n 0.134541235754472, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 10.824000000000002], [510, 225, 0, 0.021953719008264466, \n 0.232250442756508, 991.0, 991.0, 991.0, 0, 1, 1, -360, 66.41], [256, \n 257, 0, 0.010125619834710746, 0.0267799693631888, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 15.315], [258, 190, 0, 0.011717451523545707, \n 0.10515695255750121, 856.0, 856.0, 856.0, 0, 1, 1, -360, 33.84], [258, \n 259, 0, 0.015782548476454293, 0.1416387085570408, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 45.58], [260, 261, 0, 0.006791031855955679, \n 0.9751256416231477, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 78.45], [554,\n 553, 0, 0.17583338842975205, 0.11625986438453201, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 132.974], [515, 263, 0, 0.006987107438016529, \n 0.0739172618295936, 991.0, 991.0, 991.0, 0, 2, 1, -360, 21.136], [14, \n 264, 0, 0.01700694214876033, 0.17991802858084, 991.0, 991.0, 991.0, 0, \n 1, 1, -360, 51.446000000000005], [116, 555, 0, 0.0009768595041322315, \n 0.0103342878835768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.955], [151, \n 116, 0, 0.007244958677685951, 0.0191612735410668, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 10.958], [111, 114, 0, 0.008806613573407202, \n 0.3161358573133961, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.867], [77,\n 111, 0, 0.00288452216066482, 0.41418912211817605, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 33.321999999999996], [266, 525, 0, \n 0.01042909090909091, 0.027582581569373602, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 15.774000000000001], [267, 120, 0, 0.013136945983379503, \n 0.471584184581432, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 75.87899999999999], [268, 269, 0, 0.0010327272727272726, \n 0.0027313295556817604, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 1.5619999999999998], [556, 271, 0, 0.052289586776859506, \n 0.0345735262323792, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 39.544000000000004], [556, 272, 0, 0.04685355371900827, \n 0.030979257409249603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 35.433], [529,\n 273, 0, 0.0034604958677685953, 0.009152227205140799, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 5.234], [128, 274, 0, 0.0029350761772853184, \n 0.1053620459045884, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.953], [34,\n 275, 0, 0.0008290909090909092, 0.00054818938265696, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 0.627], [503, 276, 0, 0.006707438016528925, \n 0.07095861291266, 991.0, 991.0, 991.0, 0, 2, 1, -360, 20.29], [503, 504,\n 0, 0.06432727272727272, 0.680524223098808, 991.0, 991.0, 991.0, 0, 2, 1,\n -360, 194.59], [177, 218, 0, 0.04330380165289256, 0.114528740018308, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 65.497], [277, 278, 0, \n 0.007191135734072023, 1.032576638635032, 3423.0, 3423.0, 3423.0, 0, 2, \n 1, -360, 83.072], [557, 558, 0, 0.04341289256198347, 0.258338836678648,\n 743.0, 743.0, 743.0, 0, 1, 1, -360, 98.493], [557, 559, 0, \n 0.03415867768595042, 0.09034195998366001, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 51.665], [559, 558, 0, 0.04474314049586777, 0.11833546501370001, \n 495.0, 495.0, 495.0, 0, 1, 1, -360, 67.67399999999999], [277, 78, 0, \n 0.03585768698060942, 0.32180078416049196, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 103.557], [277, 279, 0, 0.021390927977839334, 0.191970480441328, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 61.777], [78, 279, 0, \n 0.015811980609418283, 0.1419028439283376, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 45.665], [281, 282, 0, 0.0023178670360110803, 0.08320574945862161,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.388], [283, 161, 0, \n 0.036741157024793386, 0.09717203248350399, 495.0, 495.0, 495.0, 0, 2, 1,\n -360, 55.571000000000005], [268, 161, 0, 0.018883636363636366, \n 0.199771751868832, 991.0, 991.0, 991.0, 0, 2, 1, -360, \n 57.123000000000005], [256, 284, 0, 0.010755371900826446, \n 0.113782083346976, 991.0, 991.0, 991.0, 0, 2, 1, -360, 32.535], [515, \n 516, 0, 0.04071140495867769, 0.107672438361532, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 61.576], [263, 516, 0, 0.0030355371900826445, \n 0.128452925198488, 1981.0, 1981.0, 1981.0, 0, 2, 1, -360, 18.365], [516,\n 285, 0, 0.006908429752066116, 0.018271230811372, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 10.449000000000002], [63, 286, 0, 0.019088925619834708, \n 0.050485881518556, 495.0, 495.0, 495.0, 0, 1, 1, -360, 28.872], [287, \n 516, 0, 0.01732892561983471, 0.011457770111127998, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 13.105], [8, 102, 0, 0.015100069252077563, \n 0.542055501663692, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 87.21799999999999], [8, 101, 0, 0.019246883656509697, 0.69091598202144,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 111.17], [80, 288, 0, \n 0.007984072022160666, 0.2866086302684072, 1711.0, 1711.0, 1711.0, 0, 2,\n 1, -360, 46.11600000000001], [80, 289, 0, 0.0003782317636201524, \n 0.122198345223416, 5134.0, 5134.0, 5134.0, 0, 4, 1, -360, \n 6.553999999999999], [276, 560, 0, 0.01778314049586777, \n 0.047032375838192794, 495.0, 495.0, 495.0, 0, 2, 1, -360, 26.897], [37,\n 290, 0, 0.005629501385041551, 0.4546919507138321, 2567.0, 2567.0, \n 2567.0, 0, 2, 1, -360, 48.773999999999994], [290, 74, 0, \n 0.02071595106187673, 1.673216783321968, 2567.0, 2567.0, 2567.0, 0, 2, 1,\n -360, 179.483], [512, 291, 0, 0.0053299173553719, 0.056385693247479204,\n 991.0, 991.0, 991.0, 0, 2, 1, -360, 16.123], [78, 292, 0, \n 0.0058149815327908595, 0.469673087481408, 2567.0, 2567.0, 2567.0, 0, 2,\n 1, -360, 50.381], [199, 548, 0, 0.0015530578512396695, \n 0.00410748599634868, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.349], [491, \n 293, 0, 0.014176528925619833, 0.009373426429729999, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.720999999999998], [4, 294, 0, 9.669321329639889e-05, \n 0.013884198109531681, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 1.117], [\n 490, 541, 0, 0.050580495867768596, 0.133773946861896, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 76.503], [491, 295, 0, 0.010613553719008264, \n 0.028070443890777202, 495.0, 495.0, 495.0, 0, 1, 1, -360, 16.053], [491,\n 296, 0, 0.004400661157024794, 0.0116387512948784, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.656000000000001], [295, 297, 0, 0.020297520661157024, \n 0.053682341459340005, 495.0, 495.0, 495.0, 0, 1, 1, -360, 30.7], [508, \n 161, 0, 0.023239669421487603, 0.061463658055360006, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 35.15], [117, 123, 0, 0.005876211911357341, \n 0.21094161505628, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 33.941], [133,\n 117, 0, 0.004469182825484764, 0.0401081792747688, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 12.907], [71, 74, 0, 0.03904524469065097, \n 0.7884161162841721, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 169.144], [\n 74, 278, 0, 0.0077122576177285325, 1.10740463560792, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 89.09200000000001], [298, 515, 0, \n 0.021701157024793388, 0.05739464148919599, 495.0, 495.0, 495.0, 0, 1, 1,\n -360, 32.823], [5, 299, 0, 0.0016232686980609415, 0.058271370400665996,\n 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 9.376], [32, 292, 0, \n 0.009679362880886427, 0.34746541983297996, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 55.908], [5, 29, 0, 0.00743395083102493, 1.0674425076571843, \n 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 85.87700000000001], [503, 560, 0,\n 0.015140495867768593, 0.160172719142436, 991.0, 991.0, 991.0, 0, 1, 1, \n -360, 45.8], [300, 301, 0, 0.004892053324099723, 0.7024509290644521, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 56.513000000000005], [51, 300, 0,\n 0.002573493767313019, 0.3695284920307039, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 29.729], [244, 302, 0, 0.007714508310249307, 1.107727813004004,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 89.118], [31, 302, 0, \n 0.004369113573407203, 0.6273619041941161, 3423.0, 3423.0, 3423.0, 0, 1,\n 1, -360, 50.472], [51, 282, 0, 0.006288434903047093, 0.9029576432132521,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 72.64399999999999], [303, 304, 0,\n 8.795013850415512e-05, 0.000789298639172312, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 0.254], [305, 304, 0, 0.003881117266849031, 0.0783689646873844,\n 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 16.813], [305, 259, 0, 0.0025625,\n 0.36794989475177603, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, \n 29.601999999999997], [306, 307, 0, 0.03223268698060942, \n 0.289268628831688, 856.0, 856.0, 856.0, 0, 1, 1, -360, 93.088], [305, \n 308, 0, 0.0024272853185595567, 0.0217833994511184, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 7.01], [305, 309, 0, 0.011014773776523545, \n 0.22241441259921202, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 47.716], [\n 310, 309, 0, 0.009565962603878117, 0.343394627639832, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 55.253], [306, 309, 0, 0.035333795013850415, \n 0.31709917455019604, 856.0, 856.0, 856.0, 0, 1, 1, -360, 102.044], [311,\n 280, 0, 0.003433691135734072, 0.1232611016590444, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 19.833], [280, 278, 0, 0.009749769159764544, \n 0.7874838737974121, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, \n 84.47200000000001], [311, 32, 0, 0.01205909510619806, \n 0.9740069506375919, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 104.48], [13,\n 312, 0, 0.0043324965373961214, 0.622104056565324, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 50.049], [313, 314, 0, 0.006092624653739613, \n 0.218710302449316, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.191], [312,\n 313, 0, 0.00893957756232687, 0.32090893884734, 1711.0, 1711.0, 1711.0, \n 0, 1, 1, -360, 51.635], [547, 566, 0, 0.027035702479338848, \n 0.286013220297816, 991.0, 991.0, 991.0, 0, 1, 1, -360, 81.783], [245, \n 315, 0, 0.014162569252077564, 0.508401547875772, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 81.803], [312, 316, 0, 8.803670360110802e-05, \n 0.01264120812658816, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0170000000000001], [312, 314, 0, 0.005339854570637119, \n 0.191687700220296, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 30.843000000000004], [554, 546, 0, 0.08174743801652892, \n 0.21620344446439202, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 123.64299999999999], [262, 216, 0, 0.042641966759002774, \n 0.38268554099981195, 856.0, 856.0, 856.0, 0, 1, 1, -360, 123.15], [317,\n 233, 0, 0.005647276084951523, 0.114031901035644, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 24.464000000000002], [318, 317, 0, 0.008311634349030471,\n 0.16783161497270002, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 36.006], [\n 231, 52, 0, 0.035263677285318554, 1.2658796434850879, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 203.683], [319, 567, 0, 0.006089586776859504, \n 0.0644223069721, 991.0, 991.0, 991.0, 0, 1, 1, -360, 18.421], [557, 321,\n 0, 0.010004628099173555, 0.10583989458750401, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 30.264], [277, 65, 0, 0.009430170821779778, 0.7616700793261759,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 81.703], [322, 288, 0, \n 0.006545013850415513, 0.528637424797136, 2567.0, 2567.0, 2567.0, 0, 2, \n 1, -360, 56.706], [322, 323, 0, 0.0018503000923372577, 0.14944779312484,\n 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 16.031], [277, 324, 0, \n 0.019719529085872576, 0.39818407235049996, 1283.0, 1283.0, 1283.0, 0, 1,\n 1, -360, 85.425], [324, 325, 0, 0.01103508771932133, \n 0.22282459929396403, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, \n 47.803999999999995], [277, 325, 0, 0.008665743305609418, \n 0.174981914850048, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 37.54], [326,\n 327, 0, 0.007654214876033058, 0.0202436634226288, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 11.577], [328, 326, 0, 0.10300958677685952, \n 0.068109252150368, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 77.90100000000001], [328, 327, 0, 0.09827173553719008, \n 0.064976616491468, 248.0, 248.0, 248.0, 0, 1, 1, -360, 74.318], [326, \n 329, 0, 0.028062148760330575, 0.07421802283046801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.443999999999996], [568, 329, 0, 0.05699900826446282, \n 0.15074945731414802, 495.0, 495.0, 495.0, 0, 1, 1, -360, 86.211], [568,\n 326, 0, 0.03218644628099173, 0.08512585494846397, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 48.681999999999995], [332, 78, 0, 0.006471029547541551, \n 0.522661750455416, 2567.0, 2567.0, 2567.0, 0, 2, 1, -360, 56.065], [333,\n 306, 0, 0.008580159279778392, 0.308006702824228, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 49.559], [332, 333, 0, 0.007504674515235457, \n 0.26939943395502003, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 43.347], [\n 332, 334, 0, 0.017124653739612188, 0.15368328149175597, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 49.456], [66, 334, 0, 0.030625, \n 0.27484062260471603, 856.0, 856.0, 856.0, 0, 1, 1, -360, 88.445], [330,\n 335, 0, 0.00550536703601108, 0.790516769355108, 3423.0, 3423.0, 3423.0,\n 0, 1, 1, -360, 63.598], [336, 66, 0, 0.015054362880886425, \n 0.1351036887216764, 856.0, 856.0, 856.0, 0, 1, 1, -360, 43.477], [330, \n 336, 0, 0.039036357340720224, 0.350327404269788, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 112.73700000000001], [68, 70, 0, 0.016314058171745152, \n 0.14640868261713597, 856.0, 856.0, 856.0, 0, 1, 1, -360, 47.115], [509,\n 337, 0, 0.03494082644628099, 0.09241056617056001, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 52.848], [324, 288, 0, 0.012627423822714683, \n 0.11332339674541761, 856.0, 856.0, 856.0, 0, 1, 1, -360, 36.468], [338,\n 559, 0, 0.009228099173553718, 0.097624922595552, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 27.915], [339, 559, 0, 0.03560595041322315, \n 0.023542417076125203, 248.0, 248.0, 248.0, 0, 1, 1, -360, 26.927], [339,\n 340, 0, 0.08711537190082644, 0.23040041287850396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 131.762], [559, 340, 0, 0.20983272727272728, \n 0.138740000599684, 248.0, 248.0, 248.0, 0, 1, 1, -360, 158.686], [341, \n 292, 0, 0.0009329409048961218, 0.07535316024134399, 2567.0, 2567.0, \n 2567.0, 0, 1, 1, -360, 8.083], [557, 342, 0, 0.006019834710743802, \n 0.0636843933534336, 991.0, 991.0, 991.0, 0, 2, 1, -360, 18.21], [558, \n 343, 0, 0.010650247933884296, 0.11266996708783199, 991.0, 991.0, 991.0,\n 0, 1, 1, -360, 32.217], [502, 340, 0, 0.021737520661157025, \n 0.22996326026071198, 991.0, 991.0, 991.0, 0, 2, 1, -360, 65.756], [72, \n 32, 0, 0.00675502077562327, 0.969954803293024, 3423.0, 3423.0, 3423.0, \n 0, 2, 1, -360, 78.03399999999999], [344, 345, 0, 0.0005762927054480609,\n 0.04654686738645321, 2567.0, 2567.0, 2567.0, 0, 1, 1, -360, 4.993], [\n 346, 47, 0, 0.0011340027700831024, 0.04070792194158799, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 6.55], [46, 47, 0, 0.0008975069252077563, \n 0.0322183003580208, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 5.184], [346,\n 345, 0, 0.0007217797783933517, 0.025910126194627202, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 4.169], [347, 328, 0, 0.029905454545454544, \n 0.07909314882361201, 495.0, 495.0, 495.0, 0, 1, 1, -360, 45.232], [347,\n 348, 0, 0.04883438016528925, 0.129155866607944, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 73.862], [571, 348, 0, 0.041548429752066116, \n 0.10988617921762801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 62.842], [347,\n 572, 0, 0.016052231404958678, 0.04245451362512801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 24.279], [571, 570, 0, 0.17379041322314048, \n 0.11490906279551602, 248.0, 248.0, 248.0, 0, 1, 1, -360, 131.429], [14,\n 350, 0, 0.02166743801652892, 0.05730546235524, 495.0, 495.0, 495.0, 0, \n 1, 1, -360, 32.772], [350, 573, 0, 0.026277685950413226, \n 0.06949852316919598, 495.0, 495.0, 495.0, 0, 1, 1, -360, 39.745], [15, \n 351, 0, 0.02639265927977839, 0.236857956201204, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 76.222], [352, 15, 0, 0.0015260560941828254, \n 0.219126704094076, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 17.629], [15,\n 335, 0, 0.0035338758079432133, 1.1417173740880242, 5134.0, 5134.0, \n 5134.0, 0, 1, 1, -360, 61.235], [232, 227, 0, 5.5747922437673134e-05, \n 0.000500303468136644, 1200.0, 1200.0, 1200.0, 0, 1, 1, -360, 0.161], [\n 565, 544, 0, 0.0394803305785124, 0.10441652566461601, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 59.714], [235, 567, 0, 0.02391404958677686, \n 0.25298896294275997, 991.0, 991.0, 991.0, 0, 1, 1, -360, 72.34], [567, \n 286, 0, 0.008068760330578512, 0.34144067500694797, 1981.0, 1981.0, \n 1981.0, 0, 1, 1, -360, 48.816], [353, 519, 0, 0.007621818181818182, \n 0.080631926038356, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 23.055999999999997], [354, 353, 0, 0.0008436363636363636, \n 0.00892490784392768, 991.0, 991.0, 991.0, 0, 2, 1, -360, 2.552], [355, \n 354, 0, 0.0068502479338842966, 0.0181173530898976, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.360999999999999], [354, 356, 0, 0.01855404958677686, \n 0.049071255647172, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 28.063000000000002], [357, 358, 0, 0.0034823407202216067, \n 0.5000300103406239, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 40.228], [\n 574, 359, 0, 0.013352066115702478, 0.0353131884615884, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 20.195], [235, 575, 0, 0.007459504132231404, \n 0.0789147905557, 991.0, 991.0, 991.0, 0, 1, 1, -360, 22.565], [167, 361,\n 0, 0.000616198347107438, 0.0065188198358579995, 991.0, 991.0, 991.0, 0,\n 1, 1, -360, 1.864], [528, 362, 0, 0.0011960330578512398, \n 0.012652945368078402, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 3.6180000000000003], [363, 344, 0, 0.0002662742382271468, \n 0.009558592968871479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.538], [\n 259, 364, 0, 0.013069713758102496, 0.26390852570525997, 1283.0, 1283.0,\n 1283.0, 0, 1, 1, -360, 56.618], [54, 56, 0, 0.007723337950138504, \n 0.0693122289241068, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.305], [365, \n 364, 0, 0.0049974607571537395, 0.10091058802821559, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 21.649], [231, 366, 0, 0.0013273891966759002, \n 0.0476500209962672, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 7.667000000000001], [30, 367, 0, 0.01126108033240997, \n 0.1010613005635992, 856.0, 856.0, 856.0, 0, 1, 1, -360, 32.522], [61, \n 367, 0, 0.020337603878116343, 0.18251754162067196, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 58.735], [254, 368, 0, 0.0004297520661157025, \n 0.00454638722456732, 991.0, 991.0, 991.0, 0, 1, 1, -360, 1.3], [254, \n 369, 0, 0.00015999999999999999, 0.00169265493591832, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.484], [254, 370, 0, 0.0003669421487603306, \n 0.0038819152455960805, 991.0, 991.0, 991.0, 0, 2, 1, -360, 1.11], [99, \n 358, 0, 0.0020184383656509696, 0.28982797432374396, 3423.0, 3423.0, \n 3423.0, 0, 1, 1, -360, 23.316999999999997], [354, 519, 0, \n 0.006762644628099174, 0.07154264880985199, 991.0, 991.0, 991.0, 0, 1, 1,\n -360, 20.457], [571, 371, 0, 0.023726942148760328, 0.06275238397221199,\n 495.0, 495.0, 495.0, 0, 1, 1, -360, 35.887], [207, 372, 0, \n 0.002329256198347108, 0.006160354689297601, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 3.523], [57, 373, 0, 0.0017725619834710745, \n 0.0046880246727212796, 495.0, 495.0, 495.0, 0, 1, 1, -360, 2.681], [209,\n 374, 0, 0.0010122922437673131, 0.0363388121515216, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 5.847], [375, 376, 0, 0.0045364727608518006, \n 0.0916021467933684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 19.652], [\n 376, 377, 0, 0.0030886426592797783, 0.062367022394423606, 1283.0, \n 1283.0, 1283.0, 0, 1, 1, -360, 13.38], [16, 49, 0, 0.002266101108033241,\n 0.32538991773524, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 26.178], [318,\n 377, 0, 0.004755078485685596, 0.0960163149704152, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 20.599], [378, 297, 0, 0.01753917355371901, \n 0.046387138574374404, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 26.528000000000002], [562, 379, 0, 0.01802314049586777, \n 0.047667121439141605, 495.0, 495.0, 495.0, 0, 1, 1, -360, 27.26], [576,\n 563, 0, 0.001808264462809917, 0.004782449638150801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 2.735], [576, 381, 0, 0.0034320661157024794, \n 0.009077036954898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.191], [577, \n 576, 0, 0.06004495867768594, 0.15880530575430396, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 90.818], [244, 383, 0, 0.006845567867036011, \n 0.1382282547912684, 1283.0, 1283.0, 1283.0, 0, 1, 1, -360, 29.655], [\n 244, 306, 0, 0.02679108956599723, 0.5409756541164079, 1283.0, 1283.0, \n 1283.0, 0, 1, 1, -360, 116.059], [383, 306, 0, 0.0300685595567867, \n 0.269846910348376, 856.0, 856.0, 856.0, 0, 1, 1, -360, 86.838], [380, \n 306, 0, 0.00025605955678670365, 0.03676764369572, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 2.958], [252, 225, 0, 0.062094545454545444, \n 0.041056499553586, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 46.958999999999996], [220, 76, 0, 0.002772074099722992, \n 0.398042682239984, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 32.023], [542,\n 384, 0, 0.007939834710743802, 0.020999063146094, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 12.009], [385, 384, 0, 0.053734876033057856, \n 0.035529141854791196, 248.0, 248.0, 248.0, 0, 1, 1, -360, 40.637], [542,\n 385, 0, 0.011306115702479337, 0.119608453436296, 991.0, 991.0, 991.0, 0,\n 2, 1, -360, 34.201], [386, 385, 0, 0.003668760330578512, \n 0.0388121580140316, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 11.097999999999999], [387, 578, 0, 0.015444628099173553, \n 0.16339016240905604, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.72], [332, \n 388, 0, 0.014036184210526315, 0.5038646344377999, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 81.07300000000001], [382, 332, 0, \n 0.017764369806094183, 0.637697365901468, 1711.0, 1711.0, 1711.0, 0, 1, \n 1, -360, 102.60700000000001], [382, 388, 0, 0.00476159972299169, \n 0.17092976750548, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 27.503], [579,\n 578, 0, 0.01911074380165289, 0.050543585664, 495.0, 495.0, 495.0, 0, 1,\n 1, -360, 28.905], [577, 387, 0, 0.07597818181818182, \n 0.20094506949431204, 495.0, 495.0, 495.0, 0, 1, 1, -360, 114.917], [144,\n 390, 0, 0.0004277685950413223, 0.0011313509747276, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.647], [37, 49, 0, 0.008441481994459835, \n 0.303028527944352, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 48.758], [391,\n 233, 0, 0.014211218836565096, 0.1275369872004348, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 41.042], [392, 310, 0, 0.007035318559556785, \n 0.06313767618386361, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 20.317999999999998], [260, 393, 0, 0.006341412742382271, \n 0.0569102963692744, 856.0, 856.0, 856.0, 0, 1, 1, -360, 18.314], [394, \n 230, 0, 0.0007590027700831025, 0.00681158510656168, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 2.1919999999999997], [395, 282, 0, 0.008762984764542936,\n 0.314569689934484, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.615], [395,\n 244, 0, 0.0034046052631578946, 0.12221699007344, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 19.665], [25, 396, 0, 0.008809037396121884, \n 0.316222866612064, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 50.881], [81,\n 74, 0, 0.0075207756232686974, 0.26997742429652244, 1711.0, 1711.0, \n 1711.0, 0, 2, 1, -360, 43.44], [278, 80, 0, 0.016286011080332407, \n 0.5846279085788, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 94.068], [81, \n 278, 0, 0.021054016620498613, 0.755787629231688, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 121.60799999999999], [569, 570, 0, 0.03253950413223141, \n 0.08605961294018, 495.0, 495.0, 495.0, 0, 1, 1, -360, 49.216], [397, \n 552, 0, 0.006289586776859504, 0.0166345314104904, 1200.0, 1200.0, \n 1200.0, 0, 1, 1, -360, 9.513], [542, 398, 0, 0.0005580165289256199, \n 0.0059033089500572, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 1.6880000000000002], [398, 385, 0, 0.021893553719008262, \n 0.05790348713648401, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 33.114000000000004], [399, 499, 0, 0.03266380165289256, \n 0.021597087927192803, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 24.701999999999998], [83, 399, 0, 0.025700495867768593, \n 0.016992996557050798, 248.0, 248.0, 248.0, 0, 1, 1, -360, 19.436], [498,\n 400, 0, 0.012134214876033058, 0.032092247974028, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 18.352999999999998], [518, 239, 0, 0.04685289256198347, \n 0.123915281026504, 495.0, 495.0, 495.0, 0, 1, 1, -360, 70.865], [575, \n 543, 0, 0.0030307438016528923, 0.032062521596058796, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 9.168], [401, 360, 0, 0.007957063711911357, \n 0.071409774520472, 856.0, 856.0, 856.0, 0, 1, 1, -360, 22.98], [580, \n 581, 0, 0.007134545454545454, 0.018869255592422397, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.790999999999999], [401, 402, 0, 0.0033434903047091418,\n 0.030005778188384805, 856.0, 856.0, 856.0, 0, 1, 1, -360, 9.656], [403,\n 231, 0, 0.009592105263157893, 0.08608327126915, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 27.701999999999998], [189, 360, 0, 0.028456024930747923, \n 0.255375399471348, 856.0, 856.0, 856.0, 0, 1, 1, -360, 82.181], [234, \n 404, 0, 0.008092561983471074, 0.0214029921648796, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 12.24], [235, 404, 0, 0.05107504132231405, \n 0.13508190749437998, 495.0, 495.0, 495.0, 0, 1, 1, -360, 77.251], [235,\n 580, 0, 0.000580495867768595, 0.00153527999352772, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 0.878], [216, 259, 0, 0.0022115650969529088, \n 0.079389770210892, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, \n 12.774000000000001], [405, 259, 0, 0.0052832409972299165, \n 0.1896554115982928, 1711.0, 1711.0, 1711.0, 0, 2, 1, -360, 30.516], [\n 405, 318, 0, 0.0066348684210526315, 0.23817552558268398, 1711.0, 1711.0,\n 1711.0, 0, 2, 1, -360, 38.323], [406, 230, 0, 8.098164819944598e-05, \n 0.046512685161986804, 6845.0, 6845.0, 6845.0, 0, 1, 1, -360, 1.871], [\n 542, 407, 0, 0.025569586776859506, 0.067625761355152, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 38.674], [23, 408, 0, 0.03224528925619835, \n 0.08528148128033601, 495.0, 495.0, 495.0, 0, 1, 1, -360, 48.771], [577,\n 348, 0, 0.012999008264462809, 0.13751772188026398, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 39.321999999999996], [562, 564, 0, 0.06921520661157024, \n 0.18305853298686803, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 104.68799999999999], [582, 507, 0, 0.006357685950413223, \n 0.016814638289042002, 495.0, 495.0, 495.0, 0, 1, 1, -360, 9.616], [27, \n 410, 0, 0.0030042975206611565, 0.007945685980170399, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 4.544], [501, 27, 0, 0.003811570247933884, \n 0.040322957460962, 991.0, 991.0, 991.0, 0, 1, 1, -360, 11.53], [27, 411,\n 0, 0.004648595041322314, 0.012294480221518, 495.0, 495.0, 495.0, 0, 1, \n 1, -360, 7.031000000000001], [411, 410, 0, 0.002054214876033058, \n 0.0054329327333556, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 3.1069999999999998], [403, 360, 0, 0.008191481994459833, \n 0.07351353506655639, 856.0, 856.0, 856.0, 0, 1, 1, -360, \n 23.656999999999996], [412, 360, 0, 0.016761772853185596, \n 0.15042664773666, 856.0, 856.0, 856.0, 0, 1, 1, -360, 48.408], [326, \n 413, 0, 0.012077024793388432, 0.12776397267356798, 991.0, 991.0, 991.0,\n 0, 2, 1, -360, 36.533], [414, 413, 0, 0.008093223140495867, \n 0.08561896310149601, 991.0, 991.0, 991.0, 0, 2, 1, -360, 24.482], [6, \n 297, 0, 0.019472396694214876, 0.0128750188978664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 14.725999999999999], [554, 580, 0, 0.07435371900826447, \n 0.196648733567264, 495.0, 495.0, 495.0, 0, 1, 1, -360, 112.46], [262, \n 401, 0, 0.03931232686980609, 0.35280406181043206, 856.0, 856.0, 856.0, \n 0, 1, 1, -360, 113.53399999999999], [499, 556, 0, 0.04185586776859504, \n 0.11069928308639199, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 63.306999999999995], [224, 229, 0, 0.004135206611570248, \n 0.0437467367631624, 991.0, 991.0, 991.0, 0, 1, 1, -360, 12.509], [583, \n 507, 0, 0.024632727272727268, 0.065147980317596, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 37.257], [415, 307, 0, 0.015675554016620498, \n 0.1406784987952448, 856.0, 856.0, 856.0, 0, 1, 1, -360, 45.271], [416, \n 507, 0, 0.0010555371900826446, 0.011166626467730801, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 3.193], [284, 561, 0, 0.015221487603305786, \n 0.16102953827307598, 991.0, 991.0, 991.0, 0, 1, 1, -360, 46.045], [543,\n 417, 0, 0.0006614876033057851, 0.027991756419545603, 1981.0, 1981.0, \n 1981.0, 0, 4, 1, -360, 4.002], [418, 506, 0, 0.0009395041322314049, \n 0.009939101917118, 991.0, 991.0, 991.0, 0, 1, 1, -360, 2.842], [220, \n 157, 0, 0.004599549861495845, 0.165112574384632, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 26.566999999999997], [295, 419, 0, 0.0012023140495867769,\n 0.012719392565946, 991.0, 991.0, 991.0, 0, 1, 1, -360, 3.637], [295, \n 420, 0, 0.0008003305785123967, 0.008466771900532, 991.0, 991.0, 991.0, \n 0, 1, 1, -360, 2.421], [541, 62, 0, 0.05133355371900827, \n 0.0339414035471236, 248.0, 248.0, 248.0, 0, 1, 1, -360, 38.821], [52, \n 421, 0, 0.00013885041551246538, 0.004984389831631239, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 0.802], [60, 160, 0, 6.128808864265928e-05, \n 0.000550023067454096, 856.0, 856.0, 856.0, 0, 2, 1, -360, 0.177], [535,\n 161, 0, 3.735537190082645e-05, 0.00039518596644331203, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 0.113], [267, 282, 0, 0.0065652700831024926, \n 0.235677115717012, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 37.921], [52,\n 365, 0, 0.007655586334279779, 0.15458444922992, 1283.0, 1283.0, 1283.0,\n 0, 1, 1, -360, 33.164], [28, 27, 0, 0.015726942148760328, \n 0.041594197273402404, 495.0, 495.0, 495.0, 0, 1, 1, -360, 23.787], [30,\n 201, 0, 0.009128289473684211, 0.327683234253536, 1711.0, 1711.0, 1711.0,\n 0, 2, 1, -360, 52.725], [422, 81, 0, 0.0004226685133887349, \n 0.13655487952674, 5134.0, 5134.0, 5134.0, 0, 6, 1, -360, 7.324], [119, \n 425, 0, 0.003579120498614958, 0.1284816595874996, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 20.673000000000002], [423, 425, 0, \n 0.0006518351800554017, 0.0233992864289392, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 3.765], [424, 425, 0, 0.005922957063711911, \n 0.21261965153389198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 34.211], [\n 426, 428, 0, 0.013948429752066116, 0.14756174042535197, 991.0, 991.0, \n 991.0, 0, 2, 1, -360, 42.193999999999996], [427, 428, 0, \n 0.0002664462809917355, 0.0028187600792304794, 991.0, 991.0, 991.0, 0, 2,\n 1, -360, 0.8059999999999999], [19, 428, 0, 0.023607603305785128, \n 0.24974703912892798, 991.0, 991.0, 991.0, 0, 2, 1, -360, 71.413], [45, \n 429, 0, 0.02562314049586777, 0.067767398802972, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 38.755], [44, 429, 0, 5.289256198347107e-05, \n 0.00013988883767892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.08], [505, \n 429, 0, 0.006012561983471073, 0.015901863623161996, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 9.094], [231, 431, 0, 0.011677285318559558, \n 0.4191859418495199, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 67.44800000000001], [190, 431, 0, 0.009600761772853185, \n 0.34464383257266795, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 55.45399999999999], [430, 431, 0, 0.0028100761772853187, \n 0.1008748520662472, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 16.230999999999998], [286, 433, 0, 0.01568694214876033, \n 0.16595362535967603, 991.0, 991.0, 991.0, 0, 1, 1, -360, 47.453], [432,\n 433, 0, 0.00010049586776859504, 0.00106315516636076, 991.0, 991.0, \n 991.0, 0, 1, 1, -360, 0.304], [506, 433, 0, 0.0065904132231404955, \n 0.06972059669946801, 991.0, 991.0, 991.0, 0, 1, 1, -360, 19.936], [23, \n 434, 0, 0.02613685950413223, 0.069126069139116, 495.0, 495.0, 495.0, 0,\n 2, 1, -360, 39.532], [400, 434, 0, 0.008155371900826446, \n 0.021569110159669603, 495.0, 495.0, 495.0, 0, 2, 1, -360, 12.335], [500,\n 434, 0, 0.006338512396694216, 0.0167639285853336, 495.0, 495.0, 495.0, \n 0, 2, 1, -360, 9.587], [32, 436, 0, 0.0044813019390581715, \n 0.16086776359270402, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 25.884], [\n 435, 436, 0, 0.0006634349030470914, 0.023815688073266, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 3.832], [78, 436, 0, 0.00897680055401662, \n 0.32224515307884394, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 51.85], [86,\n 438, 0, 0.014693213296398892, 0.52745036936438, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 84.868], [437, 438, 0, 1.0387811634349031e-05, \n 0.0003728969948845, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.06], [221,\n 438, 0, 0.002280124653739612, 0.081850890377238, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 13.17], [207, 439, 0, 0.055703801652892564, \n 0.0368309823503996, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 42.126000000000005], [516, 439, 0, 0.05448462809917355, \n 0.03602487292327441, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 41.20399999999999], [513, 439, 0, 0.046726611570247926, \n 0.0308953241066316, 248.0, 248.0, 248.0, 0, 1, 1, -360, \n 35.336999999999996], [181, 441, 0, 0.040805289256198356, \n 0.10792074104825197, 495.0, 495.0, 495.0, 0, 1, 1, -360, 61.718], [440,\n 441, 0, 0.0001322314049586777, 0.000349722094197784, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.2], [504, 441, 0, 0.05916099173553719, \n 0.156467413554364, 495.0, 495.0, 495.0, 0, 1, 1, -360, \n 89.48100000000001], [135, 442, 0, 0.004956890581717451, \n 0.177940231009092, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 28.631], [109,\n 442, 0, 0.0015380886426592797, 0.055213615042649204, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.884], [112, 442, 0, 0.0027304362880886425, \n 0.09801597510545401, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 15.770999999999999], [113, 443, 0, 0.0019885734072022164, \n 0.07138491472072879, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 11.485999999999999], [132, 443, 0, 0.006788434903047091, \n 0.24368818615747198, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 39.21], [\n 107, 443, 0, 2.2333795013850418e-05, 0.000801728539002036, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.129], [444, 445, 0, \n 7.877423822714682e-05, 0.00282780221121528, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 0.455], [112, 445, 0, 0.002816135734072022, \n 0.101092375313206, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.266], [109,\n 445, 0, 0.0014354224376731304, 0.0515281497432104, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 8.291], [119, 447, 0, 0.005212690443213296, \n 0.74849127803204, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 60.217], [100,\n 447, 0, 0.0050695117728531865, 0.7279322237145921, 3423.0, 3423.0, \n 3423.0, 0, 2, 1, -360, 58.563], [446, 447, 0, 2.9518698060941832e-05, \n 0.00423859584186224, 3423.0, 3423.0, 3423.0, 0, 2, 1, -360, 0.341], [\n 124, 448, 0, 6.509695290858726e-05, 0.00233682116794768, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.376], [125, 448, 0, 0.00615148891966759, \n 0.22082338542026803, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 35.531], [\n 131, 448, 0, 3.912742382271468e-05, 0.0014045786807313759, 1711.0, \n 1711.0, 1711.0, 0, 1, 1, -360, 0.226], [449, 450, 0, \n 0.0023614958448753462, 0.08477191683710039, 1711.0, 1711.0, 1711.0, 0, \n 1, 1, -360, 13.64], [173, 450, 0, 0.002862361495844876, \n 0.10275176694050518, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 16.533], [\n 184, 450, 0, 0.004022853185595568, 0.14441057621844403, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 23.236], [144, 451, 0, 0.007672727272727273, \n 0.020292624515794402, 495.0, 495.0, 495.0, 0, 1, 1, -360, 11.605], [140,\n 451, 0, 0.006991074380165291, 0.018489807120219602, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 10.574000000000002], [514, 451, 0, 0.01149289256198347, \n 0.030396095817207994, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.383], [537,\n 585, 0, 0.05072595041322314, 0.134158641165824, 495.0, 495.0, 495.0, 0,\n 1, 1, -360, 76.723], [141, 585, 0, 0.007994710743801653, \n 0.0211441978151932, 495.0, 495.0, 495.0, 0, 1, 1, -360, 12.092], [584, \n 585, 0, 9.256198347107438e-05, 0.000244805465938352, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.14], [522, 454, 0, 0.0035008264462809916, \n 0.0092588924438956, 495.0, 495.0, 495.0, 0, 1, 1, -360, 5.295], [144, \n 454, 0, 0.00452892561983471, 0.011977981726290799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.85], [453, 454, 0, 0.001114710743801653, \n 0.0029481572540882, 495.0, 495.0, 495.0, 0, 1, 1, -360, 1.686], [199, \n 456, 0, 0.013063140495867768, 0.0086372614214612, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.879], [140, 456, 0, 0.005061818181818182, \n 0.013387361765852802, 495.0, 495.0, 495.0, 0, 2, 1, -360, \n 7.656000000000001], [455, 456, 0, 0.0011365289256198346, \n 0.00300586139962416, 495.0, 495.0, 495.0, 0, 2, 1, -360, 1.719], [537, \n 456, 0, 0.039058512396694216, 0.025825228046024003, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 29.538], [538, 457, 0, 0.027927272727272728, \n 0.0184653265736368, 248.0, 248.0, 248.0, 0, 1, 1, -360, 21.12], [153, \n 457, 0, 0.030093223140495867, 0.019897438549384, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 22.758000000000003], [176, 457, 0, 0.004579173553719009, \n 0.0030277190305137603, 248.0, 248.0, 248.0, 0, 1, 1, -360, 3.463], [524,\n 459, 0, 0.004318677685950414, 0.011421923596476799, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 6.532], [458, 459, 0, 0.001993388429752066, \n 0.0052720605700488, 495.0, 495.0, 495.0, 0, 1, 1, -360, 3.015], [134, \n 459, 0, 0.011813553719008265, 0.031244171895617998, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 17.868], [460, 461, 0, 6.611570247933885e-05, \n 0.000174861047098892, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.1], [150, \n 461, 0, 0.008018512396694214, 0.021207147792120403, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 12.128], [149, 461, 0, 0.005586115702479339, \n 0.0147740098693748, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.449], [521, \n 463, 0, 0.014348429752066114, 0.009487086110365599, 248.0, 248.0, 248.0,\n 0, 1, 1, -360, 10.850999999999999], [462, 463, 0, 0.007197355371900825,\n 0.0047588433967958406, 248.0, 248.0, 248.0, 0, 1, 1, -360, 5.443], [538,\n 463, 0, 0.012211570247933883, 0.0080742088497664, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 9.235], [110, 464, 0, 0.0025753116343490306, \n 0.0924473799817492, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 14.875], [90,\n 464, 0, 0.007328947368421053, 0.26309125979076, 1711.0, 1711.0, 1711.0,\n 0, 1, 1, -360, 42.332], [165, 464, 0, 0.002152527700831025, \n 0.0772704722900764, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 12.433], [\n 458, 465, 0, 0.002003305785123967, 0.0052982897270776, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 3.03], [134, 465, 0, 0.011838677685950413, \n 0.031310619093534, 495.0, 495.0, 495.0, 0, 1, 1, -360, 17.906], [524, \n 465, 0, 0.004293553719008264, 0.0113554763986092, 495.0, 495.0, 495.0, \n 0, 1, 1, -360, 6.494], [466, 467, 0, 0.0023509349030470914, \n 0.084392804892244, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 13.579], [110,\n 467, 0, 0.0025337603878116343, 0.09095579200221118, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 14.635], [165, 467, 0, 0.0022891274238227145, \n 0.08217406777274441, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 13.222000000000001], [468, 469, 0, 0.0005269421487603305, \n 0.0013936425453786, 495.0, 495.0, 495.0, 0, 1, 1, -360, 0.797], [541, \n 469, 0, 0.022390743801652895, 0.05921844221026801, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 33.866], [490, 469, 0, 0.028243305785123966, \n 0.07469714209944801, 495.0, 495.0, 495.0, 0, 1, 1, -360, 42.718], [263,\n 471, 0, 0.0371900826446281, 0.0245898347482832, 248.0, 248.0, 248.0, 0,\n 1, 1, -360, 28.125], [470, 471, 0, 0.001570909090909091, \n 0.0010386746197682802, 248.0, 248.0, 248.0, 0, 1, 1, -360, 1.188], [534,\n 471, 0, 0.024497190082644622, 0.0161973787927468, 248.0, 248.0, 248.0, \n 0, 1, 1, -360, 18.526], [136, 472, 0, 0.0007079293628808865, \n 0.025412930201351602, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 4.0889999999999995], [110, 472, 0, 0.00019511772853185596, \n 0.0070042485539216805, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 1.127], [\n 251, 472, 0, 4.207063711911357e-05, 0.00151023282928764, 1711.0, 1711.0,\n 1711.0, 0, 1, 1, -360, 0.243], [226, 474, 0, 0.017639669421487602, \n 0.011663231841509601, 248.0, 248.0, 248.0, 0, 1, 1, -360, 13.34], [473,\n 474, 0, 0.003467107438016529, 0.00916971330986216, 495.0, 495.0, 495.0,\n 0, 2, 1, -360, 5.244], [257, 474, 0, 0.020264462809917356, \n 0.053594910935781594, 495.0, 495.0, 495.0, 0, 2, 1, -360, 30.65], [6, \n 474, 0, 0.08066247933884299, 0.05333349367016, 248.0, 248.0, 248.0, 0, \n 1, 1, -360, 61.001000000000005], [299, 475, 0, 0.013238227146814403, \n 0.47521993028123993, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 76.464], [3,\n 475, 0, 0.0002794321329639889, 0.010030929162389441, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 1.614], [210, 475, 0, 0.0001481994459833795, \n 0.00531999712702368, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 0.856], [\n 297, 476, 0, 0.0193500826446281, 0.05117658265464801, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 29.267], [296, 476, 0, 0.005596694214876033, \n 0.014801987636898, 495.0, 495.0, 495.0, 0, 1, 1, -360, 8.465], [295, \n 476, 0, 0.0009474380165289256, 0.00250575880492432, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 1.433], [313, 478, 0, 0.008696849030470914, \n 0.31219557906752804, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 50.233000000000004], [477, 478, 0, 1.5235457063711912e-05, \n 0.0005469155924977479, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, \n 0.08800000000000001], [245, 478, 0, 0.005264542936288089, \n 0.188984197007248, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 30.408], [479,\n 481, 0, 0.028420495867768597, 0.07516576970575199, 495.0, 495.0, 495.0,\n 0, 1, 1, -360, 42.986000000000004], [565, 481, 0, 0.024842314049586776,\n 0.065702289836964, 495.0, 495.0, 495.0, 0, 1, 1, -360, 37.574], [480, \n 481, 0, 7.735537190082645e-05, 0.000204587425105844, 495.0, 495.0, \n 495.0, 0, 1, 1, -360, 0.11699999999999999], [415, 482, 0, \n 0.011021814404432133, 0.0989140353680364, 856.0, 856.0, 856.0, 0, 1, 1,\n -360, 31.831], [56, 482, 0, 0.002630886426592798, 0.0236105947261788, \n 856.0, 856.0, 856.0, 0, 1, 1, -360, 7.598], [409, 482, 0, \n 0.0007635041551246537, 0.0068519822810072005, 856.0, 856.0, 856.0, 0, 1,\n 1, -360, 2.205], [483, 484, 0, 9.037396121883656e-05, \n 0.000811050963873968, 856.0, 856.0, 856.0, 0, 1, 1, -360, 0.261], [3, \n 484, 0, 0.010022160664819944, 0.08994275516621358, 856.0, 856.0, 856.0,\n 0, 1, 1, -360, 28.944000000000003], [301, 484, 0, 0.00966516620498615, \n 0.08673894848517479, 856.0, 856.0, 856.0, 0, 1, 1, -360, 27.913], [233,\n 485, 0, 0.01410180055401662, 0.1265550251138996, 856.0, 856.0, 856.0, 0,\n 1, 1, -360, 40.726], [392, 485, 0, 0.00914819944598338, \n 0.0820994883738036, 856.0, 856.0, 856.0, 0, 1, 1, -360, 26.42], [391, \n 485, 0, 8.518005540166207e-05, 0.000764438839512864, 856.0, 856.0, \n 856.0, 0, 1, 1, -360, 0.24600000000000002], [579, 488, 0, \n 0.004636473829194215, 0.11036180126571601, 1486.0, 1486.0, 1486.0, 0, 1,\n 1, -360, 21.038], [486, 488, 0, 0.00016969696969690082, \n 0.00403929018798184, 1486.0, 1486.0, 1486.0, 0, 1, 1, -360, 0.77], [487,\n 488, 0, 0.00014567493112954544, 0.00346749456396992, 1486.0, 1486.0, \n 1486.0, 0, 1, 1, -360, 0.6609999999999999], [270, 489, 0, \n 0.0001745152354570637, 0.0062646695140596, 1711.0, 1711.0, 1711.0, 0, 1,\n 1, -360, 1.008], [331, 489, 0, 0.003002943213296399, \n 0.10779830627119119, 1711.0, 1711.0, 1711.0, 0, 1, 1, -360, 17.345], [\n 396, 489, 0, 0.01124792243767313, 0.40377286606072005, 1711.0, 1711.0, \n 1711.0, 0, 1, 1, -360, 64.968], [519, 253, 0, 0.013353485337561985, \n 0.141267767926912, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 40.394293146100004], [382, 349, 0, 0.009091647380263157, \n 1.30547149138788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 105.02671053600001], [349, 351, 0, 0.0005858117819605263, \n 0.0841168325920224, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 6.76729770521], [459, 465, 0, 1.578788789911157e-05, \n 0.00016702153987596, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.047758360894800005], [549, 550, 0, 3.680432518409091e-05, \n 0.000389356391787088, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.111333083682], [550, 551, 0, 5.755645674710744e-05, \n 0.0006088951287918401, 991.0, 991.0, 991.0, 0, 1, 1, -360, \n 0.17410828165999997], [194, 195, 0, 1.7560672583171745e-05, \n 0.00252154053805592, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.202860889681], [247, 248, 0, 2.1755213937811637e-05, \n 0.0031238355819477198, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.25131623141], [2, 294, 0, 2.3531392658518004e-05, 0.003378877444715, \n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.271834647991], [549, 551, 0, \n 9.265809538429751e-05, 0.0009802386406577602, 991.0, 991.0, 991.0, 0, 1,\n 1, -360, 0.28029073853799996], [54, 365, 0, 2.573045189134349e-05, \n 0.00369464080598484, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.297238180249], [131, 265, 0, 2.7616389041343487e-05, \n 0.00396544290388756, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.319024526206], [91, 92, 0, 2.8945628197853184e-05, \n 0.0041563086239824396, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.33437989694200004], [247, 249, 0, 3.098840072160664e-05, \n 0.00444963074500788, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.357978005136], [186, 191, 0, 3.1591661821191135e-05, \n 0.00453625312865552, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.36494687735799997], [129, 173, 0, 3.202671277479225e-05, \n 0.00459872218332188, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.369972585975], [96, 202, 0, 3.5971247867797784e-05, \n 0.00516511877739804, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.415539855369], [53, 320, 0, 3.784209581142659e-05, \n 0.00543375421308236, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.437151890814], [24, 396, 0, 4.144748602818559e-05, \n 0.005951452925597279, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.47880135859800005], [133, 156, 0, 4.431754564044322e-05, \n 0.0063635653674415605, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.511956287238], [442, 452, 0, 4.483572190450138e-05, \n 0.006437970402313801, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.517942259441], [445, 452, 0, 4.490753296371191e-05, \n 0.0064482817668697215, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.518771820797], [247, 250, 0, 4.594910768732687e-05, \n 0.00659784169268824, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.530804092004], [187, 195, 0, 4.755760376239612e-05, \n 0.006828805970367921, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.549385438663], [216, 236, 0, 5.03353075283241e-05, \n 0.00722765701751724, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.581473472567], [244, 389, 0, 5.1633313019736845e-05, \n 0.007414037889302401, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.596468032004], [394, 406, 0, 5.6346419007686985e-05, \n 0.008090793734075721, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.650913832377], [442, 445, 0, 6.388070648310249e-05, \n 0.00917264360085512, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.737949921293], [442, 444, 0, 6.584378362735456e-05, \n 0.00945452224616264, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 0.760627388463], [198, 472, 0, 8.37554210498615e-05, 0.0120264578966664,\n 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, 0.967542623967], [464, 467, 0, \n 8.460287496468144e-05, 0.01214814397621276, 3423.0, 3423.0, 3423.0, 0, \n 1, 1, -360, 0.977332411594], [198, 251, 0, 8.83613182396122e-05, \n 0.012687819608389479, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.0207499483], [112, 143, 0, 9.049653833033241e-05, \n 0.012994416294241841, 3423.0, 3423.0, 3423.0, 0, 1, 1, -360, \n 1.04541601079], [2, 490, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [5, 491, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0, \n 1, -360, 360], [10, 492, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [12, 493, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [13, 494, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [15, 495, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [18, 496, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [20, 497, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [22, 498, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [24, 499, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [26, 500, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [30, 501, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [32, 502, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [37, 503, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [42, 504, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [46, 505, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [52, 506, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [56, 507, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [61, 508, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [68, 509, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [69, 510, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [74, 511, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [78, 512, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [86, 513, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [87, 514, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [94, 515, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [95, 516, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [96, 517, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [99, 518, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [100, 519, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [104, 520, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [105, 521, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [106, 522, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [107, 523, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [117, 524, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [120, 525, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [123, 526, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [124, 527, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [125, 528, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [128, 529, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [129, 530, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [138, 531, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [143, 532, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [156, 533, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [157, 534, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [159, 535, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [160, 536, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [165, 537, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [184, 538, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [191, 539, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [195, 540, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [201, 541, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [220, 542, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [231, 543, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [232, 544, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [233, 545, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [236, 546, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [245, 547, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [246, 548, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [248, 549, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [249, 550, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [250, 551, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [259, 552, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [261, 553, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [262, 554, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [265, 555, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [270, 556, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [277, 557, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [279, 558, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [280, 559, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [290, 560, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [301, 561, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [305, 562, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [306, 563, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [310, 564, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [313, 565, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [315, 566, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [320, 567, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [330, 568, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [332, 569, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [334, 570, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [336, 571, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [349, 572, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [351, 573, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [358, 574, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [360, 575, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [380, 576, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [382, 577, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [383, 578, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [389, 579, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [401, 580, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [402, 581, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [409, 582, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [415, 583, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [444, 584, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360], [452, 585, 0, 0.005, 0.0, 2000.0, 2000.0, 2000.0, 1.0, 0,\n 1, -360, 360]])\n', (265880, 436452), False, 'from numpy import array\n'), ((458249, 550497), 'numpy.array', 'array', (['[[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0], [589, 1, \n 0.010042676909098597, 0.5021338454549299, 0, 0, 0], [590, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [593, 1, \n 0.0017666198683200384, 0.08833099341600192, 0, 0, 0], [594, 1, \n 0.006047887837492023, 0.30239439187460115, 0, 0, 0], [595, 1, \n 1.50560576164933, 75.2802880824665, 0, 0, 0], [597, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [598, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [599, 1, \n 0.0029602819415092537, 0.1480140970754627, 0, 0, 0], [600, 1, \n 0.005379437076506062, 0.26897185382530314, 0, 0, 0], [601, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [602, 1, \n 0.007830423200121252, 0.39152116000606263, 0, 0, 0], [603, 1, \n 1.0997606567649967, 54.98803283824984, 0, 0, 0], [607, 1, \n 0.5729577951308232, 28.64788975654116, 0, 0, 0], [608, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [609, 1, \n 0.0057932399285449895, 0.2896619964272495, 0, 0, 0], [610, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [612, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [613, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [614, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [616, 1, \n 0.0046154933496649645, 0.23077466748324824, 0, 0, 0], [617, 1, \n 0.04360845440717932, 2.1804227203589663, 0, 0, 0], [618, 1, \n 0.010631550198538607, 0.5315775099269304, 0, 0, 0], [619, 1, \n 0.037560566569687294, 1.8780283284843649, 0, 0, 0], [621, 1, \n 0.24350706293059987, 12.175353146529993, 0, 0, 0], [623, 1, \n 0.2419155134996809, 12.095775674984045, 0, 0, 0], [624, 1, \n 0.004297183463481174, 0.21485917317405873, 0, 0, 0], [628, 1, \n 0.14292113889652203, 7.1460569448261015, 0, 0, 0], [629, 1, \n 0.023968734429639437, 1.198436721481972, 0, 0, 0], [631, 1, \n 0.025401128917466494, 1.2700564458733248, 0, 0, 0], [632, 1, \n 0.01435577586688896, 0.717788793344448, 0, 0, 0], [637, 1, \n 0.017093240888069558, 0.854662044403478, 0, 0, 0], [638, 1, \n 0.02048324117592693, 1.0241620587963465, 0, 0, 0], [639, 1, \n 0.005029296201703893, 0.25146481008519467, 0, 0, 0], [640, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [641, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [642, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [643, 1, \n 0.27279157245950864, 13.639578622975431, 0, 0, 0], [646, 1, \n 0.03278591827693044, 1.6392959138465222, 0, 0, 0], [647, 1, \n 0.00445633840657307, 0.2228169203286535, 0, 0, 0], [650, 1, \n 0.4216014442504307, 21.080072212521536, 0, 0, 0], [652, 1, \n 0.00746436683100989, 0.37321834155049455, 0, 0, 0], [655, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [657, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [658, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [661, 1, \n 0.010408733278209955, 0.5204366639104978, 0, 0, 0], [662, 1, \n 0.002928450952890874, 0.1464225476445437, 0, 0, 0], [663, 1, \n 0.00238732414637843, 0.1193662073189215, 0, 0, 0], [666, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [668, 1, \n 0.24382537281678363, 12.191268640839182, 0, 0, 0], [670, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [672, 1, \n 0.010536057232683471, 0.5268028616341736, 0, 0, 0], [675, 1, \n 0.0033740847935481814, 0.16870423967740908, 0, 0, 0], [676, 1, \n 0.11777465788800255, 5.888732894400127, 0, 0, 0], [678, 1, \n 0.3237211542489151, 16.186057712445756, 0, 0, 0], [679, 1, \n 0.2212253708977345, 11.061268544886726, 0, 0, 0], [681, 1, \n 0.0063821132179850025, 0.31910566089925013, 0, 0, 0], [683, 1, \n 0.008753521870054244, 0.4376760935027122, 0, 0, 0], [687, 1, \n 0.42303383873825773, 21.151691936912886, 0, 0, 0], [689, 1, \n 0.09867606471697511, 4.933803235848756, 0, 0, 0], [691, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [693, 1, \n 0.06175211791965539, 3.0876058959827692, 0, 0, 0], [694, 1, \n 0.005220282133414166, 0.2610141066707083, 0, 0, 0], [695, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [696, 1, \n 0.22950142793851305, 11.475071396925653, 0, 0, 0], [697, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [698, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [701, 1, \n 0.015024226627874922, 0.7512113313937461, 0, 0, 0], [702, 1, \n 0.023363945645890238, 1.168197282294512, 0, 0, 0], [704, 1, \n 0.16170142218136566, 8.085071109068283, 0, 0, 0], [705, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [707, 1, \n 0.010822536130248884, 0.5411268065124443, 0, 0, 0], [708, 1, \n 0.0024828171122335675, 0.12414085561167837, 0, 0, 0], [711, 1, \n 0.056054370956965534, 2.802718547848277, 0, 0, 0], [713, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [714, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [716, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [717, 1, \n 0.0017507043740108488, 0.08753521870054244, 0, 0, 0], [719, 1, \n 0.623250757147862, 31.162537857393104, 0, 0, 0], [722, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [723, 1, \n 0.006270704757820675, 0.31353523789103377, 0, 0, 0], [724, 1, \n 0.0019257748114119334, 0.09628874057059668, 0, 0, 0], [725, 1, \n 0.25464790894703254, 12.732395447351628, 0, 0, 0], [727, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [728, 1, \n 0.16233804195373325, 8.116902097686662, 0, 0, 0], [730, 1, \n 0.10077690996578814, 5.038845498289407, 0, 0, 0], [731, 1, \n 0.2848873481344926, 14.244367406724633, 0, 0, 0], [732, 1, \n 0.004647324338283344, 0.2323662169141672, 0, 0, 0], [733, 1, \n 0.12624170086049138, 6.312085043024569, 0, 0, 0], [735, 1, \n 0.013496339174192726, 0.6748169587096363, 0, 0, 0], [737, 1, \n 0.00891267681314614, 0.445633840657307, 0, 0, 0], [738, 1, \n 0.04408591923645501, 2.2042959618227504, 0, 0, 0], [739, 1, \n 0.01906676218240906, 0.9533381091204531, 0, 0, 0], [741, 1, \n 0.0340591578216656, 1.7029578910832803, 0, 0, 0], [742, 1, \n 0.0028647889756541157, 0.14323944878270578, 0, 0, 0], [743, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [745, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [746, 1, \n 0.03183098861837907, 1.5915494309189535, 0, 0, 0], [747, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [748, 1, \n 0.03501408748021698, 1.7507043740108488, 0, 0, 0], [749, 1, \n 0.0025464790894703256, 0.12732395447351627, 0, 0, 0], [750, 1, \n 0.028902537665488188, 1.4451268832744095, 0, 0, 0], [753, 1, \n 0.049624511256052974, 2.4812255628026487, 0, 0, 0], [758, 1, \n 0.0058887328944001276, 0.2944366447200064, 0, 0, 0], [760, 1, \n 0.2527380496299298, 12.636902481496492, 0, 0, 0], [761, 1, \n 0.004997465213085514, 0.2498732606542757, 0, 0, 0], [762, 1, \n 0.3517324242330887, 17.586621211654435, 0, 0, 0], [763, 1, \n 0.006461690689530951, 0.32308453447654756, 0, 0, 0], [765, 1, \n 0.018780283284843647, 0.9390141642421824, 0, 0, 0], [767, 1, \n 0.0035650707252584553, 0.17825353626292276, 0, 0, 0], [769, 1, \n 0.013782818071758136, 0.6891409035879068, 0, 0, 0], [771, 1, \n 0.21963382146681557, 10.981691073340778, 0, 0, 0], [772, 1, \n 0.002992112930127632, 0.1496056465063816, 0, 0, 0], [774, 1, \n 0.010663381187156987, 0.5331690593578494, 0, 0, 0], [776, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [777, 1, \n 0.012573240504259732, 0.6286620252129866, 0, 0, 0], [778, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [781, 1, \n 0.4169859509007658, 20.84929754503829, 0, 0, 0], [784, 1, \n 0.4058451048843331, 20.292255244216655, 0, 0, 0], [785, 1, \n 0.00047746482927568597, 0.0238732414637843, 0, 0, 0], [787, 1, \n 0.24764509145098912, 12.382254572549456, 0, 0, 0], [788, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [789, 1, \n 0.0123185925953127, 0.615929629765635, 0, 0, 0], [790, 1, \n 0.02412788937273133, 1.2063944686365666, 0, 0, 0], [791, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [792, 1, \n 0.009979014931861837, 0.49895074659309185, 0, 0, 0], [795, 1, \n 0.004329014452099553, 0.2164507226049777, 0, 0, 0], [798, 1, \n 0.10179550160157626, 5.089775080078813, 0, 0, 0], [800, 1, \n 0.0058091554228541795, 0.290457771142709, 0, 0, 0], [801, 1, \n 0.007957747154594767, 0.3978873577297384, 0, 0, 0], [802, 1, \n 0.07957747154594767, 3.9788735772973833, 0, 0, 0], [805, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [806, 1, \n 0.005697746962689853, 0.2848873481344927, 0, 0, 0], [808, 1, \n 0.034616200122487235, 1.7308100061243619, 0, 0, 0], [809, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [810, 1, \n 0.03116253785739311, 1.5581268928696554, 0, 0, 0], [811, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [814, 1, \n 0.014164789935178685, 0.7082394967589343, 0, 0, 0], [815, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [816, 1, \n 0.012748310941660816, 0.6374155470830408, 0, 0, 0], [817, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [818, 1, \n 0.24096058384112953, 12.048029192056477, 0, 0, 0], [821, 1, \n 0.013130282805081364, 0.6565141402540683, 0, 0, 0], [822, 1, \n 0.04265352474862795, 2.1326762374313977, 0, 0, 0], [825, 1, \n 0.013591832140047864, 0.6795916070023932, 0, 0, 0], [826, 1, \n 0.018461973398659858, 0.9230986699329929, 0, 0, 0], [829, 1, \n 0.06716338598477982, 3.3581692992389915, 0, 0, 0], [830, 1, \n 0.02832957987035737, 1.4164789935178685, 0, 0, 0], [833, 1, \n 0.0059205638830185075, 0.2960281941509254, 0, 0, 0], [834, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [835, 1, \n 0.010138169874953733, 0.5069084937476867, 0, 0, 0], [836, 1, \n 0.008116902097686661, 0.4058451048843331, 0, 0, 0], [837, 1, \n 0.15024226627874918, 7.512113313937459, 0, 0, 0], [839, 1, \n 0.011666057328635928, 0.5833028664317964, 0, 0, 0], [840, 1, \n 0.4427690516816528, 22.138452584082643, 0, 0, 0], [841, 1, \n 0.0037083101740411615, 0.18541550870205808, 0, 0, 0], [842, 1, \n 0.17204649348233886, 8.602324674116945, 0, 0, 0], [843, 1, \n 0.10599719209920229, 5.2998596049601145, 0, 0, 0], [844, 1, \n 0.012732395447351627, 0.6366197723675814, 0, 0, 0], [845, 1, \n 0.10122254380644544, 5.061127190322272, 0, 0, 0], [847, 1, \n 0.08912676813146139, 4.45633840657307, 0, 0, 0], [848, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [849, 1, \n 0.24796340133717296, 12.398170066858649, 0, 0, 0], [850, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [851, 1, \n 0.01265281797580568, 0.632640898790284, 0, 0, 0], [852, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [853, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [854, 1, \n 0.026037748689834075, 1.3018874344917037, 0, 0, 0], [855, 1, \n 0.21899720169444797, 10.949860084722399, 0, 0, 0], [856, 1, \n 0.011459155902616463, 0.5729577951308231, 0, 0, 0], [857, 1, \n 0.4462704604296745, 22.313523021483725, 0, 0, 0], [858, 1, \n 0.01808000153523931, 0.9040000767619655, 0, 0, 0], [859, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [860, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [862, 1, \n 0.23077466748324824, 11.538733374162412, 0, 0, 0], [863, 1, \n 0.0001909859317102744, 0.00954929658551372, 0, 0, 0], [864, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [865, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [867, 1, \n 0.24478030247533505, 12.239015123766753, 0, 0, 0], [869, 1, \n 0.4329014452099553, 21.645072260497766, 0, 0, 0], [870, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [872, 1, \n 0.00716197243913529, 0.3580986219567645, 0, 0, 0], [873, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [874, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [875, 1, \n 0.007766761222884492, 0.38833806114422464, 0, 0, 0], [877, 1, \n 0.007894085177358009, 0.39470425886790045, 0, 0, 0], [881, 1, \n 0.3187236890358296, 15.93618445179148, 0, 0, 0], [882, 1, \n 0.005538592019597957, 0.2769296009798979, 0, 0, 0], [883, 1, \n 0.005729577951308231, 0.28647889756541156, 0, 0, 0], [886, 1, \n 0.8186930272647096, 40.93465136323548, 0, 0, 0], [889, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [890, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [893, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [894, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [895, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [896, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [898, 1, \n 0.013464508185574344, 0.6732254092787172, 0, 0, 0], [900, 1, \n 0.03584169318429482, 1.7920846592147412, 0, 0, 0], [902, 1, \n 0.006207042780583919, 0.31035213902919595, 0, 0, 0], [903, 1, \n 0.0031990143561470966, 0.15995071780735484, 0, 0, 0], [905, 1, \n 0.021851973686517232, 1.0925986843258617, 0, 0, 0], [907, 1, \n 0.02142225534016911, 1.0711127670084555, 0, 0, 0], [909, 1, \n 0.005856901905781748, 0.2928450952890874, 0, 0, 0], [911, 1, \n 0.09183240216402361, 4.59162010820118, 0, 0, 0], [913, 1, \n 0.02355493157760051, 1.1777465788800257, 0, 0, 0], [914, 1, \n 0.03568253824120294, 1.7841269120601468, 0, 0, 0], [915, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [916, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [917, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [918, 1, \n 0.012254930618075942, 0.612746530903797, 0, 0, 0], [919, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [920, 1, \n 0.0020371832715762603, 0.10185916357881303, 0, 0, 0], [921, 1, \n 0.019735212943395024, 0.9867606471697512, 0, 0, 0], [922, 1, \n 0.05220282133414166, 2.6101410667070835, 0, 0, 0], [923, 1, \n 0.023236621691416718, 1.161831084570836, 0, 0, 0], [925, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [928, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [931, 1, \n 0.03455253814525047, 1.7276269072625237, 0, 0, 0], [934, 1, \n 0.09421972631040204, 4.710986315520103, 0, 0, 0], [935, 1, \n 0.007352958370845565, 0.36764791854227824, 0, 0, 0], [936, 1, \n 0.016615776058793875, 0.8307888029396938, 0, 0, 0], [937, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [939, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [940, 1, \n 0.009421972631040205, 0.47109863155201026, 0, 0, 0], [942, 1, \n 0.016520283092938737, 0.8260141546469368, 0, 0, 0], [943, 1, \n 0.021103945453985317, 1.055197272699266, 0, 0, 0], [944, 1, \n 0.004042535554534142, 0.2021267777267071, 0, 0, 0], [945, 1, \n 0.011140846016432674, 0.5570423008216338, 0, 0, 0], [946, 1, \n 0.025464790894703253, 1.2732395447351628, 0, 0, 0], [948, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [950, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [951, 1, \n 0.14132958946560306, 7.066479473280154, 0, 0, 0], [952, 1, \n 0.005045211696013082, 0.2522605848006541, 0, 0, 0], [956, 1, \n 0.020690142601946394, 1.0345071300973196, 0, 0, 0], [957, 1, \n 0.0019098593171027439, 0.0954929658551372, 0, 0, 0], [958, 1, \n 0.010615634704229418, 0.530781735211471, 0, 0, 0], [959, 1, \n 0.007241549910681238, 0.3620774955340619, 0, 0, 0], [960, 1, \n 0.004217605991935227, 0.21088029959676136, 0, 0, 0], [963, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [965, 1, \n 0.11204507993669433, 5.602253996834716, 0, 0, 0], [966, 1, \n 0.021008452488130186, 1.0504226244065094, 0, 0, 0], [967, 1, \n 0.01193662073189215, 0.5968310365946076, 0, 0, 0], [968, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [969, 1, \n 0.018111832523857688, 0.9055916261928845, 0, 0, 0], [971, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [973, 1, \n 0.4287634166895661, 21.438170834478306, 0, 0, 0], [976, 1, \n 0.008562535938343968, 0.4281267969171984, 0, 0, 0], [977, 1, \n 0.1031324031235482, 5.15662015617741, 0, 0, 0], [978, 1, \n 0.0007321127382227185, 0.03660563691113593, 0, 0, 0], [980, 1, \n 0.11140846016432673, 5.570423008216337, 0, 0, 0], [981, 1, \n 0.03787887645587108, 1.8939438227935543, 0, 0, 0], [982, 1, \n 0.0015756339366097638, 0.07878169683048819, 0, 0, 0], [983, 1, \n 0.01400563499208679, 0.7002817496043395, 0, 0, 0], [984, 1, \n 0.14801409707546268, 7.400704853773133, 0, 0, 0], [985, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [986, 1, \n 0.0017825353626292277, 0.08912676813146138, 0, 0, 0], [987, 1, \n 0.02618098813861678, 1.3090494069308392, 0, 0, 0], [988, 1, \n 0.0008116902097686662, 0.04058451048843331, 0, 0, 0], [990, 1, \n 0.0954929658551372, 4.7746482927568605, 0, 0, 0], [993, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [994, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [995, 1, \n 0.0006684507609859605, 0.033422538049298026, 0, 0, 0], [996, 1, \n 0.003660563691113593, 0.18302818455567965, 0, 0, 0], [997, 1, \n 0.005984225860255264, 0.2992112930127632, 0, 0, 0], [998, 1, \n 0.13464508185574348, 6.732254092787174, 0, 0, 0], [999, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [1000, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1002, 1, \n 0.0031512678732195276, 0.15756339366097638, 0, 0, 0], [1003, 1, \n 0.2864788975654116, 14.32394487827058, 0, 0, 0], [1006, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [1007, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [1008, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1010, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1011, 1, \n 0.005952394871636886, 0.2976197435818443, 0, 0, 0], [1012, 1, \n 0.9024085273310466, 45.12042636655233, 0, 0, 0], [1014, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1018, 1, \n 0.05599070897972878, 2.7995354489864392, 0, 0, 0], [1019, 1, \n 0.03819718634205488, 1.909859317102744, 0, 0, 0], [1023, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [1025, 1, \n 0.03616000307047862, 1.808000153523931, 0, 0, 0], [1026, 1, \n 0.20868396138209316, 10.434198069104658, 0, 0, 0], [1028, 2, \n 0.025464790894703257, 1.273239544735163, 0, 0, 0], [1029, 2, \n 0.003819718634205488, 0.19098593171027442, 0, 0, 0], [1030, 2, \n 0.06480789282701978, 3.2403946413509894, 0, 0, 0], [1031, 2, \n 0.0921316134570364, 4.60658067285182, 0, 0, 0], [1032, 2, \n 0.009772775025341927, 0.4886387512670964, 0, 0, 0], [1033, 2, \n 0.0015543376717485793, 0.07771688358742897, 0, 0, 0], [1034, 2, \n 0.005364335122251813, 0.26821675611259066, 0, 0, 0], [1035, 3, \n 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502], [1036, \n 2, 0.003538471088451239, 0.17692355442256197, 0, 0, 0], [1037, 2, \n 0.0032845967867616726, 0.16422983933808363, 0, 0, 0], [1038, 2, \n 0.0035759833548530246, 0.17879916774265123, 0, 0, 0], [1039, 2, \n 0.0033678813297702355, 0.1683940664885118, 0, 0, 0], [1041, 2, \n 0.012998987840239671, 0.6499493920119837, 0, 0, 0], [1042, 2, \n 0.0013374224133557281, 0.0668711206677864, 0, 0, 0], [1044, 3, \n 0.0012140138945870601, 0.060700694729353, 2.22, 61.69, 0.004502], [1046,\n 2, 0.0032875263364469907, 0.16437631682234954, 0, 0, 0], [1047, 3, \n 0.0005212006415679155, 0.026060032078395773, 2.22, 61.69, 0.004502], [\n 1048, 2, 0.0022653377413018724, 0.11326688706509364, 0, 0, 0], [1049, 2,\n 0.01870104799381521, 0.9350523996907605, 0, 0, 0], [1050, 2, \n 0.0017161801534011875, 0.08580900767005938, 0, 0, 0], [1051, 2, \n 0.011268551438979963, 0.5634275719489983, 0, 0, 0], [1052, 3, \n 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502], [\n 1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, \n 0.004502], [1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],\n [1055, 3, 7.255367011902793e-05, 0.0036276835059513967, 2.22, 61.69, \n 0.004502], [1056, 2, 0.02185427247219657, 1.0927136236098287, 0, 0, 0],\n [1057, 2, 0.010956497647839606, 0.5478248823919804, 0, 0, 0], [1058, 2,\n 0.02761344248663413, 1.3806721243317066, 0, 0, 0], [1059, 2, \n 0.01272767318121002, 0.636383659060501, 0, 0, 0], [1060, 3, \n 0.0002750105502899529, 0.013750527514497644, 2.22, 61.69, 0.004502], [\n 1061, 2, 0.004862954432750976, 0.2431477216375488, 0, 0, 0], [1062, 3, \n 7.333747745020713e-05, 0.0036668738725103567, 2.22, 61.69, 0.004502], [\n 1063, 3, 0.00022007597509710681, 0.011003798754855342, 2.22, 61.69, \n 0.004502], [1064, 2, 0.013355424896304362, 0.667771244815218, 0, 0, 0],\n [1065, 2, 0.020654478247623165, 1.0327239123811582, 0, 0, 0], [1066, 2,\n 0.004269679264204669, 0.21348396321023344, 0, 0, 0], [1067, 3, \n 0.002078788013715776, 0.1039394006857888, 2.22, 61.69, 0.004502], [1068,\n 3, 0.00014512554313847776, 0.007256277156923888, 2.22, 61.69, 0.004502],\n [1069, 3, 0.00010143951295915809, 0.005071975647957905, 2.22, 61.69, \n 0.004502], [1070, 3, 2.3689278981581715e-05, 0.001184463949079086, 2.22,\n 61.69, 0.004502], [1071, 3, 0.00021315991932608, 0.010657995966304002, \n 2.22, 61.69, 0.004502], [1072, 2, 0.007168748144119091, \n 0.3584374072059546, 0, 0, 0], [1073, 2, 0.004954025493475761, \n 0.24770127467378808, 0, 0, 0], [1074, 2, 0.009778033156939965, \n 0.48890165784699824, 0, 0, 0], [1075, 3, 0.0009142432329184414, \n 0.04571216164592208, 2.22, 61.69, 0.004502], [1077, 3, \n 0.000761621711582911, 0.038081085579145545, 2.22, 61.69, 0.004502], [\n 1078, 3, 0.0010764248660874562, 0.05382124330437281, 2.22, 61.69, \n 0.004502], [1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0\n ], [1080, 2, 0.005216654256351391, 0.2608327128175696, 0, 0, 0], [1081,\n 2, 0.01643746145779033, 0.8218730728895166, 0, 0, 0], [1082, 2, \n 0.015076341350664345, 0.7538170675332174, 0, 0, 0], [1083, 2, \n 0.019983163198675734, 0.9991581599337868, 0, 0, 0], [1084, 2, \n 0.018855524406049307, 0.9427762203024654, 0, 0, 0], [1085, 2, \n 0.0037788529320756745, 0.1889426466037837, 0, 0, 0], [1086, 2, \n 0.006918625580223116, 0.34593127901115583, 0, 0, 0], [1087, 2, \n 0.0032275229191801595, 0.16137614595900798, 0, 0, 0], [1088, 3, \n 0.0009589741139576335, 0.04794870569788167, 2.22, 61.69, 0.004502], [\n 1089, 2, 0.009823983504007974, 0.49119917520039863, 0, 0, 0], [1090, 2,\n 0.005674885746854652, 0.2837442873427326, 0, 0, 0], [1091, 3, \n 0.001168793996530651, 0.05843969982653256, 2.22, 61.69, 0.004502], [\n 1092, 2, 0.0013687465331790676, 0.06843732665895338, 0, 0, 0], [1093, 2,\n 0.007017509546711356, 0.3508754773355678, 0, 0, 0], [1094, 3, \n 0.00014185080981113786, 0.0070925404905568925, 2.22, 61.69, 0.004502],\n [1095, 3, 7.71951382648268e-06, 0.000385975691324134, 2.22, 61.69, \n 0.004502], [1096, 2, 0.0029145237970444643, 0.14572618985222321, 0, 0, \n 0], [1097, 3, 0.0002728726471928731, 0.013643632359643654, 2.22, 61.69,\n 0.004502], [1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0\n ], [1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0], [1100,\n 3, 7.335549646801683e-07, 3.667774823400842e-05, 2.22, 61.69, 0.004502],\n [1101, 2, 0.0021341020267997028, 0.10670510133998513, 0, 0, 0], [1102, \n 2, 0.008936050319297435, 0.44680251596487175, 0, 0, 0], [1103, 2, \n 0.006751135880742038, 0.33755679403710187, 0, 0, 0], [1104, 3, \n 8.200597012001097e-06, 0.0004100298506000548, 2.22, 61.69, 0.004502], [\n 1105, 3, 7.430370821118754e-05, 0.003715185410559377, 2.22, 61.69, \n 0.004502], [1106, 3, 9.496706349756433e-05, 0.004748353174878216, 2.22,\n 61.69, 0.004502], [1107, 2, 0.002514754747681537, 0.12573773738407681, \n 0, 0, 0], [1108, 2, 0.010075472977677913, 0.5037736488838956, 0, 0, 0],\n [1109, 3, 2.3877174563372565e-05, 0.0011938587281686282, 2.22, 61.69, \n 0.004502], [1110, 3, 5.6797921539226925e-05, 0.0028398960769613463, \n 2.22, 61.69, 0.004502], [1111, 2, 0.0027876433772406257, \n 0.13938216886203128, 0, 0, 0], [1112, 2, 0.004265767031264296, \n 0.2132883515632148, 0, 0, 0], [1113, 3, 0.00022012925719619891, \n 0.011006462859809947, 2.22, 61.69, 0.004502], [1114, 3, \n 0.0008560555102861403, 0.042802775514307015, 2.22, 61.69, 0.004502], [\n 1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, 0], [1116, 3,\n 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, 0.004502], [\n 1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0], [1118, 3, \n 0.0004094636121064103, 0.02047318060532052, 2.22, 61.69, 0.004502], [\n 1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, 61.69, \n 0.004502], [1120, 3, 0.00014563422679717648, 0.007281711339858825, 2.22,\n 61.69, 0.004502], [1121, 3, 3.4414977793908876e-05, \n 0.0017207488896954439, 2.22, 61.69, 0.004502], [1122, 3, \n 8.894132329422267e-05, 0.004447066164711133, 2.22, 61.69, 0.004502], [\n 1123, 3, 9.32225252447514e-05, 0.00466112626223757, 2.22, 61.69, \n 0.004502], [1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22,\n 61.69, 0.004502], [1125, 3, 0.0009107448109473576, 0.04553724054736788,\n 2.22, 61.69, 0.004502], [1126, 3, 0.0010150413250921298, \n 0.050752066254606494, 2.22, 61.69, 0.004502], [1127, 2, \n 0.003587869493403156, 0.17939347467015782, 0, 0, 0], [1128, 3, \n 9.85754616930036e-05, 0.004928773084650179, 2.22, 61.69, 0.004502], [\n 1129, 3, 0.00015167785485332866, 0.0075838927426664345, 2.22, 61.69, \n 0.004502], [1130, 3, 4.313144137237104e-05, 0.0021565720686185525, 2.22,\n 61.69, 0.004502], [1131, 3, 9.338261111863579e-05, 0.00466913055593179,\n 2.22, 61.69, 0.004502], [1132, 3, 1.598304249187116e-05, \n 0.0007991521245935579, 2.22, 61.69, 0.004502], [1133, 3, \n 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, 0.004502], [\n 1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22, 61.69, \n 0.004502], [1135, 3, 0.00030684246506199216, 0.01534212325309961, 2.22,\n 61.69, 0.004502], [1136, 3, 2.5636662405410735e-05, \n 0.0012818331202705368, 2.22, 61.69, 0.004502], [1137, 3, \n 0.00018370212263491662, 0.00918510613174583, 2.22, 61.69, 0.004502], [\n 1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, \n 0.004502], [1139, 3, 0.0012225149594472903, 0.06112574797236452, 2.22, \n 61.69, 0.004502], [1140, 3, 0.0018073289497007397, 0.09036644748503699,\n 2.22, 61.69, 0.004502], [1141, 2, 0.005339291711123932, \n 0.2669645855561966, 0, 0, 0], [1142, 3, 7.73959943559724e-05, \n 0.00386979971779862, 2.22, 61.69, 0.004502], [1143, 3, \n 0.0009515158509821171, 0.04757579254910586, 2.22, 61.69, 0.004502], [\n 1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0], [1145, 2, \n 0.011197481443497569, 0.5598740721748785, 0, 0, 0], [1146, 3, \n 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502], [\n 1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, \n 0.004502], [1148, 3, 0.0005993650905551883, 0.029968254527759416, 2.22,\n 61.69, 0.004502], [1149, 3, 0.00026672685204354104, \n 0.013336342602177052, 2.22, 61.69, 0.004502], [1150, 3, \n 0.0001204929064021154, 0.00602464532010577, 2.22, 61.69, 0.004502], [\n 1151, 3, 0.00043239573730817076, 0.021619786865408542, 2.22, 61.69, \n 0.004502], [1152, 3, 3.9796369738190234e-06, 0.0001989818486909512, \n 2.22, 61.69, 0.004502], [1153, 3, 2.543747302116541e-06, \n 0.00012718736510582707, 2.22, 61.69, 0.004502], [1154, 3, \n 5.939787701451754e-06, 0.00029698938507258764, 2.22, 61.69, 0.004502],\n [1155, 3, 2.0319819845729137e-05, 0.001015990992286457, 2.22, 61.69, \n 0.004502], [1156, 3, 0.0008888342953225629, 0.044441714766128154, 2.22,\n 61.69, 0.004502], [1157, 3, 0.00014449421139309436, \n 0.007224710569654718, 2.22, 61.69, 0.004502], [1158, 3, \n 3.9344224255474475e-05, 0.001967211212773724, 2.22, 61.69, 0.004502], [\n 1159, 3, 0.0006423837433282069, 0.032119187166410344, 2.22, 61.69, \n 0.004502], [1160, 2, 0.006583846414473584, 0.3291923207236792, 0, 0, 0],\n [1161, 3, 0.0007639741440540192, 0.038198707202700966, 2.22, 61.69, \n 0.004502], [1162, 2, 0.012733717176428691, 0.6366858588214346, 0, 0, 0],\n [1164, 2, 0.007318959323231913, 0.3659479661615957, 0, 0, 0], [1166, 2,\n 0.005301588846150501, 0.26507944230752506, 0, 0, 0], [1167, 3, \n 0.0001907109190583028, 0.00953554595291514, 2.22, 61.69, 0.004502], [\n 1168, 3, 4.6735632418379986e-05, 0.0023367816209189994, 2.22, 61.69, \n 0.004502], [1169, 3, 8.929850730838101e-05, 0.004464925365419051, 2.22,\n 61.69, 0.004502], [1170, 3, 1.00233247146895e-05, 0.0005011662357344751,\n 2.22, 61.69, 0.004502], [1171, 3, 0.0004260194354054759, \n 0.021300971770273798, 2.22, 61.69, 0.004502], [1172, 3, \n 0.00011513389518096898, 0.005756694759048449, 2.22, 61.69, 0.004502], [\n 1173, 2, 0.006452614026547609, 0.32263070132738053, 0, 0, 0], [1174, 3,\n 4.754703790085141e-05, 0.00237735189504257, 2.22, 61.69, 0.004502], [\n 1175, 3, 2.7710161030475335e-05, 0.001385508051523767, 2.22, 61.69, \n 0.004502], [1176, 3, 7.75663051366249e-06, 0.0003878315256831245, 2.22,\n 61.69, 0.004502], [1177, 3, 0.0009447268553453907, 0.04723634276726953,\n 2.22, 61.69, 0.004502], [1178, 3, 0.0001088973020076013, \n 0.005444865100380065, 2.22, 61.69, 0.004502], [1179, 3, \n 3.969316682855094e-05, 0.001984658341427547, 2.22, 61.69, 0.004502], [\n 1180, 3, 2.5956634148895864e-05, 0.0012978317074447932, 2.22, 61.69, \n 0.004502], [1181, 2, 0.00545834972439398, 0.272917486219699, 0, 0, 0],\n [1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0], [1183, 3,\n 0.0014314935186861295, 0.07157467593430648, 2.22, 61.69, 0.004502], [\n 1184, 3, 0.00015810533075432708, 0.007905266537716353, 2.22, 61.69, \n 0.004502], [1185, 3, 0.0006974320121398697, 0.034871600606993486, 2.22,\n 61.69, 0.004502], [1186, 3, 0.0012771847490467955, 0.06385923745233978,\n 2.22, 61.69, 0.004502], [1187, 3, 0.0003086504024546428, \n 0.01543252012273214, 2.22, 61.69, 0.004502], [1188, 2, \n 0.011440868435801076, 0.5720434217900537, 0, 0, 0], [1189, 3, \n 0.0006752949613083114, 0.03376474806541557, 2.22, 61.69, 0.004502], [\n 1190, 2, 0.011056408319218359, 0.552820415960918, 0, 0, 0], [1191, 2, \n 0.004652379906159672, 0.23261899530798363, 0, 0, 0], [1192, 3, \n 0.0009482218539415114, 0.04741109269707557, 2.22, 61.69, 0.004502], [\n 1193, 3, 9.320005102883975e-05, 0.0046600025514419875, 2.22, 61.69, \n 0.004502], [1194, 3, 0.00033807612872480814, 0.016903806436240405, 2.22,\n 61.69, 0.004502], [1195, 3, 7.285440296486341e-06, \n 0.0003642720148243171, 2.22, 61.69, 0.004502], [1196, 2, \n 0.0040761948650300354, 0.20380974325150175, 0, 0, 0], [1197, 2, \n 0.0023095720666282643, 0.11547860333141323, 0, 0, 0], [1198, 3, \n 0.0016279886826880022, 0.08139943413440012, 2.22, 61.69, 0.004502], [\n 1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0], [1200, 2, \n 0.0035658606694853635, 0.1782930334742682, 0, 0, 0], [1201, 3, \n 0.0007239107895971019, 0.03619553947985509, 2.22, 61.69, 0.004502], [\n 1202, 3, 0.00176071556288929, 0.0880357781444645, 2.22, 61.69, 0.004502\n ], [1203, 2, 0.0063796286094078974, 0.31898143047039484, 0, 0, 0], [\n 1204, 3, 0.0015802630524518553, 0.07901315262259277, 2.22, 61.69, \n 0.004502], [1205, 3, 1.3927092046315124e-05, 0.0006963546023157563, \n 2.22, 61.69, 0.004502], [1206, 3, 0.00015871592092437352, \n 0.007935796046218677, 2.22, 61.69, 0.004502], [1207, 3, \n 0.00013884952267018553, 0.006942476133509278, 2.22, 61.69, 0.004502], [\n 1208, 3, 7.055386967979429e-05, 0.0035276934839897148, 2.22, 61.69, \n 0.004502], [1209, 3, 3.2453994235092736e-05, 0.001622699711754637, 2.22,\n 61.69, 0.004502], [1210, 3, 0.0003259549620621221, 0.016297748103106108,\n 2.22, 61.69, 0.004502], [1211, 3, 0.0011462484513341364, \n 0.057312422566706815, 2.22, 61.69, 0.004502], [1212, 2, \n 0.005804182676892941, 0.290209133844647, 0, 0, 0], [1213, 2, \n 0.0036505499187602444, 0.18252749593801224, 0, 0, 0], [1214, 3, \n 0.00019852168003620192, 0.009926084001810095, 2.22, 61.69, 0.004502], [\n 1215, 3, 7.81255594160887e-05, 0.003906277970804435, 2.22, 61.69, \n 0.004502], [1216, 2, 0.0021517677385590084, 0.10758838692795043, 0, 0, \n 0], [1217, 3, 0.001279974509378072, 0.0639987254689036, 2.22, 61.69, \n 0.004502], [1218, 3, 4.139664610366431e-05, 0.0020698323051832157, 2.22,\n 61.69, 0.004502], [1219, 3, 0.00042701347071105576, 0.02135067353555279,\n 2.22, 61.69, 0.004502], [1220, 3, 0.0010059882305525484, \n 0.050299411527627416, 2.22, 61.69, 0.004502], [1221, 2, \n 0.02105078881494917, 1.0525394407474586, 0, 0, 0], [1222, 2, \n 0.013436354905899806, 0.6718177452949904, 0, 0, 0], [1223, 3, \n 0.00024230393037435297, 0.01211519651871765, 2.22, 61.69, 0.004502], [\n 1224, 2, 0.006415271247382745, 0.3207635623691373, 0, 0, 0], [1225, 3, \n 0.0010196947606849961, 0.05098473803424981, 2.22, 61.69, 0.004502], [\n 1226, 3, 0.00011572498554223855, 0.005786249277111928, 2.22, 61.69, \n 0.004502], [1227, 3, 0.0010454325410475286, 0.05227162705237644, 2.22, \n 61.69, 0.004502], [1228, 3, 9.713499706791583e-05, 0.004856749853395792,\n 2.22, 61.69, 0.004502], [1229, 2, 0.0026494957954367885, \n 0.13247478977183944, 0, 0, 0], [1230, 3, 4.8238032843230984e-05, \n 0.002411901642161549, 2.22, 61.69, 0.004502], [1231, 3, \n 0.0010059686019705035, 0.05029843009852517, 2.22, 61.69, 0.004502], [\n 1232, 2, 0.002228131222721375, 0.11140656113606878, 0, 0, 0], [1233, 2,\n 0.03662908231521014, 1.831454115760507, 0, 0, 0], [1234, 2, \n 0.0064387341725816285, 0.32193670862908147, 0, 0, 0], [1235, 3, \n 0.0002292223612393676, 0.01146111806196838, 2.22, 61.69, 0.004502], [\n 1236, 2, 0.0020851258089392244, 0.10425629044696123, 0, 0, 0], [1237, 3,\n 0.0009298092078685558, 0.04649046039342779, 2.22, 61.69, 0.004502], [\n 1238, 2, 0.00642623738699833, 0.3213118693499165, 0, 0, 0], [1239, 3, \n 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, 0.004502], [\n 1240, 2, 0.02037573875130283, 1.0187869375651415, 0, 0, 0], [1241, 2, \n 0.010972960615224547, 0.5486480307612274, 0, 0, 0], [1242, 3, \n 0.0008355662499393597, 0.041778312496967986, 2.22, 61.69, 0.004502], [\n 1243, 2, 0.0027276591752610937, 0.1363829587630547, 0, 0, 0], [1244, 2,\n 0.020592901244747865, 1.0296450622373932, 0, 0, 0], [1245, 3, \n 0.00023503888700973188, 0.011751944350486595, 2.22, 61.69, 0.004502], [\n 1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0], [1247, 3,\n 0.0013899571448864774, 0.06949785724432388, 2.22, 61.69, 0.004502], [\n 1248, 2, 0.004527446475069785, 0.22637232375348926, 0, 0, 0], [1249, 2,\n 0.0021092345113500805, 0.10546172556750404, 0, 0, 0], [1250, 3, \n 0.000876926339333997, 0.04384631696669984, 2.22, 61.69, 0.004502], [\n 1251, 3, 0.0008805328097855692, 0.044026640489278464, 2.22, 61.69, \n 0.004502], [1252, 3, 0.0006440660331426705, 0.032203301657133525, 2.22,\n 61.69, 0.004502], [1253, 2, 0.004106369053307717, 0.20531845266538587, \n 0, 0, 0], [1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],\n [1255, 3, 0.00023250233000853782, 0.01162511650042689, 2.22, 61.69, \n 0.004502], [1256, 3, 0.0009607764830526361, 0.048038824152631804, 2.22,\n 61.69, 0.004502], [1257, 2, 0.005662916214121937, 0.28314581070609685, \n 0, 0, 0], [1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0],\n [1259, 2, 0.00695753592752513, 0.34787679637625657, 0, 0, 0], [1260, 3,\n 0.000590177310330468, 0.0295088655165234, 2.22, 61.69, 0.004502], [1261,\n 2, 0.0065104902868619585, 0.3255245143430979, 0, 0, 0], [1262, 3, \n 2.3902123196900468e-05, 0.0011951061598450233, 2.22, 61.69, 0.004502],\n [1263, 3, 1.7811428520856433e-05, 0.0008905714260428216, 2.22, 61.69, \n 0.004502], [1264, 2, 0.0033780757704728456, 0.1689037885236423, 0, 0, 0\n ], [1265, 3, 0.0003085654478954214, 0.015428272394771068, 2.22, 61.69, \n 0.004502], [1266, 2, 0.006508243779623651, 0.3254121889811826, 0, 0, 0],\n [1267, 3, 0.0011818165946297665, 0.05909082973148832, 2.22, 61.69, \n 0.004502], [1270, 3, 0.0013856435479358959, 0.06928217739679479, 2.22, \n 61.69, 0.004502], [1271, 3, 0.0014840987910167424, 0.07420493955083712,\n 2.22, 61.69, 0.004502], [1272, 3, 4.931888796058019e-05, \n 0.00246594439802901, 2.22, 61.69, 0.004502], [1273, 3, \n 0.00012918225610620136, 0.006459112805310069, 2.22, 61.69, 0.004502], [\n 1274, 2, 0.002007808497835817, 0.10039042489179087, 0, 0, 0], [1275, 2,\n 0.003173827843694794, 0.1586913921847397, 0, 0, 0], [1276, 3, \n 0.0007211910038712903, 0.036059550193564514, 2.22, 61.69, 0.004502], [\n 1277, 2, 0.00187538099082149, 0.09376904954107451, 0, 0, 0], [1278, 2, \n 0.0052395364566005164, 0.2619768228300258, 0, 0, 0], [1279, 3, \n 1.1251600278965072e-07, 5.625800139482535e-06, 2.22, 61.69, 0.004502],\n [1280, 3, 1.694789540680769e-05, 0.0008473947703403845, 2.22, 61.69, \n 0.004502], [1282, 3, 0.00013160445621004433, 0.006580222810502218, 2.22,\n 61.69, 0.004502], [1283, 2, 0.03582020109680739, 1.7910100548403696, 0,\n 0, 0], [1284, 3, 0.001164025604385567, 0.058201280219278353, 2.22, \n 61.69, 0.004502], [1285, 3, 7.476034074798499e-05, \n 0.0037380170373992492, 2.22, 61.69, 0.004502], [1286, 3, \n 0.0008085504689103687, 0.04042752344551843, 2.22, 61.69, 0.004502], [\n 1287, 2, 0.0029583869971778567, 0.14791934985889282, 0, 0, 0], [1288, 2,\n 0.004222012491839328, 0.2111006245919664, 0, 0, 0], [1289, 2, \n 0.005576926941677767, 0.2788463470838884, 0, 0, 0], [1290, 3, \n 0.00016635371363986156, 0.008317685681993078, 2.22, 61.69, 0.004502], [\n 1291, 2, 0.0031745529736635094, 0.1587276486831755, 0, 0, 0], [1292, 3,\n 0.0015865361520825533, 0.07932680760412766, 2.22, 61.69, 0.004502], [\n 1293, 3, 6.53883586637161e-05, 0.003269417933185805, 2.22, 61.69, \n 0.004502], [1294, 3, 0.00013884615253373605, 0.006942307626686803, 2.22,\n 61.69, 0.004502], [1295, 3, 0.00015342985152912175, \n 0.007671492576456088, 2.22, 61.69, 0.004502], [1296, 3, \n 0.0007760328429390742, 0.03880164214695372, 2.22, 61.69, 0.004502], [\n 1297, 2, 0.006086894248154212, 0.3043447124077106, 0, 0, 0], [1300, 3, \n 0.001511593201166196, 0.07557966005830981, 2.22, 61.69, 0.004502], [\n 1301, 2, 0.0038746782543149596, 0.193733912715748, 0, 0, 0], [1302, 3, \n 0.0003104985267932093, 0.015524926339660468, 2.22, 61.69, 0.004502], [\n 1303, 3, 0.00027600750632746427, 0.013800375316373212, 2.22, 61.69, \n 0.004502], [1304, 3, 0.000610793340517708, 0.030539667025885397, 2.22, \n 61.69, 0.004502], [1305, 3, 1.6012209452329225e-07, \n 8.006104726164614e-06, 2.22, 61.69, 0.004502], [1306, 3, \n 5.855304532138158e-05, 0.0029276522660690793, 2.22, 61.69, 0.004502], [\n 1307, 3, 1.9031130574577255e-05, 0.0009515565287288628, 2.22, 61.69, \n 0.004502], [1308, 3, 8.924254018516687e-05, 0.004462127009258345, 2.22,\n 61.69, 0.004502], [1309, 3, 9.599337069530822e-05, 0.004799668534765412,\n 2.22, 61.69, 0.004502], [1310, 3, 4.717144911466962e-05, \n 0.002358572455733481, 2.22, 61.69, 0.004502], [1311, 3, \n 0.000494670556881473, 0.024733527844073653, 2.22, 61.69, 0.004502], [\n 1312, 2, 0.011688306978695986, 0.5844153489347994, 0, 0, 0], [1313, 3, \n 0.0019631283227609974, 0.09815641613804986, 2.22, 61.69, 0.004502], [\n 1314, 3, 0.0007641975650906521, 0.038209878254532606, 2.22, 61.69, \n 0.004502], [1315, 3, 0.0005015944131679134, 0.02507972065839567, 2.22, \n 61.69, 0.004502], [1316, 3, 7.002675793369909e-05, \n 0.0035013378966849544, 2.22, 61.69, 0.004502], [1317, 3, \n 0.0007908894216365961, 0.039544471081829805, 2.22, 61.69, 0.004502], [\n 1318, 3, 5.6301925294159776e-05, 0.002815096264707989, 2.22, 61.69, \n 0.004502], [1319, 3, 0.0008405877558306301, 0.04202938779153151, 2.22, \n 61.69, 0.004502], [1320, 3, 0.0008231691710158349, 0.04115845855079175,\n 2.22, 61.69, 0.004502], [1321, 3, 6.721511097913718e-06, \n 0.0003360755548956859, 2.22, 61.69, 0.004502], [1322, 3, \n 4.510903550142661e-05, 0.0022554517750713312, 2.22, 61.69, 0.004502], [\n 1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0], [1324, 3, \n 0.0005501358559855778, 0.027506792799278885, 2.22, 61.69, 0.004502], [\n 1325, 2, 0.0029533893249704176, 0.14766946624852087, 0, 0, 0], [1326, 2,\n 0.0017553273040833693, 0.08776636520416847, 0, 0, 0], [1327, 2, \n 0.0017060005041489908, 0.08530002520744955, 0, 0, 0], [1328, 3, \n 0.0006537346009359085, 0.032686730046795426, 2.22, 61.69, 0.004502], [\n 1329, 2, 0.00793023382909983, 0.3965116914549916, 0, 0, 0], [1330, 3, \n 0.0019182008434651947, 0.09591004217325974, 2.22, 61.69, 0.004502], [\n 1331, 3, 1.2859395030416278e-05, 0.0006429697515208139, 2.22, 61.69, \n 0.004502], [1332, 3, 0.0006688404111922736, 0.03344202055961368, 2.22, \n 61.69, 0.004502], [1333, 3, 0.0019970167397866546, 0.09985083698933273,\n 2.22, 61.69, 0.004502], [1334, 3, 3.081793473501891e-05, \n 0.001540896736750946, 2.22, 61.69, 0.004502], [1336, 3, \n 0.0012612757957991489, 0.06306378978995744, 2.22, 61.69, 0.004502], [\n 1337, 2, 0.003207094686766897, 0.16035473433834485, 0, 0, 0], [1338, 3,\n 2.9972992477731713e-05, 0.0014986496238865857, 2.22, 61.69, 0.004502],\n [1339, 3, 0.00033310206544168424, 0.016655103272084214, 2.22, 61.69, \n 0.004502], [1340, 2, 0.0017807406464817902, 0.08903703232408952, 0, 0, \n 0], [1341, 2, 0.0060362713117726305, 0.3018135655886316, 0, 0, 0], [\n 1342, 3, 2.2718668528089703e-05, 0.0011359334264044853, 2.22, 61.69, \n 0.004502], [1343, 3, 2.8562833512248258e-05, 0.001428141675612413, 2.22,\n 61.69, 0.004502], [1344, 3, 8.141338105296074e-06, \n 0.0004070669052648037, 2.22, 61.69, 0.004502], [1345, 3, \n 0.00011633701914020801, 0.005816850957010401, 2.22, 61.69, 0.004502], [\n 1346, 2, 0.007061813430091215, 0.35309067150456075, 0, 0, 0], [1348, 3,\n 0.000978567012051048, 0.048928350602552406, 2.22, 61.69, 0.004502], [\n 1349, 3, 0.0014423210644570928, 0.07211605322285465, 2.22, 61.69, \n 0.004502], [1350, 3, 5.238023081568273e-06, 0.0002619011540784137, 2.22,\n 61.69, 0.004502], [1351, 3, 4.1064133941603613e-07, \n 2.0532066970801804e-05, 2.22, 61.69, 0.004502], [1352, 3, \n 2.2066211271763273e-05, 0.0011033105635881637, 2.22, 61.69, 0.004502],\n [1355, 3, 4.8633739445049876e-05, 0.0024316869722524944, 2.22, 61.69, \n 0.004502], [1356, 2, 0.004176219204509461, 0.20881096022547305, 0, 0, 0\n ], [1357, 2, 0.0024790764561485362, 0.12395382280742683, 0, 0, 0], [\n 1358, 3, 7.127776476894326e-06, 0.00035638882384471626, 2.22, 61.69, \n 0.004502], [1359, 2, 0.0018980577612326096, 0.0949028880616305, 0, 0, 0\n ], [1360, 3, 0.00101350119837844, 0.050675059918922, 2.22, 61.69, \n 0.004502], [1361, 2, 0.0029249133090325724, 0.14624566545162862, 0, 0, \n 0], [1362, 2, 0.004182445633969954, 0.2091222816984977, 0, 0, 0], [1363,\n 3, 2.004955475366426e-06, 0.0001002477737683213, 2.22, 61.69, 0.004502],\n [1364, 3, 2.7595075243285495e-06, 0.00013797537621642746, 2.22, 61.69, \n 0.004502], [1365, 3, 2.8999446623259055e-08, 1.449972331162953e-06, \n 2.22, 61.69, 0.004502], [1366, 3, 3.1831901356432676e-05, \n 0.001591595067821634, 2.22, 61.69, 0.004502], [1367, 3, \n 0.0021429014821967973, 0.10714507410983987, 2.22, 61.69, 0.004502], [\n 1368, 3, 9.560516623724435e-05, 0.004780258311862218, 2.22, 61.69, \n 0.004502], [1369, 3, 0.00046204655219542516, 0.023102327609771257, 2.22,\n 61.69, 0.004502], [1370, 3, 1.0304608838582957e-05, \n 0.0005152304419291479, 2.22, 61.69, 0.004502], [1371, 2, \n 0.0022749567929977086, 0.11374783964988543, 0, 0, 0], [1372, 2, \n 0.0050082619833296356, 0.2504130991664818, 0, 0, 0], [1373, 3, \n 0.0010693151538022578, 0.05346575769011289, 2.22, 61.69, 0.004502], [\n 1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0], [1375, 2, \n 0.003897629175102736, 0.1948814587551368, 0, 0, 0], [1376, 2, \n 0.007852128522530815, 0.39260642612654084, 0, 0, 0], [1377, 2, \n 0.006094764129655812, 0.30473820648279065, 0, 0, 0], [1378, 2, \n 0.0062434108523654235, 0.3121705426182712, 0, 0, 0], [1379, 3, \n 3.0098190435426792e-05, 0.0015049095217713397, 2.22, 61.69, 0.004502],\n [1380, 3, 5.394520401513898e-05, 0.002697260200756949, 2.22, 61.69, \n 0.004502], [1381, 3, 3.680472218048895e-05, 0.001840236109024447, 2.22,\n 61.69, 0.004502], [1382, 2, 0.008838822964419164, 0.4419411482209583, 0,\n 0, 0], [1383, 2, 0.006991449967869686, 0.34957249839348425, 0, 0, 0], [\n 1384, 3, 0.0002870603107466644, 0.01435301553733322, 2.22, 61.69, \n 0.004502], [1385, 3, 4.602918986308876e-06, 0.00023014594931544384, \n 2.22, 61.69, 0.004502], [1386, 3, 2.5406083498023173e-05, \n 0.0012703041749011585, 2.22, 61.69, 0.004502], [1387, 3, \n 0.00011182192406483717, 0.0055910962032418585, 2.22, 61.69, 0.004502],\n [1388, 3, 4.1266752095987256e-05, 0.0020633376047993627, 2.22, 61.69, \n 0.004502], [1389, 3, 9.493711173340556e-06, 0.00047468555866702787, \n 2.22, 61.69, 0.004502], [1390, 3, 0.00011948001087807657, \n 0.005974000543903829, 2.22, 61.69, 0.004502], [1391, 3, \n 1.6156815754111043e-05, 0.0008078407877055523, 2.22, 61.69, 0.004502],\n [1392, 3, 0.0007258528797202384, 0.03629264398601192, 2.22, 61.69, \n 0.004502], [1393, 3, 8.763130962106806e-05, 0.004381565481053403, 2.22,\n 61.69, 0.004502], [1394, 3, 6.862035771367977e-05, 0.003431017885683988,\n 2.22, 61.69, 0.004502], [1395, 3, 4.696755105006889e-06, \n 0.00023483775525034447, 2.22, 61.69, 0.004502], [1396, 3, \n 1.6473931389884785e-06, 8.236965694942393e-05, 2.22, 61.69, 0.004502],\n [1397, 3, 0.000841878959456196, 0.042093947972809805, 2.22, 61.69, \n 0.004502], [1398, 3, 9.106352752461475e-05, 0.0045531763762307375, 2.22,\n 61.69, 0.004502], [1399, 3, 0.000614501928895323, 0.03072509644476615, \n 2.22, 61.69, 0.004502], [1400, 3, 8.258214886247176e-05, \n 0.004129107443123589, 2.22, 61.69, 0.004502], [1401, 2, \n 0.0029499050537279323, 0.14749525268639663, 0, 0, 0], [1402, 3, \n 0.0008779203509557502, 0.04389601754778751, 2.22, 61.69, 0.004502], [\n 1403, 2, 0.007617262031172502, 0.38086310155862513, 0, 0, 0], [1404, 2,\n 0.008581667499251882, 0.42908337496259413, 0, 0, 0], [1405, 3, \n 0.0010206451561773305, 0.051032257808866534, 2.22, 61.69, 0.004502], [\n 1406, 3, 0.00044281345416550866, 0.02214067270827543, 2.22, 61.69, \n 0.004502], [1407, 3, 6.985519985723439e-06, 0.00034927599928617195, \n 2.22, 61.69, 0.004502], [1408, 3, 0.0015599034807669107, \n 0.07799517403834554, 2.22, 61.69, 0.004502], [1409, 3, \n 0.0003826451438968471, 0.019132257194842357, 2.22, 61.69, 0.004502], [\n 1410, 3, 0.001119849138434054, 0.0559924569217027, 2.22, 61.69, \n 0.004502], [1411, 3, 0.0021677332100863795, 0.10838666050431899, 2.22, \n 61.69, 0.004502], [1412, 3, 0.0001702932115988861, 0.008514660579944306,\n 2.22, 61.69, 0.004502], [1413, 3, 0.00015712687360754934, \n 0.007856343680377468, 2.22, 61.69, 0.004502], [1414, 3, \n 0.0006609559456239092, 0.033047797281195467, 2.22, 61.69, 0.004502], [\n 1415, 3, 0.0001890075811839285, 0.009450379059196426, 2.22, 61.69, \n 0.004502], [1416, 3, 0.0002017048354821146, 0.010085241774105731, 2.22,\n 61.69, 0.004502], [1417, 3, 3.587634624733768e-08, \n 1.7938173123668838e-06, 2.22, 61.69, 0.004502], [1418, 2, \n 0.002634005451573638, 0.13170027257868192, 0, 0, 0], [1419, 3, \n 0.0009538705167746413, 0.04769352583873206, 2.22, 61.69, 0.004502], [\n 1421, 3, 0.00030900630459512675, 0.015450315229756338, 2.22, 61.69, \n 0.004502], [1422, 3, 0.0002087121412723534, 0.010435607063617671, 2.22,\n 61.69, 0.004502], [1423, 3, 8.660213976572599e-05, 0.0043301069882863, \n 2.22, 61.69, 0.004502], [1424, 2, 0.005562707763624093, \n 0.27813538818120465, 0, 0, 0], [1425, 3, 0.0013602274146640447, \n 0.06801137073320224, 2.22, 61.69, 0.004502], [1426, 2, \n 0.004377563184547638, 0.2188781592273819, 0, 0, 0], [1427, 2, \n 0.012484847220837852, 0.6242423610418927, 0, 0, 0], [1428, 2, \n 0.008488880122374441, 0.4244440061187221, 0, 0, 0], [1431, 2, \n 0.006398108618200077, 0.31990543091000384, 0, 0, 0], [1432, 3, \n 0.00038249012070950037, 0.019124506035475018, 2.22, 61.69, 0.004502], [\n 1433, 2, 0.0499489397816605, 2.4974469890830253, 0, 0, 0], [1434, 2, \n 0.002523926322700656, 0.12619631613503277, 0, 0, 0], [1435, 2, \n 0.00281243262144019, 0.1406216310720095, 0, 0, 0], [1436, 2, \n 0.005026791926267322, 0.2513395963133661, 0, 0, 0], [1437, 2, \n 0.007689748714359815, 0.38448743571799077, 0, 0, 0], [1438, 2, \n 0.021209120082186957, 1.060456004109348, 0, 0, 0], [1439, 2, \n 0.0025185488172777457, 0.12592744086388727, 0, 0, 0], [1440, 3, \n 2.1228241611109457e-05, 0.001061412080555473, 2.22, 61.69, 0.004502], [\n 1441, 3, 5.1097125443354235e-06, 0.0002554856272167712, 2.22, 61.69, \n 0.004502], [1442, 3, 2.626011287317575e-05, 0.0013130056436587876, 2.22,\n 61.69, 0.004502], [1443, 2, 0.006557506818224797, 0.3278753409112398, 0,\n 0, 0], [1444, 3, 0.00042227456865251087, 0.021113728432625545, 2.22, \n 61.69, 0.004502], [1445, 3, 0.0009856395478638393, 0.04928197739319196,\n 2.22, 61.69, 0.004502], [1446, 2, 0.02178507310152743, \n 1.0892536550763714, 0, 0, 0], [1447, 2, 0.003442397713820559, \n 0.17211988569102793, 0, 0, 0], [1448, 3, 0.000439455069088402, \n 0.0219727534544201, 2.22, 61.69, 0.004502], [1449, 2, \n 0.003346435866528816, 0.16732179332644082, 0, 0, 0], [1450, 2, \n 0.0033264151601212124, 0.1663207580060606, 0, 0, 0], [1451, 2, \n 0.004170743873351868, 0.2085371936675934, 0, 0, 0], [1452, 3, \n 0.0013165328240904745, 0.06582664120452372, 2.22, 61.69, 0.004502], [\n 1453, 2, 0.004077756743774734, 0.20388783718873668, 0, 0, 0], [1454, 2,\n 0.009875666531734596, 0.49378332658672985, 0, 0, 0], [1455, 3, \n 2.1818849454345026e-05, 0.001090942472717251, 2.22, 61.69, 0.004502], [\n 1456, 2, 0.0017907486519991621, 0.08953743259995812, 0, 0, 0], [1457, 3,\n 8.903780729597746e-05, 0.004451890364798873, 2.22, 61.69, 0.004502], [\n 1458, 3, 1.0945897203271481e-05, 0.0005472948601635741, 2.22, 61.69, \n 0.004502], [1459, 3, 0.00033798517072819835, 0.01689925853640992, 2.22,\n 61.69, 0.004502], [1460, 2, 0.003233851084262461, 0.16169255421312306, \n 0, 0, 0], [1461, 3, 0.0011159317192975062, 0.05579658596487532, 2.22, \n 61.69, 0.004502], [1462, 3, 0.00014771811478685875, \n 0.0073859057393429375, 2.22, 61.69, 0.004502], [1463, 3, \n 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502], [\n 1464, 2, 0.009317735345896607, 0.4658867672948304, 0, 0, 0], [1465, 3, \n 0.0002263874562139475, 0.011319372810697375, 2.22, 61.69, 0.004502], [\n 1466, 3, 0.00018856670442025825, 0.009428335221012914, 2.22, 61.69, \n 0.004502], [1467, 3, 6.63001698920047e-05, 0.0033150084946002357, 2.22,\n 61.69, 0.004502], [1468, 3, 0.0015144656821575462, 0.0757232841078773, \n 2.22, 61.69, 0.004502], [1469, 2, 0.0021846358435379763, \n 0.10923179217689882, 0, 0, 0], [1470, 2, 0.005027084884666319, \n 0.2513542442333159, 0, 0, 0], [1471, 2, 0.008429379144717497, \n 0.42146895723587485, 0, 0, 0], [1472, 3, 0.000411329166889909, \n 0.020566458344495452, 2.22, 61.69, 0.004502], [1473, 3, \n 0.0003152649698806797, 0.01576324849403399, 2.22, 61.69, 0.004502], [\n 1474, 3, 4.6374430095522104e-05, 0.0023187215047761056, 2.22, 61.69, \n 0.004502], [1475, 3, 1.2661518354387543e-05, 0.0006330759177193771, \n 2.22, 61.69, 0.004502], [1476, 2, 0.015946059282369706, \n 0.7973029641184852, 0, 0, 0], [1477, 3, 0.0003829836649997916, \n 0.01914918324998958, 2.22, 61.69, 0.004502], [1479, 3, \n 0.00014225067121410135, 0.007112533560705067, 2.22, 61.69, 0.004502], [\n 1480, 3, 0.0004782600316322042, 0.023913001581610215, 2.22, 61.69, \n 0.004502], [1481, 3, 1.9134115446378896e-06, 9.567057723189448e-05, \n 2.22, 61.69, 0.004502], [1482, 3, 0.0005460062457677878, \n 0.02730031228838939, 2.22, 61.69, 0.004502], [1483, 3, \n 0.00010937933305696306, 0.005468966652848153, 2.22, 61.69, 0.004502], [\n 1484, 3, 1.0350331428991598e-06, 5.175165714495798e-05, 2.22, 61.69, \n 0.004502], [1485, 3, 1.9501739896369628e-05, 0.0009750869948184814, \n 2.22, 61.69, 0.004502], [1486, 3, 0.00010033262049505883, \n 0.005016631024752942, 2.22, 61.69, 0.004502], [1487, 3, \n 4.061288205771431e-05, 0.0020306441028857154, 2.22, 61.69, 0.004502], [\n 1488, 3, 0.0001420359709113183, 0.007101798545565915, 2.22, 61.69, \n 0.004502], [1489, 3, 7.571817467557017e-06, 0.00037859087337785094, \n 2.22, 61.69, 0.004502], [1490, 2, 0.02173832998960063, \n 1.0869164994800316, 0, 0, 0], [1491, 2, 0.002899243829618353, \n 0.14496219148091766, 0, 0, 0], [1492, 2, 0.006310327387189529, \n 0.31551636935947647, 0, 0, 0], [1493, 2, 0.0026261050067275696, \n 0.1313052503363785, 0, 0, 0], [1494, 2, 0.01942091372606376, \n 0.971045686303188, 0, 0, 0], [1495, 2, 0.001839513558783269, \n 0.09197567793916346, 0, 0, 0], [1497, 2, 0.004375527360649893, \n 0.2187763680324947, 0, 0, 0], [1498, 2, 0.006735488235440387, \n 0.3367744117720194, 0, 0, 0], [1500, 3, 9.85597782087346e-06, \n 0.000492798891043673, 2.22, 61.69, 0.004502], [1501, 3, \n 0.0005198212383651805, 0.02599106191825903, 2.22, 61.69, 0.004502], [\n 1502, 3, 2.5730645753187908e-05, 0.0012865322876593954, 2.22, 61.69, \n 0.004502], [1503, 3, 0.0016785036591113812, 0.08392518295556907, 2.22, \n 61.69, 0.004502], [1504, 2, 0.0070690698718853685, 0.3534534935942685, \n 0, 0, 0], [1505, 3, 0.0008020995657820899, 0.0401049782891045, 2.22, \n 61.69, 0.004502], [1506, 2, 0.0016397994496200178, 0.08198997248100089,\n 0, 0, 0], [1507, 3, 0.00041507959569883954, 0.020753979784941975, 2.22,\n 61.69, 0.004502], [1508, 3, 4.154538017488063e-06, \n 0.00020772690087440316, 2.22, 61.69, 0.004502], [1510, 2, \n 0.0038109932532764228, 0.19054966266382115, 0, 0, 0], [1511, 2, \n 0.00988173435818505, 0.4940867179092525, 0, 0, 0], [1512, 2, \n 0.0024139057115332764, 0.12069528557666383, 0, 0, 0], [1513, 3, \n 0.0009163944605813735, 0.04581972302906867, 2.22, 61.69, 0.004502], [\n 1514, 3, 7.863212274868215e-07, 3.931606137434107e-05, 2.22, 61.69, \n 0.004502], [1516, 3, 8.064530491522743e-07, 4.032265245761371e-05, 2.22,\n 61.69, 0.004502], [1517, 3, 5.411679453042277e-05, \n 0.0027058397265211386, 2.22, 61.69, 0.004502], [1518, 3, \n 2.5128262984133043e-05, 0.0012564131492066523, 2.22, 61.69, 0.004502],\n [1519, 3, 1.7440471969906603e-06, 8.720235984953302e-05, 2.22, 61.69, \n 0.004502], [1520, 2, 0.002179468836492435, 0.10897344182462178, 0, 0, 0\n ], [1521, 3, 0.0008492761068800811, 0.042463805344004055, 2.22, 61.69, \n 0.004502], [1522, 3, 0.001100146404858253, 0.055007320242912654, 2.22, \n 61.69, 0.004502], [1523, 3, 0.0005582443262487387, 0.027912216312436934,\n 2.22, 61.69, 0.004502], [1524, 3, 0.000714042943349428, \n 0.0357021471674714, 2.22, 61.69, 0.004502], [1525, 2, \n 0.0030458928986021308, 0.15229464493010655, 0, 0, 0], [1526, 3, \n 0.0028315929319783603, 0.14157964659891803, 2.22, 61.69, 0.004502], [\n 1527, 2, 0.006620761748036568, 0.3310380874018284, 0, 0, 0], [1528, 3, \n 0.0026347607821089578, 0.13173803910544787, 2.22, 61.69, 0.004502], [\n 1529, 2, 0.002711166418718582, 0.1355583209359291, 0, 0, 0], [1530, 2, \n 0.005032807482107288, 0.25164037410536444, 0, 0, 0], [1531, 2, \n 0.01170243432457441, 0.5851217162287206, 0, 0, 0], [1532, 3, \n 0.0013959626805160842, 0.06979813402580422, 2.22, 61.69, 0.004502], [\n 1534, 3, 0.0018790855823381403, 0.09395427911690701, 2.22, 61.69, \n 0.004502], [1535, 3, 0.0005686146984208124, 0.028430734921040625, 2.22,\n 61.69, 0.004502], [1536, 3, 0.0024994615604055, 0.124973078020275, 2.22,\n 61.69, 0.004502], [1537, 2, 0.0032722848050199577, 0.16361424025099788,\n 0, 0, 0], [1538, 2, 0.0037830688364752845, 0.18915344182376426, 0, 0, 0\n ], [1539, 2, 0.005940345649432395, 0.2970172824716198, 0, 0, 0], [1540,\n 3, 0.00011646135769917789, 0.005823067884958895, 2.22, 61.69, 0.004502],\n [1541, 3, 0.00012889056523503453, 0.006444528261751726, 2.22, 61.69, \n 0.004502], [1542, 2, 0.0015000008003063865, 0.07500004001531933, 0, 0, \n 0], [1543, 3, 0.0009414759018296965, 0.04707379509148483, 2.22, 61.69, \n 0.004502], [1544, 2, 0.0055441839759994335, 0.2772091987999717, 0, 0, 0\n ], [1545, 2, 0.011812169709970757, 0.5906084854985378, 0, 0, 0], [1546,\n 2, 0.01626203379888308, 0.8131016899441541, 0, 0, 0], [1547, 2, \n 0.02285851188035466, 1.142925594017733, 0, 0, 0], [1548, 3, \n 0.0013543308279443016, 0.06771654139721509, 2.22, 61.69, 0.004502], [\n 1549, 2, 0.0049030854262021965, 0.2451542713101098, 0, 0, 0], [1550, 3,\n 0.00033197905453791535, 0.016598952726895766, 2.22, 61.69, 0.004502], [\n 1551, 3, 0.0006096583500745879, 0.030482917503729397, 2.22, 61.69, \n 0.004502], [1552, 2, 0.0015656981738750837, 0.0782849086937542, 0, 0, 0\n ], [1553, 2, 0.0024888943599414575, 0.12444471799707287, 0, 0, 0], [\n 1554, 2, 0.004505411665481134, 0.22527058327405666, 0, 0, 0], [1555, 2,\n 0.002990934193624122, 0.14954670968120612, 0, 0, 0], [1556, 3, \n 0.0011564128320789798, 0.057820641603948994, 2.22, 61.69, 0.004502], [\n 1557, 3, 0.0007362927807377101, 0.036814639036885505, 2.22, 61.69, \n 0.004502], [1558, 3, 0.0007445458899189016, 0.03722729449594508, 2.22, \n 61.69, 0.004502], [1559, 2, 0.003443835108227301, 0.17219175541136506, \n 0, 0, 0], [1560, 2, 0.002329145997663478, 0.11645729988317388, 0, 0, 0],\n [1561, 3, 0.0005540231602239543, 0.027701158011197716, 2.22, 61.69, \n 0.004502], [1562, 2, 0.0017152625197382394, 0.08576312598691198, 0, 0, \n 0], [1563, 2, 0.0030915759312768417, 0.1545787965638421, 0, 0, 0], [\n 1564, 2, 0.0037097629455119584, 0.18548814727559793, 0, 0, 0], [1565, 3,\n 0.0004375471497403783, 0.021877357487018915, 2.22, 61.69, 0.004502], [\n 1566, 2, 0.010252171892683539, 0.512608594634177, 0, 0, 0], [1567, 3, \n 0.0008118171037128424, 0.04059085518564212, 2.22, 61.69, 0.004502], [\n 1568, 2, 0.002604241793178731, 0.13021208965893655, 0, 0, 0], [1569, 2,\n 0.009255990694371212, 0.46279953471856067, 0, 0, 0], [1570, 2, \n 0.0069640706150360665, 0.3482035307518033, 0, 0, 0], [1571, 2, \n 0.0065041313813353095, 0.32520656906676554, 0, 0, 0], [1572, 2, \n 0.006633904979541033, 0.33169524897705166, 0, 0, 0], [1573, 2, \n 0.0023394661316732436, 0.11697330658366219, 0, 0, 0], [1574, 2, \n 0.004137684975217191, 0.20688424876085953, 0, 0, 0], [1575, 2, \n 0.005321935603588621, 0.266096780179431, 0, 0, 0], [1576, 3, \n 0.0012058684964594748, 0.06029342482297374, 2.22, 61.69, 0.004502], [\n 1577, 2, 0.007623891664161928, 0.38119458320809646, 0, 0, 0], [1578, 3,\n 0.0005221838250086942, 0.026109191250434708, 2.22, 61.69, 0.004502], [\n 1579, 3, 0.002238630940686654, 0.11193154703433271, 2.22, 61.69, \n 0.004502], [1580, 3, 0.001393719346464869, 0.06968596732324346, 2.22, \n 61.69, 0.004502], [1581, 2, 0.004209660542722961, 0.21048302713614803, \n 0, 0, 0], [1582, 3, 0.00022686224095152467, 0.011343112047576234, 2.22,\n 61.69, 0.004502], [1583, 3, 5.082160364336507e-05, 0.002541080182168254,\n 2.22, 61.69, 0.004502], [1584, 2, 0.0022062235268679067, \n 0.11031117634339535, 0, 0, 0], [1585, 3, 9.927313465409417e-05, \n 0.004963656732704709, 2.22, 61.69, 0.004502], [1586, 2, \n 0.0016556098644012565, 0.08278049322006283, 0, 0, 0], [1587, 2, \n 0.0051600530588915, 0.25800265294457503, 0, 0, 0], [1588, 2, \n 0.0020300209546731105, 0.10150104773365555, 0, 0, 0], [1589, 3, \n 0.003090042091003551, 0.15450210455017754, 2.22, 61.69, 0.004502], [\n 1590, 2, 0.00678480159716298, 0.33924007985814897, 0, 0, 0], [1591, 2, \n 0.007640573237260637, 0.3820286618630319, 0, 0, 0], [1592, 3, \n 0.0002808269093051203, 0.014041345465256016, 2.22, 61.69, 0.004502], [\n 1593, 3, 0.00020129856047632, 0.010064928023816, 2.22, 61.69, 0.004502],\n [1594, 3, 0.0002789388372524298, 0.01394694186262149, 2.22, 61.69, \n 0.004502], [1595, 2, 0.0016750204459843893, 0.08375102229921946, 0, 0, \n 0], [1596, 2, 0.004134439238739313, 0.20672196193696565, 0, 0, 0], [\n 1597, 3, 8.285309045665851e-05, 0.004142654522832926, 2.22, 61.69, \n 0.004502], [1598, 3, 0.00013540004754729773, 0.0067700023773648865, \n 2.22, 61.69, 0.004502], [1599, 2, 0.0026959085186091525, \n 0.13479542593045762, 0, 0, 0], [1600, 3, 0.0009357608497023268, \n 0.04678804248511634, 2.22, 61.69, 0.004502], [1601, 3, \n 0.00027170543018973547, 0.013585271509486775, 2.22, 61.69, 0.004502], [\n 1602, 3, 0.0015513668512933244, 0.07756834256466623, 2.22, 61.69, \n 0.004502], [1603, 3, 0.0009086996263346224, 0.04543498131673112, 2.22, \n 61.69, 0.004502], [1604, 3, 0.0005649494759739373, 0.02824747379869687,\n 2.22, 61.69, 0.004502], [1605, 3, 0.0014751450593580586, \n 0.07375725296790293, 2.22, 61.69, 0.004502], [1606, 3, \n 0.0013425796771799677, 0.06712898385899839, 2.22, 61.69, 0.004502], [\n 1607, 3, 0.0006631858002546182, 0.03315929001273091, 2.22, 61.69, \n 0.004502], [1608, 3, 0.000668140823101588, 0.0334070411550794, 2.22, \n 61.69, 0.004502], [1609, 3, 0.00022162254349097636, \n 0.011081127174548818, 2.22, 61.69, 0.004502], [1610, 3, \n 0.0006039031650447518, 0.030195158252237588, 2.22, 61.69, 0.004502], [\n 1611, 3, 0.00022694944446959337, 0.011347472223479668, 2.22, 61.69, \n 0.004502], [1612, 3, 0.0003947897752379102, 0.019739488761895515, 2.22,\n 61.69, 0.004502], [1613, 3, 0.0008375258341098956, 0.04187629170549478,\n 2.22, 61.69, 0.004502], [1614, 3, 0.0008441996938739789, \n 0.042209984693698945, 2.22, 61.69, 0.004502], [1615, 2, \n 0.005227574288460156, 0.26137871442300786, 0, 0, 0], [1616, 3, \n 0.00019064354714925193, 0.009532177357462597, 2.22, 61.69, 0.004502], [\n 1617, 3, 0.00029566775950504534, 0.014783387975252268, 2.22, 61.69, \n 0.004502], [1618, 3, 0.00014179949030894114, 0.007089974515447057, 2.22,\n 61.69, 0.004502], [1619, 3, 0.00018640385871827544, \n 0.009320192935913772, 2.22, 61.69, 0.004502], [1620, 3, \n 5.5271626586484114e-05, 0.0027635813293242053, 2.22, 61.69, 0.004502],\n [1621, 3, 0.0002950094150485152, 0.014750470752425757, 2.22, 61.69, \n 0.004502], [1622, 3, 0.00020847655089586544, 0.010423827544793273, 2.22,\n 61.69, 0.004502], [1623, 3, 0.0006246630015592596, 0.031233150077962978,\n 2.22, 61.69, 0.004502], [1624, 3, 0.00028274003590258393, \n 0.014137001795129197, 2.22, 61.69, 0.004502], [1625, 2, \n 0.0022534174910895347, 0.11267087455447673, 0, 0, 0], [1626, 3, \n 0.0004280693443394328, 0.02140346721697164, 2.22, 61.69, 0.004502], [\n 1627, 3, 0.000375648911560075, 0.01878244557800375, 2.22, 61.69, \n 0.004502], [1628, 2, 0.002172204242957195, 0.10861021214785976, 0, 0, 0\n ], [1629, 2, 0.003587225381224193, 0.17936126906120967, 0, 0, 0], [1630,\n 3, 0.00045326643232520994, 0.0226633216162605, 2.22, 61.69, 0.004502],\n [1631, 3, 0.0009801395432241038, 0.04900697716120519, 2.22, 61.69, \n 0.004502], [1632, 3, 0.0008930991123686864, 0.044654955618434314, 2.22,\n 61.69, 0.004502], [1633, 2, 0.001835290275730487, 0.09176451378652435, \n 0, 0, 0], [1634, 3, 0.00035310969975077067, 0.017655484987538533, 2.22,\n 61.69, 0.004502], [1635, 3, 0.0006833295628236428, 0.03416647814118214,\n 2.22, 61.69, 0.004502], [1636, 3, 0.0006973081800050544, \n 0.03486540900025272, 2.22, 61.69, 0.004502], [1637, 3, \n 0.000849481774844417, 0.042474088742220854, 2.22, 61.69, 0.004502], [\n 1638, 3, 0.0003577601952454168, 0.01788800976227084, 2.22, 61.69, \n 0.004502], [1639, 3, 0.0008040502325112668, 0.04020251162556334, 2.22, \n 61.69, 0.004502], [1640, 3, 6.362024595159042e-05, \n 0.0031810122975795213, 2.22, 61.69, 0.004502], [1641, 3, \n 0.00014325661737729948, 0.007162830868864973, 2.22, 61.69, 0.004502], [\n 1642, 3, 0.00033451195931950633, 0.01672559796597532, 2.22, 61.69, \n 0.004502], [1643, 3, 9.619219687560661e-05, 0.0048096098437803315, 2.22,\n 61.69, 0.004502], [1644, 3, 0.0003653755557936511, 0.018268777789682555,\n 2.22, 61.69, 0.004502], [1645, 3, 0.00030842754735325555, \n 0.015421377367662779, 2.22, 61.69, 0.004502], [1646, 3, \n 0.0001049187322986075, 0.005245936614930375, 2.22, 61.69, 0.004502], [\n 1647, 3, 0.000503659392774143, 0.025182969638707146, 2.22, 61.69, \n 0.004502], [1648, 2, 0.006961158588339223, 0.34805792941696123, 0, 0, 0\n ], [1649, 3, 0.000744807327898371, 0.03724036639491855, 2.22, 61.69, \n 0.004502], [1650, 2, 0.011263647688495146, 0.5631823844247573, 0, 0, 0],\n [1651, 2, 0.008559494225984409, 0.4279747112992205, 0, 0, 0], [1652, 2,\n 0.005352098184679378, 0.2676049092339689, 0, 0, 0], [1653, 3, \n 0.0011733692302176245, 0.058668461510881224, 2.22, 61.69, 0.004502], [\n 1654, 3, 0.0020443508774251108, 0.10221754387125553, 2.22, 61.69, \n 0.004502], [1655, 3, 0.0003002115401188504, 0.01501057700594252, 2.22, \n 61.69, 0.004502], [1656, 3, 7.370159725959526e-05, 0.003685079862979763,\n 2.22, 61.69, 0.004502], [1657, 3, 0.00015430974585088452, \n 0.007715487292544226, 2.22, 61.69, 0.004502], [1658, 3, \n 5.322222256050306e-05, 0.0026611111280251533, 2.22, 61.69, 0.004502], [\n 1659, 2, 0.005607978495065647, 0.2803989247532824, 0, 0, 0], [1660, 2, \n 0.006516269957589729, 0.32581349787948644, 0, 0, 0], [1661, 2, \n 0.008823810212990009, 0.4411905106495005, 0, 0, 0], [1662, 3, \n 8.483345715007819e-05, 0.00424167285750391, 2.22, 61.69, 0.004502], [\n 1663, 3, 4.3530191699128595e-05, 0.0021765095849564297, 2.22, 61.69, \n 0.004502], [1664, 3, 4.452953003965536e-05, 0.002226476501982768, 2.22,\n 61.69, 0.004502], [1665, 3, 0.0013225288693347707, 0.06612644346673854,\n 2.22, 61.69, 0.004502], [1666, 3, 8.635567359373938e-05, \n 0.0043177836796869686, 2.22, 61.69, 0.004502], [1667, 3, \n 0.0001522890012790897, 0.007614450063954485, 2.22, 61.69, 0.004502], [\n 1668, 3, 0.00011100625173614089, 0.005550312586807045, 2.22, 61.69, \n 0.004502], [1669, 2, 0.0019551374257545055, 0.09775687128772527, 0, 0, \n 0], [1670, 2, 0.002994563514151705, 0.1497281757075853, 0, 0, 0], [1671,\n 2, 0.00194197125660994, 0.097098562830497, 0, 0, 0], [1672, 3, \n 0.00031759653323842224, 0.01587982666192111, 2.22, 61.69, 0.004502], [\n 1673, 3, 0.00015112697948666895, 0.007556348974333448, 2.22, 61.69, \n 0.004502], [1674, 3, 0.001338975669244281, 0.06694878346221406, 2.22, \n 61.69, 0.004502], [1675, 3, 0.0009048640187272772, 0.04524320093636386,\n 2.22, 61.69, 0.004502], [1676, 2, 0.002276296569919192, \n 0.11381482849595959, 0, 0, 0], [1677, 3, 0.0003779607501536475, \n 0.018898037507682378, 2.22, 61.69, 0.004502], [1678, 2, \n 0.005903817693380342, 0.2951908846690171, 0, 0, 0], [1679, 2, \n 0.0018586402973926343, 0.09293201486963171, 0, 0, 0], [1680, 2, \n 0.0014488887108239739, 0.0724444355411987, 0, 0, 0], [1681, 3, \n 0.0004714294646830218, 0.023571473234151093, 2.22, 61.69, 0.004502], [\n 1682, 3, 0.001085935652974641, 0.05429678264873205, 2.22, 61.69, \n 0.004502], [1683, 3, 0.00028145757533810527, 0.014072878766905264, 2.22,\n 61.69, 0.004502], [1684, 3, 0.0025831258538967852, 0.12915629269483925,\n 2.22, 61.69, 0.004502], [1685, 2, 0.0047697103139446575, \n 0.23848551569723286, 0, 0, 0], [1686, 2, 0.0022483118876134227, \n 0.11241559438067113, 0, 0, 0], [1687, 2, 0.0030131816049814983, \n 0.15065908024907493, 0, 0, 0], [1688, 3, 0.0004903983387759389, \n 0.024519916938796946, 2.22, 61.69, 0.004502], [1689, 2, \n 0.0032938946161484794, 0.16469473080742397, 0, 0, 0], [1690, 2, \n 0.00317999955372553, 0.15899997768627652, 0, 0, 0], [1691, 2, \n 0.006018881738424175, 0.30094408692120883, 0, 0, 0], [1692, 3, \n 0.0007150498191215078, 0.03575249095607538, 2.22, 61.69, 0.004502], [\n 1693, 2, 0.0030184481369320087, 0.15092240684660044, 0, 0, 0], [1694, 2,\n 0.001461369242868097, 0.07306846214340486, 0, 0, 0], [1695, 3, \n 0.0006306603001410114, 0.03153301500705057, 2.22, 61.69, 0.004502], [\n 1696, 2, 0.0014331689037382152, 0.07165844518691075, 0, 0, 0], [1697, 2,\n 0.008710326279612261, 0.43551631398061313, 0, 0, 0], [1698, 3, \n 0.0016301483386422185, 0.08150741693211093, 2.22, 61.69, 0.004502], [\n 1699, 3, 0.00013956784357760127, 0.006978392178880064, 2.22, 61.69, \n 0.004502], [1700, 2, 0.001455730736331227, 0.07278653681656136, 0, 0, 0\n ], [1701, 3, 0.000985466392749056, 0.04927331963745281, 2.22, 61.69, \n 0.004502], [1702, 3, 0.0008069862705159137, 0.04034931352579569, 2.22, \n 61.69, 0.004502], [1703, 3, 0.0015568099066940577, 0.07784049533470289,\n 2.22, 61.69, 0.004502], [1704, 2, 0.0039863070632047415, \n 0.1993153531602371, 0, 0, 0], [1705, 2, 0.0016994219326201241, \n 0.0849710966310062, 0, 0, 0], [1706, 3, 0.00022834587513481845, \n 0.011417293756740922, 2.22, 61.69, 0.004502], [1707, 3, \n 0.00035050593877745283, 0.017525296938872642, 2.22, 61.69, 0.004502], [\n 1708, 3, 0.0008077480562281571, 0.04038740281140786, 2.22, 61.69, \n 0.004502], [1709, 2, 0.006228812219006413, 0.31144061095032066, 0, 0, 0\n ], [1710, 2, 0.005128653226179494, 0.2564326613089747, 0, 0, 0], [1711,\n 3, 0.0001865928228376505, 0.009329641141882526, 2.22, 61.69, 0.004502],\n [1712, 2, 0.002102837121501151, 0.10514185607505754, 0, 0, 0], [1713, 2,\n 0.0025368957405395645, 0.12684478702697824, 0, 0, 0], [1714, 3, \n 0.0011562226654331135, 0.05781113327165568, 2.22, 61.69, 0.004502], [\n 1715, 2, 0.004481367157274824, 0.22406835786374124, 0, 0, 0], [1716, 2,\n 0.009993594261663767, 0.4996797130831883, 0, 0, 0], [1717, 2, \n 0.002267986548968579, 0.11339932744842897, 0, 0, 0], [1718, 2, \n 0.01920136583254073, 0.9600682916270364, 0, 0, 0], [1719, 3, \n 0.0006250608555912478, 0.03125304277956239, 2.22, 61.69, 0.004502], [\n 1720, 2, 0.00168964057950739, 0.08448202897536951, 0, 0, 0], [1721, 2, \n 0.0022514556432754154, 0.11257278216377076, 0, 0, 0], [1722, 3, \n 0.0005776709769605844, 0.02888354884802922, 2.22, 61.69, 0.004502], [\n 1723, 3, 0.00018177235502873834, 0.009088617751436916, 2.22, 61.69, \n 0.004502], [1724, 3, 0.002308942454207542, 0.1154471227103771, 2.22, \n 61.69, 0.004502], [1725, 2, 0.0018560503299213332, 0.09280251649606665,\n 0, 0, 0], [1726, 2, 0.002761006390807373, 0.13805031954036864, 0, 0, 0],\n [1727, 3, 1.2777785942774298e-05, 0.0006388892971387149, 2.22, 61.69, \n 0.004502], [1728, 2, 0.0018392523086213346, 0.09196261543106675, 0, 0, \n 0], [1729, 2, 0.006839303534284608, 0.3419651767142304, 0, 0, 0], [1730,\n 2, 0.0016405280887646968, 0.08202640443823485, 0, 0, 0], [1731, 2, \n 0.004530580326268455, 0.2265290163134228, 0, 0, 0], [1732, 2, \n 0.010296734416249178, 0.5148367208124589, 0, 0, 0], [1733, 2, \n 0.0017360181799001156, 0.08680090899500578, 0, 0, 0], [1734, 2, \n 0.002080576836187494, 0.1040288418093747, 0, 0, 0], [1735, 2, \n 0.004596997723122095, 0.2298498861561048, 0, 0, 0], [1736, 2, \n 0.002413425654250592, 0.12067128271252962, 0, 0, 0], [1737, 2, \n 0.006813443685203153, 0.34067218426015766, 0, 0, 0], [1738, 2, \n 0.0038515318581644853, 0.1925765929082243, 0, 0, 0], [1739, 3, \n 0.0010627604171624583, 0.053138020858122914, 2.22, 61.69, 0.004502], [\n 1740, 2, 0.0021026257427105457, 0.10513128713552729, 0, 0, 0], [1741, 3,\n 0.0009950302298943022, 0.049751511494715114, 2.22, 61.69, 0.004502], [\n 1742, 3, 0.0006991333883527254, 0.03495666941763627, 2.22, 61.69, \n 0.004502], [1743, 3, 2.6718441567986027e-05, 0.0013359220783993014, \n 2.22, 61.69, 0.004502], [1744, 3, 0.00010295853025504874, \n 0.0051479265127524374, 2.22, 61.69, 0.004502], [1745, 3, \n 0.0008552992639033185, 0.04276496319516592, 2.22, 61.69, 0.004502], [\n 1746, 2, 0.004641428723601485, 0.23207143618007425, 0, 0, 0], [1747, 3,\n 0.0007127580911748647, 0.03563790455874324, 2.22, 61.69, 0.004502], [\n 1748, 2, 0.0019372469660483122, 0.09686234830241562, 0, 0, 0], [1749, 2,\n 0.006244643211840332, 0.3122321605920166, 0, 0, 0], [1750, 3, \n 0.000653478119652876, 0.0326739059826438, 2.22, 61.69, 0.004502], [1751,\n 3, 0.0005383084342515337, 0.026915421712576687, 2.22, 61.69, 0.004502],\n [1752, 2, 0.0037542906982168446, 0.18771453491084222, 0, 0, 0], [1753, \n 2, 0.002297268499533676, 0.11486342497668381, 0, 0, 0], [1754, 2, \n 0.011467968203347287, 0.5733984101673645, 0, 0, 0], [1755, 3, \n 0.0014040905423340156, 0.07020452711670079, 2.22, 61.69, 0.004502], [\n 1756, 2, 0.0025915006544054604, 0.12957503272027304, 0, 0, 0], [1757, 2,\n 0.006862277688448091, 0.34311388442240454, 0, 0, 0], [1758, 2, \n 0.008413471513428292, 0.42067357567141467, 0, 0, 0], [1759, 2, \n 0.004574362398582669, 0.22871811992913343, 0, 0, 0], [1760, 2, \n 0.0031789097473471192, 0.15894548736735598, 0, 0, 0], [1761, 3, \n 0.0014083619528329524, 0.07041809764164762, 2.22, 61.69, 0.004502], [\n 1762, 2, 0.0033502257085727175, 0.1675112854286359, 0, 0, 0], [1763, 2,\n 0.0030242326674567712, 0.15121163337283858, 0, 0, 0], [1764, 3, \n 0.0007202102426608419, 0.0360105121330421, 2.22, 61.69, 0.004502], [\n 1765, 2, 0.003945424551590993, 0.19727122757954962, 0, 0, 0], [1766, 2,\n 0.003915515453890014, 0.1957757726945007, 0, 0, 0], [1767, 2, \n 0.006085505697192886, 0.30427528485964433, 0, 0, 0], [1768, 2, \n 0.010174366269247585, 0.5087183134623792, 0, 0, 0], [1769, 2, \n 0.009031054425598138, 0.451552721279907, 0, 0, 0], [1770, 2, \n 0.030509885187144117, 1.525494259357206, 0, 0, 0], [1771, 2, \n 0.017611454160671825, 0.8805727080335912, 0, 0, 0], [1772, 2, \n 0.007633737706924312, 0.3816868853462156, 0, 0, 0], [1773, 2, \n 0.01780807424723992, 0.890403712361996, 0, 0, 0], [1774, 2, \n 0.002413161491111794, 0.1206580745555897, 0, 0, 0], [1775, 2, \n 0.005451344168542172, 0.2725672084271086, 0, 0, 0], [1776, 2, \n 0.0033074583919163653, 0.16537291959581826, 0, 0, 0], [1777, 2, \n 0.005568161613558242, 0.2784080806779121, 0, 0, 0], [1778, 2, \n 0.002395611780191415, 0.11978058900957077, 0, 0, 0], [1779, 2, \n 0.0028488054525953985, 0.14244027262976997, 0, 0, 0], [1780, 2, \n 0.0030002134377383463, 0.1500106718869173, 0, 0, 0], [1781, 3, \n 0.0004499032173986467, 0.022495160869932335, 2.22, 61.69, 0.004502], [\n 1782, 3, 0.0006333736554700433, 0.03166868277350216, 2.22, 61.69, \n 0.004502], [1783, 3, 0.0006836718573255382, 0.03418359286627692, 2.22, \n 61.69, 0.004502], [1784, 2, 0.006456743545235233, 0.32283717726176164, \n 0, 0, 0], [1785, 2, 0.007347157943155048, 0.36735789715775236, 0, 0, 0],\n [1786, 2, 0.007214359186119591, 0.36071795930597955, 0, 0, 0], [1787, 2,\n 0.007834284018991623, 0.39171420094958115, 0, 0, 0], [1788, 3, \n 0.0002545220592081115, 0.012726102960405576, 2.22, 61.69, 0.004502], [\n 1789, 3, 0.0006445279945604626, 0.03222639972802314, 2.22, 61.69, \n 0.004502], [1790, 3, 3.7097412529855566e-05, 0.0018548706264927782, \n 2.22, 61.69, 0.004502], [1791, 3, 3.060700921589692e-05, \n 0.001530350460794846, 2.22, 61.69, 0.004502], [1792, 3, \n 0.00023113047197876308, 0.011556523598938153, 2.22, 61.69, 0.004502], [\n 1793, 3, 0.0010854139444152772, 0.054270697220763865, 2.22, 61.69, \n 0.004502], [1794, 3, 0.000193812719045554, 0.009690635952277699, 2.22, \n 61.69, 0.004502], [1795, 3, 0.00012212686390123214, \n 0.006106343195061608, 2.22, 61.69, 0.004502], [1796, 3, \n 0.0006642823349345957, 0.033214116746729784, 2.22, 61.69, 0.004502], [\n 1797, 2, 0.0018439478449351068, 0.09219739224675534, 0, 0, 0], [1798, 3,\n 0.00042633568546037186, 0.021316784273018592, 2.22, 61.69, 0.004502], [\n 1799, 2, 0.002237269697339197, 0.11186348486695984, 0, 0, 0], [1800, 2,\n 0.0042493921881998535, 0.2124696094099927, 0, 0, 0], [1801, 3, \n 0.0005438025657211798, 0.02719012828605899, 2.22, 61.69, 0.004502], [\n 1802, 3, 0.00029245884668739017, 0.01462294233436951, 2.22, 61.69, \n 0.004502], [1803, 3, 0.0003927492716827882, 0.01963746358413941, 2.22, \n 61.69, 0.004502], [1804, 2, 0.01120428237244892, 0.5602141186224461, 0,\n 0, 0], [1805, 3, 0.0006332582976482522, 0.03166291488241261, 2.22, \n 61.69, 0.004502], [1806, 3, 0.0006249082238639684, 0.03124541119319842,\n 2.22, 61.69, 0.004502], [1807, 3, 0.0007715037279579743, \n 0.03857518639789872, 2.22, 61.69, 0.004502], [1808, 2, \n 0.003273470708969163, 0.16367353544845814, 0, 0, 0], [1809, 3, \n 0.0009238292096633647, 0.04619146048316824, 2.22, 61.69, 0.004502], [\n 1810, 2, 0.002106300089692593, 0.10531500448462965, 0, 0, 0], [1811, 2,\n 0.0014671228267872148, 0.07335614133936073, 0, 0, 0], [1812, 3, \n 0.0013029854518401976, 0.0651492725920099, 2.22, 61.69, 0.004502], [\n 1813, 2, 0.005212306067684381, 0.26061530338421907, 0, 0, 0], [1814, 2,\n 0.0017458294165536873, 0.08729147082768438, 0, 0, 0], [1815, 2, \n 0.0017071985603054247, 0.08535992801527123, 0, 0, 0], [1816, 3, \n 0.0008355966484335978, 0.04177983242167989, 2.22, 61.69, 0.004502], [\n 1817, 2, 0.00786124232779237, 0.39306211638961847, 0, 0, 0], [1818, 2, \n 0.00467172216419726, 0.23358610820986297, 0, 0, 0], [1819, 3, \n 4.446961087725697e-05, 0.0022234805438628488, 2.22, 61.69, 0.004502], [\n 1820, 2, 0.0021455616092900765, 0.10727808046450382, 0, 0, 0], [1821, 2,\n 0.0052492883399868, 0.26246441699934, 0, 0, 0], [1822, 2, \n 0.010875476397094096, 0.5437738198547047, 0, 0, 0], [1823, 2, \n 0.003945992802078176, 0.19729964010390882, 0, 0, 0], [1824, 2, \n 0.0018267545792273764, 0.09133772896136881, 0, 0, 0], [1825, 2, \n 0.00519430489419229, 0.25971524470961443, 0, 0, 0], [1826, 2, \n 0.0021811060524790952, 0.10905530262395477, 0, 0, 0], [1827, 3, \n 0.0008530157012054359, 0.0426507850602718, 2.22, 61.69, 0.004502], [\n 1828, 3, 0.002756494944812388, 0.1378247472406194, 2.22, 61.69, \n 0.004502], [1829, 2, 0.004409435763064647, 0.22047178815323237, 0, 0, 0\n ], [1830, 3, 0.0011403474572496454, 0.05701737286248228, 2.22, 61.69, \n 0.004502], [1831, 2, 0.004449336207686825, 0.2224668103843413, 0, 0, 0],\n [1832, 3, 0.0007771931121615173, 0.038859655608075874, 2.22, 61.69, \n 0.004502], [1833, 2, 0.00219574579139257, 0.10978728956962851, 0, 0, 0],\n [1834, 2, 0.0029144516945575063, 0.14572258472787536, 0, 0, 0], [1836, \n 3, 0.0002291147948951537, 0.011455739744757684, 2.22, 61.69, 0.004502],\n [1837, 3, 0.0008040081530028336, 0.040200407650141684, 2.22, 61.69, \n 0.004502], [1838, 3, 0.0008406582811366919, 0.042032914056834604, 2.22,\n 61.69, 0.004502], [1839, 2, 0.009448279703012192, 0.47241398515060967, \n 0, 0, 0], [1840, 2, 0.004930931936026686, 0.2465465968013343, 0, 0, 0],\n [1841, 3, 0.0006235800258089248, 0.03117900129044624, 2.22, 61.69, \n 0.004502], [1842, 3, 0.000453678034330045, 0.022683901716502253, 2.22, \n 61.69, 0.004502], [1843, 3, 0.0005619991314477211, 0.02809995657238605,\n 2.22, 61.69, 0.004502], [1844, 3, 0.0008621042105392081, \n 0.043105210526960404, 2.22, 61.69, 0.004502], [1845, 3, \n 0.000841554397088342, 0.0420777198544171, 2.22, 61.69, 0.004502], [1846,\n 3, 0.00010981600382526249, 0.005490800191263125, 2.22, 61.69, 0.004502],\n [1847, 2, 0.003982054075289823, 0.19910270376449113, 0, 0, 0], [1848, 3,\n 0.00033381245647581777, 0.01669062282379089, 2.22, 61.69, 0.004502], [\n 1849, 3, 0.001158450269038491, 0.057922513451924555, 2.22, 61.69, \n 0.004502], [1850, 3, 0.001708114521061397, 0.08540572605306987, 2.22, \n 61.69, 0.004502], [1851, 3, 0.0005065229873089011, 0.025326149365445055,\n 2.22, 61.69, 0.004502], [1852, 3, 0.0023941306142429277, \n 0.11970653071214639, 2.22, 61.69, 0.004502], [1853, 3, \n 0.001917289339589373, 0.09586446697946867, 2.22, 61.69, 0.004502], [\n 1854, 3, 0.00014267713764539732, 0.007133856882269866, 2.22, 61.69, \n 0.004502], [1855, 2, 0.003701425783106976, 0.18507128915534882, 0, 0, 0\n ], [1856, 2, 0.004052362315850483, 0.20261811579252417, 0, 0, 0], [1857,\n 3, 0.0012207911958070376, 0.06103955979035188, 2.22, 61.69, 0.004502],\n [1858, 3, 0.0008157807822408823, 0.04078903911204411, 2.22, 61.69, \n 0.004502], [1860, 2, 0.0028539824090186706, 0.14269912045093353, 0, 0, \n 0], [1861, 3, 0.0008409403758531892, 0.04204701879265946, 2.22, 61.69, \n 0.004502], [1862, 3, 0.0008746423721642757, 0.04373211860821378, 2.22, \n 61.69, 0.004502], [1863, 3, 0.0008078987718104445, 0.04039493859052222,\n 2.22, 61.69, 0.004502], [1864, 2, 0.0037260737853256434, \n 0.1863036892662822, 0, 0, 0], [1865, 2, 0.0043352387888536065, \n 0.21676193944268035, 0, 0, 0], [1866, 2, 0.006257281052932708, \n 0.31286405264663536, 0, 0, 0], [1867, 3, 6.12285505372934e-05, \n 0.00306142752686467, 2.22, 61.69, 0.004502], [1868, 3, \n 0.00018655016239655994, 0.009327508119827998, 2.22, 61.69, 0.004502], [\n 1869, 3, 8.230686306328308e-05, 0.004115343153164154, 2.22, 61.69, \n 0.004502], [1870, 2, 0.0014869657686431364, 0.07434828843215682, 0, 0, \n 0], [1871, 2, 0.0015337314104040772, 0.07668657052020388, 0, 0, 0], [\n 1872, 3, 6.220327851111738e-05, 0.003110163925555869, 2.22, 61.69, \n 0.004502], [1873, 3, 0.0002573648025375113, 0.012868240126875569, 2.22,\n 61.69, 0.004502], [1874, 3, 0.00010039547173203763, \n 0.0050197735866018825, 2.22, 61.69, 0.004502], [1875, 3, \n 0.0002179760373318144, 0.010898801866590722, 2.22, 61.69, 0.004502], [\n 1876, 3, 0.00014270627844755376, 0.00713531392237769, 2.22, 61.69, \n 0.004502], [1877, 3, 3.283059900250418e-05, 0.001641529950125209, 2.22,\n 61.69, 0.004502], [1878, 3, 0.00023290405284479777, \n 0.011645202642239888, 2.22, 61.69, 0.004502], [1879, 3, \n 5.049284201103439e-05, 0.0025246421005517194, 2.22, 61.69, 0.004502], [\n 1880, 3, 0.001068255049908474, 0.05341275249542371, 2.22, 61.69, \n 0.004502], [1881, 3, 0.00015727984940835908, 0.007863992470417953, 2.22,\n 61.69, 0.004502], [1882, 3, 0.0001818121283940816, 0.00909060641970408,\n 2.22, 61.69, 0.004502], [1883, 3, 0.0002453456224830875, \n 0.012267281124154376, 2.22, 61.69, 0.004502], [1884, 3, \n 0.00020684198110963, 0.010342099055481502, 2.22, 61.69, 0.004502], [\n 1885, 3, 0.00129792588119142, 0.06489629405957101, 2.22, 61.69, \n 0.004502], [1886, 3, 0.00014319470844547947, 0.007159735422273974, 2.22,\n 61.69, 0.004502], [1887, 3, 0.0005032189871086648, 0.025160949355433244,\n 2.22, 61.69, 0.004502], [1888, 3, 0.00014324092549305482, \n 0.0071620462746527416, 2.22, 61.69, 0.004502], [1889, 2, \n 0.0025884474041454283, 0.12942237020727143, 0, 0, 0], [1890, 3, \n 0.0007104281028062201, 0.035521405140311005, 2.22, 61.69, 0.004502], [\n 1891, 3, 0.0008415405866706834, 0.042077029333534174, 2.22, 61.69, \n 0.004502], [1892, 3, 0.0010384360084148645, 0.05192180042074322, 2.22, \n 61.69, 0.004502], [1893, 3, 0.001301927182997355, 0.06509635914986775, \n 2.22, 61.69, 0.004502], [1894, 3, 0.0008768655006630459, \n 0.0438432750331523, 2.22, 61.69, 0.004502], [1895, 3, \n 4.304267639620148e-06, 0.00021521338198100739, 2.22, 61.69, 0.004502],\n [1896, 3, 0.0012165952308203119, 0.060829761541015596, 2.22, 61.69, \n 0.004502], [1897, 3, 0.0004032096848351131, 0.020160484241755657, 2.22,\n 61.69, 0.004502], [1898, 3, 0.0004936037088332394, 0.024680185441661975,\n 2.22, 61.69, 0.004502], [1899, 3, 0.0003231170726398226, \n 0.016155853631991127, 2.22, 61.69, 0.004502], [1900, 2, \n 0.004972924117850934, 0.2486462058925467, 0, 0, 0], [1901, 2, \n 0.00850139874298526, 0.42506993714926306, 0, 0, 0], [1902, 2, \n 0.017941196935571776, 0.8970598467785887, 0, 0, 0], [1903, 2, \n 0.008625713146876468, 0.4312856573438233, 0, 0, 0], [1904, 2, \n 0.005041037225995458, 0.2520518612997729, 0, 0, 0], [1905, 3, \n 0.0002626527775456755, 0.013132638877283775, 2.22, 61.69, 0.004502], [\n 1906, 2, 0.002010065672184408, 0.10050328360922042, 0, 0, 0], [1907, 3,\n 0.0008003650424765439, 0.040018252123827196, 2.22, 61.69, 0.004502], [\n 1908, 2, 0.0013979563523032034, 0.06989781761516019, 0, 0, 0], [1909, 3,\n 0.0011036689330580832, 0.05518344665290417, 2.22, 61.69, 0.004502], [\n 1910, 3, 0.0006883943546285288, 0.03441971773142644, 2.22, 61.69, \n 0.004502], [1911, 3, 0.0002772595538987581, 0.013862977694937906, 2.22,\n 61.69, 0.004502], [1912, 2, 0.006444942182323984, 0.3222471091161993, 0,\n 0, 0], [1913, 3, 0.0001851619920160923, 0.009258099600804617, 2.22, \n 61.69, 0.004502], [1914, 3, 0.00043823655905455975, 0.02191182795272799,\n 2.22, 61.69, 0.004502], [1915, 2, 0.010158557501696754, \n 0.5079278750848377, 0, 0, 0], [1916, 2, 0.017684886510895965, \n 0.8842443255447983, 0, 0, 0], [1917, 2, 0.01186578896955475, \n 0.5932894484777375, 0, 0, 0], [1918, 2, 0.007670383184040397, \n 0.3835191592020199, 0, 0, 0], [1919, 2, 0.0038936492873901407, \n 0.19468246436950706, 0, 0, 0], [1920, 3, 0.0005833186660407878, \n 0.029165933302039395, 2.22, 61.69, 0.004502], [1921, 2, \n 0.014667779068156944, 0.7333889534078474, 0, 0, 0], [1922, 2, \n 0.00420908399548562, 0.21045419977428104, 0, 0, 0], [1923, 3, \n 0.001390133293413998, 0.0695066646706999, 2.22, 61.69, 0.004502], [1924,\n 3, 0.001743020791378585, 0.08715103956892926, 2.22, 61.69, 0.004502], [\n 1925, 2, 0.004089510330471294, 0.20447551652356472, 0, 0, 0], [1926, 2,\n 0.00287118105637557, 0.1435590528187785, 0, 0, 0], [1927, 2, \n 0.0041806062493278656, 0.20903031246639325, 0, 0, 0], [1928, 3, \n 9.612221268309282e-05, 0.004806110634154641, 2.22, 61.69, 0.004502], [\n 1929, 3, 0.000144746604528514, 0.0072373302264257, 2.22, 61.69, \n 0.004502], [1930, 3, 0.00030511943453295244, 0.015255971726647622, 2.22,\n 61.69, 0.004502], [1931, 3, 0.0010456667798853683, 0.05228333899426842,\n 2.22, 61.69, 0.004502], [1932, 3, 0.0014184910249342812, \n 0.07092455124671407, 2.22, 61.69, 0.004502], [1933, 3, \n 0.0012104704776866732, 0.060523523884333665, 2.22, 61.69, 0.004502], [\n 1934, 2, 0.017260023459133387, 0.8630011729566692, 0, 0, 0], [1935, 2, \n 0.0020131873177782612, 0.10065936588891305, 0, 0, 0], [1936, 3, \n 0.00016183222128449105, 0.008091611064224553, 2.22, 61.69, 0.004502], [\n 1937, 2, 0.0036698553451389514, 0.18349276725694758, 0, 0, 0], [1938, 2,\n 0.0024417642388014174, 0.12208821194007087, 0, 0, 0], [1939, 2, \n 0.002785103211444589, 0.13925516057222947, 0, 0, 0], [1940, 3, \n 0.0005110953936246092, 0.025554769681230462, 2.22, 61.69, 0.004502], [\n 1941, 2, 0.002709985093250103, 0.13549925466250515, 0, 0, 0], [1942, 2,\n 0.0018877299747687521, 0.0943864987384376, 0, 0, 0], [1943, 3, \n 0.00010279589286423787, 0.005139794643211894, 2.22, 61.69, 0.004502], [\n 1944, 2, 0.0025353013507918823, 0.1267650675395941, 0, 0, 0], [1945, 3,\n 0.0003079053590355567, 0.015395267951777833, 2.22, 61.69, 0.004502], [\n 1946, 3, 3.785246414633451e-05, 0.0018926232073167254, 2.22, 61.69, \n 0.004502], [1947, 3, 0.0006231855866823692, 0.03115927933411846, 2.22, \n 61.69, 0.004502], [1948, 2, 0.002715072413449747, 0.13575362067248736, \n 0, 0, 0], [1949, 3, 0.0003749199035037024, 0.01874599517518512, 2.22, \n 61.69, 0.004502], [1950, 3, 3.2009130803650874e-05, \n 0.0016004565401825438, 2.22, 61.69, 0.004502], [1951, 3, \n 0.00028982139778890414, 0.014491069889445209, 2.22, 61.69, 0.004502], [\n 1952, 2, 0.0021449687785486293, 0.10724843892743147, 0, 0, 0], [1953, 3,\n 0.0002522618160854708, 0.012613090804273537, 2.22, 61.69, 0.004502], [\n 1954, 3, 0.0003506443043975968, 0.017532215219879844, 2.22, 61.69, \n 0.004502], [1955, 3, 0.00019049808752063204, 0.009524904376031602, 2.22,\n 61.69, 0.004502], [1956, 3, 0.0013327624870031016, 0.06663812435015508,\n 2.22, 61.69, 0.004502], [1957, 2, 0.0038265233479846173, \n 0.1913261673992309, 0, 0, 0], [1958, 2, 0.001623585117719857, \n 0.08117925588599285, 0, 0, 0], [1959, 3, 0.0014711543728682193, \n 0.07355771864341097, 2.22, 61.69, 0.004502], [1960, 3, \n 0.00040419410791183997, 0.020209705395591998, 2.22, 61.69, 0.004502], [\n 1961, 3, 0.0004963095835166648, 0.02481547917583324, 2.22, 61.69, \n 0.004502], [1962, 3, 8.676879300628758e-05, 0.00433843965031438, 2.22, \n 61.69, 0.004502], [1963, 3, 1.98901161405436e-05, 0.0009945058070271802,\n 2.22, 61.69, 0.004502], [1964, 2, 0.001926379139961268, \n 0.0963189569980634, 0, 0, 0], [1965, 3, 0.0005268011695933483, \n 0.026340058479667413, 2.22, 61.69, 0.004502], [1966, 3, \n 0.00017024481693603925, 0.008512240846801963, 2.22, 61.69, 0.004502], [\n 1967, 2, 0.003124156872402211, 0.15620784362011056, 0, 0, 0], [1968, 2,\n 0.008146530594916731, 0.4073265297458366, 0, 0, 0], [1969, 3, \n 0.0004332236280372991, 0.021661181401864953, 2.22, 61.69, 0.004502], [\n 1970, 2, 0.015079725927314894, 0.7539862963657448, 0, 0, 0], [1971, 3, \n 0.00041965080447621257, 0.020982540223810627, 2.22, 61.69, 0.004502], [\n 1972, 3, 8.495873978254917e-07, 4.247936989127459e-05, 2.22, 61.69, \n 0.004502], [1973, 3, 1.600763469777576e-05, 0.0008003817348887879, 2.22,\n 61.69, 0.004502], [1974, 3, 8.235613569316079e-05, 0.00411780678465804,\n 2.22, 61.69, 0.004502], [1975, 2, 0.0024899950060986455, \n 0.12449975030493228, 0, 0, 0], [1976, 3, 0.00013846418760463496, \n 0.006923209380231748, 2.22, 61.69, 0.004502], [1977, 2, \n 0.01441202991758457, 0.7206014958792286, 0, 0, 0], [1978, 3, \n 4.876032337019254e-05, 0.002438016168509627, 2.22, 61.69, 0.004502], [\n 1979, 2, 0.01207812804630862, 0.603906402315431, 0, 0, 0], [1980, 2, \n 0.0034921293990410386, 0.17460646995205195, 0, 0, 0], [1981, 2, \n 0.004683612493623978, 0.23418062468119888, 0, 0, 0], [1982, 2, \n 0.004161761211985465, 0.20808806059927326, 0, 0, 0], [1983, 2, \n 0.0043877697353720034, 0.21938848676860015, 0, 0, 0], [1984, 2, \n 0.002631382568955209, 0.13156912844776045, 0, 0, 0], [1985, 3, \n 0.0012310071496282526, 0.061550357481412625, 2.22, 61.69, 0.004502], [\n 1986, 2, 0.008265161826349031, 0.4132580913174515, 0, 0, 0], [1987, 2, \n 0.010632736546116827, 0.5316368273058414, 0, 0, 0], [1988, 2, \n 0.011845953811604956, 0.5922976905802478, 0, 0, 0], [1989, 3, \n 0.0006607023412943799, 0.033035117064719, 2.22, 61.69, 0.004502], [1990,\n 2, 0.0014479772099362613, 0.07239886049681307, 0, 0, 0], [1991, 2, \n 0.02791736843845849, 1.3958684219229245, 0, 0, 0], [1992, 2, \n 0.00669676694709918, 0.33483834735495904, 0, 0, 0], [1993, 2, \n 0.007396801680359065, 0.36984008401795326, 0, 0, 0], [1994, 2, \n 0.007105771430148137, 0.35528857150740684, 0, 0, 0], [1995, 2, \n 0.007146789481908194, 0.35733947409540967, 0, 0, 0], [1996, 2, \n 0.002500315814796374, 0.1250157907398187, 0, 0, 0], [1997, 3, \n 0.0006919203107214647, 0.03459601553607324, 2.22, 61.69, 0.004502], [\n 1998, 3, 0.0007719976652252124, 0.038599883261260626, 2.22, 61.69, \n 0.004502], [1999, 2, 0.005606206317377037, 0.28031031586885186, 0, 0, 0\n ], [2000, 2, 0.015602932071110567, 0.7801466035555285, 0, 0, 0], [2001,\n 2, 0.003597196019504588, 0.1798598009752294, 0, 0, 0], [2002, 3, \n 0.0010051105154040628, 0.05025552577020314, 2.22, 61.69, 0.004502], [\n 2003, 3, 0.0015052919810963758, 0.07526459905481879, 2.22, 61.69, \n 0.004502], [2004, 3, 0.0011289420570764744, 0.05644710285382372, 2.22, \n 61.69, 0.004502], [2005, 2, 0.0021166659006517613, 0.10583329503258805,\n 0, 0, 0], [2006, 2, 0.0017443470806312704, 0.08721735403156351, 0, 0, 0\n ], [2007, 3, 5.04767876707769e-05, 0.002523839383538845, 2.22, 61.69, \n 0.004502], [2008, 3, 3.5033818336598355e-06, 0.0001751690916829918, \n 2.22, 61.69, 0.004502]]'], {}), '([[586, 1, 0.08658028904199107, 4.329014452099554, 0, 0, 0], [589, 1, \n 0.010042676909098597, 0.5021338454549299, 0, 0, 0], [590, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [593, 1, \n 0.0017666198683200384, 0.08833099341600192, 0, 0, 0], [594, 1, \n 0.006047887837492023, 0.30239439187460115, 0, 0, 0], [595, 1, \n 1.50560576164933, 75.2802880824665, 0, 0, 0], [597, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [598, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [599, 1, \n 0.0029602819415092537, 0.1480140970754627, 0, 0, 0], [600, 1, \n 0.005379437076506062, 0.26897185382530314, 0, 0, 0], [601, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [602, 1, \n 0.007830423200121252, 0.39152116000606263, 0, 0, 0], [603, 1, \n 1.0997606567649967, 54.98803283824984, 0, 0, 0], [607, 1, \n 0.5729577951308232, 28.64788975654116, 0, 0, 0], [608, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [609, 1, \n 0.0057932399285449895, 0.2896619964272495, 0, 0, 0], [610, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [612, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [613, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [614, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [616, 1, \n 0.0046154933496649645, 0.23077466748324824, 0, 0, 0], [617, 1, \n 0.04360845440717932, 2.1804227203589663, 0, 0, 0], [618, 1, \n 0.010631550198538607, 0.5315775099269304, 0, 0, 0], [619, 1, \n 0.037560566569687294, 1.8780283284843649, 0, 0, 0], [621, 1, \n 0.24350706293059987, 12.175353146529993, 0, 0, 0], [623, 1, \n 0.2419155134996809, 12.095775674984045, 0, 0, 0], [624, 1, \n 0.004297183463481174, 0.21485917317405873, 0, 0, 0], [628, 1, \n 0.14292113889652203, 7.1460569448261015, 0, 0, 0], [629, 1, \n 0.023968734429639437, 1.198436721481972, 0, 0, 0], [631, 1, \n 0.025401128917466494, 1.2700564458733248, 0, 0, 0], [632, 1, \n 0.01435577586688896, 0.717788793344448, 0, 0, 0], [637, 1, \n 0.017093240888069558, 0.854662044403478, 0, 0, 0], [638, 1, \n 0.02048324117592693, 1.0241620587963465, 0, 0, 0], [639, 1, \n 0.005029296201703893, 0.25146481008519467, 0, 0, 0], [640, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [641, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [642, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [643, 1, \n 0.27279157245950864, 13.639578622975431, 0, 0, 0], [646, 1, \n 0.03278591827693044, 1.6392959138465222, 0, 0, 0], [647, 1, \n 0.00445633840657307, 0.2228169203286535, 0, 0, 0], [650, 1, \n 0.4216014442504307, 21.080072212521536, 0, 0, 0], [652, 1, \n 0.00746436683100989, 0.37321834155049455, 0, 0, 0], [655, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [657, 1, \n 0.012095775674984046, 0.6047887837492023, 0, 0, 0], [658, 1, \n 0.030239439187460113, 1.5119719593730057, 0, 0, 0], [661, 1, \n 0.010408733278209955, 0.5204366639104978, 0, 0, 0], [662, 1, \n 0.002928450952890874, 0.1464225476445437, 0, 0, 0], [663, 1, \n 0.00238732414637843, 0.1193662073189215, 0, 0, 0], [666, 1, \n 0.00919915571071155, 0.4599577855355775, 0, 0, 0], [668, 1, \n 0.24382537281678363, 12.191268640839182, 0, 0, 0], [670, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [672, 1, \n 0.010536057232683471, 0.5268028616341736, 0, 0, 0], [675, 1, \n 0.0033740847935481814, 0.16870423967740908, 0, 0, 0], [676, 1, \n 0.11777465788800255, 5.888732894400127, 0, 0, 0], [678, 1, \n 0.3237211542489151, 16.186057712445756, 0, 0, 0], [679, 1, \n 0.2212253708977345, 11.061268544886726, 0, 0, 0], [681, 1, \n 0.0063821132179850025, 0.31910566089925013, 0, 0, 0], [683, 1, \n 0.008753521870054244, 0.4376760935027122, 0, 0, 0], [687, 1, \n 0.42303383873825773, 21.151691936912886, 0, 0, 0], [689, 1, \n 0.09867606471697511, 4.933803235848756, 0, 0, 0], [691, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [693, 1, \n 0.06175211791965539, 3.0876058959827692, 0, 0, 0], [694, 1, \n 0.005220282133414166, 0.2610141066707083, 0, 0, 0], [695, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [696, 1, \n 0.22950142793851305, 11.475071396925653, 0, 0, 0], [697, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [698, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [701, 1, \n 0.015024226627874922, 0.7512113313937461, 0, 0, 0], [702, 1, \n 0.023363945645890238, 1.168197282294512, 0, 0, 0], [704, 1, \n 0.16170142218136566, 8.085071109068283, 0, 0, 0], [705, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [707, 1, \n 0.010822536130248884, 0.5411268065124443, 0, 0, 0], [708, 1, \n 0.0024828171122335675, 0.12414085561167837, 0, 0, 0], [711, 1, \n 0.056054370956965534, 2.802718547848277, 0, 0, 0], [713, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [714, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [716, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [717, 1, \n 0.0017507043740108488, 0.08753521870054244, 0, 0, 0], [719, 1, \n 0.623250757147862, 31.162537857393104, 0, 0, 0], [722, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [723, 1, \n 0.006270704757820675, 0.31353523789103377, 0, 0, 0], [724, 1, \n 0.0019257748114119334, 0.09628874057059668, 0, 0, 0], [725, 1, \n 0.25464790894703254, 12.732395447351628, 0, 0, 0], [727, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [728, 1, \n 0.16233804195373325, 8.116902097686662, 0, 0, 0], [730, 1, \n 0.10077690996578814, 5.038845498289407, 0, 0, 0], [731, 1, \n 0.2848873481344926, 14.244367406724633, 0, 0, 0], [732, 1, \n 0.004647324338283344, 0.2323662169141672, 0, 0, 0], [733, 1, \n 0.12624170086049138, 6.312085043024569, 0, 0, 0], [735, 1, \n 0.013496339174192726, 0.6748169587096363, 0, 0, 0], [737, 1, \n 0.00891267681314614, 0.445633840657307, 0, 0, 0], [738, 1, \n 0.04408591923645501, 2.2042959618227504, 0, 0, 0], [739, 1, \n 0.01906676218240906, 0.9533381091204531, 0, 0, 0], [741, 1, \n 0.0340591578216656, 1.7029578910832803, 0, 0, 0], [742, 1, \n 0.0028647889756541157, 0.14323944878270578, 0, 0, 0], [743, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [745, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [746, 1, \n 0.03183098861837907, 1.5915494309189535, 0, 0, 0], [747, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [748, 1, \n 0.03501408748021698, 1.7507043740108488, 0, 0, 0], [749, 1, \n 0.0025464790894703256, 0.12732395447351627, 0, 0, 0], [750, 1, \n 0.028902537665488188, 1.4451268832744095, 0, 0, 0], [753, 1, \n 0.049624511256052974, 2.4812255628026487, 0, 0, 0], [758, 1, \n 0.0058887328944001276, 0.2944366447200064, 0, 0, 0], [760, 1, \n 0.2527380496299298, 12.636902481496492, 0, 0, 0], [761, 1, \n 0.004997465213085514, 0.2498732606542757, 0, 0, 0], [762, 1, \n 0.3517324242330887, 17.586621211654435, 0, 0, 0], [763, 1, \n 0.006461690689530951, 0.32308453447654756, 0, 0, 0], [765, 1, \n 0.018780283284843647, 0.9390141642421824, 0, 0, 0], [767, 1, \n 0.0035650707252584553, 0.17825353626292276, 0, 0, 0], [769, 1, \n 0.013782818071758136, 0.6891409035879068, 0, 0, 0], [771, 1, \n 0.21963382146681557, 10.981691073340778, 0, 0, 0], [772, 1, \n 0.002992112930127632, 0.1496056465063816, 0, 0, 0], [774, 1, \n 0.010663381187156987, 0.5331690593578494, 0, 0, 0], [776, 1, \n 0.01782535362629228, 0.891267681314614, 0, 0, 0], [777, 1, \n 0.012573240504259732, 0.6286620252129866, 0, 0, 0], [778, 1, \n 0.004679155326901723, 0.23395776634508614, 0, 0, 0], [781, 1, \n 0.4169859509007658, 20.84929754503829, 0, 0, 0], [784, 1, \n 0.4058451048843331, 20.292255244216655, 0, 0, 0], [785, 1, \n 0.00047746482927568597, 0.0238732414637843, 0, 0, 0], [787, 1, \n 0.24764509145098912, 12.382254572549456, 0, 0, 0], [788, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [789, 1, \n 0.0123185925953127, 0.615929629765635, 0, 0, 0], [790, 1, \n 0.02412788937273133, 1.2063944686365666, 0, 0, 0], [791, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [792, 1, \n 0.009979014931861837, 0.49895074659309185, 0, 0, 0], [795, 1, \n 0.004329014452099553, 0.2164507226049777, 0, 0, 0], [798, 1, \n 0.10179550160157626, 5.089775080078813, 0, 0, 0], [800, 1, \n 0.0058091554228541795, 0.290457771142709, 0, 0, 0], [801, 1, \n 0.007957747154594767, 0.3978873577297384, 0, 0, 0], [802, 1, \n 0.07957747154594767, 3.9788735772973833, 0, 0, 0], [805, 1, \n 0.44881693951914486, 22.440846975957243, 0, 0, 0], [806, 1, \n 0.005697746962689853, 0.2848873481344927, 0, 0, 0], [808, 1, \n 0.034616200122487235, 1.7308100061243619, 0, 0, 0], [809, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [810, 1, \n 0.03116253785739311, 1.5581268928696554, 0, 0, 0], [811, 1, \n 0.0040107045659157625, 0.20053522829578813, 0, 0, 0], [814, 1, \n 0.014164789935178685, 0.7082394967589343, 0, 0, 0], [815, 1, \n 0.004265352474862795, 0.21326762374313976, 0, 0, 0], [816, 1, \n 0.012748310941660816, 0.6374155470830408, 0, 0, 0], [817, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [818, 1, \n 0.24096058384112953, 12.048029192056477, 0, 0, 0], [821, 1, \n 0.013130282805081364, 0.6565141402540683, 0, 0, 0], [822, 1, \n 0.04265352474862795, 2.1326762374313977, 0, 0, 0], [825, 1, \n 0.013591832140047864, 0.6795916070023932, 0, 0, 0], [826, 1, \n 0.018461973398659858, 0.9230986699329929, 0, 0, 0], [829, 1, \n 0.06716338598477982, 3.3581692992389915, 0, 0, 0], [830, 1, \n 0.02832957987035737, 1.4164789935178685, 0, 0, 0], [833, 1, \n 0.0059205638830185075, 0.2960281941509254, 0, 0, 0], [834, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [835, 1, \n 0.010138169874953733, 0.5069084937476867, 0, 0, 0], [836, 1, \n 0.008116902097686661, 0.4058451048843331, 0, 0, 0], [837, 1, \n 0.15024226627874918, 7.512113313937459, 0, 0, 0], [839, 1, \n 0.011666057328635928, 0.5833028664317964, 0, 0, 0], [840, 1, \n 0.4427690516816528, 22.138452584082643, 0, 0, 0], [841, 1, \n 0.0037083101740411615, 0.18541550870205808, 0, 0, 0], [842, 1, \n 0.17204649348233886, 8.602324674116945, 0, 0, 0], [843, 1, \n 0.10599719209920229, 5.2998596049601145, 0, 0, 0], [844, 1, \n 0.012732395447351627, 0.6366197723675814, 0, 0, 0], [845, 1, \n 0.10122254380644544, 5.061127190322272, 0, 0, 0], [847, 1, \n 0.08912676813146139, 4.45633840657307, 0, 0, 0], [848, 1, \n 0.013369015219719208, 0.6684507609859605, 0, 0, 0], [849, 1, \n 0.24796340133717296, 12.398170066858649, 0, 0, 0], [850, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [851, 1, \n 0.01265281797580568, 0.632640898790284, 0, 0, 0], [852, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [853, 1, \n 0.0036923946797319715, 0.1846197339865986, 0, 0, 0], [854, 1, \n 0.026037748689834075, 1.3018874344917037, 0, 0, 0], [855, 1, \n 0.21899720169444797, 10.949860084722399, 0, 0, 0], [856, 1, \n 0.011459155902616463, 0.5729577951308231, 0, 0, 0], [857, 1, \n 0.4462704604296745, 22.313523021483725, 0, 0, 0], [858, 1, \n 0.01808000153523931, 0.9040000767619655, 0, 0, 0], [859, 1, \n 0.027056340325622208, 1.3528170162811104, 0, 0, 0], [860, 1, \n 0.0039788735772973835, 0.1989436788648692, 0, 0, 0], [862, 1, \n 0.23077466748324824, 11.538733374162412, 0, 0, 0], [863, 1, \n 0.0001909859317102744, 0.00954929658551372, 0, 0, 0], [864, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [865, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [867, 1, \n 0.24478030247533505, 12.239015123766753, 0, 0, 0], [869, 1, \n 0.4329014452099553, 21.645072260497766, 0, 0, 0], [870, 1, \n 0.018589297353133374, 0.9294648676566688, 0, 0, 0], [872, 1, \n 0.00716197243913529, 0.3580986219567645, 0, 0, 0], [873, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [874, 1, \n 0.006589014644004467, 0.3294507322002233, 0, 0, 0], [875, 1, \n 0.007766761222884492, 0.38833806114422464, 0, 0, 0], [877, 1, \n 0.007894085177358009, 0.39470425886790045, 0, 0, 0], [881, 1, \n 0.3187236890358296, 15.93618445179148, 0, 0, 0], [882, 1, \n 0.005538592019597957, 0.2769296009798979, 0, 0, 0], [883, 1, \n 0.005729577951308231, 0.28647889756541156, 0, 0, 0], [886, 1, \n 0.8186930272647096, 40.93465136323548, 0, 0, 0], [889, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [890, 1, \n 0.0076394372684109755, 0.3819718634205488, 0, 0, 0], [893, 1, \n 0.00954929658551372, 0.477464829275686, 0, 0, 0], [894, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [895, 1, \n 0.0030239439187460114, 0.15119719593730058, 0, 0, 0], [896, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [898, 1, \n 0.013464508185574344, 0.6732254092787172, 0, 0, 0], [900, 1, \n 0.03584169318429482, 1.7920846592147412, 0, 0, 0], [902, 1, \n 0.006207042780583919, 0.31035213902919595, 0, 0, 0], [903, 1, \n 0.0031990143561470966, 0.15995071780735484, 0, 0, 0], [905, 1, \n 0.021851973686517232, 1.0925986843258617, 0, 0, 0], [907, 1, \n 0.02142225534016911, 1.0711127670084555, 0, 0, 0], [909, 1, \n 0.005856901905781748, 0.2928450952890874, 0, 0, 0], [911, 1, \n 0.09183240216402361, 4.59162010820118, 0, 0, 0], [913, 1, \n 0.02355493157760051, 1.1777465788800257, 0, 0, 0], [914, 1, \n 0.03568253824120294, 1.7841269120601468, 0, 0, 0], [915, 1, \n 0.0038197186342054878, 0.1909859317102744, 0, 0, 0], [916, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [917, 1, \n 0.005411268065124442, 0.27056340325622213, 0, 0, 0], [918, 1, \n 0.012254930618075942, 0.612746530903797, 0, 0, 0], [919, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [920, 1, \n 0.0020371832715762603, 0.10185916357881303, 0, 0, 0], [921, 1, \n 0.019735212943395024, 0.9867606471697512, 0, 0, 0], [922, 1, \n 0.05220282133414166, 2.6101410667070835, 0, 0, 0], [923, 1, \n 0.023236621691416718, 1.161831084570836, 0, 0, 0], [925, 1, \n 0.008276057040778557, 0.4138028520389279, 0, 0, 0], [928, 1, \n 0.019576058000303126, 0.9788029000151565, 0, 0, 0], [931, 1, \n 0.03455253814525047, 1.7276269072625237, 0, 0, 0], [934, 1, \n 0.09421972631040204, 4.710986315520103, 0, 0, 0], [935, 1, \n 0.007352958370845565, 0.36764791854227824, 0, 0, 0], [936, 1, \n 0.016615776058793875, 0.8307888029396938, 0, 0, 0], [937, 1, \n 0.00477464829275686, 0.238732414637843, 0, 0, 0], [939, 1, \n 1.5915494309189534e-05, 0.0007957747154594768, 0, 0, 0], [940, 1, \n 0.009421972631040205, 0.47109863155201026, 0, 0, 0], [942, 1, \n 0.016520283092938737, 0.8260141546469368, 0, 0, 0], [943, 1, \n 0.021103945453985317, 1.055197272699266, 0, 0, 0], [944, 1, \n 0.004042535554534142, 0.2021267777267071, 0, 0, 0], [945, 1, \n 0.011140846016432674, 0.5570423008216338, 0, 0, 0], [946, 1, \n 0.025464790894703253, 1.2732395447351628, 0, 0, 0], [948, 1, \n 0.025146481008519465, 1.2573240504259733, 0, 0, 0], [950, 1, \n 0.005092958178940651, 0.25464790894703254, 0, 0, 0], [951, 1, \n 0.14132958946560306, 7.066479473280154, 0, 0, 0], [952, 1, \n 0.005045211696013082, 0.2522605848006541, 0, 0, 0], [956, 1, \n 0.020690142601946394, 1.0345071300973196, 0, 0, 0], [957, 1, \n 0.0019098593171027439, 0.0954929658551372, 0, 0, 0], [958, 1, \n 0.010615634704229418, 0.530781735211471, 0, 0, 0], [959, 1, \n 0.007241549910681238, 0.3620774955340619, 0, 0, 0], [960, 1, \n 0.004217605991935227, 0.21088029959676136, 0, 0, 0], [963, 1, \n 0.2785211504108168, 13.926057520540843, 0, 0, 0], [965, 1, \n 0.11204507993669433, 5.602253996834716, 0, 0, 0], [966, 1, \n 0.021008452488130186, 1.0504226244065094, 0, 0, 0], [967, 1, \n 0.01193662073189215, 0.5968310365946076, 0, 0, 0], [968, 1, \n 0.017188733853924696, 0.8594366926962349, 0, 0, 0], [969, 1, \n 0.018111832523857688, 0.9055916261928845, 0, 0, 0], [971, 1, \n 0.0031830988618379067, 0.15915494309189535, 0, 0, 0], [973, 1, \n 0.4287634166895661, 21.438170834478306, 0, 0, 0], [976, 1, \n 0.008562535938343968, 0.4281267969171984, 0, 0, 0], [977, 1, \n 0.1031324031235482, 5.15662015617741, 0, 0, 0], [978, 1, \n 0.0007321127382227185, 0.03660563691113593, 0, 0, 0], [980, 1, \n 0.11140846016432673, 5.570423008216337, 0, 0, 0], [981, 1, \n 0.03787887645587108, 1.8939438227935543, 0, 0, 0], [982, 1, \n 0.0015756339366097638, 0.07878169683048819, 0, 0, 0], [983, 1, \n 0.01400563499208679, 0.7002817496043395, 0, 0, 0], [984, 1, \n 0.14801409707546268, 7.400704853773133, 0, 0, 0], [985, 1, \n 0.0035014087480216977, 0.17507043740108488, 0, 0, 0], [986, 1, \n 0.0017825353626292277, 0.08912676813146138, 0, 0, 0], [987, 1, \n 0.02618098813861678, 1.3090494069308392, 0, 0, 0], [988, 1, \n 0.0008116902097686662, 0.04058451048843331, 0, 0, 0], [990, 1, \n 0.0954929658551372, 4.7746482927568605, 0, 0, 0], [993, 1, \n 0.06238873769202297, 3.119436884601149, 0, 0, 0], [994, 1, \n 0.010504226244065093, 0.5252113122032547, 0, 0, 0], [995, 1, \n 0.0006684507609859605, 0.033422538049298026, 0, 0, 0], [996, 1, \n 0.003660563691113593, 0.18302818455567965, 0, 0, 0], [997, 1, \n 0.005984225860255264, 0.2992112930127632, 0, 0, 0], [998, 1, \n 0.13464508185574348, 6.732254092787174, 0, 0, 0], [999, 1, \n 0.004965634224467135, 0.24828171122335674, 0, 0, 0], [1000, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1002, 1, \n 0.0031512678732195276, 0.15756339366097638, 0, 0, 0], [1003, 1, \n 0.2864788975654116, 14.32394487827058, 0, 0, 0], [1006, 1, \n 0.038833806114422456, 1.941690305721123, 0, 0, 0], [1007, 1, \n 0.007416620348082323, 0.37083101740411617, 0, 0, 0], [1008, 1, \n 0.015597184423005743, 0.7798592211502873, 0, 0, 0], [1010, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1011, 1, \n 0.005952394871636886, 0.2976197435818443, 0, 0, 0], [1012, 1, \n 0.9024085273310466, 45.12042636655233, 0, 0, 0], [1014, 1, \n 0.238732414637843, 11.93662073189215, 0, 0, 0], [1018, 1, \n 0.05599070897972878, 2.7995354489864392, 0, 0, 0], [1019, 1, \n 0.03819718634205488, 1.909859317102744, 0, 0, 0], [1023, 1, \n 6.366197723675813e-05, 0.003183098861837907, 0, 0, 0], [1025, 1, \n 0.03616000307047862, 1.808000153523931, 0, 0, 0], [1026, 1, \n 0.20868396138209316, 10.434198069104658, 0, 0, 0], [1028, 2, \n 0.025464790894703257, 1.273239544735163, 0, 0, 0], [1029, 2, \n 0.003819718634205488, 0.19098593171027442, 0, 0, 0], [1030, 2, \n 0.06480789282701978, 3.2403946413509894, 0, 0, 0], [1031, 2, \n 0.0921316134570364, 4.60658067285182, 0, 0, 0], [1032, 2, \n 0.009772775025341927, 0.4886387512670964, 0, 0, 0], [1033, 2, \n 0.0015543376717485793, 0.07771688358742897, 0, 0, 0], [1034, 2, \n 0.005364335122251813, 0.26821675611259066, 0, 0, 0], [1035, 3, \n 0.00317587127473044, 0.158793563736522, 2.22, 61.69, 0.004502], [1036, \n 2, 0.003538471088451239, 0.17692355442256197, 0, 0, 0], [1037, 2, \n 0.0032845967867616726, 0.16422983933808363, 0, 0, 0], [1038, 2, \n 0.0035759833548530246, 0.17879916774265123, 0, 0, 0], [1039, 2, \n 0.0033678813297702355, 0.1683940664885118, 0, 0, 0], [1041, 2, \n 0.012998987840239671, 0.6499493920119837, 0, 0, 0], [1042, 2, \n 0.0013374224133557281, 0.0668711206677864, 0, 0, 0], [1044, 3, \n 0.0012140138945870601, 0.060700694729353, 2.22, 61.69, 0.004502], [1046,\n 2, 0.0032875263364469907, 0.16437631682234954, 0, 0, 0], [1047, 3, \n 0.0005212006415679155, 0.026060032078395773, 2.22, 61.69, 0.004502], [\n 1048, 2, 0.0022653377413018724, 0.11326688706509364, 0, 0, 0], [1049, 2,\n 0.01870104799381521, 0.9350523996907605, 0, 0, 0], [1050, 2, \n 0.0017161801534011875, 0.08580900767005938, 0, 0, 0], [1051, 2, \n 0.011268551438979963, 0.5634275719489983, 0, 0, 0], [1052, 3, \n 0.001315809692296204, 0.06579048461481019, 2.22, 61.69, 0.004502], [\n 1053, 3, 0.001042024786453249, 0.05210123932266245, 2.22, 61.69, \n 0.004502], [1054, 2, 0.017434200209443074, 0.8717100104721537, 0, 0, 0],\n [1055, 3, 7.255367011902793e-05, 0.0036276835059513967, 2.22, 61.69, \n 0.004502], [1056, 2, 0.02185427247219657, 1.0927136236098287, 0, 0, 0],\n [1057, 2, 0.010956497647839606, 0.5478248823919804, 0, 0, 0], [1058, 2,\n 0.02761344248663413, 1.3806721243317066, 0, 0, 0], [1059, 2, \n 0.01272767318121002, 0.636383659060501, 0, 0, 0], [1060, 3, \n 0.0002750105502899529, 0.013750527514497644, 2.22, 61.69, 0.004502], [\n 1061, 2, 0.004862954432750976, 0.2431477216375488, 0, 0, 0], [1062, 3, \n 7.333747745020713e-05, 0.0036668738725103567, 2.22, 61.69, 0.004502], [\n 1063, 3, 0.00022007597509710681, 0.011003798754855342, 2.22, 61.69, \n 0.004502], [1064, 2, 0.013355424896304362, 0.667771244815218, 0, 0, 0],\n [1065, 2, 0.020654478247623165, 1.0327239123811582, 0, 0, 0], [1066, 2,\n 0.004269679264204669, 0.21348396321023344, 0, 0, 0], [1067, 3, \n 0.002078788013715776, 0.1039394006857888, 2.22, 61.69, 0.004502], [1068,\n 3, 0.00014512554313847776, 0.007256277156923888, 2.22, 61.69, 0.004502],\n [1069, 3, 0.00010143951295915809, 0.005071975647957905, 2.22, 61.69, \n 0.004502], [1070, 3, 2.3689278981581715e-05, 0.001184463949079086, 2.22,\n 61.69, 0.004502], [1071, 3, 0.00021315991932608, 0.010657995966304002, \n 2.22, 61.69, 0.004502], [1072, 2, 0.007168748144119091, \n 0.3584374072059546, 0, 0, 0], [1073, 2, 0.004954025493475761, \n 0.24770127467378808, 0, 0, 0], [1074, 2, 0.009778033156939965, \n 0.48890165784699824, 0, 0, 0], [1075, 3, 0.0009142432329184414, \n 0.04571216164592208, 2.22, 61.69, 0.004502], [1077, 3, \n 0.000761621711582911, 0.038081085579145545, 2.22, 61.69, 0.004502], [\n 1078, 3, 0.0010764248660874562, 0.05382124330437281, 2.22, 61.69, \n 0.004502], [1079, 2, 0.004604543003215469, 0.23022715016077344, 0, 0, 0\n ], [1080, 2, 0.005216654256351391, 0.2608327128175696, 0, 0, 0], [1081,\n 2, 0.01643746145779033, 0.8218730728895166, 0, 0, 0], [1082, 2, \n 0.015076341350664345, 0.7538170675332174, 0, 0, 0], [1083, 2, \n 0.019983163198675734, 0.9991581599337868, 0, 0, 0], [1084, 2, \n 0.018855524406049307, 0.9427762203024654, 0, 0, 0], [1085, 2, \n 0.0037788529320756745, 0.1889426466037837, 0, 0, 0], [1086, 2, \n 0.006918625580223116, 0.34593127901115583, 0, 0, 0], [1087, 2, \n 0.0032275229191801595, 0.16137614595900798, 0, 0, 0], [1088, 3, \n 0.0009589741139576335, 0.04794870569788167, 2.22, 61.69, 0.004502], [\n 1089, 2, 0.009823983504007974, 0.49119917520039863, 0, 0, 0], [1090, 2,\n 0.005674885746854652, 0.2837442873427326, 0, 0, 0], [1091, 3, \n 0.001168793996530651, 0.05843969982653256, 2.22, 61.69, 0.004502], [\n 1092, 2, 0.0013687465331790676, 0.06843732665895338, 0, 0, 0], [1093, 2,\n 0.007017509546711356, 0.3508754773355678, 0, 0, 0], [1094, 3, \n 0.00014185080981113786, 0.0070925404905568925, 2.22, 61.69, 0.004502],\n [1095, 3, 7.71951382648268e-06, 0.000385975691324134, 2.22, 61.69, \n 0.004502], [1096, 2, 0.0029145237970444643, 0.14572618985222321, 0, 0, \n 0], [1097, 3, 0.0002728726471928731, 0.013643632359643654, 2.22, 61.69,\n 0.004502], [1098, 2, 0.004521623727146264, 0.22608118635731317, 0, 0, 0\n ], [1099, 2, 0.018521637260932335, 0.9260818630466169, 0, 0, 0], [1100,\n 3, 7.335549646801683e-07, 3.667774823400842e-05, 2.22, 61.69, 0.004502],\n [1101, 2, 0.0021341020267997028, 0.10670510133998513, 0, 0, 0], [1102, \n 2, 0.008936050319297435, 0.44680251596487175, 0, 0, 0], [1103, 2, \n 0.006751135880742038, 0.33755679403710187, 0, 0, 0], [1104, 3, \n 8.200597012001097e-06, 0.0004100298506000548, 2.22, 61.69, 0.004502], [\n 1105, 3, 7.430370821118754e-05, 0.003715185410559377, 2.22, 61.69, \n 0.004502], [1106, 3, 9.496706349756433e-05, 0.004748353174878216, 2.22,\n 61.69, 0.004502], [1107, 2, 0.002514754747681537, 0.12573773738407681, \n 0, 0, 0], [1108, 2, 0.010075472977677913, 0.5037736488838956, 0, 0, 0],\n [1109, 3, 2.3877174563372565e-05, 0.0011938587281686282, 2.22, 61.69, \n 0.004502], [1110, 3, 5.6797921539226925e-05, 0.0028398960769613463, \n 2.22, 61.69, 0.004502], [1111, 2, 0.0027876433772406257, \n 0.13938216886203128, 0, 0, 0], [1112, 2, 0.004265767031264296, \n 0.2132883515632148, 0, 0, 0], [1113, 3, 0.00022012925719619891, \n 0.011006462859809947, 2.22, 61.69, 0.004502], [1114, 3, \n 0.0008560555102861403, 0.042802775514307015, 2.22, 61.69, 0.004502], [\n 1115, 2, 0.0032197222090973076, 0.16098611045486538, 0, 0, 0], [1116, 3,\n 0.002075453185310181, 0.10377265926550905, 2.22, 61.69, 0.004502], [\n 1117, 2, 0.005780032679669937, 0.2890016339834969, 0, 0, 0], [1118, 3, \n 0.0004094636121064103, 0.02047318060532052, 2.22, 61.69, 0.004502], [\n 1119, 3, 0.0027536366373517632, 0.13768183186758817, 2.22, 61.69, \n 0.004502], [1120, 3, 0.00014563422679717648, 0.007281711339858825, 2.22,\n 61.69, 0.004502], [1121, 3, 3.4414977793908876e-05, \n 0.0017207488896954439, 2.22, 61.69, 0.004502], [1122, 3, \n 8.894132329422267e-05, 0.004447066164711133, 2.22, 61.69, 0.004502], [\n 1123, 3, 9.32225252447514e-05, 0.00466112626223757, 2.22, 61.69, \n 0.004502], [1124, 3, 8.201464578534214e-05, 0.004100732289267108, 2.22,\n 61.69, 0.004502], [1125, 3, 0.0009107448109473576, 0.04553724054736788,\n 2.22, 61.69, 0.004502], [1126, 3, 0.0010150413250921298, \n 0.050752066254606494, 2.22, 61.69, 0.004502], [1127, 2, \n 0.003587869493403156, 0.17939347467015782, 0, 0, 0], [1128, 3, \n 9.85754616930036e-05, 0.004928773084650179, 2.22, 61.69, 0.004502], [\n 1129, 3, 0.00015167785485332866, 0.0075838927426664345, 2.22, 61.69, \n 0.004502], [1130, 3, 4.313144137237104e-05, 0.0021565720686185525, 2.22,\n 61.69, 0.004502], [1131, 3, 9.338261111863579e-05, 0.00466913055593179,\n 2.22, 61.69, 0.004502], [1132, 3, 1.598304249187116e-05, \n 0.0007991521245935579, 2.22, 61.69, 0.004502], [1133, 3, \n 4.5810964480308454e-05, 0.002290548224015423, 2.22, 61.69, 0.004502], [\n 1134, 3, 3.236913111220881e-05, 0.0016184565556104404, 2.22, 61.69, \n 0.004502], [1135, 3, 0.00030684246506199216, 0.01534212325309961, 2.22,\n 61.69, 0.004502], [1136, 3, 2.5636662405410735e-05, \n 0.0012818331202705368, 2.22, 61.69, 0.004502], [1137, 3, \n 0.00018370212263491662, 0.00918510613174583, 2.22, 61.69, 0.004502], [\n 1138, 3, 7.98498118498449e-05, 0.003992490592492246, 2.22, 61.69, \n 0.004502], [1139, 3, 0.0012225149594472903, 0.06112574797236452, 2.22, \n 61.69, 0.004502], [1140, 3, 0.0018073289497007397, 0.09036644748503699,\n 2.22, 61.69, 0.004502], [1141, 2, 0.005339291711123932, \n 0.2669645855561966, 0, 0, 0], [1142, 3, 7.73959943559724e-05, \n 0.00386979971779862, 2.22, 61.69, 0.004502], [1143, 3, \n 0.0009515158509821171, 0.04757579254910586, 2.22, 61.69, 0.004502], [\n 1144, 2, 0.00334399697192306, 0.16719984859615303, 0, 0, 0], [1145, 2, \n 0.011197481443497569, 0.5598740721748785, 0, 0, 0], [1146, 3, \n 5.4833151376821656e-05, 0.002741657568841083, 2.22, 61.69, 0.004502], [\n 1147, 3, 0.002909588342312674, 0.14547941711563372, 2.22, 61.69, \n 0.004502], [1148, 3, 0.0005993650905551883, 0.029968254527759416, 2.22,\n 61.69, 0.004502], [1149, 3, 0.00026672685204354104, \n 0.013336342602177052, 2.22, 61.69, 0.004502], [1150, 3, \n 0.0001204929064021154, 0.00602464532010577, 2.22, 61.69, 0.004502], [\n 1151, 3, 0.00043239573730817076, 0.021619786865408542, 2.22, 61.69, \n 0.004502], [1152, 3, 3.9796369738190234e-06, 0.0001989818486909512, \n 2.22, 61.69, 0.004502], [1153, 3, 2.543747302116541e-06, \n 0.00012718736510582707, 2.22, 61.69, 0.004502], [1154, 3, \n 5.939787701451754e-06, 0.00029698938507258764, 2.22, 61.69, 0.004502],\n [1155, 3, 2.0319819845729137e-05, 0.001015990992286457, 2.22, 61.69, \n 0.004502], [1156, 3, 0.0008888342953225629, 0.044441714766128154, 2.22,\n 61.69, 0.004502], [1157, 3, 0.00014449421139309436, \n 0.007224710569654718, 2.22, 61.69, 0.004502], [1158, 3, \n 3.9344224255474475e-05, 0.001967211212773724, 2.22, 61.69, 0.004502], [\n 1159, 3, 0.0006423837433282069, 0.032119187166410344, 2.22, 61.69, \n 0.004502], [1160, 2, 0.006583846414473584, 0.3291923207236792, 0, 0, 0],\n [1161, 3, 0.0007639741440540192, 0.038198707202700966, 2.22, 61.69, \n 0.004502], [1162, 2, 0.012733717176428691, 0.6366858588214346, 0, 0, 0],\n [1164, 2, 0.007318959323231913, 0.3659479661615957, 0, 0, 0], [1166, 2,\n 0.005301588846150501, 0.26507944230752506, 0, 0, 0], [1167, 3, \n 0.0001907109190583028, 0.00953554595291514, 2.22, 61.69, 0.004502], [\n 1168, 3, 4.6735632418379986e-05, 0.0023367816209189994, 2.22, 61.69, \n 0.004502], [1169, 3, 8.929850730838101e-05, 0.004464925365419051, 2.22,\n 61.69, 0.004502], [1170, 3, 1.00233247146895e-05, 0.0005011662357344751,\n 2.22, 61.69, 0.004502], [1171, 3, 0.0004260194354054759, \n 0.021300971770273798, 2.22, 61.69, 0.004502], [1172, 3, \n 0.00011513389518096898, 0.005756694759048449, 2.22, 61.69, 0.004502], [\n 1173, 2, 0.006452614026547609, 0.32263070132738053, 0, 0, 0], [1174, 3,\n 4.754703790085141e-05, 0.00237735189504257, 2.22, 61.69, 0.004502], [\n 1175, 3, 2.7710161030475335e-05, 0.001385508051523767, 2.22, 61.69, \n 0.004502], [1176, 3, 7.75663051366249e-06, 0.0003878315256831245, 2.22,\n 61.69, 0.004502], [1177, 3, 0.0009447268553453907, 0.04723634276726953,\n 2.22, 61.69, 0.004502], [1178, 3, 0.0001088973020076013, \n 0.005444865100380065, 2.22, 61.69, 0.004502], [1179, 3, \n 3.969316682855094e-05, 0.001984658341427547, 2.22, 61.69, 0.004502], [\n 1180, 3, 2.5956634148895864e-05, 0.0012978317074447932, 2.22, 61.69, \n 0.004502], [1181, 2, 0.00545834972439398, 0.272917486219699, 0, 0, 0],\n [1182, 2, 0.006322880792722177, 0.3161440396361089, 0, 0, 0], [1183, 3,\n 0.0014314935186861295, 0.07157467593430648, 2.22, 61.69, 0.004502], [\n 1184, 3, 0.00015810533075432708, 0.007905266537716353, 2.22, 61.69, \n 0.004502], [1185, 3, 0.0006974320121398697, 0.034871600606993486, 2.22,\n 61.69, 0.004502], [1186, 3, 0.0012771847490467955, 0.06385923745233978,\n 2.22, 61.69, 0.004502], [1187, 3, 0.0003086504024546428, \n 0.01543252012273214, 2.22, 61.69, 0.004502], [1188, 2, \n 0.011440868435801076, 0.5720434217900537, 0, 0, 0], [1189, 3, \n 0.0006752949613083114, 0.03376474806541557, 2.22, 61.69, 0.004502], [\n 1190, 2, 0.011056408319218359, 0.552820415960918, 0, 0, 0], [1191, 2, \n 0.004652379906159672, 0.23261899530798363, 0, 0, 0], [1192, 3, \n 0.0009482218539415114, 0.04741109269707557, 2.22, 61.69, 0.004502], [\n 1193, 3, 9.320005102883975e-05, 0.0046600025514419875, 2.22, 61.69, \n 0.004502], [1194, 3, 0.00033807612872480814, 0.016903806436240405, 2.22,\n 61.69, 0.004502], [1195, 3, 7.285440296486341e-06, \n 0.0003642720148243171, 2.22, 61.69, 0.004502], [1196, 2, \n 0.0040761948650300354, 0.20380974325150175, 0, 0, 0], [1197, 2, \n 0.0023095720666282643, 0.11547860333141323, 0, 0, 0], [1198, 3, \n 0.0016279886826880022, 0.08139943413440012, 2.22, 61.69, 0.004502], [\n 1199, 2, 0.012822920004466005, 0.6411460002233003, 0, 0, 0], [1200, 2, \n 0.0035658606694853635, 0.1782930334742682, 0, 0, 0], [1201, 3, \n 0.0007239107895971019, 0.03619553947985509, 2.22, 61.69, 0.004502], [\n 1202, 3, 0.00176071556288929, 0.0880357781444645, 2.22, 61.69, 0.004502\n ], [1203, 2, 0.0063796286094078974, 0.31898143047039484, 0, 0, 0], [\n 1204, 3, 0.0015802630524518553, 0.07901315262259277, 2.22, 61.69, \n 0.004502], [1205, 3, 1.3927092046315124e-05, 0.0006963546023157563, \n 2.22, 61.69, 0.004502], [1206, 3, 0.00015871592092437352, \n 0.007935796046218677, 2.22, 61.69, 0.004502], [1207, 3, \n 0.00013884952267018553, 0.006942476133509278, 2.22, 61.69, 0.004502], [\n 1208, 3, 7.055386967979429e-05, 0.0035276934839897148, 2.22, 61.69, \n 0.004502], [1209, 3, 3.2453994235092736e-05, 0.001622699711754637, 2.22,\n 61.69, 0.004502], [1210, 3, 0.0003259549620621221, 0.016297748103106108,\n 2.22, 61.69, 0.004502], [1211, 3, 0.0011462484513341364, \n 0.057312422566706815, 2.22, 61.69, 0.004502], [1212, 2, \n 0.005804182676892941, 0.290209133844647, 0, 0, 0], [1213, 2, \n 0.0036505499187602444, 0.18252749593801224, 0, 0, 0], [1214, 3, \n 0.00019852168003620192, 0.009926084001810095, 2.22, 61.69, 0.004502], [\n 1215, 3, 7.81255594160887e-05, 0.003906277970804435, 2.22, 61.69, \n 0.004502], [1216, 2, 0.0021517677385590084, 0.10758838692795043, 0, 0, \n 0], [1217, 3, 0.001279974509378072, 0.0639987254689036, 2.22, 61.69, \n 0.004502], [1218, 3, 4.139664610366431e-05, 0.0020698323051832157, 2.22,\n 61.69, 0.004502], [1219, 3, 0.00042701347071105576, 0.02135067353555279,\n 2.22, 61.69, 0.004502], [1220, 3, 0.0010059882305525484, \n 0.050299411527627416, 2.22, 61.69, 0.004502], [1221, 2, \n 0.02105078881494917, 1.0525394407474586, 0, 0, 0], [1222, 2, \n 0.013436354905899806, 0.6718177452949904, 0, 0, 0], [1223, 3, \n 0.00024230393037435297, 0.01211519651871765, 2.22, 61.69, 0.004502], [\n 1224, 2, 0.006415271247382745, 0.3207635623691373, 0, 0, 0], [1225, 3, \n 0.0010196947606849961, 0.05098473803424981, 2.22, 61.69, 0.004502], [\n 1226, 3, 0.00011572498554223855, 0.005786249277111928, 2.22, 61.69, \n 0.004502], [1227, 3, 0.0010454325410475286, 0.05227162705237644, 2.22, \n 61.69, 0.004502], [1228, 3, 9.713499706791583e-05, 0.004856749853395792,\n 2.22, 61.69, 0.004502], [1229, 2, 0.0026494957954367885, \n 0.13247478977183944, 0, 0, 0], [1230, 3, 4.8238032843230984e-05, \n 0.002411901642161549, 2.22, 61.69, 0.004502], [1231, 3, \n 0.0010059686019705035, 0.05029843009852517, 2.22, 61.69, 0.004502], [\n 1232, 2, 0.002228131222721375, 0.11140656113606878, 0, 0, 0], [1233, 2,\n 0.03662908231521014, 1.831454115760507, 0, 0, 0], [1234, 2, \n 0.0064387341725816285, 0.32193670862908147, 0, 0, 0], [1235, 3, \n 0.0002292223612393676, 0.01146111806196838, 2.22, 61.69, 0.004502], [\n 1236, 2, 0.0020851258089392244, 0.10425629044696123, 0, 0, 0], [1237, 3,\n 0.0009298092078685558, 0.04649046039342779, 2.22, 61.69, 0.004502], [\n 1238, 2, 0.00642623738699833, 0.3213118693499165, 0, 0, 0], [1239, 3, \n 0.0001443666373276477, 0.007218331866382386, 2.22, 61.69, 0.004502], [\n 1240, 2, 0.02037573875130283, 1.0187869375651415, 0, 0, 0], [1241, 2, \n 0.010972960615224547, 0.5486480307612274, 0, 0, 0], [1242, 3, \n 0.0008355662499393597, 0.041778312496967986, 2.22, 61.69, 0.004502], [\n 1243, 2, 0.0027276591752610937, 0.1363829587630547, 0, 0, 0], [1244, 2,\n 0.020592901244747865, 1.0296450622373932, 0, 0, 0], [1245, 3, \n 0.00023503888700973188, 0.011751944350486595, 2.22, 61.69, 0.004502], [\n 1246, 2, 0.003636870278584459, 0.18184351392922293, 0, 0, 0], [1247, 3,\n 0.0013899571448864774, 0.06949785724432388, 2.22, 61.69, 0.004502], [\n 1248, 2, 0.004527446475069785, 0.22637232375348926, 0, 0, 0], [1249, 2,\n 0.0021092345113500805, 0.10546172556750404, 0, 0, 0], [1250, 3, \n 0.000876926339333997, 0.04384631696669984, 2.22, 61.69, 0.004502], [\n 1251, 3, 0.0008805328097855692, 0.044026640489278464, 2.22, 61.69, \n 0.004502], [1252, 3, 0.0006440660331426705, 0.032203301657133525, 2.22,\n 61.69, 0.004502], [1253, 2, 0.004106369053307717, 0.20531845266538587, \n 0, 0, 0], [1254, 2, 0.005238024431161238, 0.2619012215580619, 0, 0, 0],\n [1255, 3, 0.00023250233000853782, 0.01162511650042689, 2.22, 61.69, \n 0.004502], [1256, 3, 0.0009607764830526361, 0.048038824152631804, 2.22,\n 61.69, 0.004502], [1257, 2, 0.005662916214121937, 0.28314581070609685, \n 0, 0, 0], [1258, 2, 0.014991588973313675, 0.7495794486656838, 0, 0, 0],\n [1259, 2, 0.00695753592752513, 0.34787679637625657, 0, 0, 0], [1260, 3,\n 0.000590177310330468, 0.0295088655165234, 2.22, 61.69, 0.004502], [1261,\n 2, 0.0065104902868619585, 0.3255245143430979, 0, 0, 0], [1262, 3, \n 2.3902123196900468e-05, 0.0011951061598450233, 2.22, 61.69, 0.004502],\n [1263, 3, 1.7811428520856433e-05, 0.0008905714260428216, 2.22, 61.69, \n 0.004502], [1264, 2, 0.0033780757704728456, 0.1689037885236423, 0, 0, 0\n ], [1265, 3, 0.0003085654478954214, 0.015428272394771068, 2.22, 61.69, \n 0.004502], [1266, 2, 0.006508243779623651, 0.3254121889811826, 0, 0, 0],\n [1267, 3, 0.0011818165946297665, 0.05909082973148832, 2.22, 61.69, \n 0.004502], [1270, 3, 0.0013856435479358959, 0.06928217739679479, 2.22, \n 61.69, 0.004502], [1271, 3, 0.0014840987910167424, 0.07420493955083712,\n 2.22, 61.69, 0.004502], [1272, 3, 4.931888796058019e-05, \n 0.00246594439802901, 2.22, 61.69, 0.004502], [1273, 3, \n 0.00012918225610620136, 0.006459112805310069, 2.22, 61.69, 0.004502], [\n 1274, 2, 0.002007808497835817, 0.10039042489179087, 0, 0, 0], [1275, 2,\n 0.003173827843694794, 0.1586913921847397, 0, 0, 0], [1276, 3, \n 0.0007211910038712903, 0.036059550193564514, 2.22, 61.69, 0.004502], [\n 1277, 2, 0.00187538099082149, 0.09376904954107451, 0, 0, 0], [1278, 2, \n 0.0052395364566005164, 0.2619768228300258, 0, 0, 0], [1279, 3, \n 1.1251600278965072e-07, 5.625800139482535e-06, 2.22, 61.69, 0.004502],\n [1280, 3, 1.694789540680769e-05, 0.0008473947703403845, 2.22, 61.69, \n 0.004502], [1282, 3, 0.00013160445621004433, 0.006580222810502218, 2.22,\n 61.69, 0.004502], [1283, 2, 0.03582020109680739, 1.7910100548403696, 0,\n 0, 0], [1284, 3, 0.001164025604385567, 0.058201280219278353, 2.22, \n 61.69, 0.004502], [1285, 3, 7.476034074798499e-05, \n 0.0037380170373992492, 2.22, 61.69, 0.004502], [1286, 3, \n 0.0008085504689103687, 0.04042752344551843, 2.22, 61.69, 0.004502], [\n 1287, 2, 0.0029583869971778567, 0.14791934985889282, 0, 0, 0], [1288, 2,\n 0.004222012491839328, 0.2111006245919664, 0, 0, 0], [1289, 2, \n 0.005576926941677767, 0.2788463470838884, 0, 0, 0], [1290, 3, \n 0.00016635371363986156, 0.008317685681993078, 2.22, 61.69, 0.004502], [\n 1291, 2, 0.0031745529736635094, 0.1587276486831755, 0, 0, 0], [1292, 3,\n 0.0015865361520825533, 0.07932680760412766, 2.22, 61.69, 0.004502], [\n 1293, 3, 6.53883586637161e-05, 0.003269417933185805, 2.22, 61.69, \n 0.004502], [1294, 3, 0.00013884615253373605, 0.006942307626686803, 2.22,\n 61.69, 0.004502], [1295, 3, 0.00015342985152912175, \n 0.007671492576456088, 2.22, 61.69, 0.004502], [1296, 3, \n 0.0007760328429390742, 0.03880164214695372, 2.22, 61.69, 0.004502], [\n 1297, 2, 0.006086894248154212, 0.3043447124077106, 0, 0, 0], [1300, 3, \n 0.001511593201166196, 0.07557966005830981, 2.22, 61.69, 0.004502], [\n 1301, 2, 0.0038746782543149596, 0.193733912715748, 0, 0, 0], [1302, 3, \n 0.0003104985267932093, 0.015524926339660468, 2.22, 61.69, 0.004502], [\n 1303, 3, 0.00027600750632746427, 0.013800375316373212, 2.22, 61.69, \n 0.004502], [1304, 3, 0.000610793340517708, 0.030539667025885397, 2.22, \n 61.69, 0.004502], [1305, 3, 1.6012209452329225e-07, \n 8.006104726164614e-06, 2.22, 61.69, 0.004502], [1306, 3, \n 5.855304532138158e-05, 0.0029276522660690793, 2.22, 61.69, 0.004502], [\n 1307, 3, 1.9031130574577255e-05, 0.0009515565287288628, 2.22, 61.69, \n 0.004502], [1308, 3, 8.924254018516687e-05, 0.004462127009258345, 2.22,\n 61.69, 0.004502], [1309, 3, 9.599337069530822e-05, 0.004799668534765412,\n 2.22, 61.69, 0.004502], [1310, 3, 4.717144911466962e-05, \n 0.002358572455733481, 2.22, 61.69, 0.004502], [1311, 3, \n 0.000494670556881473, 0.024733527844073653, 2.22, 61.69, 0.004502], [\n 1312, 2, 0.011688306978695986, 0.5844153489347994, 0, 0, 0], [1313, 3, \n 0.0019631283227609974, 0.09815641613804986, 2.22, 61.69, 0.004502], [\n 1314, 3, 0.0007641975650906521, 0.038209878254532606, 2.22, 61.69, \n 0.004502], [1315, 3, 0.0005015944131679134, 0.02507972065839567, 2.22, \n 61.69, 0.004502], [1316, 3, 7.002675793369909e-05, \n 0.0035013378966849544, 2.22, 61.69, 0.004502], [1317, 3, \n 0.0007908894216365961, 0.039544471081829805, 2.22, 61.69, 0.004502], [\n 1318, 3, 5.6301925294159776e-05, 0.002815096264707989, 2.22, 61.69, \n 0.004502], [1319, 3, 0.0008405877558306301, 0.04202938779153151, 2.22, \n 61.69, 0.004502], [1320, 3, 0.0008231691710158349, 0.04115845855079175,\n 2.22, 61.69, 0.004502], [1321, 3, 6.721511097913718e-06, \n 0.0003360755548956859, 2.22, 61.69, 0.004502], [1322, 3, \n 4.510903550142661e-05, 0.0022554517750713312, 2.22, 61.69, 0.004502], [\n 1323, 2, 0.012675857799799822, 0.6337928899899912, 0, 0, 0], [1324, 3, \n 0.0005501358559855778, 0.027506792799278885, 2.22, 61.69, 0.004502], [\n 1325, 2, 0.0029533893249704176, 0.14766946624852087, 0, 0, 0], [1326, 2,\n 0.0017553273040833693, 0.08776636520416847, 0, 0, 0], [1327, 2, \n 0.0017060005041489908, 0.08530002520744955, 0, 0, 0], [1328, 3, \n 0.0006537346009359085, 0.032686730046795426, 2.22, 61.69, 0.004502], [\n 1329, 2, 0.00793023382909983, 0.3965116914549916, 0, 0, 0], [1330, 3, \n 0.0019182008434651947, 0.09591004217325974, 2.22, 61.69, 0.004502], [\n 1331, 3, 1.2859395030416278e-05, 0.0006429697515208139, 2.22, 61.69, \n 0.004502], [1332, 3, 0.0006688404111922736, 0.03344202055961368, 2.22, \n 61.69, 0.004502], [1333, 3, 0.0019970167397866546, 0.09985083698933273,\n 2.22, 61.69, 0.004502], [1334, 3, 3.081793473501891e-05, \n 0.001540896736750946, 2.22, 61.69, 0.004502], [1336, 3, \n 0.0012612757957991489, 0.06306378978995744, 2.22, 61.69, 0.004502], [\n 1337, 2, 0.003207094686766897, 0.16035473433834485, 0, 0, 0], [1338, 3,\n 2.9972992477731713e-05, 0.0014986496238865857, 2.22, 61.69, 0.004502],\n [1339, 3, 0.00033310206544168424, 0.016655103272084214, 2.22, 61.69, \n 0.004502], [1340, 2, 0.0017807406464817902, 0.08903703232408952, 0, 0, \n 0], [1341, 2, 0.0060362713117726305, 0.3018135655886316, 0, 0, 0], [\n 1342, 3, 2.2718668528089703e-05, 0.0011359334264044853, 2.22, 61.69, \n 0.004502], [1343, 3, 2.8562833512248258e-05, 0.001428141675612413, 2.22,\n 61.69, 0.004502], [1344, 3, 8.141338105296074e-06, \n 0.0004070669052648037, 2.22, 61.69, 0.004502], [1345, 3, \n 0.00011633701914020801, 0.005816850957010401, 2.22, 61.69, 0.004502], [\n 1346, 2, 0.007061813430091215, 0.35309067150456075, 0, 0, 0], [1348, 3,\n 0.000978567012051048, 0.048928350602552406, 2.22, 61.69, 0.004502], [\n 1349, 3, 0.0014423210644570928, 0.07211605322285465, 2.22, 61.69, \n 0.004502], [1350, 3, 5.238023081568273e-06, 0.0002619011540784137, 2.22,\n 61.69, 0.004502], [1351, 3, 4.1064133941603613e-07, \n 2.0532066970801804e-05, 2.22, 61.69, 0.004502], [1352, 3, \n 2.2066211271763273e-05, 0.0011033105635881637, 2.22, 61.69, 0.004502],\n [1355, 3, 4.8633739445049876e-05, 0.0024316869722524944, 2.22, 61.69, \n 0.004502], [1356, 2, 0.004176219204509461, 0.20881096022547305, 0, 0, 0\n ], [1357, 2, 0.0024790764561485362, 0.12395382280742683, 0, 0, 0], [\n 1358, 3, 7.127776476894326e-06, 0.00035638882384471626, 2.22, 61.69, \n 0.004502], [1359, 2, 0.0018980577612326096, 0.0949028880616305, 0, 0, 0\n ], [1360, 3, 0.00101350119837844, 0.050675059918922, 2.22, 61.69, \n 0.004502], [1361, 2, 0.0029249133090325724, 0.14624566545162862, 0, 0, \n 0], [1362, 2, 0.004182445633969954, 0.2091222816984977, 0, 0, 0], [1363,\n 3, 2.004955475366426e-06, 0.0001002477737683213, 2.22, 61.69, 0.004502],\n [1364, 3, 2.7595075243285495e-06, 0.00013797537621642746, 2.22, 61.69, \n 0.004502], [1365, 3, 2.8999446623259055e-08, 1.449972331162953e-06, \n 2.22, 61.69, 0.004502], [1366, 3, 3.1831901356432676e-05, \n 0.001591595067821634, 2.22, 61.69, 0.004502], [1367, 3, \n 0.0021429014821967973, 0.10714507410983987, 2.22, 61.69, 0.004502], [\n 1368, 3, 9.560516623724435e-05, 0.004780258311862218, 2.22, 61.69, \n 0.004502], [1369, 3, 0.00046204655219542516, 0.023102327609771257, 2.22,\n 61.69, 0.004502], [1370, 3, 1.0304608838582957e-05, \n 0.0005152304419291479, 2.22, 61.69, 0.004502], [1371, 2, \n 0.0022749567929977086, 0.11374783964988543, 0, 0, 0], [1372, 2, \n 0.0050082619833296356, 0.2504130991664818, 0, 0, 0], [1373, 3, \n 0.0010693151538022578, 0.05346575769011289, 2.22, 61.69, 0.004502], [\n 1374, 2, 0.006889508467327262, 0.3444754233663631, 0, 0, 0], [1375, 2, \n 0.003897629175102736, 0.1948814587551368, 0, 0, 0], [1376, 2, \n 0.007852128522530815, 0.39260642612654084, 0, 0, 0], [1377, 2, \n 0.006094764129655812, 0.30473820648279065, 0, 0, 0], [1378, 2, \n 0.0062434108523654235, 0.3121705426182712, 0, 0, 0], [1379, 3, \n 3.0098190435426792e-05, 0.0015049095217713397, 2.22, 61.69, 0.004502],\n [1380, 3, 5.394520401513898e-05, 0.002697260200756949, 2.22, 61.69, \n 0.004502], [1381, 3, 3.680472218048895e-05, 0.001840236109024447, 2.22,\n 61.69, 0.004502], [1382, 2, 0.008838822964419164, 0.4419411482209583, 0,\n 0, 0], [1383, 2, 0.006991449967869686, 0.34957249839348425, 0, 0, 0], [\n 1384, 3, 0.0002870603107466644, 0.01435301553733322, 2.22, 61.69, \n 0.004502], [1385, 3, 4.602918986308876e-06, 0.00023014594931544384, \n 2.22, 61.69, 0.004502], [1386, 3, 2.5406083498023173e-05, \n 0.0012703041749011585, 2.22, 61.69, 0.004502], [1387, 3, \n 0.00011182192406483717, 0.0055910962032418585, 2.22, 61.69, 0.004502],\n [1388, 3, 4.1266752095987256e-05, 0.0020633376047993627, 2.22, 61.69, \n 0.004502], [1389, 3, 9.493711173340556e-06, 0.00047468555866702787, \n 2.22, 61.69, 0.004502], [1390, 3, 0.00011948001087807657, \n 0.005974000543903829, 2.22, 61.69, 0.004502], [1391, 3, \n 1.6156815754111043e-05, 0.0008078407877055523, 2.22, 61.69, 0.004502],\n [1392, 3, 0.0007258528797202384, 0.03629264398601192, 2.22, 61.69, \n 0.004502], [1393, 3, 8.763130962106806e-05, 0.004381565481053403, 2.22,\n 61.69, 0.004502], [1394, 3, 6.862035771367977e-05, 0.003431017885683988,\n 2.22, 61.69, 0.004502], [1395, 3, 4.696755105006889e-06, \n 0.00023483775525034447, 2.22, 61.69, 0.004502], [1396, 3, \n 1.6473931389884785e-06, 8.236965694942393e-05, 2.22, 61.69, 0.004502],\n [1397, 3, 0.000841878959456196, 0.042093947972809805, 2.22, 61.69, \n 0.004502], [1398, 3, 9.106352752461475e-05, 0.0045531763762307375, 2.22,\n 61.69, 0.004502], [1399, 3, 0.000614501928895323, 0.03072509644476615, \n 2.22, 61.69, 0.004502], [1400, 3, 8.258214886247176e-05, \n 0.004129107443123589, 2.22, 61.69, 0.004502], [1401, 2, \n 0.0029499050537279323, 0.14749525268639663, 0, 0, 0], [1402, 3, \n 0.0008779203509557502, 0.04389601754778751, 2.22, 61.69, 0.004502], [\n 1403, 2, 0.007617262031172502, 0.38086310155862513, 0, 0, 0], [1404, 2,\n 0.008581667499251882, 0.42908337496259413, 0, 0, 0], [1405, 3, \n 0.0010206451561773305, 0.051032257808866534, 2.22, 61.69, 0.004502], [\n 1406, 3, 0.00044281345416550866, 0.02214067270827543, 2.22, 61.69, \n 0.004502], [1407, 3, 6.985519985723439e-06, 0.00034927599928617195, \n 2.22, 61.69, 0.004502], [1408, 3, 0.0015599034807669107, \n 0.07799517403834554, 2.22, 61.69, 0.004502], [1409, 3, \n 0.0003826451438968471, 0.019132257194842357, 2.22, 61.69, 0.004502], [\n 1410, 3, 0.001119849138434054, 0.0559924569217027, 2.22, 61.69, \n 0.004502], [1411, 3, 0.0021677332100863795, 0.10838666050431899, 2.22, \n 61.69, 0.004502], [1412, 3, 0.0001702932115988861, 0.008514660579944306,\n 2.22, 61.69, 0.004502], [1413, 3, 0.00015712687360754934, \n 0.007856343680377468, 2.22, 61.69, 0.004502], [1414, 3, \n 0.0006609559456239092, 0.033047797281195467, 2.22, 61.69, 0.004502], [\n 1415, 3, 0.0001890075811839285, 0.009450379059196426, 2.22, 61.69, \n 0.004502], [1416, 3, 0.0002017048354821146, 0.010085241774105731, 2.22,\n 61.69, 0.004502], [1417, 3, 3.587634624733768e-08, \n 1.7938173123668838e-06, 2.22, 61.69, 0.004502], [1418, 2, \n 0.002634005451573638, 0.13170027257868192, 0, 0, 0], [1419, 3, \n 0.0009538705167746413, 0.04769352583873206, 2.22, 61.69, 0.004502], [\n 1421, 3, 0.00030900630459512675, 0.015450315229756338, 2.22, 61.69, \n 0.004502], [1422, 3, 0.0002087121412723534, 0.010435607063617671, 2.22,\n 61.69, 0.004502], [1423, 3, 8.660213976572599e-05, 0.0043301069882863, \n 2.22, 61.69, 0.004502], [1424, 2, 0.005562707763624093, \n 0.27813538818120465, 0, 0, 0], [1425, 3, 0.0013602274146640447, \n 0.06801137073320224, 2.22, 61.69, 0.004502], [1426, 2, \n 0.004377563184547638, 0.2188781592273819, 0, 0, 0], [1427, 2, \n 0.012484847220837852, 0.6242423610418927, 0, 0, 0], [1428, 2, \n 0.008488880122374441, 0.4244440061187221, 0, 0, 0], [1431, 2, \n 0.006398108618200077, 0.31990543091000384, 0, 0, 0], [1432, 3, \n 0.00038249012070950037, 0.019124506035475018, 2.22, 61.69, 0.004502], [\n 1433, 2, 0.0499489397816605, 2.4974469890830253, 0, 0, 0], [1434, 2, \n 0.002523926322700656, 0.12619631613503277, 0, 0, 0], [1435, 2, \n 0.00281243262144019, 0.1406216310720095, 0, 0, 0], [1436, 2, \n 0.005026791926267322, 0.2513395963133661, 0, 0, 0], [1437, 2, \n 0.007689748714359815, 0.38448743571799077, 0, 0, 0], [1438, 2, \n 0.021209120082186957, 1.060456004109348, 0, 0, 0], [1439, 2, \n 0.0025185488172777457, 0.12592744086388727, 0, 0, 0], [1440, 3, \n 2.1228241611109457e-05, 0.001061412080555473, 2.22, 61.69, 0.004502], [\n 1441, 3, 5.1097125443354235e-06, 0.0002554856272167712, 2.22, 61.69, \n 0.004502], [1442, 3, 2.626011287317575e-05, 0.0013130056436587876, 2.22,\n 61.69, 0.004502], [1443, 2, 0.006557506818224797, 0.3278753409112398, 0,\n 0, 0], [1444, 3, 0.00042227456865251087, 0.021113728432625545, 2.22, \n 61.69, 0.004502], [1445, 3, 0.0009856395478638393, 0.04928197739319196,\n 2.22, 61.69, 0.004502], [1446, 2, 0.02178507310152743, \n 1.0892536550763714, 0, 0, 0], [1447, 2, 0.003442397713820559, \n 0.17211988569102793, 0, 0, 0], [1448, 3, 0.000439455069088402, \n 0.0219727534544201, 2.22, 61.69, 0.004502], [1449, 2, \n 0.003346435866528816, 0.16732179332644082, 0, 0, 0], [1450, 2, \n 0.0033264151601212124, 0.1663207580060606, 0, 0, 0], [1451, 2, \n 0.004170743873351868, 0.2085371936675934, 0, 0, 0], [1452, 3, \n 0.0013165328240904745, 0.06582664120452372, 2.22, 61.69, 0.004502], [\n 1453, 2, 0.004077756743774734, 0.20388783718873668, 0, 0, 0], [1454, 2,\n 0.009875666531734596, 0.49378332658672985, 0, 0, 0], [1455, 3, \n 2.1818849454345026e-05, 0.001090942472717251, 2.22, 61.69, 0.004502], [\n 1456, 2, 0.0017907486519991621, 0.08953743259995812, 0, 0, 0], [1457, 3,\n 8.903780729597746e-05, 0.004451890364798873, 2.22, 61.69, 0.004502], [\n 1458, 3, 1.0945897203271481e-05, 0.0005472948601635741, 2.22, 61.69, \n 0.004502], [1459, 3, 0.00033798517072819835, 0.01689925853640992, 2.22,\n 61.69, 0.004502], [1460, 2, 0.003233851084262461, 0.16169255421312306, \n 0, 0, 0], [1461, 3, 0.0011159317192975062, 0.05579658596487532, 2.22, \n 61.69, 0.004502], [1462, 3, 0.00014771811478685875, \n 0.0073859057393429375, 2.22, 61.69, 0.004502], [1463, 3, \n 4.5276834778775515e-05, 0.002263841738938776, 2.22, 61.69, 0.004502], [\n 1464, 2, 0.009317735345896607, 0.4658867672948304, 0, 0, 0], [1465, 3, \n 0.0002263874562139475, 0.011319372810697375, 2.22, 61.69, 0.004502], [\n 1466, 3, 0.00018856670442025825, 0.009428335221012914, 2.22, 61.69, \n 0.004502], [1467, 3, 6.63001698920047e-05, 0.0033150084946002357, 2.22,\n 61.69, 0.004502], [1468, 3, 0.0015144656821575462, 0.0757232841078773, \n 2.22, 61.69, 0.004502], [1469, 2, 0.0021846358435379763, \n 0.10923179217689882, 0, 0, 0], [1470, 2, 0.005027084884666319, \n 0.2513542442333159, 0, 0, 0], [1471, 2, 0.008429379144717497, \n 0.42146895723587485, 0, 0, 0], [1472, 3, 0.000411329166889909, \n 0.020566458344495452, 2.22, 61.69, 0.004502], [1473, 3, \n 0.0003152649698806797, 0.01576324849403399, 2.22, 61.69, 0.004502], [\n 1474, 3, 4.6374430095522104e-05, 0.0023187215047761056, 2.22, 61.69, \n 0.004502], [1475, 3, 1.2661518354387543e-05, 0.0006330759177193771, \n 2.22, 61.69, 0.004502], [1476, 2, 0.015946059282369706, \n 0.7973029641184852, 0, 0, 0], [1477, 3, 0.0003829836649997916, \n 0.01914918324998958, 2.22, 61.69, 0.004502], [1479, 3, \n 0.00014225067121410135, 0.007112533560705067, 2.22, 61.69, 0.004502], [\n 1480, 3, 0.0004782600316322042, 0.023913001581610215, 2.22, 61.69, \n 0.004502], [1481, 3, 1.9134115446378896e-06, 9.567057723189448e-05, \n 2.22, 61.69, 0.004502], [1482, 3, 0.0005460062457677878, \n 0.02730031228838939, 2.22, 61.69, 0.004502], [1483, 3, \n 0.00010937933305696306, 0.005468966652848153, 2.22, 61.69, 0.004502], [\n 1484, 3, 1.0350331428991598e-06, 5.175165714495798e-05, 2.22, 61.69, \n 0.004502], [1485, 3, 1.9501739896369628e-05, 0.0009750869948184814, \n 2.22, 61.69, 0.004502], [1486, 3, 0.00010033262049505883, \n 0.005016631024752942, 2.22, 61.69, 0.004502], [1487, 3, \n 4.061288205771431e-05, 0.0020306441028857154, 2.22, 61.69, 0.004502], [\n 1488, 3, 0.0001420359709113183, 0.007101798545565915, 2.22, 61.69, \n 0.004502], [1489, 3, 7.571817467557017e-06, 0.00037859087337785094, \n 2.22, 61.69, 0.004502], [1490, 2, 0.02173832998960063, \n 1.0869164994800316, 0, 0, 0], [1491, 2, 0.002899243829618353, \n 0.14496219148091766, 0, 0, 0], [1492, 2, 0.006310327387189529, \n 0.31551636935947647, 0, 0, 0], [1493, 2, 0.0026261050067275696, \n 0.1313052503363785, 0, 0, 0], [1494, 2, 0.01942091372606376, \n 0.971045686303188, 0, 0, 0], [1495, 2, 0.001839513558783269, \n 0.09197567793916346, 0, 0, 0], [1497, 2, 0.004375527360649893, \n 0.2187763680324947, 0, 0, 0], [1498, 2, 0.006735488235440387, \n 0.3367744117720194, 0, 0, 0], [1500, 3, 9.85597782087346e-06, \n 0.000492798891043673, 2.22, 61.69, 0.004502], [1501, 3, \n 0.0005198212383651805, 0.02599106191825903, 2.22, 61.69, 0.004502], [\n 1502, 3, 2.5730645753187908e-05, 0.0012865322876593954, 2.22, 61.69, \n 0.004502], [1503, 3, 0.0016785036591113812, 0.08392518295556907, 2.22, \n 61.69, 0.004502], [1504, 2, 0.0070690698718853685, 0.3534534935942685, \n 0, 0, 0], [1505, 3, 0.0008020995657820899, 0.0401049782891045, 2.22, \n 61.69, 0.004502], [1506, 2, 0.0016397994496200178, 0.08198997248100089,\n 0, 0, 0], [1507, 3, 0.00041507959569883954, 0.020753979784941975, 2.22,\n 61.69, 0.004502], [1508, 3, 4.154538017488063e-06, \n 0.00020772690087440316, 2.22, 61.69, 0.004502], [1510, 2, \n 0.0038109932532764228, 0.19054966266382115, 0, 0, 0], [1511, 2, \n 0.00988173435818505, 0.4940867179092525, 0, 0, 0], [1512, 2, \n 0.0024139057115332764, 0.12069528557666383, 0, 0, 0], [1513, 3, \n 0.0009163944605813735, 0.04581972302906867, 2.22, 61.69, 0.004502], [\n 1514, 3, 7.863212274868215e-07, 3.931606137434107e-05, 2.22, 61.69, \n 0.004502], [1516, 3, 8.064530491522743e-07, 4.032265245761371e-05, 2.22,\n 61.69, 0.004502], [1517, 3, 5.411679453042277e-05, \n 0.0027058397265211386, 2.22, 61.69, 0.004502], [1518, 3, \n 2.5128262984133043e-05, 0.0012564131492066523, 2.22, 61.69, 0.004502],\n [1519, 3, 1.7440471969906603e-06, 8.720235984953302e-05, 2.22, 61.69, \n 0.004502], [1520, 2, 0.002179468836492435, 0.10897344182462178, 0, 0, 0\n ], [1521, 3, 0.0008492761068800811, 0.042463805344004055, 2.22, 61.69, \n 0.004502], [1522, 3, 0.001100146404858253, 0.055007320242912654, 2.22, \n 61.69, 0.004502], [1523, 3, 0.0005582443262487387, 0.027912216312436934,\n 2.22, 61.69, 0.004502], [1524, 3, 0.000714042943349428, \n 0.0357021471674714, 2.22, 61.69, 0.004502], [1525, 2, \n 0.0030458928986021308, 0.15229464493010655, 0, 0, 0], [1526, 3, \n 0.0028315929319783603, 0.14157964659891803, 2.22, 61.69, 0.004502], [\n 1527, 2, 0.006620761748036568, 0.3310380874018284, 0, 0, 0], [1528, 3, \n 0.0026347607821089578, 0.13173803910544787, 2.22, 61.69, 0.004502], [\n 1529, 2, 0.002711166418718582, 0.1355583209359291, 0, 0, 0], [1530, 2, \n 0.005032807482107288, 0.25164037410536444, 0, 0, 0], [1531, 2, \n 0.01170243432457441, 0.5851217162287206, 0, 0, 0], [1532, 3, \n 0.0013959626805160842, 0.06979813402580422, 2.22, 61.69, 0.004502], [\n 1534, 3, 0.0018790855823381403, 0.09395427911690701, 2.22, 61.69, \n 0.004502], [1535, 3, 0.0005686146984208124, 0.028430734921040625, 2.22,\n 61.69, 0.004502], [1536, 3, 0.0024994615604055, 0.124973078020275, 2.22,\n 61.69, 0.004502], [1537, 2, 0.0032722848050199577, 0.16361424025099788,\n 0, 0, 0], [1538, 2, 0.0037830688364752845, 0.18915344182376426, 0, 0, 0\n ], [1539, 2, 0.005940345649432395, 0.2970172824716198, 0, 0, 0], [1540,\n 3, 0.00011646135769917789, 0.005823067884958895, 2.22, 61.69, 0.004502],\n [1541, 3, 0.00012889056523503453, 0.006444528261751726, 2.22, 61.69, \n 0.004502], [1542, 2, 0.0015000008003063865, 0.07500004001531933, 0, 0, \n 0], [1543, 3, 0.0009414759018296965, 0.04707379509148483, 2.22, 61.69, \n 0.004502], [1544, 2, 0.0055441839759994335, 0.2772091987999717, 0, 0, 0\n ], [1545, 2, 0.011812169709970757, 0.5906084854985378, 0, 0, 0], [1546,\n 2, 0.01626203379888308, 0.8131016899441541, 0, 0, 0], [1547, 2, \n 0.02285851188035466, 1.142925594017733, 0, 0, 0], [1548, 3, \n 0.0013543308279443016, 0.06771654139721509, 2.22, 61.69, 0.004502], [\n 1549, 2, 0.0049030854262021965, 0.2451542713101098, 0, 0, 0], [1550, 3,\n 0.00033197905453791535, 0.016598952726895766, 2.22, 61.69, 0.004502], [\n 1551, 3, 0.0006096583500745879, 0.030482917503729397, 2.22, 61.69, \n 0.004502], [1552, 2, 0.0015656981738750837, 0.0782849086937542, 0, 0, 0\n ], [1553, 2, 0.0024888943599414575, 0.12444471799707287, 0, 0, 0], [\n 1554, 2, 0.004505411665481134, 0.22527058327405666, 0, 0, 0], [1555, 2,\n 0.002990934193624122, 0.14954670968120612, 0, 0, 0], [1556, 3, \n 0.0011564128320789798, 0.057820641603948994, 2.22, 61.69, 0.004502], [\n 1557, 3, 0.0007362927807377101, 0.036814639036885505, 2.22, 61.69, \n 0.004502], [1558, 3, 0.0007445458899189016, 0.03722729449594508, 2.22, \n 61.69, 0.004502], [1559, 2, 0.003443835108227301, 0.17219175541136506, \n 0, 0, 0], [1560, 2, 0.002329145997663478, 0.11645729988317388, 0, 0, 0],\n [1561, 3, 0.0005540231602239543, 0.027701158011197716, 2.22, 61.69, \n 0.004502], [1562, 2, 0.0017152625197382394, 0.08576312598691198, 0, 0, \n 0], [1563, 2, 0.0030915759312768417, 0.1545787965638421, 0, 0, 0], [\n 1564, 2, 0.0037097629455119584, 0.18548814727559793, 0, 0, 0], [1565, 3,\n 0.0004375471497403783, 0.021877357487018915, 2.22, 61.69, 0.004502], [\n 1566, 2, 0.010252171892683539, 0.512608594634177, 0, 0, 0], [1567, 3, \n 0.0008118171037128424, 0.04059085518564212, 2.22, 61.69, 0.004502], [\n 1568, 2, 0.002604241793178731, 0.13021208965893655, 0, 0, 0], [1569, 2,\n 0.009255990694371212, 0.46279953471856067, 0, 0, 0], [1570, 2, \n 0.0069640706150360665, 0.3482035307518033, 0, 0, 0], [1571, 2, \n 0.0065041313813353095, 0.32520656906676554, 0, 0, 0], [1572, 2, \n 0.006633904979541033, 0.33169524897705166, 0, 0, 0], [1573, 2, \n 0.0023394661316732436, 0.11697330658366219, 0, 0, 0], [1574, 2, \n 0.004137684975217191, 0.20688424876085953, 0, 0, 0], [1575, 2, \n 0.005321935603588621, 0.266096780179431, 0, 0, 0], [1576, 3, \n 0.0012058684964594748, 0.06029342482297374, 2.22, 61.69, 0.004502], [\n 1577, 2, 0.007623891664161928, 0.38119458320809646, 0, 0, 0], [1578, 3,\n 0.0005221838250086942, 0.026109191250434708, 2.22, 61.69, 0.004502], [\n 1579, 3, 0.002238630940686654, 0.11193154703433271, 2.22, 61.69, \n 0.004502], [1580, 3, 0.001393719346464869, 0.06968596732324346, 2.22, \n 61.69, 0.004502], [1581, 2, 0.004209660542722961, 0.21048302713614803, \n 0, 0, 0], [1582, 3, 0.00022686224095152467, 0.011343112047576234, 2.22,\n 61.69, 0.004502], [1583, 3, 5.082160364336507e-05, 0.002541080182168254,\n 2.22, 61.69, 0.004502], [1584, 2, 0.0022062235268679067, \n 0.11031117634339535, 0, 0, 0], [1585, 3, 9.927313465409417e-05, \n 0.004963656732704709, 2.22, 61.69, 0.004502], [1586, 2, \n 0.0016556098644012565, 0.08278049322006283, 0, 0, 0], [1587, 2, \n 0.0051600530588915, 0.25800265294457503, 0, 0, 0], [1588, 2, \n 0.0020300209546731105, 0.10150104773365555, 0, 0, 0], [1589, 3, \n 0.003090042091003551, 0.15450210455017754, 2.22, 61.69, 0.004502], [\n 1590, 2, 0.00678480159716298, 0.33924007985814897, 0, 0, 0], [1591, 2, \n 0.007640573237260637, 0.3820286618630319, 0, 0, 0], [1592, 3, \n 0.0002808269093051203, 0.014041345465256016, 2.22, 61.69, 0.004502], [\n 1593, 3, 0.00020129856047632, 0.010064928023816, 2.22, 61.69, 0.004502],\n [1594, 3, 0.0002789388372524298, 0.01394694186262149, 2.22, 61.69, \n 0.004502], [1595, 2, 0.0016750204459843893, 0.08375102229921946, 0, 0, \n 0], [1596, 2, 0.004134439238739313, 0.20672196193696565, 0, 0, 0], [\n 1597, 3, 8.285309045665851e-05, 0.004142654522832926, 2.22, 61.69, \n 0.004502], [1598, 3, 0.00013540004754729773, 0.0067700023773648865, \n 2.22, 61.69, 0.004502], [1599, 2, 0.0026959085186091525, \n 0.13479542593045762, 0, 0, 0], [1600, 3, 0.0009357608497023268, \n 0.04678804248511634, 2.22, 61.69, 0.004502], [1601, 3, \n 0.00027170543018973547, 0.013585271509486775, 2.22, 61.69, 0.004502], [\n 1602, 3, 0.0015513668512933244, 0.07756834256466623, 2.22, 61.69, \n 0.004502], [1603, 3, 0.0009086996263346224, 0.04543498131673112, 2.22, \n 61.69, 0.004502], [1604, 3, 0.0005649494759739373, 0.02824747379869687,\n 2.22, 61.69, 0.004502], [1605, 3, 0.0014751450593580586, \n 0.07375725296790293, 2.22, 61.69, 0.004502], [1606, 3, \n 0.0013425796771799677, 0.06712898385899839, 2.22, 61.69, 0.004502], [\n 1607, 3, 0.0006631858002546182, 0.03315929001273091, 2.22, 61.69, \n 0.004502], [1608, 3, 0.000668140823101588, 0.0334070411550794, 2.22, \n 61.69, 0.004502], [1609, 3, 0.00022162254349097636, \n 0.011081127174548818, 2.22, 61.69, 0.004502], [1610, 3, \n 0.0006039031650447518, 0.030195158252237588, 2.22, 61.69, 0.004502], [\n 1611, 3, 0.00022694944446959337, 0.011347472223479668, 2.22, 61.69, \n 0.004502], [1612, 3, 0.0003947897752379102, 0.019739488761895515, 2.22,\n 61.69, 0.004502], [1613, 3, 0.0008375258341098956, 0.04187629170549478,\n 2.22, 61.69, 0.004502], [1614, 3, 0.0008441996938739789, \n 0.042209984693698945, 2.22, 61.69, 0.004502], [1615, 2, \n 0.005227574288460156, 0.26137871442300786, 0, 0, 0], [1616, 3, \n 0.00019064354714925193, 0.009532177357462597, 2.22, 61.69, 0.004502], [\n 1617, 3, 0.00029566775950504534, 0.014783387975252268, 2.22, 61.69, \n 0.004502], [1618, 3, 0.00014179949030894114, 0.007089974515447057, 2.22,\n 61.69, 0.004502], [1619, 3, 0.00018640385871827544, \n 0.009320192935913772, 2.22, 61.69, 0.004502], [1620, 3, \n 5.5271626586484114e-05, 0.0027635813293242053, 2.22, 61.69, 0.004502],\n [1621, 3, 0.0002950094150485152, 0.014750470752425757, 2.22, 61.69, \n 0.004502], [1622, 3, 0.00020847655089586544, 0.010423827544793273, 2.22,\n 61.69, 0.004502], [1623, 3, 0.0006246630015592596, 0.031233150077962978,\n 2.22, 61.69, 0.004502], [1624, 3, 0.00028274003590258393, \n 0.014137001795129197, 2.22, 61.69, 0.004502], [1625, 2, \n 0.0022534174910895347, 0.11267087455447673, 0, 0, 0], [1626, 3, \n 0.0004280693443394328, 0.02140346721697164, 2.22, 61.69, 0.004502], [\n 1627, 3, 0.000375648911560075, 0.01878244557800375, 2.22, 61.69, \n 0.004502], [1628, 2, 0.002172204242957195, 0.10861021214785976, 0, 0, 0\n ], [1629, 2, 0.003587225381224193, 0.17936126906120967, 0, 0, 0], [1630,\n 3, 0.00045326643232520994, 0.0226633216162605, 2.22, 61.69, 0.004502],\n [1631, 3, 0.0009801395432241038, 0.04900697716120519, 2.22, 61.69, \n 0.004502], [1632, 3, 0.0008930991123686864, 0.044654955618434314, 2.22,\n 61.69, 0.004502], [1633, 2, 0.001835290275730487, 0.09176451378652435, \n 0, 0, 0], [1634, 3, 0.00035310969975077067, 0.017655484987538533, 2.22,\n 61.69, 0.004502], [1635, 3, 0.0006833295628236428, 0.03416647814118214,\n 2.22, 61.69, 0.004502], [1636, 3, 0.0006973081800050544, \n 0.03486540900025272, 2.22, 61.69, 0.004502], [1637, 3, \n 0.000849481774844417, 0.042474088742220854, 2.22, 61.69, 0.004502], [\n 1638, 3, 0.0003577601952454168, 0.01788800976227084, 2.22, 61.69, \n 0.004502], [1639, 3, 0.0008040502325112668, 0.04020251162556334, 2.22, \n 61.69, 0.004502], [1640, 3, 6.362024595159042e-05, \n 0.0031810122975795213, 2.22, 61.69, 0.004502], [1641, 3, \n 0.00014325661737729948, 0.007162830868864973, 2.22, 61.69, 0.004502], [\n 1642, 3, 0.00033451195931950633, 0.01672559796597532, 2.22, 61.69, \n 0.004502], [1643, 3, 9.619219687560661e-05, 0.0048096098437803315, 2.22,\n 61.69, 0.004502], [1644, 3, 0.0003653755557936511, 0.018268777789682555,\n 2.22, 61.69, 0.004502], [1645, 3, 0.00030842754735325555, \n 0.015421377367662779, 2.22, 61.69, 0.004502], [1646, 3, \n 0.0001049187322986075, 0.005245936614930375, 2.22, 61.69, 0.004502], [\n 1647, 3, 0.000503659392774143, 0.025182969638707146, 2.22, 61.69, \n 0.004502], [1648, 2, 0.006961158588339223, 0.34805792941696123, 0, 0, 0\n ], [1649, 3, 0.000744807327898371, 0.03724036639491855, 2.22, 61.69, \n 0.004502], [1650, 2, 0.011263647688495146, 0.5631823844247573, 0, 0, 0],\n [1651, 2, 0.008559494225984409, 0.4279747112992205, 0, 0, 0], [1652, 2,\n 0.005352098184679378, 0.2676049092339689, 0, 0, 0], [1653, 3, \n 0.0011733692302176245, 0.058668461510881224, 2.22, 61.69, 0.004502], [\n 1654, 3, 0.0020443508774251108, 0.10221754387125553, 2.22, 61.69, \n 0.004502], [1655, 3, 0.0003002115401188504, 0.01501057700594252, 2.22, \n 61.69, 0.004502], [1656, 3, 7.370159725959526e-05, 0.003685079862979763,\n 2.22, 61.69, 0.004502], [1657, 3, 0.00015430974585088452, \n 0.007715487292544226, 2.22, 61.69, 0.004502], [1658, 3, \n 5.322222256050306e-05, 0.0026611111280251533, 2.22, 61.69, 0.004502], [\n 1659, 2, 0.005607978495065647, 0.2803989247532824, 0, 0, 0], [1660, 2, \n 0.006516269957589729, 0.32581349787948644, 0, 0, 0], [1661, 2, \n 0.008823810212990009, 0.4411905106495005, 0, 0, 0], [1662, 3, \n 8.483345715007819e-05, 0.00424167285750391, 2.22, 61.69, 0.004502], [\n 1663, 3, 4.3530191699128595e-05, 0.0021765095849564297, 2.22, 61.69, \n 0.004502], [1664, 3, 4.452953003965536e-05, 0.002226476501982768, 2.22,\n 61.69, 0.004502], [1665, 3, 0.0013225288693347707, 0.06612644346673854,\n 2.22, 61.69, 0.004502], [1666, 3, 8.635567359373938e-05, \n 0.0043177836796869686, 2.22, 61.69, 0.004502], [1667, 3, \n 0.0001522890012790897, 0.007614450063954485, 2.22, 61.69, 0.004502], [\n 1668, 3, 0.00011100625173614089, 0.005550312586807045, 2.22, 61.69, \n 0.004502], [1669, 2, 0.0019551374257545055, 0.09775687128772527, 0, 0, \n 0], [1670, 2, 0.002994563514151705, 0.1497281757075853, 0, 0, 0], [1671,\n 2, 0.00194197125660994, 0.097098562830497, 0, 0, 0], [1672, 3, \n 0.00031759653323842224, 0.01587982666192111, 2.22, 61.69, 0.004502], [\n 1673, 3, 0.00015112697948666895, 0.007556348974333448, 2.22, 61.69, \n 0.004502], [1674, 3, 0.001338975669244281, 0.06694878346221406, 2.22, \n 61.69, 0.004502], [1675, 3, 0.0009048640187272772, 0.04524320093636386,\n 2.22, 61.69, 0.004502], [1676, 2, 0.002276296569919192, \n 0.11381482849595959, 0, 0, 0], [1677, 3, 0.0003779607501536475, \n 0.018898037507682378, 2.22, 61.69, 0.004502], [1678, 2, \n 0.005903817693380342, 0.2951908846690171, 0, 0, 0], [1679, 2, \n 0.0018586402973926343, 0.09293201486963171, 0, 0, 0], [1680, 2, \n 0.0014488887108239739, 0.0724444355411987, 0, 0, 0], [1681, 3, \n 0.0004714294646830218, 0.023571473234151093, 2.22, 61.69, 0.004502], [\n 1682, 3, 0.001085935652974641, 0.05429678264873205, 2.22, 61.69, \n 0.004502], [1683, 3, 0.00028145757533810527, 0.014072878766905264, 2.22,\n 61.69, 0.004502], [1684, 3, 0.0025831258538967852, 0.12915629269483925,\n 2.22, 61.69, 0.004502], [1685, 2, 0.0047697103139446575, \n 0.23848551569723286, 0, 0, 0], [1686, 2, 0.0022483118876134227, \n 0.11241559438067113, 0, 0, 0], [1687, 2, 0.0030131816049814983, \n 0.15065908024907493, 0, 0, 0], [1688, 3, 0.0004903983387759389, \n 0.024519916938796946, 2.22, 61.69, 0.004502], [1689, 2, \n 0.0032938946161484794, 0.16469473080742397, 0, 0, 0], [1690, 2, \n 0.00317999955372553, 0.15899997768627652, 0, 0, 0], [1691, 2, \n 0.006018881738424175, 0.30094408692120883, 0, 0, 0], [1692, 3, \n 0.0007150498191215078, 0.03575249095607538, 2.22, 61.69, 0.004502], [\n 1693, 2, 0.0030184481369320087, 0.15092240684660044, 0, 0, 0], [1694, 2,\n 0.001461369242868097, 0.07306846214340486, 0, 0, 0], [1695, 3, \n 0.0006306603001410114, 0.03153301500705057, 2.22, 61.69, 0.004502], [\n 1696, 2, 0.0014331689037382152, 0.07165844518691075, 0, 0, 0], [1697, 2,\n 0.008710326279612261, 0.43551631398061313, 0, 0, 0], [1698, 3, \n 0.0016301483386422185, 0.08150741693211093, 2.22, 61.69, 0.004502], [\n 1699, 3, 0.00013956784357760127, 0.006978392178880064, 2.22, 61.69, \n 0.004502], [1700, 2, 0.001455730736331227, 0.07278653681656136, 0, 0, 0\n ], [1701, 3, 0.000985466392749056, 0.04927331963745281, 2.22, 61.69, \n 0.004502], [1702, 3, 0.0008069862705159137, 0.04034931352579569, 2.22, \n 61.69, 0.004502], [1703, 3, 0.0015568099066940577, 0.07784049533470289,\n 2.22, 61.69, 0.004502], [1704, 2, 0.0039863070632047415, \n 0.1993153531602371, 0, 0, 0], [1705, 2, 0.0016994219326201241, \n 0.0849710966310062, 0, 0, 0], [1706, 3, 0.00022834587513481845, \n 0.011417293756740922, 2.22, 61.69, 0.004502], [1707, 3, \n 0.00035050593877745283, 0.017525296938872642, 2.22, 61.69, 0.004502], [\n 1708, 3, 0.0008077480562281571, 0.04038740281140786, 2.22, 61.69, \n 0.004502], [1709, 2, 0.006228812219006413, 0.31144061095032066, 0, 0, 0\n ], [1710, 2, 0.005128653226179494, 0.2564326613089747, 0, 0, 0], [1711,\n 3, 0.0001865928228376505, 0.009329641141882526, 2.22, 61.69, 0.004502],\n [1712, 2, 0.002102837121501151, 0.10514185607505754, 0, 0, 0], [1713, 2,\n 0.0025368957405395645, 0.12684478702697824, 0, 0, 0], [1714, 3, \n 0.0011562226654331135, 0.05781113327165568, 2.22, 61.69, 0.004502], [\n 1715, 2, 0.004481367157274824, 0.22406835786374124, 0, 0, 0], [1716, 2,\n 0.009993594261663767, 0.4996797130831883, 0, 0, 0], [1717, 2, \n 0.002267986548968579, 0.11339932744842897, 0, 0, 0], [1718, 2, \n 0.01920136583254073, 0.9600682916270364, 0, 0, 0], [1719, 3, \n 0.0006250608555912478, 0.03125304277956239, 2.22, 61.69, 0.004502], [\n 1720, 2, 0.00168964057950739, 0.08448202897536951, 0, 0, 0], [1721, 2, \n 0.0022514556432754154, 0.11257278216377076, 0, 0, 0], [1722, 3, \n 0.0005776709769605844, 0.02888354884802922, 2.22, 61.69, 0.004502], [\n 1723, 3, 0.00018177235502873834, 0.009088617751436916, 2.22, 61.69, \n 0.004502], [1724, 3, 0.002308942454207542, 0.1154471227103771, 2.22, \n 61.69, 0.004502], [1725, 2, 0.0018560503299213332, 0.09280251649606665,\n 0, 0, 0], [1726, 2, 0.002761006390807373, 0.13805031954036864, 0, 0, 0],\n [1727, 3, 1.2777785942774298e-05, 0.0006388892971387149, 2.22, 61.69, \n 0.004502], [1728, 2, 0.0018392523086213346, 0.09196261543106675, 0, 0, \n 0], [1729, 2, 0.006839303534284608, 0.3419651767142304, 0, 0, 0], [1730,\n 2, 0.0016405280887646968, 0.08202640443823485, 0, 0, 0], [1731, 2, \n 0.004530580326268455, 0.2265290163134228, 0, 0, 0], [1732, 2, \n 0.010296734416249178, 0.5148367208124589, 0, 0, 0], [1733, 2, \n 0.0017360181799001156, 0.08680090899500578, 0, 0, 0], [1734, 2, \n 0.002080576836187494, 0.1040288418093747, 0, 0, 0], [1735, 2, \n 0.004596997723122095, 0.2298498861561048, 0, 0, 0], [1736, 2, \n 0.002413425654250592, 0.12067128271252962, 0, 0, 0], [1737, 2, \n 0.006813443685203153, 0.34067218426015766, 0, 0, 0], [1738, 2, \n 0.0038515318581644853, 0.1925765929082243, 0, 0, 0], [1739, 3, \n 0.0010627604171624583, 0.053138020858122914, 2.22, 61.69, 0.004502], [\n 1740, 2, 0.0021026257427105457, 0.10513128713552729, 0, 0, 0], [1741, 3,\n 0.0009950302298943022, 0.049751511494715114, 2.22, 61.69, 0.004502], [\n 1742, 3, 0.0006991333883527254, 0.03495666941763627, 2.22, 61.69, \n 0.004502], [1743, 3, 2.6718441567986027e-05, 0.0013359220783993014, \n 2.22, 61.69, 0.004502], [1744, 3, 0.00010295853025504874, \n 0.0051479265127524374, 2.22, 61.69, 0.004502], [1745, 3, \n 0.0008552992639033185, 0.04276496319516592, 2.22, 61.69, 0.004502], [\n 1746, 2, 0.004641428723601485, 0.23207143618007425, 0, 0, 0], [1747, 3,\n 0.0007127580911748647, 0.03563790455874324, 2.22, 61.69, 0.004502], [\n 1748, 2, 0.0019372469660483122, 0.09686234830241562, 0, 0, 0], [1749, 2,\n 0.006244643211840332, 0.3122321605920166, 0, 0, 0], [1750, 3, \n 0.000653478119652876, 0.0326739059826438, 2.22, 61.69, 0.004502], [1751,\n 3, 0.0005383084342515337, 0.026915421712576687, 2.22, 61.69, 0.004502],\n [1752, 2, 0.0037542906982168446, 0.18771453491084222, 0, 0, 0], [1753, \n 2, 0.002297268499533676, 0.11486342497668381, 0, 0, 0], [1754, 2, \n 0.011467968203347287, 0.5733984101673645, 0, 0, 0], [1755, 3, \n 0.0014040905423340156, 0.07020452711670079, 2.22, 61.69, 0.004502], [\n 1756, 2, 0.0025915006544054604, 0.12957503272027304, 0, 0, 0], [1757, 2,\n 0.006862277688448091, 0.34311388442240454, 0, 0, 0], [1758, 2, \n 0.008413471513428292, 0.42067357567141467, 0, 0, 0], [1759, 2, \n 0.004574362398582669, 0.22871811992913343, 0, 0, 0], [1760, 2, \n 0.0031789097473471192, 0.15894548736735598, 0, 0, 0], [1761, 3, \n 0.0014083619528329524, 0.07041809764164762, 2.22, 61.69, 0.004502], [\n 1762, 2, 0.0033502257085727175, 0.1675112854286359, 0, 0, 0], [1763, 2,\n 0.0030242326674567712, 0.15121163337283858, 0, 0, 0], [1764, 3, \n 0.0007202102426608419, 0.0360105121330421, 2.22, 61.69, 0.004502], [\n 1765, 2, 0.003945424551590993, 0.19727122757954962, 0, 0, 0], [1766, 2,\n 0.003915515453890014, 0.1957757726945007, 0, 0, 0], [1767, 2, \n 0.006085505697192886, 0.30427528485964433, 0, 0, 0], [1768, 2, \n 0.010174366269247585, 0.5087183134623792, 0, 0, 0], [1769, 2, \n 0.009031054425598138, 0.451552721279907, 0, 0, 0], [1770, 2, \n 0.030509885187144117, 1.525494259357206, 0, 0, 0], [1771, 2, \n 0.017611454160671825, 0.8805727080335912, 0, 0, 0], [1772, 2, \n 0.007633737706924312, 0.3816868853462156, 0, 0, 0], [1773, 2, \n 0.01780807424723992, 0.890403712361996, 0, 0, 0], [1774, 2, \n 0.002413161491111794, 0.1206580745555897, 0, 0, 0], [1775, 2, \n 0.005451344168542172, 0.2725672084271086, 0, 0, 0], [1776, 2, \n 0.0033074583919163653, 0.16537291959581826, 0, 0, 0], [1777, 2, \n 0.005568161613558242, 0.2784080806779121, 0, 0, 0], [1778, 2, \n 0.002395611780191415, 0.11978058900957077, 0, 0, 0], [1779, 2, \n 0.0028488054525953985, 0.14244027262976997, 0, 0, 0], [1780, 2, \n 0.0030002134377383463, 0.1500106718869173, 0, 0, 0], [1781, 3, \n 0.0004499032173986467, 0.022495160869932335, 2.22, 61.69, 0.004502], [\n 1782, 3, 0.0006333736554700433, 0.03166868277350216, 2.22, 61.69, \n 0.004502], [1783, 3, 0.0006836718573255382, 0.03418359286627692, 2.22, \n 61.69, 0.004502], [1784, 2, 0.006456743545235233, 0.32283717726176164, \n 0, 0, 0], [1785, 2, 0.007347157943155048, 0.36735789715775236, 0, 0, 0],\n [1786, 2, 0.007214359186119591, 0.36071795930597955, 0, 0, 0], [1787, 2,\n 0.007834284018991623, 0.39171420094958115, 0, 0, 0], [1788, 3, \n 0.0002545220592081115, 0.012726102960405576, 2.22, 61.69, 0.004502], [\n 1789, 3, 0.0006445279945604626, 0.03222639972802314, 2.22, 61.69, \n 0.004502], [1790, 3, 3.7097412529855566e-05, 0.0018548706264927782, \n 2.22, 61.69, 0.004502], [1791, 3, 3.060700921589692e-05, \n 0.001530350460794846, 2.22, 61.69, 0.004502], [1792, 3, \n 0.00023113047197876308, 0.011556523598938153, 2.22, 61.69, 0.004502], [\n 1793, 3, 0.0010854139444152772, 0.054270697220763865, 2.22, 61.69, \n 0.004502], [1794, 3, 0.000193812719045554, 0.009690635952277699, 2.22, \n 61.69, 0.004502], [1795, 3, 0.00012212686390123214, \n 0.006106343195061608, 2.22, 61.69, 0.004502], [1796, 3, \n 0.0006642823349345957, 0.033214116746729784, 2.22, 61.69, 0.004502], [\n 1797, 2, 0.0018439478449351068, 0.09219739224675534, 0, 0, 0], [1798, 3,\n 0.00042633568546037186, 0.021316784273018592, 2.22, 61.69, 0.004502], [\n 1799, 2, 0.002237269697339197, 0.11186348486695984, 0, 0, 0], [1800, 2,\n 0.0042493921881998535, 0.2124696094099927, 0, 0, 0], [1801, 3, \n 0.0005438025657211798, 0.02719012828605899, 2.22, 61.69, 0.004502], [\n 1802, 3, 0.00029245884668739017, 0.01462294233436951, 2.22, 61.69, \n 0.004502], [1803, 3, 0.0003927492716827882, 0.01963746358413941, 2.22, \n 61.69, 0.004502], [1804, 2, 0.01120428237244892, 0.5602141186224461, 0,\n 0, 0], [1805, 3, 0.0006332582976482522, 0.03166291488241261, 2.22, \n 61.69, 0.004502], [1806, 3, 0.0006249082238639684, 0.03124541119319842,\n 2.22, 61.69, 0.004502], [1807, 3, 0.0007715037279579743, \n 0.03857518639789872, 2.22, 61.69, 0.004502], [1808, 2, \n 0.003273470708969163, 0.16367353544845814, 0, 0, 0], [1809, 3, \n 0.0009238292096633647, 0.04619146048316824, 2.22, 61.69, 0.004502], [\n 1810, 2, 0.002106300089692593, 0.10531500448462965, 0, 0, 0], [1811, 2,\n 0.0014671228267872148, 0.07335614133936073, 0, 0, 0], [1812, 3, \n 0.0013029854518401976, 0.0651492725920099, 2.22, 61.69, 0.004502], [\n 1813, 2, 0.005212306067684381, 0.26061530338421907, 0, 0, 0], [1814, 2,\n 0.0017458294165536873, 0.08729147082768438, 0, 0, 0], [1815, 2, \n 0.0017071985603054247, 0.08535992801527123, 0, 0, 0], [1816, 3, \n 0.0008355966484335978, 0.04177983242167989, 2.22, 61.69, 0.004502], [\n 1817, 2, 0.00786124232779237, 0.39306211638961847, 0, 0, 0], [1818, 2, \n 0.00467172216419726, 0.23358610820986297, 0, 0, 0], [1819, 3, \n 4.446961087725697e-05, 0.0022234805438628488, 2.22, 61.69, 0.004502], [\n 1820, 2, 0.0021455616092900765, 0.10727808046450382, 0, 0, 0], [1821, 2,\n 0.0052492883399868, 0.26246441699934, 0, 0, 0], [1822, 2, \n 0.010875476397094096, 0.5437738198547047, 0, 0, 0], [1823, 2, \n 0.003945992802078176, 0.19729964010390882, 0, 0, 0], [1824, 2, \n 0.0018267545792273764, 0.09133772896136881, 0, 0, 0], [1825, 2, \n 0.00519430489419229, 0.25971524470961443, 0, 0, 0], [1826, 2, \n 0.0021811060524790952, 0.10905530262395477, 0, 0, 0], [1827, 3, \n 0.0008530157012054359, 0.0426507850602718, 2.22, 61.69, 0.004502], [\n 1828, 3, 0.002756494944812388, 0.1378247472406194, 2.22, 61.69, \n 0.004502], [1829, 2, 0.004409435763064647, 0.22047178815323237, 0, 0, 0\n ], [1830, 3, 0.0011403474572496454, 0.05701737286248228, 2.22, 61.69, \n 0.004502], [1831, 2, 0.004449336207686825, 0.2224668103843413, 0, 0, 0],\n [1832, 3, 0.0007771931121615173, 0.038859655608075874, 2.22, 61.69, \n 0.004502], [1833, 2, 0.00219574579139257, 0.10978728956962851, 0, 0, 0],\n [1834, 2, 0.0029144516945575063, 0.14572258472787536, 0, 0, 0], [1836, \n 3, 0.0002291147948951537, 0.011455739744757684, 2.22, 61.69, 0.004502],\n [1837, 3, 0.0008040081530028336, 0.040200407650141684, 2.22, 61.69, \n 0.004502], [1838, 3, 0.0008406582811366919, 0.042032914056834604, 2.22,\n 61.69, 0.004502], [1839, 2, 0.009448279703012192, 0.47241398515060967, \n 0, 0, 0], [1840, 2, 0.004930931936026686, 0.2465465968013343, 0, 0, 0],\n [1841, 3, 0.0006235800258089248, 0.03117900129044624, 2.22, 61.69, \n 0.004502], [1842, 3, 0.000453678034330045, 0.022683901716502253, 2.22, \n 61.69, 0.004502], [1843, 3, 0.0005619991314477211, 0.02809995657238605,\n 2.22, 61.69, 0.004502], [1844, 3, 0.0008621042105392081, \n 0.043105210526960404, 2.22, 61.69, 0.004502], [1845, 3, \n 0.000841554397088342, 0.0420777198544171, 2.22, 61.69, 0.004502], [1846,\n 3, 0.00010981600382526249, 0.005490800191263125, 2.22, 61.69, 0.004502],\n [1847, 2, 0.003982054075289823, 0.19910270376449113, 0, 0, 0], [1848, 3,\n 0.00033381245647581777, 0.01669062282379089, 2.22, 61.69, 0.004502], [\n 1849, 3, 0.001158450269038491, 0.057922513451924555, 2.22, 61.69, \n 0.004502], [1850, 3, 0.001708114521061397, 0.08540572605306987, 2.22, \n 61.69, 0.004502], [1851, 3, 0.0005065229873089011, 0.025326149365445055,\n 2.22, 61.69, 0.004502], [1852, 3, 0.0023941306142429277, \n 0.11970653071214639, 2.22, 61.69, 0.004502], [1853, 3, \n 0.001917289339589373, 0.09586446697946867, 2.22, 61.69, 0.004502], [\n 1854, 3, 0.00014267713764539732, 0.007133856882269866, 2.22, 61.69, \n 0.004502], [1855, 2, 0.003701425783106976, 0.18507128915534882, 0, 0, 0\n ], [1856, 2, 0.004052362315850483, 0.20261811579252417, 0, 0, 0], [1857,\n 3, 0.0012207911958070376, 0.06103955979035188, 2.22, 61.69, 0.004502],\n [1858, 3, 0.0008157807822408823, 0.04078903911204411, 2.22, 61.69, \n 0.004502], [1860, 2, 0.0028539824090186706, 0.14269912045093353, 0, 0, \n 0], [1861, 3, 0.0008409403758531892, 0.04204701879265946, 2.22, 61.69, \n 0.004502], [1862, 3, 0.0008746423721642757, 0.04373211860821378, 2.22, \n 61.69, 0.004502], [1863, 3, 0.0008078987718104445, 0.04039493859052222,\n 2.22, 61.69, 0.004502], [1864, 2, 0.0037260737853256434, \n 0.1863036892662822, 0, 0, 0], [1865, 2, 0.0043352387888536065, \n 0.21676193944268035, 0, 0, 0], [1866, 2, 0.006257281052932708, \n 0.31286405264663536, 0, 0, 0], [1867, 3, 6.12285505372934e-05, \n 0.00306142752686467, 2.22, 61.69, 0.004502], [1868, 3, \n 0.00018655016239655994, 0.009327508119827998, 2.22, 61.69, 0.004502], [\n 1869, 3, 8.230686306328308e-05, 0.004115343153164154, 2.22, 61.69, \n 0.004502], [1870, 2, 0.0014869657686431364, 0.07434828843215682, 0, 0, \n 0], [1871, 2, 0.0015337314104040772, 0.07668657052020388, 0, 0, 0], [\n 1872, 3, 6.220327851111738e-05, 0.003110163925555869, 2.22, 61.69, \n 0.004502], [1873, 3, 0.0002573648025375113, 0.012868240126875569, 2.22,\n 61.69, 0.004502], [1874, 3, 0.00010039547173203763, \n 0.0050197735866018825, 2.22, 61.69, 0.004502], [1875, 3, \n 0.0002179760373318144, 0.010898801866590722, 2.22, 61.69, 0.004502], [\n 1876, 3, 0.00014270627844755376, 0.00713531392237769, 2.22, 61.69, \n 0.004502], [1877, 3, 3.283059900250418e-05, 0.001641529950125209, 2.22,\n 61.69, 0.004502], [1878, 3, 0.00023290405284479777, \n 0.011645202642239888, 2.22, 61.69, 0.004502], [1879, 3, \n 5.049284201103439e-05, 0.0025246421005517194, 2.22, 61.69, 0.004502], [\n 1880, 3, 0.001068255049908474, 0.05341275249542371, 2.22, 61.69, \n 0.004502], [1881, 3, 0.00015727984940835908, 0.007863992470417953, 2.22,\n 61.69, 0.004502], [1882, 3, 0.0001818121283940816, 0.00909060641970408,\n 2.22, 61.69, 0.004502], [1883, 3, 0.0002453456224830875, \n 0.012267281124154376, 2.22, 61.69, 0.004502], [1884, 3, \n 0.00020684198110963, 0.010342099055481502, 2.22, 61.69, 0.004502], [\n 1885, 3, 0.00129792588119142, 0.06489629405957101, 2.22, 61.69, \n 0.004502], [1886, 3, 0.00014319470844547947, 0.007159735422273974, 2.22,\n 61.69, 0.004502], [1887, 3, 0.0005032189871086648, 0.025160949355433244,\n 2.22, 61.69, 0.004502], [1888, 3, 0.00014324092549305482, \n 0.0071620462746527416, 2.22, 61.69, 0.004502], [1889, 2, \n 0.0025884474041454283, 0.12942237020727143, 0, 0, 0], [1890, 3, \n 0.0007104281028062201, 0.035521405140311005, 2.22, 61.69, 0.004502], [\n 1891, 3, 0.0008415405866706834, 0.042077029333534174, 2.22, 61.69, \n 0.004502], [1892, 3, 0.0010384360084148645, 0.05192180042074322, 2.22, \n 61.69, 0.004502], [1893, 3, 0.001301927182997355, 0.06509635914986775, \n 2.22, 61.69, 0.004502], [1894, 3, 0.0008768655006630459, \n 0.0438432750331523, 2.22, 61.69, 0.004502], [1895, 3, \n 4.304267639620148e-06, 0.00021521338198100739, 2.22, 61.69, 0.004502],\n [1896, 3, 0.0012165952308203119, 0.060829761541015596, 2.22, 61.69, \n 0.004502], [1897, 3, 0.0004032096848351131, 0.020160484241755657, 2.22,\n 61.69, 0.004502], [1898, 3, 0.0004936037088332394, 0.024680185441661975,\n 2.22, 61.69, 0.004502], [1899, 3, 0.0003231170726398226, \n 0.016155853631991127, 2.22, 61.69, 0.004502], [1900, 2, \n 0.004972924117850934, 0.2486462058925467, 0, 0, 0], [1901, 2, \n 0.00850139874298526, 0.42506993714926306, 0, 0, 0], [1902, 2, \n 0.017941196935571776, 0.8970598467785887, 0, 0, 0], [1903, 2, \n 0.008625713146876468, 0.4312856573438233, 0, 0, 0], [1904, 2, \n 0.005041037225995458, 0.2520518612997729, 0, 0, 0], [1905, 3, \n 0.0002626527775456755, 0.013132638877283775, 2.22, 61.69, 0.004502], [\n 1906, 2, 0.002010065672184408, 0.10050328360922042, 0, 0, 0], [1907, 3,\n 0.0008003650424765439, 0.040018252123827196, 2.22, 61.69, 0.004502], [\n 1908, 2, 0.0013979563523032034, 0.06989781761516019, 0, 0, 0], [1909, 3,\n 0.0011036689330580832, 0.05518344665290417, 2.22, 61.69, 0.004502], [\n 1910, 3, 0.0006883943546285288, 0.03441971773142644, 2.22, 61.69, \n 0.004502], [1911, 3, 0.0002772595538987581, 0.013862977694937906, 2.22,\n 61.69, 0.004502], [1912, 2, 0.006444942182323984, 0.3222471091161993, 0,\n 0, 0], [1913, 3, 0.0001851619920160923, 0.009258099600804617, 2.22, \n 61.69, 0.004502], [1914, 3, 0.00043823655905455975, 0.02191182795272799,\n 2.22, 61.69, 0.004502], [1915, 2, 0.010158557501696754, \n 0.5079278750848377, 0, 0, 0], [1916, 2, 0.017684886510895965, \n 0.8842443255447983, 0, 0, 0], [1917, 2, 0.01186578896955475, \n 0.5932894484777375, 0, 0, 0], [1918, 2, 0.007670383184040397, \n 0.3835191592020199, 0, 0, 0], [1919, 2, 0.0038936492873901407, \n 0.19468246436950706, 0, 0, 0], [1920, 3, 0.0005833186660407878, \n 0.029165933302039395, 2.22, 61.69, 0.004502], [1921, 2, \n 0.014667779068156944, 0.7333889534078474, 0, 0, 0], [1922, 2, \n 0.00420908399548562, 0.21045419977428104, 0, 0, 0], [1923, 3, \n 0.001390133293413998, 0.0695066646706999, 2.22, 61.69, 0.004502], [1924,\n 3, 0.001743020791378585, 0.08715103956892926, 2.22, 61.69, 0.004502], [\n 1925, 2, 0.004089510330471294, 0.20447551652356472, 0, 0, 0], [1926, 2,\n 0.00287118105637557, 0.1435590528187785, 0, 0, 0], [1927, 2, \n 0.0041806062493278656, 0.20903031246639325, 0, 0, 0], [1928, 3, \n 9.612221268309282e-05, 0.004806110634154641, 2.22, 61.69, 0.004502], [\n 1929, 3, 0.000144746604528514, 0.0072373302264257, 2.22, 61.69, \n 0.004502], [1930, 3, 0.00030511943453295244, 0.015255971726647622, 2.22,\n 61.69, 0.004502], [1931, 3, 0.0010456667798853683, 0.05228333899426842,\n 2.22, 61.69, 0.004502], [1932, 3, 0.0014184910249342812, \n 0.07092455124671407, 2.22, 61.69, 0.004502], [1933, 3, \n 0.0012104704776866732, 0.060523523884333665, 2.22, 61.69, 0.004502], [\n 1934, 2, 0.017260023459133387, 0.8630011729566692, 0, 0, 0], [1935, 2, \n 0.0020131873177782612, 0.10065936588891305, 0, 0, 0], [1936, 3, \n 0.00016183222128449105, 0.008091611064224553, 2.22, 61.69, 0.004502], [\n 1937, 2, 0.0036698553451389514, 0.18349276725694758, 0, 0, 0], [1938, 2,\n 0.0024417642388014174, 0.12208821194007087, 0, 0, 0], [1939, 2, \n 0.002785103211444589, 0.13925516057222947, 0, 0, 0], [1940, 3, \n 0.0005110953936246092, 0.025554769681230462, 2.22, 61.69, 0.004502], [\n 1941, 2, 0.002709985093250103, 0.13549925466250515, 0, 0, 0], [1942, 2,\n 0.0018877299747687521, 0.0943864987384376, 0, 0, 0], [1943, 3, \n 0.00010279589286423787, 0.005139794643211894, 2.22, 61.69, 0.004502], [\n 1944, 2, 0.0025353013507918823, 0.1267650675395941, 0, 0, 0], [1945, 3,\n 0.0003079053590355567, 0.015395267951777833, 2.22, 61.69, 0.004502], [\n 1946, 3, 3.785246414633451e-05, 0.0018926232073167254, 2.22, 61.69, \n 0.004502], [1947, 3, 0.0006231855866823692, 0.03115927933411846, 2.22, \n 61.69, 0.004502], [1948, 2, 0.002715072413449747, 0.13575362067248736, \n 0, 0, 0], [1949, 3, 0.0003749199035037024, 0.01874599517518512, 2.22, \n 61.69, 0.004502], [1950, 3, 3.2009130803650874e-05, \n 0.0016004565401825438, 2.22, 61.69, 0.004502], [1951, 3, \n 0.00028982139778890414, 0.014491069889445209, 2.22, 61.69, 0.004502], [\n 1952, 2, 0.0021449687785486293, 0.10724843892743147, 0, 0, 0], [1953, 3,\n 0.0002522618160854708, 0.012613090804273537, 2.22, 61.69, 0.004502], [\n 1954, 3, 0.0003506443043975968, 0.017532215219879844, 2.22, 61.69, \n 0.004502], [1955, 3, 0.00019049808752063204, 0.009524904376031602, 2.22,\n 61.69, 0.004502], [1956, 3, 0.0013327624870031016, 0.06663812435015508,\n 2.22, 61.69, 0.004502], [1957, 2, 0.0038265233479846173, \n 0.1913261673992309, 0, 0, 0], [1958, 2, 0.001623585117719857, \n 0.08117925588599285, 0, 0, 0], [1959, 3, 0.0014711543728682193, \n 0.07355771864341097, 2.22, 61.69, 0.004502], [1960, 3, \n 0.00040419410791183997, 0.020209705395591998, 2.22, 61.69, 0.004502], [\n 1961, 3, 0.0004963095835166648, 0.02481547917583324, 2.22, 61.69, \n 0.004502], [1962, 3, 8.676879300628758e-05, 0.00433843965031438, 2.22, \n 61.69, 0.004502], [1963, 3, 1.98901161405436e-05, 0.0009945058070271802,\n 2.22, 61.69, 0.004502], [1964, 2, 0.001926379139961268, \n 0.0963189569980634, 0, 0, 0], [1965, 3, 0.0005268011695933483, \n 0.026340058479667413, 2.22, 61.69, 0.004502], [1966, 3, \n 0.00017024481693603925, 0.008512240846801963, 2.22, 61.69, 0.004502], [\n 1967, 2, 0.003124156872402211, 0.15620784362011056, 0, 0, 0], [1968, 2,\n 0.008146530594916731, 0.4073265297458366, 0, 0, 0], [1969, 3, \n 0.0004332236280372991, 0.021661181401864953, 2.22, 61.69, 0.004502], [\n 1970, 2, 0.015079725927314894, 0.7539862963657448, 0, 0, 0], [1971, 3, \n 0.00041965080447621257, 0.020982540223810627, 2.22, 61.69, 0.004502], [\n 1972, 3, 8.495873978254917e-07, 4.247936989127459e-05, 2.22, 61.69, \n 0.004502], [1973, 3, 1.600763469777576e-05, 0.0008003817348887879, 2.22,\n 61.69, 0.004502], [1974, 3, 8.235613569316079e-05, 0.00411780678465804,\n 2.22, 61.69, 0.004502], [1975, 2, 0.0024899950060986455, \n 0.12449975030493228, 0, 0, 0], [1976, 3, 0.00013846418760463496, \n 0.006923209380231748, 2.22, 61.69, 0.004502], [1977, 2, \n 0.01441202991758457, 0.7206014958792286, 0, 0, 0], [1978, 3, \n 4.876032337019254e-05, 0.002438016168509627, 2.22, 61.69, 0.004502], [\n 1979, 2, 0.01207812804630862, 0.603906402315431, 0, 0, 0], [1980, 2, \n 0.0034921293990410386, 0.17460646995205195, 0, 0, 0], [1981, 2, \n 0.004683612493623978, 0.23418062468119888, 0, 0, 0], [1982, 2, \n 0.004161761211985465, 0.20808806059927326, 0, 0, 0], [1983, 2, \n 0.0043877697353720034, 0.21938848676860015, 0, 0, 0], [1984, 2, \n 0.002631382568955209, 0.13156912844776045, 0, 0, 0], [1985, 3, \n 0.0012310071496282526, 0.061550357481412625, 2.22, 61.69, 0.004502], [\n 1986, 2, 0.008265161826349031, 0.4132580913174515, 0, 0, 0], [1987, 2, \n 0.010632736546116827, 0.5316368273058414, 0, 0, 0], [1988, 2, \n 0.011845953811604956, 0.5922976905802478, 0, 0, 0], [1989, 3, \n 0.0006607023412943799, 0.033035117064719, 2.22, 61.69, 0.004502], [1990,\n 2, 0.0014479772099362613, 0.07239886049681307, 0, 0, 0], [1991, 2, \n 0.02791736843845849, 1.3958684219229245, 0, 0, 0], [1992, 2, \n 0.00669676694709918, 0.33483834735495904, 0, 0, 0], [1993, 2, \n 0.007396801680359065, 0.36984008401795326, 0, 0, 0], [1994, 2, \n 0.007105771430148137, 0.35528857150740684, 0, 0, 0], [1995, 2, \n 0.007146789481908194, 0.35733947409540967, 0, 0, 0], [1996, 2, \n 0.002500315814796374, 0.1250157907398187, 0, 0, 0], [1997, 3, \n 0.0006919203107214647, 0.03459601553607324, 2.22, 61.69, 0.004502], [\n 1998, 3, 0.0007719976652252124, 0.038599883261260626, 2.22, 61.69, \n 0.004502], [1999, 2, 0.005606206317377037, 0.28031031586885186, 0, 0, 0\n ], [2000, 2, 0.015602932071110567, 0.7801466035555285, 0, 0, 0], [2001,\n 2, 0.003597196019504588, 0.1798598009752294, 0, 0, 0], [2002, 3, \n 0.0010051105154040628, 0.05025552577020314, 2.22, 61.69, 0.004502], [\n 2003, 3, 0.0015052919810963758, 0.07526459905481879, 2.22, 61.69, \n 0.004502], [2004, 3, 0.0011289420570764744, 0.05644710285382372, 2.22, \n 61.69, 0.004502], [2005, 2, 0.0021166659006517613, 0.10583329503258805,\n 0, 0, 0], [2006, 2, 0.0017443470806312704, 0.08721735403156351, 0, 0, 0\n ], [2007, 3, 5.04767876707769e-05, 0.002523839383538845, 2.22, 61.69, \n 0.004502], [2008, 3, 3.5033818336598355e-06, 0.0001751690916829918, \n 2.22, 61.69, 0.004502]])\n', (458254, 550497), False, 'from numpy import array\n'), ((551568, 584549), 'numpy.array', 'array', (['[[586, 1, 0], [589, 108, 0], [590, 108, 0], [593, 112, 0], [594, 114, 0], [\n 595, 115, 0], [597, 118, 0], [598, 118, 0], [599, 119, 0], [600, 119, 0\n ], [601, 119, 0], [602, 121, 0], [603, 526, 0], [607, 127, 0], [608, \n 127, 0], [609, 529, 0], [610, 530, 0], [612, 493, 0], [613, 130, 0], [\n 614, 130, 0], [616, 132, 0], [617, 133, 0], [618, 133, 0], [619, 134, 0\n ], [621, 136, 0], [623, 139, 0], [624, 14, 0], [628, 142, 0], [629, 145,\n 0], [631, 145, 0], [632, 145, 0], [637, 148, 0], [638, 149, 0], [639, \n 150, 0], [640, 153, 0], [641, 155, 0], [642, 533, 0], [643, 534, 0], [\n 646, 536, 0], [647, 536, 0], [650, 166, 0], [652, 167, 0], [655, 170, 0\n ], [657, 174, 0], [658, 175, 0], [661, 177, 0], [662, 178, 0], [663, \n 178, 0], [666, 180, 0], [668, 183, 0], [670, 183, 0], [672, 185, 0], [\n 675, 19, 0], [676, 19, 0], [678, 194, 0], [679, 196, 0], [681, 197, 0],\n [683, 200, 0], [687, 202, 0], [689, 204, 0], [691, 209, 0], [693, 21, 0\n ], [694, 21, 0], [695, 210, 0], [696, 211, 0], [697, 211, 0], [698, 212,\n 0], [701, 215, 0], [702, 215, 0], [704, 217, 0], [705, 217, 0], [707, \n 219, 0], [708, 221, 0], [711, 224, 0], [713, 225, 0], [714, 225, 0], [\n 716, 226, 0], [717, 227, 0], [719, 229, 0], [722, 545, 0], [723, 235, 0\n ], [724, 238, 0], [725, 239, 0], [727, 243, 0], [728, 244, 0], [730, \n 547, 0], [731, 548, 0], [732, 247, 0], [733, 549, 0], [735, 253, 0], [\n 737, 256, 0], [738, 258, 0], [739, 264, 0], [741, 264, 0], [742, 264, 0\n ], [743, 500, 0], [745, 273, 0], [746, 273, 0], [747, 273, 0], [748, \n 274, 0], [749, 274, 0], [750, 557, 0], [753, 28, 0], [758, 286, 0], [\n 760, 287, 0], [761, 288, 0], [762, 289, 0], [763, 560, 0], [765, 560, 0\n ], [767, 292, 0], [769, 293, 0], [771, 297, 0], [772, 3, 0], [774, 300,\n 0], [776, 300, 0], [777, 300, 0], [778, 300, 0], [781, 303, 0], [784, \n 563, 0], [785, 501, 0], [787, 308, 0], [788, 311, 0], [789, 565, 0], [\n 790, 314, 0], [791, 314, 0], [792, 316, 0], [795, 319, 0], [798, 324, 0\n ], [800, 326, 0], [801, 327, 0], [802, 327, 0], [805, 328, 0], [806, \n 328, 0], [808, 329, 0], [809, 329, 0], [810, 568, 0], [811, 568, 0], [\n 814, 570, 0], [815, 335, 0], [816, 335, 0], [817, 571, 0], [818, 34, 0],\n [821, 338, 0], [822, 339, 0], [825, 339, 0], [826, 339, 0], [829, 345, \n 0], [830, 345, 0], [833, 348, 0], [834, 572, 0], [835, 572, 0], [836, \n 572, 0], [837, 350, 0], [839, 350, 0], [840, 573, 0], [841, 573, 0], [\n 842, 352, 0], [843, 352, 0], [844, 352, 0], [845, 356, 0], [847, 36, 0],\n [848, 574, 0], [849, 574, 0], [850, 574, 0], [851, 575, 0], [852, 361, \n 0], [853, 362, 0], [854, 363, 0], [855, 363, 0], [856, 363, 0], [857, \n 365, 0], [858, 368, 0], [859, 368, 0], [860, 371, 0], [862, 372, 0], [\n 863, 374, 0], [864, 374, 0], [865, 375, 0], [867, 376, 0], [869, 503, 0\n ], [870, 503, 0], [872, 378, 0], [873, 576, 0], [874, 576, 0], [875, \n 381, 0], [877, 578, 0], [881, 388, 0], [882, 388, 0], [883, 388, 0], [\n 886, 394, 0], [889, 397, 0], [890, 40, 0], [893, 400, 0], [894, 400, 0],\n [895, 580, 0], [896, 581, 0], [898, 403, 0], [900, 405, 0], [902, 405, \n 0], [903, 406, 0], [905, 413, 0], [907, 583, 0], [909, 417, 0], [911, \n 419, 0], [913, 422, 0], [914, 423, 0], [915, 423, 0], [916, 43, 0], [\n 917, 43, 0], [918, 424, 0], [919, 427, 0], [920, 428, 0], [921, 428, 0],\n [922, 429, 0], [923, 432, 0], [925, 44, 0], [928, 435, 0], [931, 439, 0\n ], [934, 45, 0], [935, 45, 0], [936, 445, 0], [937, 447, 0], [939, 450,\n 0], [940, 451, 0], [942, 458, 0], [943, 458, 0], [944, 458, 0], [945, \n 459, 0], [946, 459, 0], [948, 462, 0], [950, 462, 0], [951, 47, 0], [\n 952, 47, 0], [956, 478, 0], [957, 478, 0], [958, 478, 0], [959, 478, 0],\n [960, 479, 0], [963, 481, 0], [965, 49, 0], [966, 49, 0], [967, 49, 0],\n [968, 486, 0], [969, 486, 0], [971, 51, 0], [973, 506, 0], [976, 58, 0],\n [977, 59, 0], [978, 491, 0], [980, 508, 0], [981, 62, 0], [982, 62, 0],\n [983, 62, 0], [984, 63, 0], [985, 63, 0], [986, 64, 0], [987, 65, 0], [\n 988, 66, 0], [990, 67, 0], [993, 67, 0], [994, 67, 0], [995, 509, 0], [\n 996, 510, 0], [997, 510, 0], [998, 70, 0], [999, 70, 0], [1000, 71, 0],\n [1002, 71, 0], [1003, 72, 0], [1006, 511, 0], [1007, 511, 0], [1008, 75,\n 0], [1010, 79, 0], [1011, 79, 0], [1012, 81, 0], [1014, 83, 0], [1018, \n 514, 0], [1019, 514, 0], [1023, 515, 0], [1025, 518, 0], [1026, 518, 0],\n [1028, 221, 0], [1029, 268, 0], [1030, 269, 0], [1031, 498, 0], [1032, \n 1, 0], [1033, 3, 0], [1034, 4, 0], [1035, 6, 0], [1036, 7, 0], [1037, 8,\n 0], [1038, 9, 0], [1039, 11, 0], [1041, 16, 0], [1042, 17, 0], [1044, \n 21, 0], [1046, 25, 0], [1047, 27, 0], [1048, 28, 0], [1049, 29, 0], [\n 1050, 31, 0], [1051, 33, 0], [1052, 34, 0], [1053, 35, 0], [1054, 36, 0\n ], [1055, 38, 0], [1056, 39, 0], [1057, 40, 0], [1058, 41, 0], [1059, \n 43, 0], [1060, 44, 0], [1061, 45, 0], [1062, 47, 0], [1063, 48, 0], [\n 1064, 49, 0], [1065, 50, 0], [1066, 51, 0], [1067, 53, 0], [1068, 54, 0\n ], [1069, 55, 0], [1070, 57, 0], [1071, 58, 0], [1072, 59, 0], [1073, \n 60, 0], [1074, 62, 0], [1075, 63, 0], [1077, 65, 0], [1078, 66, 0], [\n 1079, 67, 0], [1080, 70, 0], [1081, 71, 0], [1082, 72, 0], [1083, 73, 0\n ], [1084, 75, 0], [1085, 76, 0], [1086, 77, 0], [1087, 79, 0], [1088, \n 80, 0], [1089, 81, 0], [1090, 82, 0], [1091, 83, 0], [1092, 84, 0], [\n 1093, 85, 0], [1094, 88, 0], [1095, 89, 0], [1096, 90, 0], [1097, 91, 0\n ], [1098, 92, 0], [1099, 93, 0], [1100, 97, 0], [1101, 98, 0], [1102, \n 101, 0], [1103, 102, 0], [1104, 103, 0], [1105, 108, 0], [1106, 109, 0],\n [1107, 110, 0], [1108, 111, 0], [1109, 112, 0], [1110, 113, 0], [1111, \n 114, 0], [1112, 115, 0], [1113, 116, 0], [1114, 118, 0], [1115, 119, 0],\n [1116, 121, 0], [1117, 122, 0], [1118, 126, 0], [1119, 127, 0], [1120, \n 130, 0], [1121, 131, 0], [1122, 132, 0], [1123, 133, 0], [1124, 134, 0],\n [1125, 135, 0], [1126, 136, 0], [1127, 137, 0], [1128, 139, 0], [1129, \n 140, 0], [1130, 141, 0], [1131, 142, 0], [1132, 144, 0], [1133, 145, 0],\n [1134, 146, 0], [1135, 147, 0], [1136, 148, 0], [1137, 149, 0], [1138, \n 150, 0], [1139, 151, 0], [1140, 152, 0], [1141, 153, 0], [1142, 154, 0],\n [1143, 155, 0], [1144, 158, 0], [1145, 161, 0], [1146, 162, 0], [1147, \n 163, 0], [1148, 164, 0], [1149, 166, 0], [1150, 167, 0], [1151, 168, 0],\n [1152, 169, 0], [1153, 170, 0], [1154, 171, 0], [1155, 172, 0], [1156, \n 173, 0], [1157, 174, 0], [1158, 175, 0], [1159, 176, 0], [1160, 177, 0],\n [1161, 178, 0], [1162, 179, 0], [1164, 181, 0], [1166, 183, 0], [1167, \n 185, 0], [1168, 186, 0], [1169, 187, 0], [1170, 188, 0], [1171, 189, 0],\n [1172, 190, 0], [1173, 192, 0], [1174, 193, 0], [1175, 194, 0], [1176, \n 196, 0], [1177, 197, 0], [1178, 198, 0], [1179, 199, 0], [1180, 200, 0],\n [1181, 202, 0], [1182, 203, 0], [1183, 204, 0], [1184, 205, 0], [1185, \n 206, 0], [1186, 207, 0], [1187, 208, 0], [1188, 209, 0], [1189, 210, 0],\n [1190, 211, 0], [1191, 212, 0], [1192, 213, 0], [1193, 214, 0], [1194, \n 215, 0], [1195, 216, 0], [1196, 217, 0], [1197, 218, 0], [1198, 219, 0],\n [1199, 221, 0], [1200, 222, 0], [1201, 223, 0], [1202, 224, 0], [1203, \n 225, 0], [1204, 226, 0], [1205, 227, 0], [1206, 228, 0], [1207, 229, 0],\n [1208, 230, 0], [1209, 234, 0], [1210, 235, 0], [1211, 237, 0], [1212, \n 238, 0], [1213, 239, 0], [1214, 240, 0], [1215, 241, 0], [1216, 242, 0],\n [1217, 243, 0], [1218, 244, 0], [1219, 247, 0], [1220, 251, 0], [1221, \n 252, 0], [1222, 253, 0], [1223, 254, 0], [1224, 255, 0], [1225, 256, 0],\n [1226, 257, 0], [1227, 258, 0], [1228, 260, 0], [1229, 263, 0], [1230, \n 264, 0], [1231, 266, 0], [1232, 267, 0], [1233, 268, 0], [1234, 269, 0],\n [1235, 271, 0], [1236, 272, 0], [1237, 273, 0], [1238, 274, 0], [1239, \n 275, 0], [1240, 276, 0], [1241, 278, 0], [1242, 281, 0], [1243, 282, 0],\n [1244, 283, 0], [1245, 284, 0], [1246, 285, 0], [1247, 286, 0], [1248, \n 287, 0], [1249, 288, 0], [1250, 289, 0], [1251, 291, 0], [1252, 292, 0],\n [1253, 293, 0], [1254, 294, 0], [1255, 295, 0], [1256, 296, 0], [1257, \n 297, 0], [1258, 298, 0], [1259, 299, 0], [1260, 300, 0], [1261, 302, 0],\n [1262, 303, 0], [1263, 304, 0], [1264, 307, 0], [1265, 308, 0], [1266, \n 309, 0], [1267, 311, 0], [1270, 316, 0], [1271, 317, 0], [1272, 318, 0],\n [1273, 319, 0], [1274, 321, 0], [1275, 322, 0], [1276, 323, 0], [1277, \n 324, 0], [1278, 325, 0], [1279, 326, 0], [1280, 327, 0], [1282, 329, 0],\n [1283, 331, 0], [1284, 333, 0], [1285, 335, 0], [1286, 337, 0], [1287, \n 338, 0], [1288, 339, 0], [1289, 340, 0], [1290, 341, 0], [1291, 342, 0],\n [1292, 343, 0], [1293, 344, 0], [1294, 345, 0], [1295, 346, 0], [1296, \n 347, 0], [1297, 348, 0], [1300, 353, 0], [1301, 354, 0], [1302, 355, 0],\n [1303, 356, 0], [1304, 357, 0], [1305, 359, 0], [1306, 361, 0], [1307, \n 362, 0], [1308, 363, 0], [1309, 364, 0], [1310, 365, 0], [1311, 366, 0],\n [1312, 367, 0], [1313, 368, 0], [1314, 369, 0], [1315, 370, 0], [1316, \n 371, 0], [1317, 372, 0], [1318, 373, 0], [1319, 374, 0], [1320, 375, 0],\n [1321, 376, 0], [1322, 377, 0], [1323, 378, 0], [1324, 379, 0], [1325, \n 381, 0], [1326, 384, 0], [1327, 385, 0], [1328, 386, 0], [1329, 387, 0],\n [1330, 388, 0], [1331, 390, 0], [1332, 391, 0], [1333, 392, 0], [1334, \n 393, 0], [1336, 395, 0], [1337, 396, 0], [1338, 397, 0], [1339, 398, 0],\n [1340, 399, 0], [1341, 400, 0], [1342, 403, 0], [1343, 404, 0], [1344, \n 405, 0], [1345, 406, 0], [1346, 407, 0], [1348, 410, 0], [1349, 411, 0],\n [1350, 412, 0], [1351, 413, 0], [1352, 414, 0], [1355, 418, 0], [1356, \n 419, 0], [1357, 420, 0], [1358, 421, 0], [1359, 422, 0], [1360, 423, 0],\n [1361, 424, 0], [1362, 425, 0], [1363, 426, 0], [1364, 427, 0], [1365, \n 428, 0], [1366, 429, 0], [1367, 430, 0], [1368, 431, 0], [1369, 432, 0],\n [1370, 433, 0], [1371, 434, 0], [1372, 435, 0], [1373, 436, 0], [1374, \n 437, 0], [1375, 438, 0], [1376, 439, 0], [1377, 440, 0], [1378, 441, 0],\n [1379, 442, 0], [1380, 443, 0], [1381, 445, 0], [1382, 446, 0], [1383, \n 447, 0], [1384, 448, 0], [1385, 449, 0], [1386, 450, 0], [1387, 451, 0],\n [1388, 453, 0], [1389, 454, 0], [1390, 455, 0], [1391, 456, 0], [1392, \n 457, 0], [1393, 458, 0], [1394, 459, 0], [1395, 460, 0], [1396, 461, 0],\n [1397, 462, 0], [1398, 463, 0], [1399, 464, 0], [1400, 465, 0], [1401, \n 466, 0], [1402, 467, 0], [1403, 468, 0], [1404, 469, 0], [1405, 470, 0],\n [1406, 471, 0], [1407, 472, 0], [1408, 473, 0], [1409, 474, 0], [1410, \n 475, 0], [1411, 476, 0], [1412, 477, 0], [1413, 478, 0], [1414, 479, 0],\n [1415, 480, 0], [1416, 481, 0], [1417, 482, 0], [1418, 483, 0], [1419, \n 484, 0], [1421, 486, 0], [1422, 487, 0], [1423, 488, 0], [1424, 489, 0],\n [1425, 490, 0], [1426, 491, 0], [1427, 492, 0], [1428, 493, 0], [1431, \n 496, 0], [1432, 497, 0], [1433, 498, 0], [1434, 499, 0], [1435, 500, 0],\n [1436, 501, 0], [1437, 502, 0], [1438, 503, 0], [1439, 504, 0], [1440, \n 505, 0], [1441, 506, 0], [1442, 507, 0], [1443, 508, 0], [1444, 509, 0],\n [1445, 510, 0], [1446, 511, 0], [1447, 512, 0], [1448, 513, 0], [1449, \n 514, 0], [1450, 515, 0], [1451, 516, 0], [1452, 517, 0], [1453, 518, 0],\n [1454, 519, 0], [1455, 520, 0], [1456, 521, 0], [1457, 522, 0], [1458, \n 523, 0], [1459, 524, 0], [1460, 525, 0], [1461, 526, 0], [1462, 527, 0],\n [1463, 528, 0], [1464, 529, 0], [1465, 530, 0], [1466, 531, 0], [1467, \n 532, 0], [1468, 533, 0], [1469, 534, 0], [1470, 535, 0], [1471, 536, 0],\n [1472, 537, 0], [1473, 538, 0], [1474, 539, 0], [1475, 540, 0], [1476, \n 541, 0], [1477, 542, 0], [1479, 544, 0], [1480, 545, 0], [1481, 546, 0],\n [1482, 547, 0], [1483, 548, 0], [1484, 549, 0], [1485, 550, 0], [1486, \n 551, 0], [1487, 552, 0], [1488, 554, 0], [1489, 555, 0], [1490, 556, 0],\n [1491, 557, 0], [1492, 558, 0], [1493, 559, 0], [1494, 560, 0], [1495, \n 561, 0], [1497, 563, 0], [1498, 564, 0], [1500, 566, 0], [1501, 567, 0],\n [1502, 568, 0], [1503, 569, 0], [1504, 570, 0], [1505, 571, 0], [1506, \n 572, 0], [1507, 573, 0], [1508, 574, 0], [1510, 576, 0], [1511, 577, 0],\n [1512, 578, 0], [1513, 579, 0], [1514, 580, 0], [1516, 582, 0], [1517, \n 583, 0], [1518, 584, 0], [1519, 585, 0], [1520, 1, 0], [1521, 3, 0], [\n 1522, 4, 0], [1523, 6, 0], [1524, 7, 0], [1525, 8, 0], [1526, 9, 0], [\n 1527, 11, 0], [1528, 14, 0], [1529, 16, 0], [1530, 17, 0], [1531, 19, 0\n ], [1532, 21, 0], [1534, 25, 0], [1535, 27, 0], [1536, 28, 0], [1537, \n 29, 0], [1538, 31, 0], [1539, 33, 0], [1540, 34, 0], [1541, 35, 0], [\n 1542, 36, 0], [1543, 38, 0], [1544, 39, 0], [1545, 40, 0], [1546, 41, 0\n ], [1547, 43, 0], [1548, 44, 0], [1549, 45, 0], [1550, 47, 0], [1551, \n 48, 0], [1552, 49, 0], [1553, 50, 0], [1554, 51, 0], [1555, 53, 0], [\n 1556, 54, 0], [1557, 55, 0], [1558, 57, 0], [1559, 58, 0], [1560, 59, 0\n ], [1561, 60, 0], [1562, 62, 0], [1563, 63, 0], [1564, 64, 0], [1565, \n 65, 0], [1566, 66, 0], [1567, 67, 0], [1568, 70, 0], [1569, 71, 0], [\n 1570, 72, 0], [1571, 73, 0], [1572, 75, 0], [1573, 76, 0], [1574, 77, 0\n ], [1575, 79, 0], [1576, 80, 0], [1577, 81, 0], [1578, 82, 0], [1579, \n 83, 0], [1580, 84, 0], [1581, 85, 0], [1582, 88, 0], [1583, 89, 0], [\n 1584, 90, 0], [1585, 91, 0], [1586, 92, 0], [1587, 93, 0], [1588, 97, 0\n ], [1589, 98, 0], [1590, 101, 0], [1591, 102, 0], [1592, 103, 0], [1593,\n 108, 0], [1594, 109, 0], [1595, 110, 0], [1596, 111, 0], [1597, 112, 0],\n [1598, 113, 0], [1599, 114, 0], [1600, 115, 0], [1601, 116, 0], [1602, \n 118, 0], [1603, 119, 0], [1604, 121, 0], [1605, 122, 0], [1606, 126, 0],\n [1607, 127, 0], [1608, 130, 0], [1609, 131, 0], [1610, 132, 0], [1611, \n 133, 0], [1612, 134, 0], [1613, 135, 0], [1614, 136, 0], [1615, 137, 0],\n [1616, 139, 0], [1617, 140, 0], [1618, 141, 0], [1619, 142, 0], [1620, \n 144, 0], [1621, 145, 0], [1622, 146, 0], [1623, 147, 0], [1624, 148, 0],\n [1625, 149, 0], [1626, 150, 0], [1627, 151, 0], [1628, 152, 0], [1629, \n 153, 0], [1630, 154, 0], [1631, 155, 0], [1632, 158, 0], [1633, 161, 0],\n [1634, 162, 0], [1635, 163, 0], [1636, 164, 0], [1637, 166, 0], [1638, \n 167, 0], [1639, 168, 0], [1640, 169, 0], [1641, 170, 0], [1642, 171, 0],\n [1643, 172, 0], [1644, 173, 0], [1645, 174, 0], [1646, 175, 0], [1647, \n 176, 0], [1648, 177, 0], [1649, 178, 0], [1650, 179, 0], [1651, 180, 0],\n [1652, 181, 0], [1653, 182, 0], [1654, 183, 0], [1655, 185, 0], [1656, \n 186, 0], [1657, 187, 0], [1658, 188, 0], [1659, 189, 0], [1660, 190, 0],\n [1661, 192, 0], [1662, 193, 0], [1663, 194, 0], [1664, 196, 0], [1665, \n 197, 0], [1666, 198, 0], [1667, 199, 0], [1668, 200, 0], [1669, 202, 0],\n [1670, 203, 0], [1671, 204, 0], [1672, 205, 0], [1673, 206, 0], [1674, \n 207, 0], [1675, 208, 0], [1676, 209, 0], [1677, 210, 0], [1678, 211, 0],\n [1679, 212, 0], [1680, 213, 0], [1681, 214, 0], [1682, 215, 0], [1683, \n 216, 0], [1684, 217, 0], [1685, 218, 0], [1686, 219, 0], [1687, 221, 0],\n [1688, 222, 0], [1689, 223, 0], [1690, 224, 0], [1691, 225, 0], [1692, \n 226, 0], [1693, 227, 0], [1694, 228, 0], [1695, 229, 0], [1696, 230, 0],\n [1697, 234, 0], [1698, 235, 0], [1699, 237, 0], [1700, 238, 0], [1701, \n 239, 0], [1702, 240, 0], [1703, 241, 0], [1704, 242, 0], [1705, 243, 0],\n [1706, 244, 0], [1707, 247, 0], [1708, 251, 0], [1709, 252, 0], [1710, \n 253, 0], [1711, 254, 0], [1712, 255, 0], [1713, 256, 0], [1714, 257, 0],\n [1715, 258, 0], [1716, 260, 0], [1717, 263, 0], [1718, 264, 0], [1719, \n 266, 0], [1720, 267, 0], [1721, 268, 0], [1722, 269, 0], [1723, 271, 0],\n [1724, 272, 0], [1725, 273, 0], [1726, 274, 0], [1727, 275, 0], [1728, \n 276, 0], [1729, 278, 0], [1730, 281, 0], [1731, 282, 0], [1732, 283, 0],\n [1733, 284, 0], [1734, 285, 0], [1735, 286, 0], [1736, 287, 0], [1737, \n 288, 0], [1738, 289, 0], [1739, 291, 0], [1740, 292, 0], [1741, 293, 0],\n [1742, 294, 0], [1743, 295, 0], [1744, 296, 0], [1745, 297, 0], [1746, \n 298, 0], [1747, 299, 0], [1748, 300, 0], [1749, 302, 0], [1750, 303, 0],\n [1751, 304, 0], [1752, 307, 0], [1753, 308, 0], [1754, 309, 0], [1755, \n 311, 0], [1756, 312, 0], [1757, 314, 0], [1758, 316, 0], [1759, 317, 0],\n [1760, 318, 0], [1761, 319, 0], [1762, 321, 0], [1763, 322, 0], [1764, \n 323, 0], [1765, 324, 0], [1766, 325, 0], [1767, 326, 0], [1768, 327, 0],\n [1769, 328, 0], [1770, 329, 0], [1771, 331, 0], [1772, 333, 0], [1773, \n 335, 0], [1774, 337, 0], [1775, 338, 0], [1776, 339, 0], [1777, 340, 0],\n [1778, 341, 0], [1779, 342, 0], [1780, 343, 0], [1781, 344, 0], [1782, \n 345, 0], [1783, 346, 0], [1784, 347, 0], [1785, 348, 0], [1786, 350, 0],\n [1787, 352, 0], [1788, 353, 0], [1789, 354, 0], [1790, 355, 0], [1791, \n 356, 0], [1792, 357, 0], [1793, 359, 0], [1794, 361, 0], [1795, 362, 0],\n [1796, 363, 0], [1797, 364, 0], [1798, 365, 0], [1799, 366, 0], [1800, \n 367, 0], [1801, 368, 0], [1802, 369, 0], [1803, 370, 0], [1804, 371, 0],\n [1805, 372, 0], [1806, 373, 0], [1807, 374, 0], [1808, 375, 0], [1809, \n 376, 0], [1810, 377, 0], [1811, 378, 0], [1812, 379, 0], [1813, 381, 0],\n [1814, 384, 0], [1815, 385, 0], [1816, 386, 0], [1817, 387, 0], [1818, \n 388, 0], [1819, 390, 0], [1820, 391, 0], [1821, 392, 0], [1822, 393, 0],\n [1823, 394, 0], [1824, 395, 0], [1825, 396, 0], [1826, 397, 0], [1827, \n 398, 0], [1828, 399, 0], [1829, 400, 0], [1830, 403, 0], [1831, 404, 0],\n [1832, 405, 0], [1833, 406, 0], [1834, 407, 0], [1836, 410, 0], [1837, \n 411, 0], [1838, 412, 0], [1839, 413, 0], [1840, 414, 0], [1841, 416, 0],\n [1842, 417, 0], [1843, 418, 0], [1844, 419, 0], [1845, 420, 0], [1846, \n 421, 0], [1847, 422, 0], [1848, 423, 0], [1849, 424, 0], [1850, 425, 0],\n [1851, 426, 0], [1852, 427, 0], [1853, 428, 0], [1854, 429, 0], [1855, \n 430, 0], [1856, 431, 0], [1857, 432, 0], [1858, 433, 0], [1860, 435, 0],\n [1861, 436, 0], [1862, 437, 0], [1863, 438, 0], [1864, 439, 0], [1865, \n 440, 0], [1866, 441, 0], [1867, 442, 0], [1868, 443, 0], [1869, 445, 0],\n [1870, 446, 0], [1871, 447, 0], [1872, 448, 0], [1873, 449, 0], [1874, \n 450, 0], [1875, 451, 0], [1876, 453, 0], [1877, 454, 0], [1878, 455, 0],\n [1879, 456, 0], [1880, 457, 0], [1881, 458, 0], [1882, 459, 0], [1883, \n 460, 0], [1884, 461, 0], [1885, 462, 0], [1886, 463, 0], [1887, 464, 0],\n [1888, 465, 0], [1889, 466, 0], [1890, 467, 0], [1891, 468, 0], [1892, \n 469, 0], [1893, 470, 0], [1894, 471, 0], [1895, 472, 0], [1896, 473, 0],\n [1897, 474, 0], [1898, 475, 0], [1899, 476, 0], [1900, 477, 0], [1901, \n 478, 0], [1902, 479, 0], [1903, 480, 0], [1904, 481, 0], [1905, 482, 0],\n [1906, 483, 0], [1907, 484, 0], [1908, 485, 0], [1909, 486, 0], [1910, \n 487, 0], [1911, 488, 0], [1912, 489, 0], [1913, 490, 0], [1914, 491, 0],\n [1915, 492, 0], [1916, 493, 0], [1917, 494, 0], [1918, 495, 0], [1919, \n 496, 0], [1920, 497, 0], [1921, 498, 0], [1922, 499, 0], [1923, 500, 0],\n [1924, 501, 0], [1925, 502, 0], [1926, 503, 0], [1927, 504, 0], [1928, \n 505, 0], [1929, 506, 0], [1930, 507, 0], [1931, 508, 0], [1932, 509, 0],\n [1933, 510, 0], [1934, 511, 0], [1935, 512, 0], [1936, 513, 0], [1937, \n 514, 0], [1938, 515, 0], [1939, 516, 0], [1940, 517, 0], [1941, 518, 0],\n [1942, 519, 0], [1943, 520, 0], [1944, 521, 0], [1945, 522, 0], [1946, \n 523, 0], [1947, 524, 0], [1948, 525, 0], [1949, 526, 0], [1950, 527, 0],\n [1951, 528, 0], [1952, 529, 0], [1953, 530, 0], [1954, 531, 0], [1955, \n 532, 0], [1956, 533, 0], [1957, 534, 0], [1958, 535, 0], [1959, 536, 0],\n [1960, 537, 0], [1961, 538, 0], [1962, 539, 0], [1963, 540, 0], [1964, \n 541, 0], [1965, 542, 0], [1966, 543, 0], [1967, 544, 0], [1968, 545, 0],\n [1969, 546, 0], [1970, 547, 0], [1971, 548, 0], [1972, 549, 0], [1973, \n 550, 0], [1974, 551, 0], [1975, 552, 0], [1976, 553, 0], [1977, 554, 0],\n [1978, 555, 0], [1979, 556, 0], [1980, 557, 0], [1981, 558, 0], [1982, \n 559, 0], [1983, 560, 0], [1984, 561, 0], [1985, 562, 0], [1986, 563, 0],\n [1987, 564, 0], [1988, 565, 0], [1989, 566, 0], [1990, 567, 0], [1991, \n 568, 0], [1992, 569, 0], [1993, 570, 0], [1994, 571, 0], [1995, 572, 0],\n [1996, 573, 0], [1997, 574, 0], [1998, 575, 0], [1999, 576, 0], [2000, \n 577, 0], [2001, 578, 0], [2002, 579, 0], [2003, 580, 0], [2004, 581, 0],\n [2005, 582, 0], [2006, 583, 0], [2007, 584, 0], [2008, 585, 0], [1, 490,\n 0], [3, 4, 1], [491, 6, 0], [7, 5, 0], [8, 9, 0], [492, 11, 0], [11, \n 493, 0], [492, 493, 1], [494, 14, 0], [13, 15, 0], [16, 5, 0], [17, 18,\n 1], [17, 12, 0], [14, 495, 0], [494, 19, 0], [20, 21, 0], [20, 22, 1],\n [497, 23, 0], [23, 499, 1], [25, 26, 0], [25, 22, 0], [23, 27, 0], [28,\n 23, 0], [8, 21, 0], [9, 29, 0], [30, 25, 1], [31, 32, 1], [32, 33, 1],\n [34, 35, 0], [35, 36, 0], [490, 6, 1], [37, 10, 1], [10, 38, 0], [37, \n 38, 1], [39, 40, 1], [39, 41, 1], [42, 41, 1], [18, 42, 1], [492, 43, 1\n ], [44, 45, 0], [44, 505, 0], [46, 12, 0], [47, 48, 0], [49, 50, 0], [\n 31, 33, 1], [31, 51, 0], [52, 53, 1], [52, 54, 0], [506, 55, 0], [506, \n 507, 1], [57, 506, 0], [57, 58, 0], [58, 506, 0], [59, 60, 1], [508, 62,\n 0], [30, 61, 1], [63, 506, 0], [13, 64, 0], [65, 66, 1], [59, 67, 0], [\n 61, 67, 0], [68, 69, 1], [70, 69, 1], [71, 72, 1], [73, 74, 1], [37, 75,\n 1], [72, 75, 0], [37, 72, 1], [76, 77, 1], [77, 51, 0], [73, 72, 1], [\n 18, 40, 1], [492, 45, 1], [10, 74, 1], [45, 511, 1], [78, 32, 1], [79, \n 80, 0], [81, 79, 1], [34, 82, 0], [83, 84, 0], [83, 499, 0], [85, 86, 0\n ], [87, 86, 1], [88, 89, 0], [90, 86, 1], [91, 86, 0], [86, 92, 0], [86,\n 93, 0], [94, 86, 1], [86, 95, 1], [513, 517, 0], [97, 66, 1], [42, 98, \n 0], [99, 100, 1], [42, 101, 0], [102, 42, 1], [103, 87, 0], [104, 103, \n 0], [105, 87, 0], [106, 107, 0], [108, 107, 0], [109, 106, 0], [110, \n 111, 1], [87, 112, 0], [113, 87, 0], [87, 85, 1], [110, 114, 1], [115, \n 116, 0], [117, 118, 0], [117, 119, 0], [117, 120, 1], [121, 122, 0], [\n 123, 124, 0], [125, 126, 0], [127, 119, 0], [118, 128, 0], [121, 119, 0\n ], [530, 527, 0], [125, 130, 0], [125, 123, 0], [131, 132, 0], [133, \n 123, 0], [524, 134, 0], [135, 136, 0], [123, 131, 0], [117, 128, 1], [\n 137, 521, 0], [531, 514, 0], [139, 521, 0], [140, 514, 0], [522, 141, 0\n ], [142, 523, 0], [530, 526, 0], [140, 532, 0], [142, 144, 0], [140, \n 522, 0], [145, 146, 0], [147, 523, 0], [144, 523, 0], [139, 523, 0], [\n 140, 141, 0], [528, 526, 0], [528, 148, 0], [149, 150, 0], [145, 528, 0\n ], [530, 151, 0], [524, 152, 0], [149, 525, 1], [139, 514, 0], [126, \n 120, 1], [530, 153, 0], [528, 147, 1], [528, 154, 0], [130, 120, 1], [\n 528, 155, 1], [524, 533, 0], [524, 149, 0], [154, 150, 0], [157, 110, 1\n ], [119, 158, 0], [159, 60, 0], [536, 161, 0], [115, 151, 0], [162, 134,\n 0], [115, 526, 0], [138, 87, 0], [123, 163, 0], [112, 164, 0], [112, \n 165, 0], [166, 165, 0], [167, 537, 0], [168, 104, 0], [531, 520, 0], [\n 139, 520, 0], [520, 169, 0], [168, 105, 0], [520, 170, 0], [171, 89, 0],\n [521, 172, 0], [123, 173, 0], [521, 174, 0], [37, 39, 0], [530, 175, 0],\n [530, 176, 0], [88, 530, 0], [177, 496, 1], [178, 525, 0], [179, 493, 1\n ], [180, 181, 1], [182, 180, 0], [179, 181, 0], [180, 493, 1], [183, 30,\n 0], [183, 21, 0], [538, 185, 0], [538, 89, 0], [184, 186, 0], [184, 187,\n 0], [520, 172, 0], [89, 175, 0], [185, 89, 0], [89, 188, 0], [189, 190,\n 0], [539, 172, 0], [504, 192, 0], [105, 186, 0], [105, 187, 0], [539, \n 193, 0], [187, 194, 0], [539, 540, 0], [539, 196, 0], [197, 540, 0], [\n 110, 198, 0], [197, 539, 0], [199, 537, 0], [134, 526, 0], [200, 193, 0\n ], [4, 201, 1], [202, 86, 0], [85, 203, 0], [147, 204, 0], [147, 205, 0\n ], [123, 206, 0], [537, 207, 0], [165, 208, 0], [4, 94, 1], [4, 2, 0],\n [209, 4, 0], [119, 163, 0], [210, 3, 0], [99, 211, 0], [99, 69, 1], [\n 212, 99, 0], [213, 214, 0], [510, 215, 0], [128, 69, 1], [216, 69, 1],\n [217, 98, 0], [504, 218, 0], [177, 504, 1], [219, 209, 0], [219, 220, 0\n ], [94, 95, 1], [159, 221, 1], [34, 161, 0], [222, 221, 0], [211, 52, 1\n ], [215, 223, 1], [224, 215, 0], [225, 224, 1], [224, 223, 0], [226, 6,\n 0], [7, 3, 1], [216, 227, 1], [228, 229, 0], [227, 230, 0], [231, 53, 1\n ], [544, 545, 0], [234, 235, 1], [546, 214, 1], [233, 227, 0], [237, \n 238, 0], [212, 100, 0], [519, 239, 0], [238, 519, 0], [213, 240, 0], [\n 241, 242, 1], [70, 241, 0], [509, 213, 0], [68, 243, 0], [243, 244, 0],\n [68, 244, 0], [544, 547, 1], [245, 227, 1], [246, 208, 0], [112, 208, 0\n ], [165, 247, 0], [537, 549, 0], [537, 550, 0], [537, 551, 0], [110, \n 251, 0], [510, 252, 1], [529, 253, 1], [237, 239, 1], [254, 238, 1], [\n 69, 255, 0], [510, 225, 1], [256, 257, 0], [258, 190, 0], [258, 259, 0],\n [260, 261, 1], [554, 553, 1], [515, 263, 0], [14, 264, 1], [116, 555, 0\n ], [151, 116, 0], [111, 114, 1], [77, 111, 0], [266, 525, 0], [267, 120,\n 1], [268, 269, 0], [556, 271, 0], [556, 272, 0], [529, 273, 0], [128, \n 274, 0], [34, 275, 0], [503, 276, 0], [503, 504, 1], [177, 218, 1], [\n 277, 278, 1], [557, 558, 1], [557, 559, 1], [559, 558, 1], [277, 78, 1],\n [277, 279, 1], [78, 279, 0], [281, 282, 0], [283, 161, 1], [268, 161, 1\n ], [256, 284, 0], [515, 516, 1], [263, 516, 0], [516, 285, 0], [63, 286,\n 0], [287, 516, 0], [8, 102, 1], [8, 101, 1], [80, 288, 0], [80, 289, 0],\n [276, 560, 0], [37, 290, 0], [290, 74, 1], [512, 291, 0], [78, 292, 1],\n [199, 548, 0], [491, 293, 0], [4, 294, 0], [490, 541, 1], [491, 295, 0],\n [491, 296, 0], [295, 297, 0], [508, 161, 0], [117, 123, 0], [133, 117, \n 0], [71, 74, 1], [74, 278, 1], [298, 515, 0], [5, 299, 0], [32, 292, 1],\n [5, 29, 1], [503, 560, 0], [300, 301, 1], [51, 300, 0], [244, 302, 1],\n [31, 302, 1], [51, 282, 1], [303, 304, 0], [305, 304, 0], [305, 259, 0],\n [306, 307, 1], [305, 308, 0], [305, 309, 0], [310, 309, 1], [306, 309, \n 1], [311, 280, 0], [280, 278, 1], [311, 32, 1], [13, 312, 1], [313, 314,\n 0], [312, 313, 1], [547, 566, 1], [245, 315, 1], [312, 316, 0], [312, \n 314, 0], [554, 546, 1], [262, 216, 1], [317, 233, 0], [318, 317, 0], [\n 231, 52, 1], [319, 567, 0], [557, 321, 0], [277, 65, 1], [322, 288, 1],\n [322, 323, 0], [277, 324, 1], [324, 325, 0], [277, 325, 0], [326, 327, \n 0], [328, 326, 1], [328, 327, 1], [326, 329, 0], [568, 329, 1], [568, \n 326, 0], [332, 78, 1], [333, 306, 0], [332, 333, 0], [332, 334, 0], [66,\n 334, 1], [330, 335, 1], [336, 66, 0], [330, 336, 1], [68, 70, 0], [509,\n 337, 1], [324, 288, 0], [338, 559, 0], [339, 559, 0], [339, 340, 1], [\n 559, 340, 1], [341, 292, 0], [557, 342, 0], [558, 343, 0], [502, 340, 1\n ], [72, 32, 1], [344, 345, 0], [346, 47, 0], [46, 47, 0], [346, 345, 0],\n [347, 328, 0], [347, 348, 1], [571, 348, 1], [347, 572, 0], [571, 570, \n 1], [14, 350, 0], [350, 573, 0], [15, 351, 1], [352, 15, 0], [15, 335, \n 1], [232, 227, 0], [565, 544, 1], [235, 567, 1], [567, 286, 0], [353, \n 519, 0], [354, 353, 0], [355, 354, 0], [354, 356, 0], [357, 358, 0], [\n 574, 359, 0], [235, 575, 0], [167, 361, 0], [528, 362, 0], [363, 344, 0\n ], [259, 364, 1], [54, 56, 0], [365, 364, 0], [231, 366, 0], [30, 367, \n 0], [61, 367, 1], [254, 368, 0], [254, 369, 0], [254, 370, 0], [99, 358,\n 0], [354, 519, 0], [571, 371, 0], [207, 372, 0], [57, 373, 0], [209, \n 374, 0], [375, 376, 0], [376, 377, 0], [16, 49, 0], [318, 377, 0], [378,\n 297, 0], [562, 379, 0], [576, 563, 0], [576, 381, 0], [577, 576, 1], [\n 244, 383, 0], [244, 306, 1], [383, 306, 1], [380, 306, 0], [252, 225, 0\n ], [220, 76, 0], [542, 384, 0], [385, 384, 0], [542, 385, 0], [386, 385,\n 0], [387, 578, 0], [332, 388, 1], [382, 332, 1], [382, 388, 0], [579, \n 578, 0], [577, 387, 1], [144, 390, 0], [37, 49, 0], [391, 233, 0], [392,\n 310, 0], [260, 393, 0], [394, 230, 0], [395, 282, 1], [395, 244, 0], [\n 25, 396, 1], [81, 74, 0], [278, 80, 1], [81, 278, 1], [569, 570, 0], [\n 397, 552, 0], [542, 398, 0], [398, 385, 0], [399, 499, 0], [83, 399, 0],\n [498, 400, 0], [518, 239, 1], [575, 543, 0], [401, 360, 0], [580, 581, \n 0], [401, 402, 0], [403, 231, 0], [189, 360, 1], [234, 404, 0], [235, \n 404, 1], [235, 580, 0], [216, 259, 0], [405, 259, 0], [405, 318, 0], [\n 406, 230, 0], [542, 407, 0], [23, 408, 0], [577, 348, 0], [562, 564, 1],\n [582, 507, 0], [27, 410, 0], [501, 27, 0], [27, 411, 0], [411, 410, 0],\n [403, 360, 0], [412, 360, 0], [326, 413, 0], [414, 413, 0], [6, 297, 0],\n [554, 580, 1], [262, 401, 1], [499, 556, 1], [224, 229, 0], [583, 507, \n 0], [415, 307, 0], [416, 507, 0], [284, 561, 0], [543, 417, 0], [418, \n 506, 0], [220, 157, 0], [295, 419, 0], [295, 420, 0], [541, 62, 0], [52,\n 421, 0], [60, 160, 0], [535, 161, 0], [267, 282, 0], [52, 365, 0], [28,\n 27, 0], [30, 201, 1], [422, 81, 0], [119, 425, 0], [423, 425, 0], [424,\n 425, 0], [426, 428, 0], [427, 428, 0], [19, 428, 1], [45, 429, 0], [44,\n 429, 0], [505, 429, 0], [231, 431, 1], [190, 431, 1], [430, 431, 0], [\n 286, 433, 0], [432, 433, 0], [506, 433, 0], [23, 434, 0], [400, 434, 0],\n [500, 434, 0], [32, 436, 0], [435, 436, 0], [78, 436, 1], [86, 438, 1],\n [437, 438, 0], [221, 438, 0], [207, 439, 0], [516, 439, 0], [513, 439, \n 0], [181, 441, 1], [440, 441, 0], [504, 441, 1], [135, 442, 0], [109, \n 442, 0], [112, 442, 0], [113, 443, 0], [132, 443, 0], [107, 443, 0], [\n 444, 445, 0], [112, 445, 0], [109, 445, 0], [119, 447, 1], [100, 447, 1\n ], [446, 447, 0], [124, 448, 0], [125, 448, 0], [131, 448, 0], [449, \n 450, 0], [173, 450, 0], [184, 450, 0], [144, 451, 0], [140, 451, 0], [\n 514, 451, 0], [537, 585, 1], [141, 585, 0], [584, 585, 0], [522, 454, 0\n ], [144, 454, 0], [453, 454, 0], [199, 456, 0], [140, 456, 0], [455, \n 456, 0], [537, 456, 0], [538, 457, 0], [153, 457, 0], [176, 457, 0], [\n 524, 459, 0], [458, 459, 0], [134, 459, 0], [460, 461, 0], [150, 461, 0\n ], [149, 461, 0], [521, 463, 0], [462, 463, 0], [538, 463, 0], [110, \n 464, 0], [90, 464, 0], [165, 464, 0], [458, 465, 0], [134, 465, 0], [\n 524, 465, 0], [466, 467, 0], [110, 467, 0], [165, 467, 0], [468, 469, 0\n ], [541, 469, 0], [490, 469, 0], [263, 471, 0], [470, 471, 0], [534, \n 471, 0], [136, 472, 0], [110, 472, 0], [251, 472, 0], [226, 474, 0], [\n 473, 474, 0], [257, 474, 0], [6, 474, 1], [299, 475, 1], [3, 475, 0], [\n 210, 475, 0], [297, 476, 0], [296, 476, 0], [295, 476, 0], [313, 478, 1\n ], [477, 478, 0], [245, 478, 0], [479, 481, 0], [565, 481, 0], [480, \n 481, 0], [415, 482, 0], [56, 482, 0], [409, 482, 0], [483, 484, 0], [3,\n 484, 0], [301, 484, 0], [233, 485, 0], [392, 485, 0], [391, 485, 0], [\n 579, 488, 0], [486, 488, 0], [487, 488, 0], [270, 489, 0], [331, 489, 0\n ], [396, 489, 1], [519, 253, 0], [382, 349, 1], [349, 351, 0], [459, \n 465, 0], [549, 550, 0], [550, 551, 0], [194, 195, 0], [247, 248, 0], [2,\n 294, 0], [549, 551, 0], [54, 365, 0], [131, 265, 0], [91, 92, 0], [247,\n 249, 0], [186, 191, 0], [129, 173, 0], [96, 202, 0], [53, 320, 0], [24,\n 396, 0], [133, 156, 0], [442, 452, 0], [445, 452, 0], [247, 250, 0], [\n 187, 195, 0], [216, 236, 0], [244, 389, 0], [394, 406, 0], [442, 445, 0\n ], [442, 444, 0], [198, 472, 0], [464, 467, 0], [198, 251, 0], [112, \n 143, 0], [2, 490, 0], [5, 491, 0], [10, 492, 0], [12, 493, 0], [13, 494,\n 0], [15, 495, 0], [18, 496, 0], [20, 497, 0], [22, 498, 0], [24, 499, 0\n ], [26, 500, 0], [30, 501, 0], [32, 502, 0], [37, 503, 0], [42, 504, 0],\n [46, 505, 0], [52, 506, 0], [56, 507, 0], [61, 508, 0], [68, 509, 0], [\n 69, 510, 0], [74, 511, 0], [78, 512, 0], [86, 513, 0], [87, 514, 0], [\n 94, 515, 0], [95, 516, 0], [96, 517, 0], [99, 518, 0], [100, 519, 0], [\n 104, 520, 0], [105, 521, 0], [106, 522, 0], [107, 523, 0], [117, 524, 0\n ], [120, 525, 0], [123, 526, 0], [124, 527, 0], [125, 528, 0], [128, \n 529, 0], [129, 530, 0], [138, 531, 0], [143, 532, 0], [156, 533, 0], [\n 157, 534, 0], [159, 535, 0], [160, 536, 0], [165, 537, 0], [184, 538, 0\n ], [191, 539, 0], [195, 540, 0], [201, 541, 0], [220, 542, 0], [231, \n 543, 0], [232, 544, 0], [233, 545, 0], [236, 546, 0], [245, 547, 0], [\n 246, 548, 0], [248, 549, 0], [249, 550, 0], [250, 551, 0], [259, 552, 0\n ], [261, 553, 0], [262, 554, 0], [265, 555, 0], [270, 556, 0], [277, \n 557, 0], [279, 558, 0], [280, 559, 0], [290, 560, 0], [301, 561, 0], [\n 305, 562, 0], [306, 563, 0], [310, 564, 0], [313, 565, 0], [315, 566, 0\n ], [320, 567, 0], [330, 568, 0], [332, 569, 0], [334, 570, 0], [336, \n 571, 0], [349, 572, 0], [351, 573, 0], [358, 574, 0], [360, 575, 0], [\n 380, 576, 0], [382, 577, 0], [383, 578, 0], [389, 579, 0], [401, 580, 0\n ], [402, 581, 0], [409, 582, 0], [415, 583, 0], [444, 584, 0], [452, \n 585, 0]]'], {}), '([[586, 1, 0], [589, 108, 0], [590, 108, 0], [593, 112, 0], [594, 114,\n 0], [595, 115, 0], [597, 118, 0], [598, 118, 0], [599, 119, 0], [600, \n 119, 0], [601, 119, 0], [602, 121, 0], [603, 526, 0], [607, 127, 0], [\n 608, 127, 0], [609, 529, 0], [610, 530, 0], [612, 493, 0], [613, 130, 0\n ], [614, 130, 0], [616, 132, 0], [617, 133, 0], [618, 133, 0], [619, \n 134, 0], [621, 136, 0], [623, 139, 0], [624, 14, 0], [628, 142, 0], [\n 629, 145, 0], [631, 145, 0], [632, 145, 0], [637, 148, 0], [638, 149, 0\n ], [639, 150, 0], [640, 153, 0], [641, 155, 0], [642, 533, 0], [643, \n 534, 0], [646, 536, 0], [647, 536, 0], [650, 166, 0], [652, 167, 0], [\n 655, 170, 0], [657, 174, 0], [658, 175, 0], [661, 177, 0], [662, 178, 0\n ], [663, 178, 0], [666, 180, 0], [668, 183, 0], [670, 183, 0], [672, \n 185, 0], [675, 19, 0], [676, 19, 0], [678, 194, 0], [679, 196, 0], [681,\n 197, 0], [683, 200, 0], [687, 202, 0], [689, 204, 0], [691, 209, 0], [\n 693, 21, 0], [694, 21, 0], [695, 210, 0], [696, 211, 0], [697, 211, 0],\n [698, 212, 0], [701, 215, 0], [702, 215, 0], [704, 217, 0], [705, 217, \n 0], [707, 219, 0], [708, 221, 0], [711, 224, 0], [713, 225, 0], [714, \n 225, 0], [716, 226, 0], [717, 227, 0], [719, 229, 0], [722, 545, 0], [\n 723, 235, 0], [724, 238, 0], [725, 239, 0], [727, 243, 0], [728, 244, 0\n ], [730, 547, 0], [731, 548, 0], [732, 247, 0], [733, 549, 0], [735, \n 253, 0], [737, 256, 0], [738, 258, 0], [739, 264, 0], [741, 264, 0], [\n 742, 264, 0], [743, 500, 0], [745, 273, 0], [746, 273, 0], [747, 273, 0\n ], [748, 274, 0], [749, 274, 0], [750, 557, 0], [753, 28, 0], [758, 286,\n 0], [760, 287, 0], [761, 288, 0], [762, 289, 0], [763, 560, 0], [765, \n 560, 0], [767, 292, 0], [769, 293, 0], [771, 297, 0], [772, 3, 0], [774,\n 300, 0], [776, 300, 0], [777, 300, 0], [778, 300, 0], [781, 303, 0], [\n 784, 563, 0], [785, 501, 0], [787, 308, 0], [788, 311, 0], [789, 565, 0\n ], [790, 314, 0], [791, 314, 0], [792, 316, 0], [795, 319, 0], [798, \n 324, 0], [800, 326, 0], [801, 327, 0], [802, 327, 0], [805, 328, 0], [\n 806, 328, 0], [808, 329, 0], [809, 329, 0], [810, 568, 0], [811, 568, 0\n ], [814, 570, 0], [815, 335, 0], [816, 335, 0], [817, 571, 0], [818, 34,\n 0], [821, 338, 0], [822, 339, 0], [825, 339, 0], [826, 339, 0], [829, \n 345, 0], [830, 345, 0], [833, 348, 0], [834, 572, 0], [835, 572, 0], [\n 836, 572, 0], [837, 350, 0], [839, 350, 0], [840, 573, 0], [841, 573, 0\n ], [842, 352, 0], [843, 352, 0], [844, 352, 0], [845, 356, 0], [847, 36,\n 0], [848, 574, 0], [849, 574, 0], [850, 574, 0], [851, 575, 0], [852, \n 361, 0], [853, 362, 0], [854, 363, 0], [855, 363, 0], [856, 363, 0], [\n 857, 365, 0], [858, 368, 0], [859, 368, 0], [860, 371, 0], [862, 372, 0\n ], [863, 374, 0], [864, 374, 0], [865, 375, 0], [867, 376, 0], [869, \n 503, 0], [870, 503, 0], [872, 378, 0], [873, 576, 0], [874, 576, 0], [\n 875, 381, 0], [877, 578, 0], [881, 388, 0], [882, 388, 0], [883, 388, 0\n ], [886, 394, 0], [889, 397, 0], [890, 40, 0], [893, 400, 0], [894, 400,\n 0], [895, 580, 0], [896, 581, 0], [898, 403, 0], [900, 405, 0], [902, \n 405, 0], [903, 406, 0], [905, 413, 0], [907, 583, 0], [909, 417, 0], [\n 911, 419, 0], [913, 422, 0], [914, 423, 0], [915, 423, 0], [916, 43, 0],\n [917, 43, 0], [918, 424, 0], [919, 427, 0], [920, 428, 0], [921, 428, 0\n ], [922, 429, 0], [923, 432, 0], [925, 44, 0], [928, 435, 0], [931, 439,\n 0], [934, 45, 0], [935, 45, 0], [936, 445, 0], [937, 447, 0], [939, 450,\n 0], [940, 451, 0], [942, 458, 0], [943, 458, 0], [944, 458, 0], [945, \n 459, 0], [946, 459, 0], [948, 462, 0], [950, 462, 0], [951, 47, 0], [\n 952, 47, 0], [956, 478, 0], [957, 478, 0], [958, 478, 0], [959, 478, 0],\n [960, 479, 0], [963, 481, 0], [965, 49, 0], [966, 49, 0], [967, 49, 0],\n [968, 486, 0], [969, 486, 0], [971, 51, 0], [973, 506, 0], [976, 58, 0],\n [977, 59, 0], [978, 491, 0], [980, 508, 0], [981, 62, 0], [982, 62, 0],\n [983, 62, 0], [984, 63, 0], [985, 63, 0], [986, 64, 0], [987, 65, 0], [\n 988, 66, 0], [990, 67, 0], [993, 67, 0], [994, 67, 0], [995, 509, 0], [\n 996, 510, 0], [997, 510, 0], [998, 70, 0], [999, 70, 0], [1000, 71, 0],\n [1002, 71, 0], [1003, 72, 0], [1006, 511, 0], [1007, 511, 0], [1008, 75,\n 0], [1010, 79, 0], [1011, 79, 0], [1012, 81, 0], [1014, 83, 0], [1018, \n 514, 0], [1019, 514, 0], [1023, 515, 0], [1025, 518, 0], [1026, 518, 0],\n [1028, 221, 0], [1029, 268, 0], [1030, 269, 0], [1031, 498, 0], [1032, \n 1, 0], [1033, 3, 0], [1034, 4, 0], [1035, 6, 0], [1036, 7, 0], [1037, 8,\n 0], [1038, 9, 0], [1039, 11, 0], [1041, 16, 0], [1042, 17, 0], [1044, \n 21, 0], [1046, 25, 0], [1047, 27, 0], [1048, 28, 0], [1049, 29, 0], [\n 1050, 31, 0], [1051, 33, 0], [1052, 34, 0], [1053, 35, 0], [1054, 36, 0\n ], [1055, 38, 0], [1056, 39, 0], [1057, 40, 0], [1058, 41, 0], [1059, \n 43, 0], [1060, 44, 0], [1061, 45, 0], [1062, 47, 0], [1063, 48, 0], [\n 1064, 49, 0], [1065, 50, 0], [1066, 51, 0], [1067, 53, 0], [1068, 54, 0\n ], [1069, 55, 0], [1070, 57, 0], [1071, 58, 0], [1072, 59, 0], [1073, \n 60, 0], [1074, 62, 0], [1075, 63, 0], [1077, 65, 0], [1078, 66, 0], [\n 1079, 67, 0], [1080, 70, 0], [1081, 71, 0], [1082, 72, 0], [1083, 73, 0\n ], [1084, 75, 0], [1085, 76, 0], [1086, 77, 0], [1087, 79, 0], [1088, \n 80, 0], [1089, 81, 0], [1090, 82, 0], [1091, 83, 0], [1092, 84, 0], [\n 1093, 85, 0], [1094, 88, 0], [1095, 89, 0], [1096, 90, 0], [1097, 91, 0\n ], [1098, 92, 0], [1099, 93, 0], [1100, 97, 0], [1101, 98, 0], [1102, \n 101, 0], [1103, 102, 0], [1104, 103, 0], [1105, 108, 0], [1106, 109, 0],\n [1107, 110, 0], [1108, 111, 0], [1109, 112, 0], [1110, 113, 0], [1111, \n 114, 0], [1112, 115, 0], [1113, 116, 0], [1114, 118, 0], [1115, 119, 0],\n [1116, 121, 0], [1117, 122, 0], [1118, 126, 0], [1119, 127, 0], [1120, \n 130, 0], [1121, 131, 0], [1122, 132, 0], [1123, 133, 0], [1124, 134, 0],\n [1125, 135, 0], [1126, 136, 0], [1127, 137, 0], [1128, 139, 0], [1129, \n 140, 0], [1130, 141, 0], [1131, 142, 0], [1132, 144, 0], [1133, 145, 0],\n [1134, 146, 0], [1135, 147, 0], [1136, 148, 0], [1137, 149, 0], [1138, \n 150, 0], [1139, 151, 0], [1140, 152, 0], [1141, 153, 0], [1142, 154, 0],\n [1143, 155, 0], [1144, 158, 0], [1145, 161, 0], [1146, 162, 0], [1147, \n 163, 0], [1148, 164, 0], [1149, 166, 0], [1150, 167, 0], [1151, 168, 0],\n [1152, 169, 0], [1153, 170, 0], [1154, 171, 0], [1155, 172, 0], [1156, \n 173, 0], [1157, 174, 0], [1158, 175, 0], [1159, 176, 0], [1160, 177, 0],\n [1161, 178, 0], [1162, 179, 0], [1164, 181, 0], [1166, 183, 0], [1167, \n 185, 0], [1168, 186, 0], [1169, 187, 0], [1170, 188, 0], [1171, 189, 0],\n [1172, 190, 0], [1173, 192, 0], [1174, 193, 0], [1175, 194, 0], [1176, \n 196, 0], [1177, 197, 0], [1178, 198, 0], [1179, 199, 0], [1180, 200, 0],\n [1181, 202, 0], [1182, 203, 0], [1183, 204, 0], [1184, 205, 0], [1185, \n 206, 0], [1186, 207, 0], [1187, 208, 0], [1188, 209, 0], [1189, 210, 0],\n [1190, 211, 0], [1191, 212, 0], [1192, 213, 0], [1193, 214, 0], [1194, \n 215, 0], [1195, 216, 0], [1196, 217, 0], [1197, 218, 0], [1198, 219, 0],\n [1199, 221, 0], [1200, 222, 0], [1201, 223, 0], [1202, 224, 0], [1203, \n 225, 0], [1204, 226, 0], [1205, 227, 0], [1206, 228, 0], [1207, 229, 0],\n [1208, 230, 0], [1209, 234, 0], [1210, 235, 0], [1211, 237, 0], [1212, \n 238, 0], [1213, 239, 0], [1214, 240, 0], [1215, 241, 0], [1216, 242, 0],\n [1217, 243, 0], [1218, 244, 0], [1219, 247, 0], [1220, 251, 0], [1221, \n 252, 0], [1222, 253, 0], [1223, 254, 0], [1224, 255, 0], [1225, 256, 0],\n [1226, 257, 0], [1227, 258, 0], [1228, 260, 0], [1229, 263, 0], [1230, \n 264, 0], [1231, 266, 0], [1232, 267, 0], [1233, 268, 0], [1234, 269, 0],\n [1235, 271, 0], [1236, 272, 0], [1237, 273, 0], [1238, 274, 0], [1239, \n 275, 0], [1240, 276, 0], [1241, 278, 0], [1242, 281, 0], [1243, 282, 0],\n [1244, 283, 0], [1245, 284, 0], [1246, 285, 0], [1247, 286, 0], [1248, \n 287, 0], [1249, 288, 0], [1250, 289, 0], [1251, 291, 0], [1252, 292, 0],\n [1253, 293, 0], [1254, 294, 0], [1255, 295, 0], [1256, 296, 0], [1257, \n 297, 0], [1258, 298, 0], [1259, 299, 0], [1260, 300, 0], [1261, 302, 0],\n [1262, 303, 0], [1263, 304, 0], [1264, 307, 0], [1265, 308, 0], [1266, \n 309, 0], [1267, 311, 0], [1270, 316, 0], [1271, 317, 0], [1272, 318, 0],\n [1273, 319, 0], [1274, 321, 0], [1275, 322, 0], [1276, 323, 0], [1277, \n 324, 0], [1278, 325, 0], [1279, 326, 0], [1280, 327, 0], [1282, 329, 0],\n [1283, 331, 0], [1284, 333, 0], [1285, 335, 0], [1286, 337, 0], [1287, \n 338, 0], [1288, 339, 0], [1289, 340, 0], [1290, 341, 0], [1291, 342, 0],\n [1292, 343, 0], [1293, 344, 0], [1294, 345, 0], [1295, 346, 0], [1296, \n 347, 0], [1297, 348, 0], [1300, 353, 0], [1301, 354, 0], [1302, 355, 0],\n [1303, 356, 0], [1304, 357, 0], [1305, 359, 0], [1306, 361, 0], [1307, \n 362, 0], [1308, 363, 0], [1309, 364, 0], [1310, 365, 0], [1311, 366, 0],\n [1312, 367, 0], [1313, 368, 0], [1314, 369, 0], [1315, 370, 0], [1316, \n 371, 0], [1317, 372, 0], [1318, 373, 0], [1319, 374, 0], [1320, 375, 0],\n [1321, 376, 0], [1322, 377, 0], [1323, 378, 0], [1324, 379, 0], [1325, \n 381, 0], [1326, 384, 0], [1327, 385, 0], [1328, 386, 0], [1329, 387, 0],\n [1330, 388, 0], [1331, 390, 0], [1332, 391, 0], [1333, 392, 0], [1334, \n 393, 0], [1336, 395, 0], [1337, 396, 0], [1338, 397, 0], [1339, 398, 0],\n [1340, 399, 0], [1341, 400, 0], [1342, 403, 0], [1343, 404, 0], [1344, \n 405, 0], [1345, 406, 0], [1346, 407, 0], [1348, 410, 0], [1349, 411, 0],\n [1350, 412, 0], [1351, 413, 0], [1352, 414, 0], [1355, 418, 0], [1356, \n 419, 0], [1357, 420, 0], [1358, 421, 0], [1359, 422, 0], [1360, 423, 0],\n [1361, 424, 0], [1362, 425, 0], [1363, 426, 0], [1364, 427, 0], [1365, \n 428, 0], [1366, 429, 0], [1367, 430, 0], [1368, 431, 0], [1369, 432, 0],\n [1370, 433, 0], [1371, 434, 0], [1372, 435, 0], [1373, 436, 0], [1374, \n 437, 0], [1375, 438, 0], [1376, 439, 0], [1377, 440, 0], [1378, 441, 0],\n [1379, 442, 0], [1380, 443, 0], [1381, 445, 0], [1382, 446, 0], [1383, \n 447, 0], [1384, 448, 0], [1385, 449, 0], [1386, 450, 0], [1387, 451, 0],\n [1388, 453, 0], [1389, 454, 0], [1390, 455, 0], [1391, 456, 0], [1392, \n 457, 0], [1393, 458, 0], [1394, 459, 0], [1395, 460, 0], [1396, 461, 0],\n [1397, 462, 0], [1398, 463, 0], [1399, 464, 0], [1400, 465, 0], [1401, \n 466, 0], [1402, 467, 0], [1403, 468, 0], [1404, 469, 0], [1405, 470, 0],\n [1406, 471, 0], [1407, 472, 0], [1408, 473, 0], [1409, 474, 0], [1410, \n 475, 0], [1411, 476, 0], [1412, 477, 0], [1413, 478, 0], [1414, 479, 0],\n [1415, 480, 0], [1416, 481, 0], [1417, 482, 0], [1418, 483, 0], [1419, \n 484, 0], [1421, 486, 0], [1422, 487, 0], [1423, 488, 0], [1424, 489, 0],\n [1425, 490, 0], [1426, 491, 0], [1427, 492, 0], [1428, 493, 0], [1431, \n 496, 0], [1432, 497, 0], [1433, 498, 0], [1434, 499, 0], [1435, 500, 0],\n [1436, 501, 0], [1437, 502, 0], [1438, 503, 0], [1439, 504, 0], [1440, \n 505, 0], [1441, 506, 0], [1442, 507, 0], [1443, 508, 0], [1444, 509, 0],\n [1445, 510, 0], [1446, 511, 0], [1447, 512, 0], [1448, 513, 0], [1449, \n 514, 0], [1450, 515, 0], [1451, 516, 0], [1452, 517, 0], [1453, 518, 0],\n [1454, 519, 0], [1455, 520, 0], [1456, 521, 0], [1457, 522, 0], [1458, \n 523, 0], [1459, 524, 0], [1460, 525, 0], [1461, 526, 0], [1462, 527, 0],\n [1463, 528, 0], [1464, 529, 0], [1465, 530, 0], [1466, 531, 0], [1467, \n 532, 0], [1468, 533, 0], [1469, 534, 0], [1470, 535, 0], [1471, 536, 0],\n [1472, 537, 0], [1473, 538, 0], [1474, 539, 0], [1475, 540, 0], [1476, \n 541, 0], [1477, 542, 0], [1479, 544, 0], [1480, 545, 0], [1481, 546, 0],\n [1482, 547, 0], [1483, 548, 0], [1484, 549, 0], [1485, 550, 0], [1486, \n 551, 0], [1487, 552, 0], [1488, 554, 0], [1489, 555, 0], [1490, 556, 0],\n [1491, 557, 0], [1492, 558, 0], [1493, 559, 0], [1494, 560, 0], [1495, \n 561, 0], [1497, 563, 0], [1498, 564, 0], [1500, 566, 0], [1501, 567, 0],\n [1502, 568, 0], [1503, 569, 0], [1504, 570, 0], [1505, 571, 0], [1506, \n 572, 0], [1507, 573, 0], [1508, 574, 0], [1510, 576, 0], [1511, 577, 0],\n [1512, 578, 0], [1513, 579, 0], [1514, 580, 0], [1516, 582, 0], [1517, \n 583, 0], [1518, 584, 0], [1519, 585, 0], [1520, 1, 0], [1521, 3, 0], [\n 1522, 4, 0], [1523, 6, 0], [1524, 7, 0], [1525, 8, 0], [1526, 9, 0], [\n 1527, 11, 0], [1528, 14, 0], [1529, 16, 0], [1530, 17, 0], [1531, 19, 0\n ], [1532, 21, 0], [1534, 25, 0], [1535, 27, 0], [1536, 28, 0], [1537, \n 29, 0], [1538, 31, 0], [1539, 33, 0], [1540, 34, 0], [1541, 35, 0], [\n 1542, 36, 0], [1543, 38, 0], [1544, 39, 0], [1545, 40, 0], [1546, 41, 0\n ], [1547, 43, 0], [1548, 44, 0], [1549, 45, 0], [1550, 47, 0], [1551, \n 48, 0], [1552, 49, 0], [1553, 50, 0], [1554, 51, 0], [1555, 53, 0], [\n 1556, 54, 0], [1557, 55, 0], [1558, 57, 0], [1559, 58, 0], [1560, 59, 0\n ], [1561, 60, 0], [1562, 62, 0], [1563, 63, 0], [1564, 64, 0], [1565, \n 65, 0], [1566, 66, 0], [1567, 67, 0], [1568, 70, 0], [1569, 71, 0], [\n 1570, 72, 0], [1571, 73, 0], [1572, 75, 0], [1573, 76, 0], [1574, 77, 0\n ], [1575, 79, 0], [1576, 80, 0], [1577, 81, 0], [1578, 82, 0], [1579, \n 83, 0], [1580, 84, 0], [1581, 85, 0], [1582, 88, 0], [1583, 89, 0], [\n 1584, 90, 0], [1585, 91, 0], [1586, 92, 0], [1587, 93, 0], [1588, 97, 0\n ], [1589, 98, 0], [1590, 101, 0], [1591, 102, 0], [1592, 103, 0], [1593,\n 108, 0], [1594, 109, 0], [1595, 110, 0], [1596, 111, 0], [1597, 112, 0],\n [1598, 113, 0], [1599, 114, 0], [1600, 115, 0], [1601, 116, 0], [1602, \n 118, 0], [1603, 119, 0], [1604, 121, 0], [1605, 122, 0], [1606, 126, 0],\n [1607, 127, 0], [1608, 130, 0], [1609, 131, 0], [1610, 132, 0], [1611, \n 133, 0], [1612, 134, 0], [1613, 135, 0], [1614, 136, 0], [1615, 137, 0],\n [1616, 139, 0], [1617, 140, 0], [1618, 141, 0], [1619, 142, 0], [1620, \n 144, 0], [1621, 145, 0], [1622, 146, 0], [1623, 147, 0], [1624, 148, 0],\n [1625, 149, 0], [1626, 150, 0], [1627, 151, 0], [1628, 152, 0], [1629, \n 153, 0], [1630, 154, 0], [1631, 155, 0], [1632, 158, 0], [1633, 161, 0],\n [1634, 162, 0], [1635, 163, 0], [1636, 164, 0], [1637, 166, 0], [1638, \n 167, 0], [1639, 168, 0], [1640, 169, 0], [1641, 170, 0], [1642, 171, 0],\n [1643, 172, 0], [1644, 173, 0], [1645, 174, 0], [1646, 175, 0], [1647, \n 176, 0], [1648, 177, 0], [1649, 178, 0], [1650, 179, 0], [1651, 180, 0],\n [1652, 181, 0], [1653, 182, 0], [1654, 183, 0], [1655, 185, 0], [1656, \n 186, 0], [1657, 187, 0], [1658, 188, 0], [1659, 189, 0], [1660, 190, 0],\n [1661, 192, 0], [1662, 193, 0], [1663, 194, 0], [1664, 196, 0], [1665, \n 197, 0], [1666, 198, 0], [1667, 199, 0], [1668, 200, 0], [1669, 202, 0],\n [1670, 203, 0], [1671, 204, 0], [1672, 205, 0], [1673, 206, 0], [1674, \n 207, 0], [1675, 208, 0], [1676, 209, 0], [1677, 210, 0], [1678, 211, 0],\n [1679, 212, 0], [1680, 213, 0], [1681, 214, 0], [1682, 215, 0], [1683, \n 216, 0], [1684, 217, 0], [1685, 218, 0], [1686, 219, 0], [1687, 221, 0],\n [1688, 222, 0], [1689, 223, 0], [1690, 224, 0], [1691, 225, 0], [1692, \n 226, 0], [1693, 227, 0], [1694, 228, 0], [1695, 229, 0], [1696, 230, 0],\n [1697, 234, 0], [1698, 235, 0], [1699, 237, 0], [1700, 238, 0], [1701, \n 239, 0], [1702, 240, 0], [1703, 241, 0], [1704, 242, 0], [1705, 243, 0],\n [1706, 244, 0], [1707, 247, 0], [1708, 251, 0], [1709, 252, 0], [1710, \n 253, 0], [1711, 254, 0], [1712, 255, 0], [1713, 256, 0], [1714, 257, 0],\n [1715, 258, 0], [1716, 260, 0], [1717, 263, 0], [1718, 264, 0], [1719, \n 266, 0], [1720, 267, 0], [1721, 268, 0], [1722, 269, 0], [1723, 271, 0],\n [1724, 272, 0], [1725, 273, 0], [1726, 274, 0], [1727, 275, 0], [1728, \n 276, 0], [1729, 278, 0], [1730, 281, 0], [1731, 282, 0], [1732, 283, 0],\n [1733, 284, 0], [1734, 285, 0], [1735, 286, 0], [1736, 287, 0], [1737, \n 288, 0], [1738, 289, 0], [1739, 291, 0], [1740, 292, 0], [1741, 293, 0],\n [1742, 294, 0], [1743, 295, 0], [1744, 296, 0], [1745, 297, 0], [1746, \n 298, 0], [1747, 299, 0], [1748, 300, 0], [1749, 302, 0], [1750, 303, 0],\n [1751, 304, 0], [1752, 307, 0], [1753, 308, 0], [1754, 309, 0], [1755, \n 311, 0], [1756, 312, 0], [1757, 314, 0], [1758, 316, 0], [1759, 317, 0],\n [1760, 318, 0], [1761, 319, 0], [1762, 321, 0], [1763, 322, 0], [1764, \n 323, 0], [1765, 324, 0], [1766, 325, 0], [1767, 326, 0], [1768, 327, 0],\n [1769, 328, 0], [1770, 329, 0], [1771, 331, 0], [1772, 333, 0], [1773, \n 335, 0], [1774, 337, 0], [1775, 338, 0], [1776, 339, 0], [1777, 340, 0],\n [1778, 341, 0], [1779, 342, 0], [1780, 343, 0], [1781, 344, 0], [1782, \n 345, 0], [1783, 346, 0], [1784, 347, 0], [1785, 348, 0], [1786, 350, 0],\n [1787, 352, 0], [1788, 353, 0], [1789, 354, 0], [1790, 355, 0], [1791, \n 356, 0], [1792, 357, 0], [1793, 359, 0], [1794, 361, 0], [1795, 362, 0],\n [1796, 363, 0], [1797, 364, 0], [1798, 365, 0], [1799, 366, 0], [1800, \n 367, 0], [1801, 368, 0], [1802, 369, 0], [1803, 370, 0], [1804, 371, 0],\n [1805, 372, 0], [1806, 373, 0], [1807, 374, 0], [1808, 375, 0], [1809, \n 376, 0], [1810, 377, 0], [1811, 378, 0], [1812, 379, 0], [1813, 381, 0],\n [1814, 384, 0], [1815, 385, 0], [1816, 386, 0], [1817, 387, 0], [1818, \n 388, 0], [1819, 390, 0], [1820, 391, 0], [1821, 392, 0], [1822, 393, 0],\n [1823, 394, 0], [1824, 395, 0], [1825, 396, 0], [1826, 397, 0], [1827, \n 398, 0], [1828, 399, 0], [1829, 400, 0], [1830, 403, 0], [1831, 404, 0],\n [1832, 405, 0], [1833, 406, 0], [1834, 407, 0], [1836, 410, 0], [1837, \n 411, 0], [1838, 412, 0], [1839, 413, 0], [1840, 414, 0], [1841, 416, 0],\n [1842, 417, 0], [1843, 418, 0], [1844, 419, 0], [1845, 420, 0], [1846, \n 421, 0], [1847, 422, 0], [1848, 423, 0], [1849, 424, 0], [1850, 425, 0],\n [1851, 426, 0], [1852, 427, 0], [1853, 428, 0], [1854, 429, 0], [1855, \n 430, 0], [1856, 431, 0], [1857, 432, 0], [1858, 433, 0], [1860, 435, 0],\n [1861, 436, 0], [1862, 437, 0], [1863, 438, 0], [1864, 439, 0], [1865, \n 440, 0], [1866, 441, 0], [1867, 442, 0], [1868, 443, 0], [1869, 445, 0],\n [1870, 446, 0], [1871, 447, 0], [1872, 448, 0], [1873, 449, 0], [1874, \n 450, 0], [1875, 451, 0], [1876, 453, 0], [1877, 454, 0], [1878, 455, 0],\n [1879, 456, 0], [1880, 457, 0], [1881, 458, 0], [1882, 459, 0], [1883, \n 460, 0], [1884, 461, 0], [1885, 462, 0], [1886, 463, 0], [1887, 464, 0],\n [1888, 465, 0], [1889, 466, 0], [1890, 467, 0], [1891, 468, 0], [1892, \n 469, 0], [1893, 470, 0], [1894, 471, 0], [1895, 472, 0], [1896, 473, 0],\n [1897, 474, 0], [1898, 475, 0], [1899, 476, 0], [1900, 477, 0], [1901, \n 478, 0], [1902, 479, 0], [1903, 480, 0], [1904, 481, 0], [1905, 482, 0],\n [1906, 483, 0], [1907, 484, 0], [1908, 485, 0], [1909, 486, 0], [1910, \n 487, 0], [1911, 488, 0], [1912, 489, 0], [1913, 490, 0], [1914, 491, 0],\n [1915, 492, 0], [1916, 493, 0], [1917, 494, 0], [1918, 495, 0], [1919, \n 496, 0], [1920, 497, 0], [1921, 498, 0], [1922, 499, 0], [1923, 500, 0],\n [1924, 501, 0], [1925, 502, 0], [1926, 503, 0], [1927, 504, 0], [1928, \n 505, 0], [1929, 506, 0], [1930, 507, 0], [1931, 508, 0], [1932, 509, 0],\n [1933, 510, 0], [1934, 511, 0], [1935, 512, 0], [1936, 513, 0], [1937, \n 514, 0], [1938, 515, 0], [1939, 516, 0], [1940, 517, 0], [1941, 518, 0],\n [1942, 519, 0], [1943, 520, 0], [1944, 521, 0], [1945, 522, 0], [1946, \n 523, 0], [1947, 524, 0], [1948, 525, 0], [1949, 526, 0], [1950, 527, 0],\n [1951, 528, 0], [1952, 529, 0], [1953, 530, 0], [1954, 531, 0], [1955, \n 532, 0], [1956, 533, 0], [1957, 534, 0], [1958, 535, 0], [1959, 536, 0],\n [1960, 537, 0], [1961, 538, 0], [1962, 539, 0], [1963, 540, 0], [1964, \n 541, 0], [1965, 542, 0], [1966, 543, 0], [1967, 544, 0], [1968, 545, 0],\n [1969, 546, 0], [1970, 547, 0], [1971, 548, 0], [1972, 549, 0], [1973, \n 550, 0], [1974, 551, 0], [1975, 552, 0], [1976, 553, 0], [1977, 554, 0],\n [1978, 555, 0], [1979, 556, 0], [1980, 557, 0], [1981, 558, 0], [1982, \n 559, 0], [1983, 560, 0], [1984, 561, 0], [1985, 562, 0], [1986, 563, 0],\n [1987, 564, 0], [1988, 565, 0], [1989, 566, 0], [1990, 567, 0], [1991, \n 568, 0], [1992, 569, 0], [1993, 570, 0], [1994, 571, 0], [1995, 572, 0],\n [1996, 573, 0], [1997, 574, 0], [1998, 575, 0], [1999, 576, 0], [2000, \n 577, 0], [2001, 578, 0], [2002, 579, 0], [2003, 580, 0], [2004, 581, 0],\n [2005, 582, 0], [2006, 583, 0], [2007, 584, 0], [2008, 585, 0], [1, 490,\n 0], [3, 4, 1], [491, 6, 0], [7, 5, 0], [8, 9, 0], [492, 11, 0], [11, \n 493, 0], [492, 493, 1], [494, 14, 0], [13, 15, 0], [16, 5, 0], [17, 18,\n 1], [17, 12, 0], [14, 495, 0], [494, 19, 0], [20, 21, 0], [20, 22, 1],\n [497, 23, 0], [23, 499, 1], [25, 26, 0], [25, 22, 0], [23, 27, 0], [28,\n 23, 0], [8, 21, 0], [9, 29, 0], [30, 25, 1], [31, 32, 1], [32, 33, 1],\n [34, 35, 0], [35, 36, 0], [490, 6, 1], [37, 10, 1], [10, 38, 0], [37, \n 38, 1], [39, 40, 1], [39, 41, 1], [42, 41, 1], [18, 42, 1], [492, 43, 1\n ], [44, 45, 0], [44, 505, 0], [46, 12, 0], [47, 48, 0], [49, 50, 0], [\n 31, 33, 1], [31, 51, 0], [52, 53, 1], [52, 54, 0], [506, 55, 0], [506, \n 507, 1], [57, 506, 0], [57, 58, 0], [58, 506, 0], [59, 60, 1], [508, 62,\n 0], [30, 61, 1], [63, 506, 0], [13, 64, 0], [65, 66, 1], [59, 67, 0], [\n 61, 67, 0], [68, 69, 1], [70, 69, 1], [71, 72, 1], [73, 74, 1], [37, 75,\n 1], [72, 75, 0], [37, 72, 1], [76, 77, 1], [77, 51, 0], [73, 72, 1], [\n 18, 40, 1], [492, 45, 1], [10, 74, 1], [45, 511, 1], [78, 32, 1], [79, \n 80, 0], [81, 79, 1], [34, 82, 0], [83, 84, 0], [83, 499, 0], [85, 86, 0\n ], [87, 86, 1], [88, 89, 0], [90, 86, 1], [91, 86, 0], [86, 92, 0], [86,\n 93, 0], [94, 86, 1], [86, 95, 1], [513, 517, 0], [97, 66, 1], [42, 98, \n 0], [99, 100, 1], [42, 101, 0], [102, 42, 1], [103, 87, 0], [104, 103, \n 0], [105, 87, 0], [106, 107, 0], [108, 107, 0], [109, 106, 0], [110, \n 111, 1], [87, 112, 0], [113, 87, 0], [87, 85, 1], [110, 114, 1], [115, \n 116, 0], [117, 118, 0], [117, 119, 0], [117, 120, 1], [121, 122, 0], [\n 123, 124, 0], [125, 126, 0], [127, 119, 0], [118, 128, 0], [121, 119, 0\n ], [530, 527, 0], [125, 130, 0], [125, 123, 0], [131, 132, 0], [133, \n 123, 0], [524, 134, 0], [135, 136, 0], [123, 131, 0], [117, 128, 1], [\n 137, 521, 0], [531, 514, 0], [139, 521, 0], [140, 514, 0], [522, 141, 0\n ], [142, 523, 0], [530, 526, 0], [140, 532, 0], [142, 144, 0], [140, \n 522, 0], [145, 146, 0], [147, 523, 0], [144, 523, 0], [139, 523, 0], [\n 140, 141, 0], [528, 526, 0], [528, 148, 0], [149, 150, 0], [145, 528, 0\n ], [530, 151, 0], [524, 152, 0], [149, 525, 1], [139, 514, 0], [126, \n 120, 1], [530, 153, 0], [528, 147, 1], [528, 154, 0], [130, 120, 1], [\n 528, 155, 1], [524, 533, 0], [524, 149, 0], [154, 150, 0], [157, 110, 1\n ], [119, 158, 0], [159, 60, 0], [536, 161, 0], [115, 151, 0], [162, 134,\n 0], [115, 526, 0], [138, 87, 0], [123, 163, 0], [112, 164, 0], [112, \n 165, 0], [166, 165, 0], [167, 537, 0], [168, 104, 0], [531, 520, 0], [\n 139, 520, 0], [520, 169, 0], [168, 105, 0], [520, 170, 0], [171, 89, 0],\n [521, 172, 0], [123, 173, 0], [521, 174, 0], [37, 39, 0], [530, 175, 0],\n [530, 176, 0], [88, 530, 0], [177, 496, 1], [178, 525, 0], [179, 493, 1\n ], [180, 181, 1], [182, 180, 0], [179, 181, 0], [180, 493, 1], [183, 30,\n 0], [183, 21, 0], [538, 185, 0], [538, 89, 0], [184, 186, 0], [184, 187,\n 0], [520, 172, 0], [89, 175, 0], [185, 89, 0], [89, 188, 0], [189, 190,\n 0], [539, 172, 0], [504, 192, 0], [105, 186, 0], [105, 187, 0], [539, \n 193, 0], [187, 194, 0], [539, 540, 0], [539, 196, 0], [197, 540, 0], [\n 110, 198, 0], [197, 539, 0], [199, 537, 0], [134, 526, 0], [200, 193, 0\n ], [4, 201, 1], [202, 86, 0], [85, 203, 0], [147, 204, 0], [147, 205, 0\n ], [123, 206, 0], [537, 207, 0], [165, 208, 0], [4, 94, 1], [4, 2, 0],\n [209, 4, 0], [119, 163, 0], [210, 3, 0], [99, 211, 0], [99, 69, 1], [\n 212, 99, 0], [213, 214, 0], [510, 215, 0], [128, 69, 1], [216, 69, 1],\n [217, 98, 0], [504, 218, 0], [177, 504, 1], [219, 209, 0], [219, 220, 0\n ], [94, 95, 1], [159, 221, 1], [34, 161, 0], [222, 221, 0], [211, 52, 1\n ], [215, 223, 1], [224, 215, 0], [225, 224, 1], [224, 223, 0], [226, 6,\n 0], [7, 3, 1], [216, 227, 1], [228, 229, 0], [227, 230, 0], [231, 53, 1\n ], [544, 545, 0], [234, 235, 1], [546, 214, 1], [233, 227, 0], [237, \n 238, 0], [212, 100, 0], [519, 239, 0], [238, 519, 0], [213, 240, 0], [\n 241, 242, 1], [70, 241, 0], [509, 213, 0], [68, 243, 0], [243, 244, 0],\n [68, 244, 0], [544, 547, 1], [245, 227, 1], [246, 208, 0], [112, 208, 0\n ], [165, 247, 0], [537, 549, 0], [537, 550, 0], [537, 551, 0], [110, \n 251, 0], [510, 252, 1], [529, 253, 1], [237, 239, 1], [254, 238, 1], [\n 69, 255, 0], [510, 225, 1], [256, 257, 0], [258, 190, 0], [258, 259, 0],\n [260, 261, 1], [554, 553, 1], [515, 263, 0], [14, 264, 1], [116, 555, 0\n ], [151, 116, 0], [111, 114, 1], [77, 111, 0], [266, 525, 0], [267, 120,\n 1], [268, 269, 0], [556, 271, 0], [556, 272, 0], [529, 273, 0], [128, \n 274, 0], [34, 275, 0], [503, 276, 0], [503, 504, 1], [177, 218, 1], [\n 277, 278, 1], [557, 558, 1], [557, 559, 1], [559, 558, 1], [277, 78, 1],\n [277, 279, 1], [78, 279, 0], [281, 282, 0], [283, 161, 1], [268, 161, 1\n ], [256, 284, 0], [515, 516, 1], [263, 516, 0], [516, 285, 0], [63, 286,\n 0], [287, 516, 0], [8, 102, 1], [8, 101, 1], [80, 288, 0], [80, 289, 0],\n [276, 560, 0], [37, 290, 0], [290, 74, 1], [512, 291, 0], [78, 292, 1],\n [199, 548, 0], [491, 293, 0], [4, 294, 0], [490, 541, 1], [491, 295, 0],\n [491, 296, 0], [295, 297, 0], [508, 161, 0], [117, 123, 0], [133, 117, \n 0], [71, 74, 1], [74, 278, 1], [298, 515, 0], [5, 299, 0], [32, 292, 1],\n [5, 29, 1], [503, 560, 0], [300, 301, 1], [51, 300, 0], [244, 302, 1],\n [31, 302, 1], [51, 282, 1], [303, 304, 0], [305, 304, 0], [305, 259, 0],\n [306, 307, 1], [305, 308, 0], [305, 309, 0], [310, 309, 1], [306, 309, \n 1], [311, 280, 0], [280, 278, 1], [311, 32, 1], [13, 312, 1], [313, 314,\n 0], [312, 313, 1], [547, 566, 1], [245, 315, 1], [312, 316, 0], [312, \n 314, 0], [554, 546, 1], [262, 216, 1], [317, 233, 0], [318, 317, 0], [\n 231, 52, 1], [319, 567, 0], [557, 321, 0], [277, 65, 1], [322, 288, 1],\n [322, 323, 0], [277, 324, 1], [324, 325, 0], [277, 325, 0], [326, 327, \n 0], [328, 326, 1], [328, 327, 1], [326, 329, 0], [568, 329, 1], [568, \n 326, 0], [332, 78, 1], [333, 306, 0], [332, 333, 0], [332, 334, 0], [66,\n 334, 1], [330, 335, 1], [336, 66, 0], [330, 336, 1], [68, 70, 0], [509,\n 337, 1], [324, 288, 0], [338, 559, 0], [339, 559, 0], [339, 340, 1], [\n 559, 340, 1], [341, 292, 0], [557, 342, 0], [558, 343, 0], [502, 340, 1\n ], [72, 32, 1], [344, 345, 0], [346, 47, 0], [46, 47, 0], [346, 345, 0],\n [347, 328, 0], [347, 348, 1], [571, 348, 1], [347, 572, 0], [571, 570, \n 1], [14, 350, 0], [350, 573, 0], [15, 351, 1], [352, 15, 0], [15, 335, \n 1], [232, 227, 0], [565, 544, 1], [235, 567, 1], [567, 286, 0], [353, \n 519, 0], [354, 353, 0], [355, 354, 0], [354, 356, 0], [357, 358, 0], [\n 574, 359, 0], [235, 575, 0], [167, 361, 0], [528, 362, 0], [363, 344, 0\n ], [259, 364, 1], [54, 56, 0], [365, 364, 0], [231, 366, 0], [30, 367, \n 0], [61, 367, 1], [254, 368, 0], [254, 369, 0], [254, 370, 0], [99, 358,\n 0], [354, 519, 0], [571, 371, 0], [207, 372, 0], [57, 373, 0], [209, \n 374, 0], [375, 376, 0], [376, 377, 0], [16, 49, 0], [318, 377, 0], [378,\n 297, 0], [562, 379, 0], [576, 563, 0], [576, 381, 0], [577, 576, 1], [\n 244, 383, 0], [244, 306, 1], [383, 306, 1], [380, 306, 0], [252, 225, 0\n ], [220, 76, 0], [542, 384, 0], [385, 384, 0], [542, 385, 0], [386, 385,\n 0], [387, 578, 0], [332, 388, 1], [382, 332, 1], [382, 388, 0], [579, \n 578, 0], [577, 387, 1], [144, 390, 0], [37, 49, 0], [391, 233, 0], [392,\n 310, 0], [260, 393, 0], [394, 230, 0], [395, 282, 1], [395, 244, 0], [\n 25, 396, 1], [81, 74, 0], [278, 80, 1], [81, 278, 1], [569, 570, 0], [\n 397, 552, 0], [542, 398, 0], [398, 385, 0], [399, 499, 0], [83, 399, 0],\n [498, 400, 0], [518, 239, 1], [575, 543, 0], [401, 360, 0], [580, 581, \n 0], [401, 402, 0], [403, 231, 0], [189, 360, 1], [234, 404, 0], [235, \n 404, 1], [235, 580, 0], [216, 259, 0], [405, 259, 0], [405, 318, 0], [\n 406, 230, 0], [542, 407, 0], [23, 408, 0], [577, 348, 0], [562, 564, 1],\n [582, 507, 0], [27, 410, 0], [501, 27, 0], [27, 411, 0], [411, 410, 0],\n [403, 360, 0], [412, 360, 0], [326, 413, 0], [414, 413, 0], [6, 297, 0],\n [554, 580, 1], [262, 401, 1], [499, 556, 1], [224, 229, 0], [583, 507, \n 0], [415, 307, 0], [416, 507, 0], [284, 561, 0], [543, 417, 0], [418, \n 506, 0], [220, 157, 0], [295, 419, 0], [295, 420, 0], [541, 62, 0], [52,\n 421, 0], [60, 160, 0], [535, 161, 0], [267, 282, 0], [52, 365, 0], [28,\n 27, 0], [30, 201, 1], [422, 81, 0], [119, 425, 0], [423, 425, 0], [424,\n 425, 0], [426, 428, 0], [427, 428, 0], [19, 428, 1], [45, 429, 0], [44,\n 429, 0], [505, 429, 0], [231, 431, 1], [190, 431, 1], [430, 431, 0], [\n 286, 433, 0], [432, 433, 0], [506, 433, 0], [23, 434, 0], [400, 434, 0],\n [500, 434, 0], [32, 436, 0], [435, 436, 0], [78, 436, 1], [86, 438, 1],\n [437, 438, 0], [221, 438, 0], [207, 439, 0], [516, 439, 0], [513, 439, \n 0], [181, 441, 1], [440, 441, 0], [504, 441, 1], [135, 442, 0], [109, \n 442, 0], [112, 442, 0], [113, 443, 0], [132, 443, 0], [107, 443, 0], [\n 444, 445, 0], [112, 445, 0], [109, 445, 0], [119, 447, 1], [100, 447, 1\n ], [446, 447, 0], [124, 448, 0], [125, 448, 0], [131, 448, 0], [449, \n 450, 0], [173, 450, 0], [184, 450, 0], [144, 451, 0], [140, 451, 0], [\n 514, 451, 0], [537, 585, 1], [141, 585, 0], [584, 585, 0], [522, 454, 0\n ], [144, 454, 0], [453, 454, 0], [199, 456, 0], [140, 456, 0], [455, \n 456, 0], [537, 456, 0], [538, 457, 0], [153, 457, 0], [176, 457, 0], [\n 524, 459, 0], [458, 459, 0], [134, 459, 0], [460, 461, 0], [150, 461, 0\n ], [149, 461, 0], [521, 463, 0], [462, 463, 0], [538, 463, 0], [110, \n 464, 0], [90, 464, 0], [165, 464, 0], [458, 465, 0], [134, 465, 0], [\n 524, 465, 0], [466, 467, 0], [110, 467, 0], [165, 467, 0], [468, 469, 0\n ], [541, 469, 0], [490, 469, 0], [263, 471, 0], [470, 471, 0], [534, \n 471, 0], [136, 472, 0], [110, 472, 0], [251, 472, 0], [226, 474, 0], [\n 473, 474, 0], [257, 474, 0], [6, 474, 1], [299, 475, 1], [3, 475, 0], [\n 210, 475, 0], [297, 476, 0], [296, 476, 0], [295, 476, 0], [313, 478, 1\n ], [477, 478, 0], [245, 478, 0], [479, 481, 0], [565, 481, 0], [480, \n 481, 0], [415, 482, 0], [56, 482, 0], [409, 482, 0], [483, 484, 0], [3,\n 484, 0], [301, 484, 0], [233, 485, 0], [392, 485, 0], [391, 485, 0], [\n 579, 488, 0], [486, 488, 0], [487, 488, 0], [270, 489, 0], [331, 489, 0\n ], [396, 489, 1], [519, 253, 0], [382, 349, 1], [349, 351, 0], [459, \n 465, 0], [549, 550, 0], [550, 551, 0], [194, 195, 0], [247, 248, 0], [2,\n 294, 0], [549, 551, 0], [54, 365, 0], [131, 265, 0], [91, 92, 0], [247,\n 249, 0], [186, 191, 0], [129, 173, 0], [96, 202, 0], [53, 320, 0], [24,\n 396, 0], [133, 156, 0], [442, 452, 0], [445, 452, 0], [247, 250, 0], [\n 187, 195, 0], [216, 236, 0], [244, 389, 0], [394, 406, 0], [442, 445, 0\n ], [442, 444, 0], [198, 472, 0], [464, 467, 0], [198, 251, 0], [112, \n 143, 0], [2, 490, 0], [5, 491, 0], [10, 492, 0], [12, 493, 0], [13, 494,\n 0], [15, 495, 0], [18, 496, 0], [20, 497, 0], [22, 498, 0], [24, 499, 0\n ], [26, 500, 0], [30, 501, 0], [32, 502, 0], [37, 503, 0], [42, 504, 0],\n [46, 505, 0], [52, 506, 0], [56, 507, 0], [61, 508, 0], [68, 509, 0], [\n 69, 510, 0], [74, 511, 0], [78, 512, 0], [86, 513, 0], [87, 514, 0], [\n 94, 515, 0], [95, 516, 0], [96, 517, 0], [99, 518, 0], [100, 519, 0], [\n 104, 520, 0], [105, 521, 0], [106, 522, 0], [107, 523, 0], [117, 524, 0\n ], [120, 525, 0], [123, 526, 0], [124, 527, 0], [125, 528, 0], [128, \n 529, 0], [129, 530, 0], [138, 531, 0], [143, 532, 0], [156, 533, 0], [\n 157, 534, 0], [159, 535, 0], [160, 536, 0], [165, 537, 0], [184, 538, 0\n ], [191, 539, 0], [195, 540, 0], [201, 541, 0], [220, 542, 0], [231, \n 543, 0], [232, 544, 0], [233, 545, 0], [236, 546, 0], [245, 547, 0], [\n 246, 548, 0], [248, 549, 0], [249, 550, 0], [250, 551, 0], [259, 552, 0\n ], [261, 553, 0], [262, 554, 0], [265, 555, 0], [270, 556, 0], [277, \n 557, 0], [279, 558, 0], [280, 559, 0], [290, 560, 0], [301, 561, 0], [\n 305, 562, 0], [306, 563, 0], [310, 564, 0], [313, 565, 0], [315, 566, 0\n ], [320, 567, 0], [330, 568, 0], [332, 569, 0], [334, 570, 0], [336, \n 571, 0], [349, 572, 0], [351, 573, 0], [358, 574, 0], [360, 575, 0], [\n 380, 576, 0], [382, 577, 0], [383, 578, 0], [389, 579, 0], [401, 580, 0\n ], [402, 581, 0], [409, 582, 0], [415, 583, 0], [444, 584, 0], [452, \n 585, 0]])\n', (551573, 584549), False, 'from numpy import array\n')] |
import numpy as np
import tensorflow as tf
from kerod.core.box_ops import convert_to_center_coordinates
from kerod.layers.post_processing.post_processing_detr import post_processing
def test_post_processing_batch_size2():
logits = tf.constant([[[-100., 0, 100], [-100., 1000, -100]], [[4., 0, 3], [-100., 1000,
-100]]])
probs = tf.nn.softmax(logits, axis=-1)
boxes = tf.constant([
[[0, 0, 1, 1], [0, 0, 0.5, 0.5]],
[[0, 0, 0.3, 0.3], [0, 0, 0.5, 0.5]],
])
boxes = convert_to_center_coordinates(boxes)
image_information = tf.constant([[200, 400], [400, 200]])
image_padded_information = tf.constant([400, 400])
boxes, scores, labels = post_processing(boxes, logits, image_information,
image_padded_information)
expected_labels = np.array([[1, 0], [0, 1]])
expected_scores = np.array([
[probs[0, 0, 2], probs[0, 1, 1]],
[probs[1, 1, 1], probs[1, 0, 2]],
])
expected_boxes = np.array([
[[0, 0, 1, 1], [0, 0, 1., 0.5]],
[[0, 0, 0.5, 1.], [0, 0, 0.3, 0.6]],
])
np.testing.assert_array_equal(expected_labels, labels.numpy())
np.testing.assert_almost_equal(expected_boxes, boxes.numpy())
np.testing.assert_array_equal(expected_scores, scores.numpy())
def test_post_processing_singled_element():
logits = tf.constant([[[4., 0, 3], [-100., 1000, -100]]])
probs = tf.nn.softmax(logits, axis=-1)
boxes = tf.constant([[[0, 0, 0.3, 0.3], [0, 0, 0.5, 0.5]]])
boxes = convert_to_center_coordinates(boxes)
image_information = tf.constant([[400, 200]])
image_padded_information = tf.constant([400, 400])
boxes, scores, labels = post_processing(boxes, logits, image_information,
image_padded_information)
expected_labels = np.array([[0, 1]])
expected_scores = np.array([[probs[0, 1, 1], probs[0, 0, 2]]])
expected_boxes = np.array([[[0, 0, 0.5, 1.], [0, 0, 0.3, 0.6]]])
np.testing.assert_array_equal(expected_labels, labels.numpy())
np.testing.assert_almost_equal(expected_boxes, boxes.numpy())
np.testing.assert_array_equal(expected_scores, scores.numpy())
| [
"tensorflow.nn.softmax",
"kerod.layers.post_processing.post_processing_detr.post_processing",
"kerod.core.box_ops.convert_to_center_coordinates",
"tensorflow.constant",
"numpy.array"
] | [((237, 334), 'tensorflow.constant', 'tf.constant', (['[[[-100.0, 0, 100], [-100.0, 1000, -100]], [[4.0, 0, 3], [-100.0, 1000, -100]]]'], {}), '([[[-100.0, 0, 100], [-100.0, 1000, -100]], [[4.0, 0, 3], [-\n 100.0, 1000, -100]]])\n', (248, 334), True, 'import tensorflow as tf\n'), ((418, 448), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (431, 448), True, 'import tensorflow as tf\n'), ((462, 552), 'tensorflow.constant', 'tf.constant', (['[[[0, 0, 1, 1], [0, 0, 0.5, 0.5]], [[0, 0, 0.3, 0.3], [0, 0, 0.5, 0.5]]]'], {}), '([[[0, 0, 1, 1], [0, 0, 0.5, 0.5]], [[0, 0, 0.3, 0.3], [0, 0, \n 0.5, 0.5]]])\n', (473, 552), True, 'import tensorflow as tf\n'), ((583, 619), 'kerod.core.box_ops.convert_to_center_coordinates', 'convert_to_center_coordinates', (['boxes'], {}), '(boxes)\n', (612, 619), False, 'from kerod.core.box_ops import convert_to_center_coordinates\n'), ((644, 681), 'tensorflow.constant', 'tf.constant', (['[[200, 400], [400, 200]]'], {}), '([[200, 400], [400, 200]])\n', (655, 681), True, 'import tensorflow as tf\n'), ((713, 736), 'tensorflow.constant', 'tf.constant', (['[400, 400]'], {}), '([400, 400])\n', (724, 736), True, 'import tensorflow as tf\n'), ((765, 840), 'kerod.layers.post_processing.post_processing_detr.post_processing', 'post_processing', (['boxes', 'logits', 'image_information', 'image_padded_information'], {}), '(boxes, logits, image_information, image_padded_information)\n', (780, 840), False, 'from kerod.layers.post_processing.post_processing_detr import post_processing\n'), ((907, 933), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (915, 933), True, 'import numpy as np\n'), ((956, 1034), 'numpy.array', 'np.array', (['[[probs[0, 0, 2], probs[0, 1, 1]], [probs[1, 1, 1], probs[1, 0, 2]]]'], {}), '([[probs[0, 0, 2], probs[0, 1, 1]], [probs[1, 1, 1], probs[1, 0, 2]]])\n', (964, 1034), True, 'import numpy as np\n'), ((1079, 1166), 'numpy.array', 'np.array', (['[[[0, 0, 1, 1], [0, 0, 1.0, 0.5]], [[0, 0, 0.5, 1.0], [0, 0, 0.3, 0.6]]]'], {}), '([[[0, 0, 1, 1], [0, 0, 1.0, 0.5]], [[0, 0, 0.5, 1.0], [0, 0, 0.3, \n 0.6]]])\n', (1087, 1166), True, 'import numpy as np\n'), ((1442, 1492), 'tensorflow.constant', 'tf.constant', (['[[[4.0, 0, 3], [-100.0, 1000, -100]]]'], {}), '([[[4.0, 0, 3], [-100.0, 1000, -100]]])\n', (1453, 1492), True, 'import tensorflow as tf\n'), ((1503, 1533), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (1516, 1533), True, 'import tensorflow as tf\n'), ((1547, 1598), 'tensorflow.constant', 'tf.constant', (['[[[0, 0, 0.3, 0.3], [0, 0, 0.5, 0.5]]]'], {}), '([[[0, 0, 0.3, 0.3], [0, 0, 0.5, 0.5]]])\n', (1558, 1598), True, 'import tensorflow as tf\n'), ((1611, 1647), 'kerod.core.box_ops.convert_to_center_coordinates', 'convert_to_center_coordinates', (['boxes'], {}), '(boxes)\n', (1640, 1647), False, 'from kerod.core.box_ops import convert_to_center_coordinates\n'), ((1673, 1698), 'tensorflow.constant', 'tf.constant', (['[[400, 200]]'], {}), '([[400, 200]])\n', (1684, 1698), True, 'import tensorflow as tf\n'), ((1730, 1753), 'tensorflow.constant', 'tf.constant', (['[400, 400]'], {}), '([400, 400])\n', (1741, 1753), True, 'import tensorflow as tf\n'), ((1782, 1857), 'kerod.layers.post_processing.post_processing_detr.post_processing', 'post_processing', (['boxes', 'logits', 'image_information', 'image_padded_information'], {}), '(boxes, logits, image_information, image_padded_information)\n', (1797, 1857), False, 'from kerod.layers.post_processing.post_processing_detr import post_processing\n'), ((1924, 1942), 'numpy.array', 'np.array', (['[[0, 1]]'], {}), '([[0, 1]])\n', (1932, 1942), True, 'import numpy as np\n'), ((1965, 2009), 'numpy.array', 'np.array', (['[[probs[0, 1, 1], probs[0, 0, 2]]]'], {}), '([[probs[0, 1, 1], probs[0, 0, 2]]])\n', (1973, 2009), True, 'import numpy as np\n'), ((2031, 2079), 'numpy.array', 'np.array', (['[[[0, 0, 0.5, 1.0], [0, 0, 0.3, 0.6]]]'], {}), '([[[0, 0, 0.5, 1.0], [0, 0, 0.3, 0.6]]])\n', (2039, 2079), True, 'import numpy as np\n')] |
import numpy as np
import xarray as xr
import warnings
# import mpl and change the backend before other mpl imports
try:
import matplotlib as mpl
from matplotlib.transforms import blended_transform_factory
mpl.use("Agg")
import matplotlib.pyplot as plt
mpl = True
except ImportError:
raise RuntimeError(
"The `plotting` module requires `matplotlib`. Install using conda install -c conda-forge matplotlib "
)
try:
import gsw
except:
gsw = None
import string
try:
import cartopy
except ImportError:
cartopy = None
def xr_violinplot(ds, ax=None, x_dim="xt_ocean", width=1, color="0.5"):
"""Wrapper of matplotlib violinplot for xarray.DataArray.
Parameters
----------
ds : xr.DataArray
Input data.
ax : matplotlib.axis
Plotting axis (the default is None).
x_dim : str
dimension that defines the x-axis of the
plot (the default is 'xt_ocean').
width : float
Scaling width of each violin (the default is 1).
color : type
Color of the violin (the default is '0.5').
Returns
-------
type
Description of returned object.
"""
x = ds[x_dim].data.copy()
y = [ds.loc[{x_dim: xx}].data for xx in x]
y = [data[~np.isnan(data)] for data in y]
# check if all are nan
idx = [len(dat) == 0 for dat in y]
x = [xx for xx, ii in zip(x, idx) if not ii]
y = [yy for yy, ii in zip(y, idx) if not ii]
if ax is None:
ax = plt.gca()
vp = ax.violinplot(
y, x, widths=width, showextrema=False, showmedians=False, showmeans=True
)
[item.set_facecolor(color) for item in vp["bodies"]]
for item in ["cmaxes", "cmins", "cbars", "cmedians", "cmeans"]:
if item in vp.keys():
vp[item].set_edgecolor(color)
return vp
def axis_arrow(ax, x_loc, text, arrowprops={}, **kwargs):
"""Puts an arrow pointing at `x_loc` onto (but outside of ) the xaxis of
a plot.For now only works on xaxis and on the top. Modify when necessary
Parameters
----------
ax : matplotlib.axis
axis to plot on.
x_loc : type
Position of the arrow (in units of `ax` x-axis).
text : str
Text next to arrow.
arrowprops: dict
Additional arguments to pass to arrowprops.
See mpl.axes.annotate for details.
kwargs:
additional keyword arguments passed to ax.annotate
"""
ar_props = dict(dict(fc="k", lw=1.5, ec=None))
ar_props.update(arrowprops)
tform = blended_transform_factory(ax.transData, ax.transAxes)
ax.annotate(
text,
xy=[x_loc, 1],
xytext=(x_loc, 1.25),
xycoords=tform,
textcoords=tform,
ha="center",
va="center",
arrowprops=ar_props,
**kwargs,
)
def letter_subplots(axes, start_idx=0, box_color=None, labels=None, **kwargs):
"""Adds panel letters in boxes to each element of `axes` in the
upper left corner.
Parameters
----------
axes : list, array_like
List or array of matplotlib axes objects.
start_idx : type
Starting index in the alphabet (e.g. 0 is 'a').
box_color : type
Color of the box behind each letter (the default is None).
labels: list
List of strings used as labels (if None (default), uses lowercase alphabet followed by uppercase alphabet)
**kwargs : type
kwargs passed to matplotlib.axis.text
"""
if labels is None:
labels = list(string.ascii_letters)
for ax, letter in zip(axes.flat, labels[start_idx:]):
t = ax.text(
0.1,
0.85,
letter + ")",
horizontalalignment="center",
verticalalignment="center",
transform=ax.transAxes,
**kwargs,
)
if box_color:
t.set_bbox(dict(facecolor=box_color, alpha=0.5, edgecolor=None))
def map_util_plot(
ax, land_color="0.7", coast_color="0.3", lake_alpha=0.5, labels=False
):
"""Helper tool to add good default map to cartopy axes.
Parameters
----------
ax : cartopy.geoaxes (not sure this is right)
The axis to plot on (must be a cartopy axis).
land_color : type
Color of land fill (the default is '0.7').
coast_color : type
Color of costline (the default is '0.3').
lake_alpha : type
Transparency of lakes (the default is 0.5).
labels : type
Not implemented.
"""
if cartopy is None:
raise RuntimeError(
"Mapping functions require `cartopy`. Install using conda install -c conda-forge cartopy "
)
# I could default to plt.gca() for ax, but does it work when I just pass
# the axis object as positonal argument?
ax.add_feature(cartopy.feature.LAND, color=land_color)
ax.add_feature(cartopy.feature.COASTLINE, edgecolor=coast_color)
ax.add_feature(cartopy.feature.LAKES, alpha=lake_alpha)
# add option for gridlines and labelling
def same_y_range(axes):
"""Adjusts multiple axes so that the range of y values is the same everywhere, but not the actual values.
Parameters
----------
axes : np.array
An array of matplotlib.axes objects produced by e.g. plt.subplots()
"""
ylims = [ax.get_ylim() for ax in axes.flat]
yranges = [lim[1] - lim[0] for lim in ylims]
# find the max range
yrange_max = np.max(yranges)
# determine the difference from max range for other ranges
y_range_missing = [yrange_max - rang for rang in yranges]
# define new ylims by expanding with (missing range / 2) at each end
y_lims_new = [
np.array(lim) + np.array([-1, 1]) * yrm / 2
for lim, yrm in zip(ylims, y_range_missing)
]
for ax, lim in zip(axes.flat, y_lims_new):
ax.set_ylim(lim)
def center_lim(ax, which="y"):
if which == "y":
lim = np.array(ax.get_ylim())
ax.set_ylim(np.array([-1, 1]) * abs(lim).max())
elif which == "x":
lim = np.array(ax.get_xlim())
ax.set_xlim(np.array([-1, 1]) * abs(lim).max())
elif which in ["xy", "yx"]:
center_lim(ax, "x")
center_lim(ax, "y")
else:
raise ValueError("`which` is not in (`x,`y`, `xy`) found %s" % which)
def depth_logscale(ax, yscale=400, ticks=None):
if ticks is None:
ticks = [0, 100, 250, 500, 1000, 2500, 5000]
ax.set_yscale("symlog", linthreshy=yscale)
ticklabels = [str(a) for a in ticks]
ax.set_yticks(ticks)
ax.set_yticklabels(ticklabels)
ax.invert_yaxis()
def shaded_line_plot(
da,
dim,
ax=None,
horizontal=True,
spreads=[1, 3],
alphas=[0.25, 0.4],
spread_style="std",
line_kwargs=dict(),
fill_kwargs=dict(),
**kwargs,
):
"""Produces a line plot with shaded intervals based on the spread of `da` in `dim`.
Parameters
----------
da : xr.DataArray
The input data. Needs to be 2 dimensional, so that when `dim` is reduced, it is a line plot.
dim : str
Dimension of `da` which is used to calculate spread
ax : matplotlib.axes
Matplotlib axes object to plot on (the default is plt.gca()).
horizontal : bool
Determines if the plot is horizontal or vertical (e.g. x is plotted
on the y-axis).
spread : np.array, optional
Values specifying the 'spread-values', dependent on `spread_style`. Defaults to shading the
range of 1 and 2 standard deviations in `dim`
alpha: np.array, optional
Transparency values of the shaded ranges. Defaults to [0.5,0.15].
spread_style : str
Metric used to define spread on `dim`.
Options:
'std': Calculates standard deviation along `dim` and shading indicates multiples of std centered on the mean
'quantile': Calculates quantile ranges. An input of `spread=[0.2,0.5]` would show an inner shading for
the 40th-60th percentile, and an outer shading for the 25th-75th percentile, centered on the 50th quantile (~median).
Must be within [0,100].
line_kwargs : dict
optional parameters for line plot.
fill_kwargs : dict
optional parameters for std fill plot.
**kwargs
Keyword arguments passed to both line plot and fill_between.
Example
------
"""
# check input
if isinstance(spreads, float) or isinstance(spreads, int):
spreads = [spreads]
if isinstance(alphas, float):
alphas = [alphas]
if isinstance(dim, float):
dim = [dim]
# set axis
if not ax:
ax = plt.gca()
# Option to plot a straight line when the dim is not present (TODO)
# check if the data is 2 dimensional
dims = da.mean(dim).dims
if len(dims) != 1:
raise ValueError(
f"`da` must be 1 dimensional after reducing over {dim}. Found {dims}"
)
# assemble plot elements
xdim = dims[0]
x = da[xdim]
# define the line plot values
if spread_style == "std":
y = da.mean(dim)
elif spread_style in ["quantile", "percentile"]:
y = da.quantile(0.5, dim)
else:
raise ValueError(
f"Got unknown option ['{spread_style}'] for `spread_style`. Supported options are : ['std', 'quantile']"
)
# set line kwargs
line_defaults = {}
line_defaults.update(line_kwargs)
if horizontal:
ll = ax.plot(x, y, **line_defaults)
else:
ll = ax.plot(y, x, **line_defaults)
# now loop over the spreads:
fill_defaults = {"facecolor": ll[-1].get_color(), "edgecolor": "none"}
# Apply defaults but respect input
fill_defaults.update(fill_kwargs)
ff = []
for spread, alpha in zip(
(spreads), (alphas)
): # np.flip(this ensures that the shadings are drawn from outer to inner otherwise they blend too much into each other
f_kwargs = {k: v for k, v in fill_defaults.items()}
f_kwargs["alpha"] = alpha
if spread_style == "std":
y_std = da.std(dim) # i could probably precompute that.
y_lower = y - (y_std / (2 * spread))
y_upper = y + (y_std / (2 * spread))
elif spread_style in ["quantile", "percentile"]:
y_lower = da.quantile(0.5 - (spread / 2), dim)
y_upper = da.quantile(0.5 + (spread / 2), dim)
if horizontal:
ff.append(ax.fill_between(x.data, y_lower.data, y_upper.data, **f_kwargs))
else:
ff.append(ax.fill_betweenx(x.data, y_lower.data, y_upper.data, **f_kwargs))
return ll, ff
def plot_line_shaded_std(
x, y, std_y, horizontal=True, ax=None, line_kwargs=dict(), fill_kwargs=dict()
):
"""Plot wrapper to draw line for y and shaded patch according to std_y.
The shading represents one std on each side of the line...
Parameters
----------
x : numpy.array or xr.DataArray
Coordinate.
y : numpy.array or xr.DataArray
line data.
std_y : numpy.array or xr.DataArray
std corresponding to y.
horizontal : bool
Determines if the plot is horizontal or vertical (e.g. x is plotted
on the y-axis).
ax : matplotlib.axes
Matplotlib axes object to plot on (the default is plt.gca()).
line_kwargs : dict
optional parameters for line plot.
fill_kwargs : dict
optional parameters for std fill plot.
Returns
-------
(ll, ff)
Tuple of line and patch objects.
"""
warnings.warn(
"This is an outdated function. Use `shaded_line_plot` instead",
DeprecationWarning,
)
line_defaults = {}
# Set plot defaults into the kwargs
if not ax:
ax = plt.gca()
# Apply defaults but respect input
line_defaults.update(line_kwargs)
if horizontal:
ll = ax.plot(x, y, **line_defaults)
else:
ll = ax.plot(y, x, **line_defaults)
fill_defaults = {
"facecolor": ll[-1].get_color(),
"alpha": 0.35,
"edgecolor": "none",
}
# Apply defaults but respect input
fill_defaults.update(fill_kwargs)
if horizontal:
ff = ax.fill_between(x, y - std_y, y + std_y, **fill_defaults)
else:
ff = ax.fill_betweenx(x, y - std_y, y + std_y, **fill_defaults)
return ll, ff
def box_plot(box, ax=None, split_detection="True", **kwargs):
"""plots box despite coordinate discontinuities.
INPUT
-----
box: np.array
Defines the box in the coordinates of the current axis.
Describing the box corners [x1, x2, y1, y2]
ax: matplotlib.axis
axis for plotting. Defaults to plt.gca()
kwargs: optional
anything that can be passed to plot can be put as kwarg
"""
if len(box) != 4:
raise RuntimeError(
"'box' must be a 4 element np.array, \
describing the box corners [x1, x2, y1, y2]"
)
xlim = plt.gca().get_xlim()
ylim = plt.gca().get_ylim()
x_split = False
y_split = False
if ax is None:
ax = plt.gca()
if split_detection:
if np.diff([box[0], box[1]]) < 0:
x_split = True
if np.diff([box[2], box[3]]) < 0:
y_split = True
if y_split and not x_split:
ax.plot(
[box[0], box[0], box[1], box[1], box[0]],
[ylim[1], box[2], box[2], ylim[1], ylim[1]],
**kwargs,
)
ax.plot(
[box[0], box[0], box[1], box[1], box[0]],
[ylim[0], box[3], box[3], ylim[0], ylim[0]],
**kwargs,
)
elif x_split and not y_split:
ax.plot(
[xlim[1], box[0], box[0], xlim[1], xlim[1]],
[box[2], box[2], box[3], box[3], box[2]],
**kwargs,
)
ax.plot(
[xlim[0], box[1], box[1], xlim[0], xlim[0]],
[box[2], box[2], box[3], box[3], box[2]],
**kwargs,
)
elif x_split and y_split:
ax.plot([xlim[1], box[0], box[0]], [box[2], box[2], ylim[1]], **kwargs)
ax.plot([xlim[0], box[1], box[1]], [box[2], box[2], ylim[1]], **kwargs)
ax.plot([xlim[1], box[0], box[0]], [box[3], box[3], ylim[0]], **kwargs)
ax.plot([xlim[0], box[1], box[1]], [box[3], box[3], ylim[0]], **kwargs)
elif not x_split and not y_split:
ax.plot(
[box[0], box[0], box[1], box[1], box[0]],
[box[2], box[3], box[3], box[2], box[2]],
**kwargs,
)
def dict2box(di, xdim="lon", ydim="lat"):
return np.array([di[xdim].start, di[xdim].stop, di[ydim].start, di[ydim].stop])
def box_plot_dict(di, xdim="lon", ydim="lat", **kwargs):
"""plot box from xarray selection dict e.g.
`{'xdim':slice(a, b), 'ydim':slice(c,d), ...}`"""
# extract box from dict
box = dict2box(di, xdim=xdim, ydim=ydim)
# plot
box_plot(box, **kwargs)
def draw_dens_contours_teos10(
sigma="sigma0",
add_labels=True,
ax=None,
density_grid=20,
dens_interval=1.0,
salt_on_x=True,
slim=None,
tlim=None,
contour_kwargs={},
c_label_kwargs={},
**kwargs,
):
"""draws density contours on the current plot.
Assumes that the salinity and temperature values are given as SA and CT.
Needs documentation..."""
if gsw is None:
raise RuntimeError(
"`gsw` is not available. Install with `conda install -c conda-forge gsw`"
)
if ax is None:
ax = plt.gca()
if sigma not in ["sigma%i" % s for s in range(5)]:
raise ValueError(
"Sigma function has to be one of `sigma0`...`sigma4` \
is: %s"
% (sigma)
)
# get salt (default: xaxis) and temp (default: yaxis) limits
if salt_on_x:
if not (slim is None):
slim = ax.get_xlim()
if not (tlim is None):
tlim = ax.get_ylim()
x = np.linspace(*(slim + [density_grid]))
y = np.linspace(*(tlim + [density_grid]))
else:
if not tlim:
tlim = ax.get_xlim()
if not slim:
slim = ax.get_ylim()
x = np.linspace(*(slim + [density_grid]))
y = np.linspace(*(tlim + [density_grid]))
if salt_on_x:
ss, tt = np.meshgrid(x, y)
else:
tt, ss = np.meshgrid(x, y)
sigma_func = getattr(gsw, sigma)
sig = sigma_func(ss, tt)
levels = np.arange(np.floor(sig.min()), np.ceil(sig.max()), dens_interval)
c_kwarg_defaults = dict(
levels=levels, colors="0.4", linestyles="--", linewidths=0.5
)
c_kwarg_defaults.update(kwargs)
c_kwarg_defaults.update(contour_kwargs)
c_label_kwarg_defaults = dict(fmt="%.02f")
c_label_kwarg_defaults.update(kwargs)
c_label_kwarg_defaults.update(c_label_kwargs)
ch = ax.contour(x, y, sig, **c_kwarg_defaults)
ax.clabel(ch, **c_label_kwarg_defaults)
if add_labels:
plt.text(
0.05,
0.05,
"$\sigma_{%s}$" % (sigma[-1]),
fontsize=14,
verticalalignment="center",
horizontalalignment="center",
transform=ax.transAxes,
color=c_kwarg_defaults["colors"],
)
def tsdiagram(
salt,
temp,
color=None,
size=None,
lon=None,
lat=None,
pressure=None,
convert_teos10=True,
ts_kwargs={},
ax=None,
fig=None,
draw_density_contours=True,
draw_cbar=True,
add_labels=True,
**kwargs,
):
if ax is None:
ax = plt.gca()
if fig is None:
fig = plt.gcf()
if convert_teos10:
temp_label = "Conservative Temperature [$^{\circ}C$]"
salt_label = "Absolute Salinity [$g/kg$]"
if any([a is None for a in [lon, lat, pressure]]):
raise ValueError(
"when converting to teos10 variables, \
input for lon, lat and pressure is needed"
)
else:
salt = gsw.SA_from_SP(salt, pressure, lon, lat)
temp = gsw.CT_from_pt(salt, temp)
else:
temp_label = "Potential Temperature [$^{\circ}C$]"
salt_label = "Practical Salinity [$g/kg$]"
if add_labels:
ax.set_xlabel(salt_label)
ax.set_ylabel(temp_label)
scatter_kw_defaults = dict(s=size, c=color)
scatter_kw_defaults.update(kwargs)
s = ax.scatter(salt, temp, **scatter_kw_defaults)
if draw_density_contours:
draw_dens_contours_teos10(ax=ax, **ts_kwargs)
if draw_cbar and color is not None:
if isinstance(color, str) or isinstance(color, tuple):
pass
elif (
isinstance(color, list)
or isinstance(color, np.ndarray)
or isinstance(color, xr.DataArray)
):
fig.colorbar(s, ax=ax)
else:
raise RuntimeError("`color` not recognized. %s" % type(color))
return s
def linear_piecewise_scale(
cut, scale, ax=None, axis="y", scaled_half="upper", add_cut_line=False
):
"""This function sets a piecewise linear scaling for a given axis to highlight e.g. processes in the upper ocean vs deep ocean.
Parameters
----------
cut : float
value along the chosen axis used as transition between the two linear scalings.
scale : float
scaling coefficient for the chosen axis portion (determined by `axis` and `scaled_half`).
A higher number means the chosen portion of the axis will be more compressed. Must be positive. 0 means no compression.
ax : matplotlib.axis, optional
The plot axis object. Defaults to current matplotlib axis
axis : str, optional
Which axis of the plot to act on.
* 'y' (Default)
* 'x'
scaled_half: str, optional
Determines which half of the axis is scaled (compressed).
* 'upper' (default). Values larger than `cut` are compressed
* 'lower'. Values smaller than `cut` are compressed
Returns
-------
ax_scaled : matplotlib.axis
"""
if ax is None:
ax = plt.gca()
if scale < 0:
raise ValueError(f"`Scale can not be negative. Got value of {scale}")
if scale == 0:
# do nothing
return ax
else:
if scaled_half == "upper":
def inverse(x):
return np.piecewise(
x,
[x <= cut, x > cut],
[lambda x: x + (scale * (x - cut)), lambda x: x],
)
def forward(x):
return np.piecewise(
x,
[x <= cut, x > cut],
[lambda x: x + (scale * (x - cut)), lambda x: x],
)
elif scaled_half == "lower":
def inverse(x):
return np.piecewise(
x,
[x >= cut, x < cut],
[lambda x: x + (scale * (x - cut)), lambda x: x],
)
def forward(x):
return np.piecewise(
x,
[x >= cut, x < cut],
[lambda x: x + (scale * (x - cut)), lambda x: x],
)
else:
raise ValueError(
f"`scaled_half` value not recognized. Must be ['upper', 'lower']. Got {scaled_half}"
)
if axis == "y":
axlim = ax.get_ylim()
ax.set_yscale("function", functions=(forward, inverse))
ax.set_ylim(axlim)
elif axis == "x":
axlim = ax.get_xlim()
ax.set_xscale("function", functions=(forward, inverse))
ax.set_xlim(axlim)
else:
raise ValueError(
f"`axis` value not recognized. Must be ['x', 'y']. Got {axis}"
)
return ax
| [
"numpy.meshgrid",
"gsw.SA_from_SP",
"numpy.isnan",
"gsw.CT_from_pt",
"matplotlib.pyplot.text",
"numpy.max",
"matplotlib.use",
"numpy.array",
"numpy.diff",
"matplotlib.transforms.blended_transform_factory",
"matplotlib.pyplot.gca",
"numpy.linspace",
"warnings.warn",
"matplotlib.pyplot.gcf",... | [((220, 234), 'matplotlib.use', 'mpl.use', (['"""Agg"""'], {}), "('Agg')\n", (227, 234), True, 'import matplotlib as mpl\n'), ((2541, 2594), 'matplotlib.transforms.blended_transform_factory', 'blended_transform_factory', (['ax.transData', 'ax.transAxes'], {}), '(ax.transData, ax.transAxes)\n', (2566, 2594), False, 'from matplotlib.transforms import blended_transform_factory\n'), ((5432, 5447), 'numpy.max', 'np.max', (['yranges'], {}), '(yranges)\n', (5438, 5447), True, 'import numpy as np\n'), ((11536, 11637), 'warnings.warn', 'warnings.warn', (['"""This is an outdated function. Use `shaded_line_plot` instead"""', 'DeprecationWarning'], {}), "('This is an outdated function. Use `shaded_line_plot` instead',\n DeprecationWarning)\n", (11549, 11637), False, 'import warnings\n'), ((14579, 14651), 'numpy.array', 'np.array', (['[di[xdim].start, di[xdim].stop, di[ydim].start, di[ydim].stop]'], {}), '([di[xdim].start, di[xdim].stop, di[ydim].start, di[ydim].stop])\n', (14587, 14651), True, 'import numpy as np\n'), ((1503, 1512), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1510, 1512), True, 'import matplotlib.pyplot as plt\n'), ((8634, 8643), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (8641, 8643), True, 'import matplotlib.pyplot as plt\n'), ((11750, 11759), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (11757, 11759), True, 'import matplotlib.pyplot as plt\n'), ((13090, 13099), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (13097, 13099), True, 'import matplotlib.pyplot as plt\n'), ((15505, 15514), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (15512, 15514), True, 'import matplotlib.pyplot as plt\n'), ((15953, 15990), 'numpy.linspace', 'np.linspace', (['*(slim + [density_grid])'], {}), '(*(slim + [density_grid]))\n', (15964, 15990), True, 'import numpy as np\n'), ((16003, 16040), 'numpy.linspace', 'np.linspace', (['*(tlim + [density_grid])'], {}), '(*(tlim + [density_grid]))\n', (16014, 16040), True, 'import numpy as np\n'), ((16171, 16208), 'numpy.linspace', 'np.linspace', (['*(slim + [density_grid])'], {}), '(*(slim + [density_grid]))\n', (16182, 16208), True, 'import numpy as np\n'), ((16221, 16258), 'numpy.linspace', 'np.linspace', (['*(tlim + [density_grid])'], {}), '(*(tlim + [density_grid]))\n', (16232, 16258), True, 'import numpy as np\n'), ((16295, 16312), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (16306, 16312), True, 'import numpy as np\n'), ((16340, 16357), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (16351, 16357), True, 'import numpy as np\n'), ((16955, 17143), 'matplotlib.pyplot.text', 'plt.text', (['(0.05)', '(0.05)', "('$\\\\sigma_{%s}$' % sigma[-1])"], {'fontsize': '(14)', 'verticalalignment': '"""center"""', 'horizontalalignment': '"""center"""', 'transform': 'ax.transAxes', 'color': "c_kwarg_defaults['colors']"}), "(0.05, 0.05, '$\\\\sigma_{%s}$' % sigma[-1], fontsize=14,\n verticalalignment='center', horizontalalignment='center', transform=ax.\n transAxes, color=c_kwarg_defaults['colors'])\n", (16963, 17143), True, 'import matplotlib.pyplot as plt\n'), ((17550, 17559), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (17557, 17559), True, 'import matplotlib.pyplot as plt\n'), ((17595, 17604), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (17602, 17604), True, 'import matplotlib.pyplot as plt\n'), ((20088, 20097), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (20095, 20097), True, 'import matplotlib.pyplot as plt\n'), ((5675, 5688), 'numpy.array', 'np.array', (['lim'], {}), '(lim)\n', (5683, 5688), True, 'import numpy as np\n'), ((12964, 12973), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (12971, 12973), True, 'import matplotlib.pyplot as plt\n'), ((12996, 13005), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (13003, 13005), True, 'import matplotlib.pyplot as plt\n'), ((13136, 13161), 'numpy.diff', 'np.diff', (['[box[0], box[1]]'], {}), '([box[0], box[1]])\n', (13143, 13161), True, 'import numpy as np\n'), ((13206, 13231), 'numpy.diff', 'np.diff', (['[box[2], box[3]]'], {}), '([box[2], box[3]])\n', (13213, 13231), True, 'import numpy as np\n'), ((18005, 18045), 'gsw.SA_from_SP', 'gsw.SA_from_SP', (['salt', 'pressure', 'lon', 'lat'], {}), '(salt, pressure, lon, lat)\n', (18019, 18045), False, 'import gsw\n'), ((18065, 18091), 'gsw.CT_from_pt', 'gsw.CT_from_pt', (['salt', 'temp'], {}), '(salt, temp)\n', (18079, 18091), False, 'import gsw\n'), ((1275, 1289), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (1283, 1289), True, 'import numpy as np\n'), ((5962, 5979), 'numpy.array', 'np.array', (['[-1, 1]'], {}), '([-1, 1])\n', (5970, 5979), True, 'import numpy as np\n'), ((20351, 20439), 'numpy.piecewise', 'np.piecewise', (['x', '[x <= cut, x > cut]', '[lambda x: x + scale * (x - cut), lambda x: x]'], {}), '(x, [x <= cut, x > cut], [lambda x: x + scale * (x - cut), lambda\n x: x])\n', (20363, 20439), True, 'import numpy as np\n'), ((20569, 20657), 'numpy.piecewise', 'np.piecewise', (['x', '[x <= cut, x > cut]', '[lambda x: x + scale * (x - cut), lambda x: x]'], {}), '(x, [x <= cut, x > cut], [lambda x: x + scale * (x - cut), lambda\n x: x])\n', (20581, 20657), True, 'import numpy as np\n'), ((5691, 5708), 'numpy.array', 'np.array', (['[-1, 1]'], {}), '([-1, 1])\n', (5699, 5708), True, 'import numpy as np\n'), ((6079, 6096), 'numpy.array', 'np.array', (['[-1, 1]'], {}), '([-1, 1])\n', (6087, 6096), True, 'import numpy as np\n'), ((20825, 20913), 'numpy.piecewise', 'np.piecewise', (['x', '[x >= cut, x < cut]', '[lambda x: x + scale * (x - cut), lambda x: x]'], {}), '(x, [x >= cut, x < cut], [lambda x: x + scale * (x - cut), lambda\n x: x])\n', (20837, 20913), True, 'import numpy as np\n'), ((21043, 21131), 'numpy.piecewise', 'np.piecewise', (['x', '[x >= cut, x < cut]', '[lambda x: x + scale * (x - cut), lambda x: x]'], {}), '(x, [x >= cut, x < cut], [lambda x: x + scale * (x - cut), lambda\n x: x])\n', (21055, 21131), True, 'import numpy as np\n')] |
import numpy as np
import pytest
from numpy.testing import (
assert_,
assert_raises, assert_almost_equal, assert_allclose)
import pyccl as ccl
from pyccl import CCLWarning
def pk1d(k):
return (k/0.1)**(-1)
def grw(a):
return a
def pk2d(k, a):
return pk1d(k)*grw(a)
def lpk2d(k, a):
return np.log(pk2d(k, a))
def all_finite(vals):
"""
Returns True if all elements are finite (i.e. not NaN or inf).
"""
return np.all(np.isfinite(vals))
def test_pk2d_init():
"""
Test initialization of Pk2D objects
"""
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)
# If no input
assert_raises(ValueError, ccl.Pk2D)
# Input function has incorrect signature
assert_raises(ValueError, ccl.Pk2D, pkfunc=pk1d)
ccl.Pk2D(pkfunc=lpk2d, cosmo=cosmo)
# Input function but no cosmo
assert_raises(ValueError, ccl.Pk2D, pkfunc=lpk2d)
# Input arrays have incorrect sizes
lkarr = -4.+6*np.arange(100)/99.
aarr = 0.05+0.95*np.arange(100)/99.
pkarr = np.zeros([len(aarr), len(lkarr)])
assert_raises(
ValueError, ccl.Pk2D, a_arr=aarr, lk_arr=lkarr, pk_arr=pkarr[1:])
# Scale factor is not monotonically increasing
assert_raises(
ValueError, ccl.Pk2D, a_arr=aarr[::-1], lk_arr=lkarr, pk_arr=pkarr)
def test_pk2d_smoke():
"""Make sure it works once."""
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)
lkarr = -4.+6*np.arange(100)/99.
aarr = 0.05+0.95*np.arange(100)/99.
pkarr = np.zeros([len(aarr), len(lkarr)])
psp = ccl.Pk2D(a_arr=aarr, lk_arr=lkarr, pk_arr=pkarr)
assert_(not np.isnan(psp.eval(1E-2, 0.5, cosmo)))
@pytest.mark.parametrize('model', ['bbks', 'eisenstein_hu',
'eisenstein_hu_nowiggles'])
def test_pk2d_from_model(model):
cosmo_fixed = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8, n_s=0.96)
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8, n_s=0.96,
transfer_function=model)
pk = ccl.Pk2D.pk_from_model(cosmo_fixed, model=model)
ks = np.geomspace(1E-3, 1E1, 128)
for z in [0., 0.5, 2.]:
a = 1./(1+z)
pk1 = pk.eval(ks, a, cosmo)
pk2 = ccl.linear_matter_power(cosmo, ks, a)
maxdiff = np.amax(np.fabs(pk1/pk2-1))
assert maxdiff < 1E-10
def test_pk2d_from_model_emu():
pars = [0.3643, 0.071075, 0.55, 0.8333, 0.9167, -0.7667, 0.1944]
cosmo_fixed = ccl.Cosmology(Omega_c=pars[0],
Omega_b=pars[1],
h=pars[2],
sigma8=pars[3],
n_s=pars[4],
w0=pars[5],
wa=pars[6],
Neff=3.04,
Omega_g=0,
Omega_k=0,
transfer_function='bbks')
cosmo = ccl.Cosmology(Omega_c=pars[0],
Omega_b=pars[1],
h=pars[2],
sigma8=pars[3],
n_s=pars[4],
w0=pars[5],
wa=pars[6],
Neff=3.04,
Omega_g=0,
Omega_k=0,
transfer_function='bbks',
matter_power_spectrum='emu')
pk = ccl.Pk2D.pk_from_model(cosmo_fixed, model='emu')
ks = np.geomspace(1E-3, 1E1, 128)
for z in [0., 0.5, 2.]:
a = 1./(1+z)
pk1 = pk.eval(ks, a, cosmo)
pk2 = ccl.nonlin_matter_power(cosmo, ks, a)
maxdiff = np.amax(np.fabs(pk1/pk2-1))
assert maxdiff < 1E-10
@pytest.mark.parametrize('model', ['bbks', 'eisenstein_hu'])
def test_pk2d_from_model_fails(model):
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1E-10, n_s=0.96,
transfer_function='boltzmann_class')
assert_raises(ccl.CCLError, ccl.Pk2D.pk_from_model,
cosmo, model=model)
def test_pk2d_from_model_raises():
cosmo = ccl.CosmologyVanillaLCDM()
assert_raises(ValueError, ccl.Pk2D.pk_from_model,
cosmo, model='bbkss')
def test_pk2d_function():
"""
Test evaluation of Pk2D objects
"""
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)
psp = ccl.Pk2D(pkfunc=lpk2d, cosmo=cosmo)
# Test at single point
ktest = 1E-2
atest = 0.5
ptrue = pk2d(ktest, atest)
phere = psp.eval(ktest, atest, cosmo)
assert_almost_equal(np.fabs(phere/ptrue), 1., 6)
dphere = psp.eval_dlogpk_dlogk(ktest, atest, cosmo)
assert_almost_equal(dphere, -1., 6)
ktest = 1
atest = 0.5
ptrue = pk2d(ktest, atest)
phere = psp.eval(ktest, atest, cosmo)
assert_almost_equal(np.fabs(phere/ptrue), 1., 6)
dphere = psp.eval_dlogpk_dlogk(ktest, atest, cosmo)
assert_almost_equal(dphere, -1., 6)
# Test at array of points
ktest = np.logspace(-3, 1, 10)
ptrue = pk2d(ktest, atest)
phere = psp.eval(ktest, atest, cosmo)
assert_allclose(phere, ptrue, rtol=1E-6)
dphere = psp.eval_dlogpk_dlogk(ktest, atest, cosmo)
assert_allclose(dphere, -1.*np.ones_like(dphere), 6)
# Test input is not logarithmic
psp = ccl.Pk2D(pkfunc=pk2d, is_logp=False, cosmo=cosmo)
phere = psp.eval(ktest, atest, cosmo)
assert_allclose(phere, ptrue, rtol=1E-6)
dphere = psp.eval_dlogpk_dlogk(ktest, atest, cosmo)
assert_allclose(dphere, -1.*np.ones_like(dphere), 6)
# Test input is arrays
karr = np.logspace(-4, 2, 1000)
aarr = np.linspace(0.01, 1., 100)
parr = np.array([pk2d(karr, a) for a in aarr])
psp = ccl.Pk2D(
a_arr=aarr, lk_arr=np.log(karr), pk_arr=parr, is_logp=False)
phere = psp.eval(ktest, atest, cosmo)
assert_allclose(phere, ptrue, rtol=1E-6)
dphere = psp.eval_dlogpk_dlogk(ktest, atest, cosmo)
assert_allclose(dphere, -1.*np.ones_like(dphere), 6)
def test_pk2d_cls():
"""
Test interplay between Pk2D and the Limber integrator
"""
cosmo = ccl.Cosmology(
Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)
z = np.linspace(0., 1., 200)
n = np.exp(-((z-0.5)/0.1)**2)
lens1 = ccl.WeakLensingTracer(cosmo, (z, n))
ells = np.arange(2, 10)
# Check that passing no power spectrum is fine
cells = ccl.angular_cl(cosmo, lens1, lens1, ells)
assert all_finite(cells)
# Check that passing a bogus power spectrum fails as expected
assert_raises(
ValueError, ccl.angular_cl, cosmo, lens1, lens1, ells, p_of_k_a=1)
# Check that passing a correct power spectrum runs as expected
psp = ccl.Pk2D(pkfunc=lpk2d, cosmo=cosmo)
cells = ccl.angular_cl(cosmo, lens1, lens1, ells, p_of_k_a=psp)
assert all_finite(cells)
def test_pk2d_parsing():
a_arr = np.linspace(0.1, 1, 100)
k_arr = np.geomspace(1E-4, 1E3, 1000)
pk_arr = a_arr[:, None] * ((k_arr/0.01)/(1+(k_arr/0.01)**3))[None, :]
psp = ccl.Pk2D(a_arr=a_arr, lk_arr=np.log(k_arr),
pk_arr=np.log(pk_arr))
cosmo = ccl.CosmologyCalculator(
Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8, n_s=0.96,
pk_nonlin={'a': a_arr, 'k': k_arr,
'delta_matter:delta_matter': pk_arr,
'a:b': pk_arr})
z = np.linspace(0., 1., 200)
n = np.exp(-((z-0.5)/0.1)**2)
lens1 = ccl.WeakLensingTracer(cosmo, (z, n))
ells = np.linspace(2, 100, 10)
cls1 = ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a=None)
cls2 = ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a='delta_matter:delta_matter')
cls3 = ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a='a:b')
cls4 = ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a=psp)
assert all_finite(cls1)
assert all_finite(cls2)
assert all_finite(cls3)
assert all_finite(cls4)
assert np.all(np.fabs(cls2/cls1-1) < 1E-10)
assert np.all(np.fabs(cls3/cls1-1) < 1E-10)
assert np.all(np.fabs(cls4/cls1-1) < 1E-10)
# Wrong name
with pytest.raises(KeyError):
ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a='a:c')
# Wrong type
with pytest.raises(ValueError):
ccl.angular_cl(cosmo, lens1, lens1, ells,
p_of_k_a=3)
def test_pk2d_get_spline_arrays():
empty_pk2d = ccl.Pk2D(empty=True)
# Pk2D needs splines defined to get splines out
with pytest.raises(ValueError):
empty_pk2d.get_spline_arrays()
def test_pk2d_add():
x = np.linspace(0.1, 1, 10)
log_y = np.linspace(-3, 1, 20)
zarr_a = np.outer(x, np.exp(log_y))
zarr_b = np.outer(-1*x, 4*np.exp(log_y))
empty_pk2d = ccl.Pk2D(empty=True)
pk2d_a = ccl.Pk2D(a_arr=x, lk_arr=log_y, pk_arr=np.log(zarr_a),
is_logp=True)
pk2d_b = ccl.Pk2D(a_arr=2*x, lk_arr=log_y, pk_arr=zarr_b,
is_logp=False)
pk2d_b2 = ccl.Pk2D(a_arr=x, lk_arr=log_y+0.5, pk_arr=zarr_b,
is_logp=False)
# This raises an error because the a ranges don't match
with pytest.raises(ValueError):
pk2d_a + pk2d_b
# This raises an error because the k ranges don't match
with pytest.raises(ValueError):
pk2d_a + pk2d_b2
# This raises an error because addition with an empty Pk2D should not work
with pytest.raises(ValueError):
pk2d_a + empty_pk2d
pk2d_c = ccl.Pk2D(a_arr=x, lk_arr=log_y, pk_arr=zarr_b,
is_logp=False)
pk2d_d = pk2d_a + pk2d_c
pk2d_d2 = pk2d_a + 1.0
xarr_d, yarr_d, zarr_d = pk2d_d.get_spline_arrays()
_, _, zarr_d2 = pk2d_d2.get_spline_arrays()
assert np.allclose(x, xarr_d)
assert np.allclose(log_y, yarr_d)
assert np.allclose(zarr_a + zarr_b, zarr_d)
assert np.allclose(zarr_a + 1.0, zarr_d2)
pk2d_e = ccl.Pk2D(a_arr=x[1:-1], lk_arr=log_y[1:-1],
pk_arr=zarr_b[1:-1, 1:-1],
is_logp=False)
# This raises a warning because the power spectra are not defined on the
# same support
with pytest.warns(CCLWarning):
pk2d_f = pk2d_e + pk2d_a
xarr_f, yarr_f, zarr_f = pk2d_f.get_spline_arrays()
assert np.allclose((zarr_a + zarr_b)[1:-1, 1:-1], zarr_f)
def test_pk2d_mul_pow():
x = np.linspace(0.1, 1, 10)
log_y = np.linspace(-3, 1, 20)
zarr_a = np.outer(x, np.exp(log_y))
zarr_b = np.outer(-1*x, 4*np.exp(log_y))
pk2d_a = ccl.Pk2D(a_arr=x, lk_arr=log_y, pk_arr=np.log(zarr_a),
is_logp=True)
pk2d_b = ccl.Pk2D(a_arr=x, lk_arr=log_y, pk_arr=zarr_b,
is_logp=False)
# This raises an error because multiplication is only defined for
# float, int, and Pk2D
with pytest.raises(TypeError):
pk2d_a*np.array([0.1, 0.2])
# This raises an error because exponention is only defined for
# float and int
with pytest.raises(TypeError):
pk2d_a**pk2d_b
# This raises a warning because the power spectrum is non-negative and the
# power is non-integer
with pytest.warns(CCLWarning):
pk2d_b**0.5
pk2d_g = pk2d_a * pk2d_b
pk2d_h = 2*pk2d_a
pk2d_i = pk2d_a**1.8
_, _, zarr_g = pk2d_g.get_spline_arrays()
_, _, zarr_h = pk2d_h.get_spline_arrays()
_, _, zarr_i = pk2d_i.get_spline_arrays()
assert np.allclose(zarr_a * zarr_b, zarr_g)
assert np.allclose(2 * zarr_a, zarr_h)
assert np.allclose(zarr_a**1.8, zarr_i)
pk2d_j = (pk2d_a + 0.5*pk2d_i)**1.5
_, _, zarr_j = pk2d_j.get_spline_arrays()
assert np.allclose((zarr_a + 0.5*zarr_i)**1.5, zarr_j)
| [
"pyccl.Pk2D",
"numpy.logspace",
"numpy.allclose",
"pyccl.Cosmology",
"numpy.arange",
"numpy.exp",
"pyccl.angular_cl",
"pytest.mark.parametrize",
"pyccl.CosmologyCalculator",
"pytest.warns",
"numpy.geomspace",
"numpy.testing.assert_almost_equal",
"numpy.isfinite",
"pyccl.CosmologyVanillaLCD... | [((1741, 1831), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""model"""', "['bbks', 'eisenstein_hu', 'eisenstein_hu_nowiggles']"], {}), "('model', ['bbks', 'eisenstein_hu',\n 'eisenstein_hu_nowiggles'])\n", (1764, 1831), False, 'import pytest\n'), ((3856, 3915), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""model"""', "['bbks', 'eisenstein_hu']"], {}), "('model', ['bbks', 'eisenstein_hu'])\n", (3879, 3915), False, 'import pytest\n'), ((577, 648), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'A_s': '(1e-10)', 'n_s': '(0.96)'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)\n', (590, 648), True, 'import pyccl as ccl\n'), ((681, 716), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D'], {}), '(ValueError, ccl.Pk2D)\n', (694, 716), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((767, 815), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D'], {'pkfunc': 'pk1d'}), '(ValueError, ccl.Pk2D, pkfunc=pk1d)\n', (780, 815), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((820, 855), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'pkfunc': 'lpk2d', 'cosmo': 'cosmo'}), '(pkfunc=lpk2d, cosmo=cosmo)\n', (828, 855), True, 'import pyccl as ccl\n'), ((895, 944), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D'], {'pkfunc': 'lpk2d'}), '(ValueError, ccl.Pk2D, pkfunc=lpk2d)\n', (908, 944), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((1113, 1192), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D'], {'a_arr': 'aarr', 'lk_arr': 'lkarr', 'pk_arr': 'pkarr[1:]'}), '(ValueError, ccl.Pk2D, a_arr=aarr, lk_arr=lkarr, pk_arr=pkarr[1:])\n', (1126, 1192), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((1258, 1344), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D'], {'a_arr': 'aarr[::-1]', 'lk_arr': 'lkarr', 'pk_arr': 'pkarr'}), '(ValueError, ccl.Pk2D, a_arr=aarr[::-1], lk_arr=lkarr, pk_arr=\n pkarr)\n', (1271, 1344), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((1421, 1492), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'A_s': '(1e-10)', 'n_s': '(0.96)'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)\n', (1434, 1492), True, 'import pyccl as ccl\n'), ((1635, 1683), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': 'aarr', 'lk_arr': 'lkarr', 'pk_arr': 'pkarr'}), '(a_arr=aarr, lk_arr=lkarr, pk_arr=pkarr)\n', (1643, 1683), True, 'import pyccl as ccl\n'), ((1914, 1986), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'sigma8': '(0.8)', 'n_s': '(0.96)'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8, n_s=0.96)\n', (1927, 1986), True, 'import pyccl as ccl\n'), ((2008, 2109), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'sigma8': '(0.8)', 'n_s': '(0.96)', 'transfer_function': 'model'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8, n_s=0.96,\n transfer_function=model)\n', (2021, 2109), True, 'import pyccl as ccl\n'), ((2132, 2180), 'pyccl.Pk2D.pk_from_model', 'ccl.Pk2D.pk_from_model', (['cosmo_fixed'], {'model': 'model'}), '(cosmo_fixed, model=model)\n', (2154, 2180), True, 'import pyccl as ccl\n'), ((2190, 2220), 'numpy.geomspace', 'np.geomspace', (['(0.001)', '(10.0)', '(128)'], {}), '(0.001, 10.0, 128)\n', (2202, 2220), True, 'import numpy as np\n'), ((2554, 2732), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': 'pars[0]', 'Omega_b': 'pars[1]', 'h': 'pars[2]', 'sigma8': 'pars[3]', 'n_s': 'pars[4]', 'w0': 'pars[5]', 'wa': 'pars[6]', 'Neff': '(3.04)', 'Omega_g': '(0)', 'Omega_k': '(0)', 'transfer_function': '"""bbks"""'}), "(Omega_c=pars[0], Omega_b=pars[1], h=pars[2], sigma8=pars[3],\n n_s=pars[4], w0=pars[5], wa=pars[6], Neff=3.04, Omega_g=0, Omega_k=0,\n transfer_function='bbks')\n", (2567, 2732), True, 'import pyccl as ccl\n'), ((3057, 3264), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': 'pars[0]', 'Omega_b': 'pars[1]', 'h': 'pars[2]', 'sigma8': 'pars[3]', 'n_s': 'pars[4]', 'w0': 'pars[5]', 'wa': 'pars[6]', 'Neff': '(3.04)', 'Omega_g': '(0)', 'Omega_k': '(0)', 'transfer_function': '"""bbks"""', 'matter_power_spectrum': '"""emu"""'}), "(Omega_c=pars[0], Omega_b=pars[1], h=pars[2], sigma8=pars[3],\n n_s=pars[4], w0=pars[5], wa=pars[6], Neff=3.04, Omega_g=0, Omega_k=0,\n transfer_function='bbks', matter_power_spectrum='emu')\n", (3070, 3264), True, 'import pyccl as ccl\n'), ((3552, 3600), 'pyccl.Pk2D.pk_from_model', 'ccl.Pk2D.pk_from_model', (['cosmo_fixed'], {'model': '"""emu"""'}), "(cosmo_fixed, model='emu')\n", (3574, 3600), True, 'import pyccl as ccl\n'), ((3610, 3640), 'numpy.geomspace', 'np.geomspace', (['(0.001)', '(10.0)', '(128)'], {}), '(0.001, 10.0, 128)\n', (3622, 3640), True, 'import numpy as np\n'), ((3967, 4079), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'A_s': '(1e-10)', 'n_s': '(0.96)', 'transfer_function': '"""boltzmann_class"""'}), "(Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96,\n transfer_function='boltzmann_class')\n", (3980, 4079), True, 'import pyccl as ccl\n'), ((4097, 4168), 'numpy.testing.assert_raises', 'assert_raises', (['ccl.CCLError', 'ccl.Pk2D.pk_from_model', 'cosmo'], {'model': 'model'}), '(ccl.CCLError, ccl.Pk2D.pk_from_model, cosmo, model=model)\n', (4110, 4168), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((4236, 4262), 'pyccl.CosmologyVanillaLCDM', 'ccl.CosmologyVanillaLCDM', ([], {}), '()\n', (4260, 4262), True, 'import pyccl as ccl\n'), ((4267, 4338), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.Pk2D.pk_from_model', 'cosmo'], {'model': '"""bbkss"""'}), "(ValueError, ccl.Pk2D.pk_from_model, cosmo, model='bbkss')\n", (4280, 4338), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((4450, 4521), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'A_s': '(1e-10)', 'n_s': '(0.96)'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)\n', (4463, 4521), True, 'import pyccl as ccl\n'), ((4542, 4577), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'pkfunc': 'lpk2d', 'cosmo': 'cosmo'}), '(pkfunc=lpk2d, cosmo=cosmo)\n', (4550, 4577), True, 'import pyccl as ccl\n'), ((4825, 4861), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['dphere', '(-1.0)', '(6)'], {}), '(dphere, -1.0, 6)\n', (4844, 4861), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((5078, 5114), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['dphere', '(-1.0)', '(6)'], {}), '(dphere, -1.0, 6)\n', (5097, 5114), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((5157, 5179), 'numpy.logspace', 'np.logspace', (['(-3)', '(1)', '(10)'], {}), '(-3, 1, 10)\n', (5168, 5179), True, 'import numpy as np\n'), ((5257, 5298), 'numpy.testing.assert_allclose', 'assert_allclose', (['phere', 'ptrue'], {'rtol': '(1e-06)'}), '(phere, ptrue, rtol=1e-06)\n', (5272, 5298), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((5458, 5507), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'pkfunc': 'pk2d', 'is_logp': '(False)', 'cosmo': 'cosmo'}), '(pkfunc=pk2d, is_logp=False, cosmo=cosmo)\n', (5466, 5507), True, 'import pyccl as ccl\n'), ((5554, 5595), 'numpy.testing.assert_allclose', 'assert_allclose', (['phere', 'ptrue'], {'rtol': '(1e-06)'}), '(phere, ptrue, rtol=1e-06)\n', (5569, 5595), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((5747, 5771), 'numpy.logspace', 'np.logspace', (['(-4)', '(2)', '(1000)'], {}), '(-4, 2, 1000)\n', (5758, 5771), True, 'import numpy as np\n'), ((5783, 5810), 'numpy.linspace', 'np.linspace', (['(0.01)', '(1.0)', '(100)'], {}), '(0.01, 1.0, 100)\n', (5794, 5810), True, 'import numpy as np\n'), ((5996, 6037), 'numpy.testing.assert_allclose', 'assert_allclose', (['phere', 'ptrue'], {'rtol': '(1e-06)'}), '(phere, ptrue, rtol=1e-06)\n', (6011, 6037), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((6260, 6331), 'pyccl.Cosmology', 'ccl.Cosmology', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'A_s': '(1e-10)', 'n_s': '(0.96)'}), '(Omega_c=0.27, Omega_b=0.045, h=0.67, A_s=1e-10, n_s=0.96)\n', (6273, 6331), True, 'import pyccl as ccl\n'), ((6349, 6375), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', '(200)'], {}), '(0.0, 1.0, 200)\n', (6360, 6375), True, 'import numpy as np\n'), ((6382, 6413), 'numpy.exp', 'np.exp', (['(-((z - 0.5) / 0.1) ** 2)'], {}), '(-((z - 0.5) / 0.1) ** 2)\n', (6388, 6413), True, 'import numpy as np\n'), ((6420, 6456), 'pyccl.WeakLensingTracer', 'ccl.WeakLensingTracer', (['cosmo', '(z, n)'], {}), '(cosmo, (z, n))\n', (6441, 6456), True, 'import pyccl as ccl\n'), ((6468, 6484), 'numpy.arange', 'np.arange', (['(2)', '(10)'], {}), '(2, 10)\n', (6477, 6484), True, 'import numpy as np\n'), ((6549, 6590), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {}), '(cosmo, lens1, lens1, ells)\n', (6563, 6590), True, 'import pyccl as ccl\n'), ((6691, 6776), 'numpy.testing.assert_raises', 'assert_raises', (['ValueError', 'ccl.angular_cl', 'cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': '(1)'}), '(ValueError, ccl.angular_cl, cosmo, lens1, lens1, ells, p_of_k_a=1\n )\n', (6704, 6776), False, 'from numpy.testing import assert_, assert_raises, assert_almost_equal, assert_allclose\n'), ((6859, 6894), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'pkfunc': 'lpk2d', 'cosmo': 'cosmo'}), '(pkfunc=lpk2d, cosmo=cosmo)\n', (6867, 6894), True, 'import pyccl as ccl\n'), ((6907, 6962), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': 'psp'}), '(cosmo, lens1, lens1, ells, p_of_k_a=psp)\n', (6921, 6962), True, 'import pyccl as ccl\n'), ((7031, 7055), 'numpy.linspace', 'np.linspace', (['(0.1)', '(1)', '(100)'], {}), '(0.1, 1, 100)\n', (7042, 7055), True, 'import numpy as np\n'), ((7068, 7102), 'numpy.geomspace', 'np.geomspace', (['(0.0001)', '(1000.0)', '(1000)'], {}), '(0.0001, 1000.0, 1000)\n', (7080, 7102), True, 'import numpy as np\n'), ((7282, 7460), 'pyccl.CosmologyCalculator', 'ccl.CosmologyCalculator', ([], {'Omega_c': '(0.27)', 'Omega_b': '(0.045)', 'h': '(0.67)', 'sigma8': '(0.8)', 'n_s': '(0.96)', 'pk_nonlin': "{'a': a_arr, 'k': k_arr, 'delta_matter:delta_matter': pk_arr, 'a:b': pk_arr}"}), "(Omega_c=0.27, Omega_b=0.045, h=0.67, sigma8=0.8,\n n_s=0.96, pk_nonlin={'a': a_arr, 'k': k_arr,\n 'delta_matter:delta_matter': pk_arr, 'a:b': pk_arr})\n", (7305, 7460), True, 'import pyccl as ccl\n'), ((7516, 7542), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', '(200)'], {}), '(0.0, 1.0, 200)\n', (7527, 7542), True, 'import numpy as np\n'), ((7549, 7580), 'numpy.exp', 'np.exp', (['(-((z - 0.5) / 0.1) ** 2)'], {}), '(-((z - 0.5) / 0.1) ** 2)\n', (7555, 7580), True, 'import numpy as np\n'), ((7587, 7623), 'pyccl.WeakLensingTracer', 'ccl.WeakLensingTracer', (['cosmo', '(z, n)'], {}), '(cosmo, (z, n))\n', (7608, 7623), True, 'import pyccl as ccl\n'), ((7635, 7658), 'numpy.linspace', 'np.linspace', (['(2)', '(100)', '(10)'], {}), '(2, 100, 10)\n', (7646, 7658), True, 'import numpy as np\n'), ((7671, 7727), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': 'None'}), '(cosmo, lens1, lens1, ells, p_of_k_a=None)\n', (7685, 7727), True, 'import pyccl as ccl\n'), ((7765, 7844), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': '"""delta_matter:delta_matter"""'}), "(cosmo, lens1, lens1, ells, p_of_k_a='delta_matter:delta_matter')\n", (7779, 7844), True, 'import pyccl as ccl\n'), ((7882, 7939), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': '"""a:b"""'}), "(cosmo, lens1, lens1, ells, p_of_k_a='a:b')\n", (7896, 7939), True, 'import pyccl as ccl\n'), ((7977, 8032), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': 'psp'}), '(cosmo, lens1, lens1, ells, p_of_k_a=psp)\n', (7991, 8032), True, 'import pyccl as ccl\n'), ((8649, 8669), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'empty': '(True)'}), '(empty=True)\n', (8657, 8669), True, 'import pyccl as ccl\n'), ((8829, 8852), 'numpy.linspace', 'np.linspace', (['(0.1)', '(1)', '(10)'], {}), '(0.1, 1, 10)\n', (8840, 8852), True, 'import numpy as np\n'), ((8865, 8887), 'numpy.linspace', 'np.linspace', (['(-3)', '(1)', '(20)'], {}), '(-3, 1, 20)\n', (8876, 8887), True, 'import numpy as np\n'), ((8991, 9011), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'empty': '(True)'}), '(empty=True)\n', (8999, 9011), True, 'import pyccl as ccl\n'), ((9129, 9194), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': '(2 * x)', 'lk_arr': 'log_y', 'pk_arr': 'zarr_b', 'is_logp': '(False)'}), '(a_arr=2 * x, lk_arr=log_y, pk_arr=zarr_b, is_logp=False)\n', (9137, 9194), True, 'import pyccl as ccl\n'), ((9229, 9296), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': 'x', 'lk_arr': '(log_y + 0.5)', 'pk_arr': 'zarr_b', 'is_logp': '(False)'}), '(a_arr=x, lk_arr=log_y + 0.5, pk_arr=zarr_b, is_logp=False)\n', (9237, 9296), True, 'import pyccl as ccl\n'), ((9717, 9778), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': 'x', 'lk_arr': 'log_y', 'pk_arr': 'zarr_b', 'is_logp': '(False)'}), '(a_arr=x, lk_arr=log_y, pk_arr=zarr_b, is_logp=False)\n', (9725, 9778), True, 'import pyccl as ccl\n'), ((9974, 9996), 'numpy.allclose', 'np.allclose', (['x', 'xarr_d'], {}), '(x, xarr_d)\n', (9985, 9996), True, 'import numpy as np\n'), ((10008, 10034), 'numpy.allclose', 'np.allclose', (['log_y', 'yarr_d'], {}), '(log_y, yarr_d)\n', (10019, 10034), True, 'import numpy as np\n'), ((10046, 10082), 'numpy.allclose', 'np.allclose', (['(zarr_a + zarr_b)', 'zarr_d'], {}), '(zarr_a + zarr_b, zarr_d)\n', (10057, 10082), True, 'import numpy as np\n'), ((10094, 10128), 'numpy.allclose', 'np.allclose', (['(zarr_a + 1.0)', 'zarr_d2'], {}), '(zarr_a + 1.0, zarr_d2)\n', (10105, 10128), True, 'import numpy as np\n'), ((10143, 10232), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': 'x[1:-1]', 'lk_arr': 'log_y[1:-1]', 'pk_arr': 'zarr_b[1:-1, 1:-1]', 'is_logp': '(False)'}), '(a_arr=x[1:-1], lk_arr=log_y[1:-1], pk_arr=zarr_b[1:-1, 1:-1],\n is_logp=False)\n', (10151, 10232), True, 'import pyccl as ccl\n'), ((10507, 10557), 'numpy.allclose', 'np.allclose', (['(zarr_a + zarr_b)[1:-1, 1:-1]', 'zarr_f'], {}), '((zarr_a + zarr_b)[1:-1, 1:-1], zarr_f)\n', (10518, 10557), True, 'import numpy as np\n'), ((10593, 10616), 'numpy.linspace', 'np.linspace', (['(0.1)', '(1)', '(10)'], {}), '(0.1, 1, 10)\n', (10604, 10616), True, 'import numpy as np\n'), ((10629, 10651), 'numpy.linspace', 'np.linspace', (['(-3)', '(1)', '(20)'], {}), '(-3, 1, 20)\n', (10640, 10651), True, 'import numpy as np\n'), ((10855, 10916), 'pyccl.Pk2D', 'ccl.Pk2D', ([], {'a_arr': 'x', 'lk_arr': 'log_y', 'pk_arr': 'zarr_b', 'is_logp': '(False)'}), '(a_arr=x, lk_arr=log_y, pk_arr=zarr_b, is_logp=False)\n', (10863, 10916), True, 'import pyccl as ccl\n'), ((11644, 11680), 'numpy.allclose', 'np.allclose', (['(zarr_a * zarr_b)', 'zarr_g'], {}), '(zarr_a * zarr_b, zarr_g)\n', (11655, 11680), True, 'import numpy as np\n'), ((11692, 11723), 'numpy.allclose', 'np.allclose', (['(2 * zarr_a)', 'zarr_h'], {}), '(2 * zarr_a, zarr_h)\n', (11703, 11723), True, 'import numpy as np\n'), ((11735, 11769), 'numpy.allclose', 'np.allclose', (['(zarr_a ** 1.8)', 'zarr_i'], {}), '(zarr_a ** 1.8, zarr_i)\n', (11746, 11769), True, 'import numpy as np\n'), ((11866, 11917), 'numpy.allclose', 'np.allclose', (['((zarr_a + 0.5 * zarr_i) ** 1.5)', 'zarr_j'], {}), '((zarr_a + 0.5 * zarr_i) ** 1.5, zarr_j)\n', (11877, 11917), True, 'import numpy as np\n'), ((465, 482), 'numpy.isfinite', 'np.isfinite', (['vals'], {}), '(vals)\n', (476, 482), True, 'import numpy as np\n'), ((2318, 2355), 'pyccl.linear_matter_power', 'ccl.linear_matter_power', (['cosmo', 'ks', 'a'], {}), '(cosmo, ks, a)\n', (2341, 2355), True, 'import pyccl as ccl\n'), ((3738, 3775), 'pyccl.nonlin_matter_power', 'ccl.nonlin_matter_power', (['cosmo', 'ks', 'a'], {}), '(cosmo, ks, a)\n', (3761, 3775), True, 'import pyccl as ccl\n'), ((4736, 4758), 'numpy.fabs', 'np.fabs', (['(phere / ptrue)'], {}), '(phere / ptrue)\n', (4743, 4758), True, 'import numpy as np\n'), ((4989, 5011), 'numpy.fabs', 'np.fabs', (['(phere / ptrue)'], {}), '(phere / ptrue)\n', (4996, 5011), True, 'import numpy as np\n'), ((8342, 8365), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (8355, 8365), False, 'import pytest\n'), ((8375, 8432), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': '"""a:c"""'}), "(cosmo, lens1, lens1, ells, p_of_k_a='a:c')\n", (8389, 8432), True, 'import pyccl as ccl\n'), ((8483, 8508), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8496, 8508), False, 'import pytest\n'), ((8518, 8571), 'pyccl.angular_cl', 'ccl.angular_cl', (['cosmo', 'lens1', 'lens1', 'ells'], {'p_of_k_a': '(3)'}), '(cosmo, lens1, lens1, ells, p_of_k_a=3)\n', (8532, 8571), True, 'import pyccl as ccl\n'), ((8732, 8757), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8745, 8757), False, 'import pytest\n'), ((8913, 8926), 'numpy.exp', 'np.exp', (['log_y'], {}), '(log_y)\n', (8919, 8926), True, 'import numpy as np\n'), ((9388, 9413), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9401, 9413), False, 'import pytest\n'), ((9508, 9533), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9521, 9533), False, 'import pytest\n'), ((9648, 9673), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9661, 9673), False, 'import pytest\n'), ((10379, 10403), 'pytest.warns', 'pytest.warns', (['CCLWarning'], {}), '(CCLWarning)\n', (10391, 10403), False, 'import pytest\n'), ((10677, 10690), 'numpy.exp', 'np.exp', (['log_y'], {}), '(log_y)\n', (10683, 10690), True, 'import numpy as np\n'), ((11046, 11070), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11059, 11070), False, 'import pytest\n'), ((11205, 11229), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11218, 11229), False, 'import pytest\n'), ((11370, 11394), 'pytest.warns', 'pytest.warns', (['CCLWarning'], {}), '(CCLWarning)\n', (11382, 11394), False, 'import pytest\n'), ((2382, 2404), 'numpy.fabs', 'np.fabs', (['(pk1 / pk2 - 1)'], {}), '(pk1 / pk2 - 1)\n', (2389, 2404), True, 'import numpy as np\n'), ((3802, 3824), 'numpy.fabs', 'np.fabs', (['(pk1 / pk2 - 1)'], {}), '(pk1 / pk2 - 1)\n', (3809, 3824), True, 'import numpy as np\n'), ((5386, 5406), 'numpy.ones_like', 'np.ones_like', (['dphere'], {}), '(dphere)\n', (5398, 5406), True, 'import numpy as np\n'), ((5683, 5703), 'numpy.ones_like', 'np.ones_like', (['dphere'], {}), '(dphere)\n', (5695, 5703), True, 'import numpy as np\n'), ((5908, 5920), 'numpy.log', 'np.log', (['karr'], {}), '(karr)\n', (5914, 5920), True, 'import numpy as np\n'), ((6125, 6145), 'numpy.ones_like', 'np.ones_like', (['dphere'], {}), '(dphere)\n', (6137, 6145), True, 'import numpy as np\n'), ((7212, 7225), 'numpy.log', 'np.log', (['k_arr'], {}), '(k_arr)\n', (7218, 7225), True, 'import numpy as np\n'), ((7253, 7267), 'numpy.log', 'np.log', (['pk_arr'], {}), '(pk_arr)\n', (7259, 7267), True, 'import numpy as np\n'), ((8189, 8213), 'numpy.fabs', 'np.fabs', (['(cls2 / cls1 - 1)'], {}), '(cls2 / cls1 - 1)\n', (8196, 8213), True, 'import numpy as np\n'), ((8237, 8261), 'numpy.fabs', 'np.fabs', (['(cls3 / cls1 - 1)'], {}), '(cls3 / cls1 - 1)\n', (8244, 8261), True, 'import numpy as np\n'), ((8285, 8309), 'numpy.fabs', 'np.fabs', (['(cls4 / cls1 - 1)'], {}), '(cls4 / cls1 - 1)\n', (8292, 8309), True, 'import numpy as np\n'), ((8958, 8971), 'numpy.exp', 'np.exp', (['log_y'], {}), '(log_y)\n', (8964, 8971), True, 'import numpy as np\n'), ((9064, 9078), 'numpy.log', 'np.log', (['zarr_a'], {}), '(zarr_a)\n', (9070, 9078), True, 'import numpy as np\n'), ((10722, 10735), 'numpy.exp', 'np.exp', (['log_y'], {}), '(log_y)\n', (10728, 10735), True, 'import numpy as np\n'), ((10790, 10804), 'numpy.log', 'np.log', (['zarr_a'], {}), '(zarr_a)\n', (10796, 10804), True, 'import numpy as np\n'), ((11087, 11107), 'numpy.array', 'np.array', (['[0.1, 0.2]'], {}), '([0.1, 0.2])\n', (11095, 11107), True, 'import numpy as np\n'), ((1004, 1018), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1013, 1018), True, 'import numpy as np\n'), ((1044, 1058), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1053, 1058), True, 'import numpy as np\n'), ((1520, 1534), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1529, 1534), True, 'import numpy as np\n'), ((1560, 1574), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1569, 1574), True, 'import numpy as np\n')] |
import pandas as pd
import numpy as np
from PIL import Image
import os
import importdataset
from keras import applications, Input
from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPool2D, GlobalAveragePooling2D, AveragePooling2D, Flatten
from keras.models import Sequential, Model, load_model
from keras.optimizers import SGD, Adam
from tensorflow.keras.losses import MeanSquaredError, BinaryCrossentropy
from keras import metrics
from keras.models import Sequential
import keras.backend as K
from bpmll import bp_mll_loss
import utils
import h5py
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], True)
basepath = os.getcwd()
main_dataset_path = os.path.join(basepath, "../datasets/dataset.h5")
encoder_dataset_path = os.path.join(basepath, "../datasets/dataset_encoder.h5")
model = tf.keras.applications.EfficientNetB7(
include_top=True,
weights="imagenet",
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation="softmax"
)
model.summary()
# Load targets (The targets for the decoder are the original inputs, X in main dataset)
hf = h5py.File(main_dataset_path, 'r')
X_test = hf.get('X_Test').value
Y_test = hf.get('Y_Test').value
hf.close()
for i in range(1, 20):
img = Image.fromarray((X_test[i:i+1, :, :, :]*255).squeeze().astype(np.uint8))
img = img.resize((600, 600))
img.show()
value = np.array(img).reshape([1, 600, 600, 3])
y = (model.predict(value)).squeeze()
x = (Y_test[i:i+1, :]).squeeze()
print(np.argmax(x))
print(np.argmax(y))
print("*"*10)
input("Any key")
| [
"h5py.File",
"numpy.argmax",
"os.getcwd",
"tensorflow.config.list_physical_devices",
"tensorflow.config.experimental.set_memory_growth",
"numpy.array",
"os.path.join",
"tensorflow.keras.applications.EfficientNetB7"
] | [((603, 641), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (634, 641), True, 'import tensorflow as tf\n'), ((642, 709), 'tensorflow.config.experimental.set_memory_growth', 'tf.config.experimental.set_memory_growth', (['physical_devices[0]', '(True)'], {}), '(physical_devices[0], True)\n', (682, 709), True, 'import tensorflow as tf\n'), ((722, 733), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (731, 733), False, 'import os\n'), ((754, 802), 'os.path.join', 'os.path.join', (['basepath', '"""../datasets/dataset.h5"""'], {}), "(basepath, '../datasets/dataset.h5')\n", (766, 802), False, 'import os\n'), ((826, 882), 'os.path.join', 'os.path.join', (['basepath', '"""../datasets/dataset_encoder.h5"""'], {}), "(basepath, '../datasets/dataset_encoder.h5')\n", (838, 882), False, 'import os\n'), ((892, 1072), 'tensorflow.keras.applications.EfficientNetB7', 'tf.keras.applications.EfficientNetB7', ([], {'include_top': '(True)', 'weights': '"""imagenet"""', 'input_tensor': 'None', 'input_shape': 'None', 'pooling': 'None', 'classes': '(1000)', 'classifier_activation': '"""softmax"""'}), "(include_top=True, weights='imagenet',\n input_tensor=None, input_shape=None, pooling=None, classes=1000,\n classifier_activation='softmax')\n", (928, 1072), True, 'import tensorflow as tf\n'), ((1206, 1239), 'h5py.File', 'h5py.File', (['main_dataset_path', '"""r"""'], {}), "(main_dataset_path, 'r')\n", (1215, 1239), False, 'import h5py\n'), ((1610, 1622), 'numpy.argmax', 'np.argmax', (['x'], {}), '(x)\n', (1619, 1622), True, 'import numpy as np\n'), ((1634, 1646), 'numpy.argmax', 'np.argmax', (['y'], {}), '(y)\n', (1643, 1646), True, 'import numpy as np\n'), ((1482, 1495), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (1490, 1495), True, 'import numpy as np\n')] |
def calc(f, G0, GI, Beta):
""" calculate SWS as a function of frequency
:param f: vector of frequency (Hz)
:param G0: G_o (Pa)
:param GI: G_inf (Pa)
:param Beta: exponential relaxation constant (s^-1)
:returms: c_omega (SWS in m/s as a function of omega (rad/s)
"""
import numpy as np
omega = 2*np.pi*np.array(f);
rho = 1000; # kg / m^3
mu1 = GI;
mu2 = G0-GI;
eta = (G0-GI)/Beta;
muprime = mu1 + (mu2 * omega**2 * eta**2) / (mu2**2 + omega**2 * eta**2)
muprime2 = -(mu2**2 * omega * eta) / (mu2**2 +omega**2 * eta**2)
alpha = np.sqrt(rho * omega**2 * (np.sqrt(muprime**2 + muprime2**2) - muprime) / (2 * (muprime**2 + muprime2**2)))
c_omega = np.sqrt((1/rho) * (2 * (muprime**2 + muprime2**2)) / (muprime + np.sqrt(muprime**2 + muprime2**2)))
return c_omega
| [
"numpy.array",
"numpy.sqrt"
] | [((340, 351), 'numpy.array', 'np.array', (['f'], {}), '(f)\n', (348, 351), True, 'import numpy as np\n'), ((800, 837), 'numpy.sqrt', 'np.sqrt', (['(muprime ** 2 + muprime2 ** 2)'], {}), '(muprime ** 2 + muprime2 ** 2)\n', (807, 837), True, 'import numpy as np\n'), ((630, 667), 'numpy.sqrt', 'np.sqrt', (['(muprime ** 2 + muprime2 ** 2)'], {}), '(muprime ** 2 + muprime2 ** 2)\n', (637, 667), True, 'import numpy as np\n')] |
import pandas as pd
import numpy as np
from output import Logger, ResultFileWriter
def calculate_distance(u, v) -> float:
'''
Distance function for calculating euclidean distance between two tuples
'''
distance = 0
for index in range(2, len(u) - 1):
# add 0.5 to distance if there is a missing value
if pd.isna(u[index]) or pd.isna(v[index]):
distance += 0.5
else:
# if attributes do not match, add 1 to distance
if u[index] != v[index]:
distance += 1
return distance
def knn_classifier(train: pd.DataFrame, test: pd.DataFrame, n: int) -> np.array:
'''
Main KNN function
'''
# initialize logger
logger = Logger()
# initialize file writer
results_file_writer = ResultFileWriter()
print(f"Beginning KNN classification for k = {n}")
logger.log(f"Beginning KNN classification for train set {train.shape} and test set {test.shape}\n\n")
# array of test data predictions to return
prediction_list = []
# convert dataframes to NumPy arrays -- this makes iterating through them faster
train_data = train.to_numpy()
test_data = test.to_numpy()
for test_tuple in test_data:
# initialize dataframe to hold distance to neighbor information
neighbors = pd.DataFrame(columns=["Distance", "Localization"])
print(f"Begin K-Nearest Neighbor classification for test tuple:\n{test_tuple}")
logger.log(f"Begin classification for test tuple:\n{test_tuple}")
# for each training tuple
for train_row in train_data:
# calculate distance measure
distance = calculate_distance(test_tuple, train_row)
# add the distance and the Localization of the neighbor to the "neighbors" list
neighbors = neighbors.append({"Distance": distance, "Localization": train_row[7]},
ignore_index=True)
# get n neighbors with the smallest Distance value
n_nearest_neighbors = neighbors.nsmallest(n, columns="Distance")
# get the majority vote -- return the Localization value with the most votes
majority_vote = n_nearest_neighbors["Localization"].value_counts().idxmax()
# store the prediction for validation later
prediction_list = np.append(prediction_list, np.array([majority_vote]), axis=0)
# store prediction result in results file
results_file_writer.store_prediction_result(test_tuple[0], majority_vote)
print(f"Prediction complete for test tuple. Prediction: {majority_vote}")
logger.log(f"PREDICTION: {majority_vote}")
return np.array(prediction_list)
def calculate_accuracy(predictions: np.array, test: pd.DataFrame) -> float:
'''
Performance calculation
'''
# initialize logger
logger = Logger()
correct_predictions = 0
# convert dataframe to array
test_array = test.to_numpy()
for pred, test in zip(predictions, test_array):
# index 7 in test is "Localization"
if pred == test[7]:
correct_predictions += 1
accuracy = correct_predictions / predictions.size
logger.log(f"Overall accuracy of KNN model: {accuracy * 100}%")
return accuracy
| [
"pandas.DataFrame",
"output.Logger",
"output.ResultFileWriter",
"numpy.array",
"pandas.isna"
] | [((740, 748), 'output.Logger', 'Logger', ([], {}), '()\n', (746, 748), False, 'from output import Logger, ResultFileWriter\n'), ((805, 823), 'output.ResultFileWriter', 'ResultFileWriter', ([], {}), '()\n', (821, 823), False, 'from output import Logger, ResultFileWriter\n'), ((2700, 2725), 'numpy.array', 'np.array', (['prediction_list'], {}), '(prediction_list)\n', (2708, 2725), True, 'import numpy as np\n'), ((2889, 2897), 'output.Logger', 'Logger', ([], {}), '()\n', (2895, 2897), False, 'from output import Logger, ResultFileWriter\n'), ((1337, 1387), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['Distance', 'Localization']"}), "(columns=['Distance', 'Localization'])\n", (1349, 1387), True, 'import pandas as pd\n'), ((347, 364), 'pandas.isna', 'pd.isna', (['u[index]'], {}), '(u[index])\n', (354, 364), True, 'import pandas as pd\n'), ((368, 385), 'pandas.isna', 'pd.isna', (['v[index]'], {}), '(v[index])\n', (375, 385), True, 'import pandas as pd\n'), ((2386, 2411), 'numpy.array', 'np.array', (['[majority_vote]'], {}), '([majority_vote])\n', (2394, 2411), True, 'import numpy as np\n')] |
"""
#Trains a TCN on the IMDB sentiment classification task.
Output after 1 epochs on CPU: ~0.8611
Time per epoch on CPU (Core i7): ~64s.
Based on: https://github.com/keras-team/keras/blob/master/examples/imdb_bidirectional_lstm.py
"""
import numpy as np
from tensorflow.keras import Sequential
from tensorflow.keras.datasets import imdb
from tensorflow.keras.layers import Dense, Embedding
from tensorflow.keras.preprocessing import sequence
from tcn import TCN
max_features = 20000
# cut texts after this number of words
# (among top max_features most common words)
maxlen = 100
batch_size = 32
print('Loading data...')
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=max_features)
print(len(x_train), 'train sequences')
print(len(x_test), 'test sequences')
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
y_train = np.array(y_train)
y_test = np.array(y_test)
model = Sequential([
Embedding(max_features, 128, input_shape=(maxlen,)),
TCN(kernel_size=6, dilations=[1, 2, 4, 8, 16]),
Dense(1, activation='sigmoid')
])
print(f'TCN receptive field: {model.layers[1].receptive_field}.')
model.summary()
model.compile('adam', 'binary_crossentropy', metrics=['accuracy'])
print('Train...')
model.fit(
x_train, y_train,
batch_size=batch_size,
validation_data=[x_test, y_test]
)
| [
"tcn.TCN",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.datasets.imdb.load_data",
"tensorflow.keras.preprocessing.sequence.pad_sequences",
"numpy.array",
"tensorflow.keras.layers.Embedding"
] | [((664, 702), 'tensorflow.keras.datasets.imdb.load_data', 'imdb.load_data', ([], {'num_words': 'max_features'}), '(num_words=max_features)\n', (678, 702), False, 'from tensorflow.keras.datasets import imdb\n'), ((830, 876), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['x_train'], {'maxlen': 'maxlen'}), '(x_train, maxlen=maxlen)\n', (852, 876), False, 'from tensorflow.keras.preprocessing import sequence\n'), ((886, 931), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['x_test'], {'maxlen': 'maxlen'}), '(x_test, maxlen=maxlen)\n', (908, 931), False, 'from tensorflow.keras.preprocessing import sequence\n'), ((1018, 1035), 'numpy.array', 'np.array', (['y_train'], {}), '(y_train)\n', (1026, 1035), True, 'import numpy as np\n'), ((1045, 1061), 'numpy.array', 'np.array', (['y_test'], {}), '(y_test)\n', (1053, 1061), True, 'import numpy as np\n'), ((1088, 1139), 'tensorflow.keras.layers.Embedding', 'Embedding', (['max_features', '(128)'], {'input_shape': '(maxlen,)'}), '(max_features, 128, input_shape=(maxlen,))\n', (1097, 1139), False, 'from tensorflow.keras.layers import Dense, Embedding\n'), ((1145, 1191), 'tcn.TCN', 'TCN', ([], {'kernel_size': '(6)', 'dilations': '[1, 2, 4, 8, 16]'}), '(kernel_size=6, dilations=[1, 2, 4, 8, 16])\n', (1148, 1191), False, 'from tcn import TCN\n'), ((1197, 1227), 'tensorflow.keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (1202, 1227), False, 'from tensorflow.keras.layers import Dense, Embedding\n')] |
import numpy as np
def dist(a, b, ax=-1):
return np.linalg.norm(a - b, axis=ax) | [
"numpy.linalg.norm"
] | [((54, 84), 'numpy.linalg.norm', 'np.linalg.norm', (['(a - b)'], {'axis': 'ax'}), '(a - b, axis=ax)\n', (68, 84), True, 'import numpy as np\n')] |
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## pix2pix caffe interference
# facades_BtoA (architectural labels --> photo)
#%% import package
import numpy as np
import os
import caffe
import matplotlib.pyplot as plt
import skimage.io as io
import argparse
#%% define functions
def norm_image(IMG):
# output scale: [0,1]
output = (IMG - np.min(IMG))/(np.max(IMG)-np.min(IMG))
# normalize [0,255]
output1 = output*255
# assure integer 8bit
output1 = output1.astype('uint8')
return output1
#%% main
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--output_path', default="./test_output/", help='Optionally, save all generated outputs in specified folder')
parser.add_argument('--image', default=None, help='User can provide an image to run')
args = vars(parser.parse_args())
VAI_ALVEO_ROOT=os.environ["VAI_ALVEO_ROOT"]
if not os.path.isdir(args["output_path"]):
os.mkdir(args["output_path"])
# model configuration
model_def = 'xfdnn_deploy.prototxt'
model_weights = VAI_ALVEO_ROOT+'/examples/caffe/models/facades_BtoA/deploy.caffemodel'
net = caffe.Net(model_def, model_weights, caffe.TEST)
if args["image"]:
fn = args["image"]
# load image
image = plt.imread(fn)
## preprocessing
# add one dimension
batch_A = np.expand_dims(image,0)
# normalize [0,255] --> [-1,1]
batch_A1 = (batch_A / 127.5) - 1
# channel transpose NHWC to NCHW
batch_A2 = np.transpose(batch_A1,(0,3,1,2))
## net forward (feed into caffe network)
net.blobs['input_3'].data[...] = batch_A2
net.forward()
fake_B = net.blobs['activation_10'].data
## post processing
# normalize output [0,255]
fake_B1 = norm_image(np.transpose(fake_B[0,:,:,:],(1,2,0)))
# save the output image as file
filename = 'output_'+fn
io.imsave(args["output_path"]+filename,fake_B1)
print('output file is saved in '+args["output_path"])
else:
print('Please provide input image as "--image filename"' )
| [
"os.mkdir",
"argparse.ArgumentParser",
"skimage.io.imsave",
"os.path.isdir",
"numpy.transpose",
"numpy.expand_dims",
"numpy.min",
"numpy.max",
"caffe.Net",
"matplotlib.pyplot.imread"
] | [((1118, 1143), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1141, 1143), False, 'import argparse\n'), ((1721, 1768), 'caffe.Net', 'caffe.Net', (['model_def', 'model_weights', 'caffe.TEST'], {}), '(model_def, model_weights, caffe.TEST)\n', (1730, 1768), False, 'import caffe\n'), ((1469, 1503), 'os.path.isdir', 'os.path.isdir', (["args['output_path']"], {}), "(args['output_path'])\n", (1482, 1503), False, 'import os\n'), ((1513, 1542), 'os.mkdir', 'os.mkdir', (["args['output_path']"], {}), "(args['output_path'])\n", (1521, 1542), False, 'import os\n'), ((1862, 1876), 'matplotlib.pyplot.imread', 'plt.imread', (['fn'], {}), '(fn)\n', (1872, 1876), True, 'import matplotlib.pyplot as plt\n'), ((1949, 1973), 'numpy.expand_dims', 'np.expand_dims', (['image', '(0)'], {}), '(image, 0)\n', (1963, 1973), True, 'import numpy as np\n'), ((2115, 2151), 'numpy.transpose', 'np.transpose', (['batch_A1', '(0, 3, 1, 2)'], {}), '(batch_A1, (0, 3, 1, 2))\n', (2127, 2151), True, 'import numpy as np\n'), ((2530, 2580), 'skimage.io.imsave', 'io.imsave', (["(args['output_path'] + filename)", 'fake_B1'], {}), "(args['output_path'] + filename, fake_B1)\n", (2539, 2580), True, 'import skimage.io as io\n'), ((885, 896), 'numpy.min', 'np.min', (['IMG'], {}), '(IMG)\n', (891, 896), True, 'import numpy as np\n'), ((899, 910), 'numpy.max', 'np.max', (['IMG'], {}), '(IMG)\n', (905, 910), True, 'import numpy as np\n'), ((911, 922), 'numpy.min', 'np.min', (['IMG'], {}), '(IMG)\n', (917, 922), True, 'import numpy as np\n'), ((2411, 2454), 'numpy.transpose', 'np.transpose', (['fake_B[0, :, :, :]', '(1, 2, 0)'], {}), '(fake_B[0, :, :, :], (1, 2, 0))\n', (2423, 2454), True, 'import numpy as np\n')] |
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for spectrum augmenter layer."""
import lingvo.compat as tf
from lingvo.core import spectrum_augmenter
from lingvo.core import spectrum_augmenter_on_device
from lingvo.core import test_utils
import numpy as np
from six.moves import range
class SpectrumAugmenterTest(test_utils.TestCase):
def testSpectrumAugmenterWithTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 5
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.time_mask_max_ratio = 1.0
p.random_seed = 23456
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicSizeTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 3
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicMultiplicityTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 4
inputs = tf.ones([batch_size, 22, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 5 * i + 5]),
tf.ones([1, 16 - 5 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 10
p.time_masks_per_frame = 0.2
p.random_seed = 67890
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicSizeAndMultiplicityTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 4
inputs = tf.ones([batch_size, 22, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 5 * i + 5]),
tf.ones([1, 16 - 5 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 10
p.time_masks_per_frame = 0.2
p.time_mask_max_ratio = 0.4
p.use_dynamic_time_mask_max_frames = True
p.random_seed = 67890
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithFrequencyMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([3, 5, 10, 1], dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 6
p.freq_mask_count = 2
p.time_mask_max_frames = 0
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWarpMatrixConstructor(self):
with self.session(use_gpu=False, graph=tf.Graph()):
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (4, 10))
origin = tf.cast([2, 4, 4, 5], dtype=tf.float32)
destination = tf.cast([3, 2, 6, 8], dtype=tf.float32)
choose_range = tf.cast([4, 8, 8, 10], dtype=tf.float32)
outputs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
warp_matrix = specaug_layer._ConstructWarpMatrix(
batch_size=4,
matrix_size=10,
origin=origin,
destination=destination,
choose_range=choose_range,
dtype=tf.float32)
output = tf.einsum('bij,bj->bi', warp_matrix, inputs)
outputs.append(output)
layer_output, layer_output_on_device = self.evaluate(outputs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithTimeWarping(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (3, 10))
inputs = tf.expand_dims(tf.expand_dims(inputs, -1), -1)
paddings = []
for i in range(3):
paddings.append(
tf.concat([tf.zeros([1, i + 7]),
tf.ones([1, 3 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 0
p.time_warp_max_frames = 8
p.time_warp_max_ratio = 1.0
p.time_warp_bound = 'static'
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithDynamicTimeWarping(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (3, 10))
inputs = tf.expand_dims(tf.expand_dims(inputs, -1), -1)
paddings = []
for i in range(3):
paddings.append(
tf.concat([tf.zeros([1, 2 * i + 5]),
tf.ones([1, 5 - 2 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 0
p.time_warp_max_ratio = 0.5
p.time_warp_bound = 'dynamic'
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterUnstacking(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([3, 5, 10, 1], dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.unstack = True
p.stack_height = 2
p.freq_mask_max_bins = 5
p.time_mask_max_frames = 8
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithPerDomainPolicyFreqMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([6, 5, 4, 2], dtype=tf.float32)
input_domain_ids = tf.constant(
[[1] * 5, [2] * 5, [0] * 5, [2] * 5, [0] * 5, [1] * 5],
dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.domain_ids = [0, 1, 2]
p.freq_mask_max_bins = [0, 3, 8]
p.time_mask_max_frames = 0
p.random_seed = 1234
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(
inputs, paddings, domain_ids=input_domain_ids)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterNoisify(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 2
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.use_noise = True
p.gaussian_noise = False
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterGaussianNoisify(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 2
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.use_noise = True
p.gaussian_noise = True
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithStatelessRandomOps(self):
with self.session(use_gpu=False, graph=tf.Graph()):
batch_size = 5
inputs1 = tf.random.uniform(
shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=tf.float32)
inputs2 = tf.random.uniform(
shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
p.freq_mask_count = 1
p.freq_mask_max_bins = 1
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.time_mask_max_ratio = 1.0
p.use_input_dependent_random_seed = True
specaug_layer = p.Instantiate()
h1, _ = specaug_layer.FPropDefaultTheta(inputs1, paddings)
h2, _ = specaug_layer.FPropDefaultTheta(inputs2, paddings)
actual_layer_output1, actual_layer_output2 = self.evaluate([h1, h2])
self.assertAllEqual(
np.shape(actual_layer_output1), np.array([5, 20, 2, 2]))
self.assertNotAllEqual(actual_layer_output1, actual_layer_output2)
def testGraphContainsOnDeviceOps(self):
"""Checks that einsum and stateful random ops are not used on-device."""
model_graph = tf.Graph()
with model_graph.as_default():
batch_size = 5
inputs = tf.random.stateless_uniform(
shape=[batch_size, 20, 2, 2],
minval=0,
maxval=1,
seed=tf.constant([123, 123]),
dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
p.freq_mask_count = 1
p.freq_mask_max_bins = 1
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.use_noise = True
p.gaussian_noise = True
p.time_mask_max_ratio = 1.0
p.use_input_dependent_random_seed = True
specaug_layer = p.Instantiate()
_, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
# A list of ops that are not compatible with on-device training.
unsupported_on_device_nodes = [
'RandomUniform', 'RandomStandardNormal', 'Einsum'
]
for node in model_graph.as_graph_def().node:
self.assertNotIn(node.op, unsupported_on_device_nodes)
def testEinsumReplacementBBmBm(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(shape=[20], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(
shape=[20, 10], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('b,bm->bm', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBBmBm(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycByBxyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,by->bxyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycByBxyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycBxBxyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 5], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,bx->bxyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycBxBxyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxyBxBxy(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxy,bx->bxy', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxyBxBxy(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycBzxBzyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 7, 4, 3], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(
shape=[20, 5, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,bzx->bzyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycBzxBzyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
if __name__ == '__main__':
tf.test.main()
| [
"lingvo.compat.test.main",
"six.moves.range",
"lingvo.compat.einsum",
"lingvo.compat.Graph",
"lingvo.compat.concat",
"lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params",
"lingvo.compat.range",
"lingvo.compat.cast",
"lingvo.compat.expand_dims",
"numpy.shape",
"lingvo.core.spectrum_augmenter... | [((19812, 19826), 'lingvo.compat.test.main', 'tf.test.main', ([], {}), '()\n', (19824, 19826), True, 'import lingvo.compat as tf\n'), ((15361, 15371), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (15369, 15371), True, 'import lingvo.compat as tf\n'), ((1119, 1142), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (1137, 1142), True, 'import lingvo.compat as tf\n'), ((1179, 1228), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 20, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 20, 2, 2], dtype=tf.float32)\n', (1186, 1228), True, 'import lingvo.compat as tf\n'), ((1264, 1281), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (1269, 1281), False, 'from six.moves import range\n'), ((1425, 1452), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (1434, 1452), True, 'import lingvo.compat as tf\n'), ((2193, 2216), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (2211, 2216), True, 'import lingvo.compat as tf\n'), ((2253, 2302), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 20, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 20, 2, 2], dtype=tf.float32)\n', (2260, 2302), True, 'import lingvo.compat as tf\n'), ((2338, 2355), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (2343, 2355), False, 'from six.moves import range\n'), ((2529, 2556), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (2538, 2556), True, 'import lingvo.compat as tf\n'), ((3319, 3342), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (3337, 3342), True, 'import lingvo.compat as tf\n'), ((3379, 3428), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 22, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 22, 2, 2], dtype=tf.float32)\n', (3386, 3428), True, 'import lingvo.compat as tf\n'), ((3464, 3481), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (3469, 3481), False, 'from six.moves import range\n'), ((3655, 3682), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (3664, 3682), True, 'import lingvo.compat as tf\n'), ((4439, 4462), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (4457, 4462), True, 'import lingvo.compat as tf\n'), ((4499, 4548), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 22, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 22, 2, 2], dtype=tf.float32)\n', (4506, 4548), True, 'import lingvo.compat as tf\n'), ((4584, 4601), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (4589, 4601), False, 'from six.moves import range\n'), ((4775, 4802), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (4784, 4802), True, 'import lingvo.compat as tf\n'), ((5628, 5652), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (5646, 5652), True, 'import lingvo.compat as tf\n'), ((5668, 5708), 'lingvo.compat.ones', 'tf.ones', (['[3, 5, 10, 1]'], {'dtype': 'tf.float32'}), '([3, 5, 10, 1], dtype=tf.float32)\n', (5675, 5708), True, 'import lingvo.compat as tf\n'), ((5726, 5742), 'lingvo.compat.zeros', 'tf.zeros', (['[3, 5]'], {}), '([3, 5])\n', (5734, 5742), True, 'import lingvo.compat as tf\n'), ((6538, 6577), 'lingvo.compat.cast', 'tf.cast', (['[2, 4, 4, 5]'], {'dtype': 'tf.float32'}), '([2, 4, 4, 5], dtype=tf.float32)\n', (6545, 6577), True, 'import lingvo.compat as tf\n'), ((6598, 6637), 'lingvo.compat.cast', 'tf.cast', (['[3, 2, 6, 8]'], {'dtype': 'tf.float32'}), '([3, 2, 6, 8], dtype=tf.float32)\n', (6605, 6637), True, 'import lingvo.compat as tf\n'), ((6659, 6699), 'lingvo.compat.cast', 'tf.cast', (['[4, 8, 8, 10]'], {'dtype': 'tf.float32'}), '([4, 8, 8, 10], dtype=tf.float32)\n', (6666, 6699), True, 'import lingvo.compat as tf\n'), ((7533, 7557), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (7551, 7557), True, 'import lingvo.compat as tf\n'), ((7736, 7744), 'six.moves.range', 'range', (['(3)'], {}), '(3)\n', (7741, 7744), False, 'from six.moves import range\n'), ((7887, 7914), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (7896, 7914), True, 'import lingvo.compat as tf\n'), ((8699, 8723), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (8717, 8723), True, 'import lingvo.compat as tf\n'), ((8902, 8910), 'six.moves.range', 'range', (['(3)'], {}), '(3)\n', (8907, 8910), False, 'from six.moves import range\n'), ((9083, 9110), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (9092, 9110), True, 'import lingvo.compat as tf\n'), ((9849, 9873), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (9867, 9873), True, 'import lingvo.compat as tf\n'), ((9889, 9929), 'lingvo.compat.ones', 'tf.ones', (['[3, 5, 10, 1]'], {'dtype': 'tf.float32'}), '([3, 5, 10, 1], dtype=tf.float32)\n', (9896, 9929), True, 'import lingvo.compat as tf\n'), ((9947, 9963), 'lingvo.compat.zeros', 'tf.zeros', (['[3, 5]'], {}), '([3, 5])\n', (9955, 9963), True, 'import lingvo.compat as tf\n'), ((10697, 10721), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(1234)'], {}), '(1234)\n', (10715, 10721), True, 'import lingvo.compat as tf\n'), ((10737, 10776), 'lingvo.compat.ones', 'tf.ones', (['[6, 5, 4, 2]'], {'dtype': 'tf.float32'}), '([6, 5, 4, 2], dtype=tf.float32)\n', (10744, 10776), True, 'import lingvo.compat as tf\n'), ((10802, 10892), 'lingvo.compat.constant', 'tf.constant', (['[[1] * 5, [2] * 5, [0] * 5, [2] * 5, [0] * 5, [1] * 5]'], {'dtype': 'tf.float32'}), '([[1] * 5, [2] * 5, [0] * 5, [2] * 5, [0] * 5, [1] * 5], dtype=\n tf.float32)\n', (10813, 10892), True, 'import lingvo.compat as tf\n'), ((10926, 10942), 'lingvo.compat.zeros', 'tf.zeros', (['[3, 5]'], {}), '([3, 5])\n', (10934, 10942), True, 'import lingvo.compat as tf\n'), ((11686, 11709), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (11704, 11709), True, 'import lingvo.compat as tf\n'), ((11746, 11795), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 20, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 20, 2, 2], dtype=tf.float32)\n', (11753, 11795), True, 'import lingvo.compat as tf\n'), ((11831, 11848), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (11836, 11848), False, 'from six.moves import range\n'), ((12022, 12049), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (12031, 12049), True, 'import lingvo.compat as tf\n'), ((12860, 12883), 'lingvo.compat.random.set_seed', 'tf.random.set_seed', (['(127)'], {}), '(127)\n', (12878, 12883), True, 'import lingvo.compat as tf\n'), ((12920, 12969), 'lingvo.compat.ones', 'tf.ones', (['[batch_size, 20, 2, 2]'], {'dtype': 'tf.float32'}), '([batch_size, 20, 2, 2], dtype=tf.float32)\n', (12927, 12969), True, 'import lingvo.compat as tf\n'), ((13005, 13022), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (13010, 13022), False, 'from six.moves import range\n'), ((13196, 13223), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (13205, 13223), True, 'import lingvo.compat as tf\n'), ((14071, 14161), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[batch_size, 20, 2, 2]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=\n tf.float32)\n', (14088, 14161), True, 'import lingvo.compat as tf\n'), ((14184, 14274), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[batch_size, 20, 2, 2]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=\n tf.float32)\n', (14201, 14274), True, 'import lingvo.compat as tf\n'), ((14316, 14333), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (14321, 14333), False, 'from six.moves import range\n'), ((14477, 14504), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (14486, 14504), True, 'import lingvo.compat as tf\n'), ((14516, 14579), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (14577, 14579), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((15655, 15672), 'six.moves.range', 'range', (['batch_size'], {}), '(batch_size)\n', (15660, 15672), False, 'from six.moves import range\n'), ((15816, 15843), 'lingvo.compat.concat', 'tf.concat', (['paddings'], {'axis': '(0)'}), '(paddings, axis=0)\n', (15825, 15843), True, 'import lingvo.compat as tf\n'), ((15854, 15917), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (15915, 15917), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((16693, 16760), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20], minval=0, maxval=1, dtype=tf.float32)\n', (16710, 16760), True, 'import lingvo.compat as tf\n'), ((16771, 16842), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 10]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 10], minval=0, maxval=1, dtype=tf.float32)\n', (16788, 16842), True, 'import lingvo.compat as tf\n'), ((16869, 16896), 'lingvo.compat.einsum', 'tf.einsum', (['"""b,bm->bm"""', 'a', 'b'], {}), "('b,bm->bm', a, b)\n", (16878, 16896), True, 'import lingvo.compat as tf\n'), ((16907, 16970), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (16968, 16970), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((17317, 17393), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 5, 7, 4]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)\n', (17334, 17393), True, 'import lingvo.compat as tf\n'), ((17415, 17485), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 7]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)\n', (17432, 17485), True, 'import lingvo.compat as tf\n'), ((17501, 17533), 'lingvo.compat.einsum', 'tf.einsum', (['"""bxyc,by->bxyc"""', 'a', 'b'], {}), "('bxyc,by->bxyc', a, b)\n", (17510, 17533), True, 'import lingvo.compat as tf\n'), ((17544, 17607), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (17605, 17607), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((17959, 18035), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 5, 7, 4]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)\n', (17976, 18035), True, 'import lingvo.compat as tf\n'), ((18057, 18127), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 5]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 5], minval=0, maxval=1, dtype=tf.float32)\n', (18074, 18127), True, 'import lingvo.compat as tf\n'), ((18143, 18175), 'lingvo.compat.einsum', 'tf.einsum', (['"""bxyc,bx->bxyc"""', 'a', 'b'], {}), "('bxyc,bx->bxyc', a, b)\n", (18152, 18175), True, 'import lingvo.compat as tf\n'), ((18186, 18249), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (18247, 18249), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((18599, 18672), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 7, 4]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 7, 4], minval=0, maxval=1, dtype=tf.float32)\n', (18616, 18672), True, 'import lingvo.compat as tf\n'), ((18694, 18764), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 7]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)\n', (18711, 18764), True, 'import lingvo.compat as tf\n'), ((18780, 18810), 'lingvo.compat.einsum', 'tf.einsum', (['"""bxy,bx->bxy"""', 'a', 'b'], {}), "('bxy,bx->bxy', a, b)\n", (18789, 18810), True, 'import lingvo.compat as tf\n'), ((18821, 18884), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (18882, 18884), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((19235, 19311), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 7, 4, 3]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 7, 4, 3], minval=0, maxval=1, dtype=tf.float32)\n', (19252, 19311), True, 'import lingvo.compat as tf\n'), ((19333, 19406), 'lingvo.compat.random.uniform', 'tf.random.uniform', ([], {'shape': '[20, 5, 7]', 'minval': '(0)', 'maxval': '(1)', 'dtype': 'tf.float32'}), '(shape=[20, 5, 7], minval=0, maxval=1, dtype=tf.float32)\n', (19350, 19406), True, 'import lingvo.compat as tf\n'), ((19433, 19466), 'lingvo.compat.einsum', 'tf.einsum', (['"""bxyc,bzx->bzyc"""', 'a', 'b'], {}), "('bxyc,bzx->bzyc', a, b)\n", (19442, 19466), True, 'import lingvo.compat as tf\n'), ((19477, 19540), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (19538, 19540), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((1494, 1539), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (1537, 1539), False, 'from lingvo.core import spectrum_augmenter\n'), ((1551, 1614), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (1612, 1614), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((2598, 2643), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (2641, 2643), False, 'from lingvo.core import spectrum_augmenter\n'), ((2655, 2718), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (2716, 2718), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((3724, 3769), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (3767, 3769), False, 'from lingvo.core import spectrum_augmenter\n'), ((3781, 3844), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (3842, 3844), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((4844, 4889), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (4887, 4889), False, 'from lingvo.core import spectrum_augmenter\n'), ((4901, 4964), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (4962, 4964), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((5784, 5829), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (5827, 5829), False, 'from lingvo.core import spectrum_augmenter\n'), ((5841, 5904), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (5902, 5904), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((6746, 6791), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (6789, 6791), False, 'from lingvo.core import spectrum_augmenter\n'), ((6803, 6866), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (6864, 6866), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((7212, 7256), 'lingvo.compat.einsum', 'tf.einsum', (['"""bij,bj->bi"""', 'warp_matrix', 'inputs'], {}), "('bij,bj->bi', warp_matrix, inputs)\n", (7221, 7256), True, 'import lingvo.compat as tf\n'), ((7669, 7695), 'lingvo.compat.expand_dims', 'tf.expand_dims', (['inputs', '(-1)'], {}), '(inputs, -1)\n', (7683, 7695), True, 'import lingvo.compat as tf\n'), ((7956, 8001), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (7999, 8001), False, 'from lingvo.core import spectrum_augmenter\n'), ((8013, 8076), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (8074, 8076), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((8835, 8861), 'lingvo.compat.expand_dims', 'tf.expand_dims', (['inputs', '(-1)'], {}), '(inputs, -1)\n', (8849, 8861), True, 'import lingvo.compat as tf\n'), ((9152, 9197), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (9195, 9197), False, 'from lingvo.core import spectrum_augmenter\n'), ((9209, 9272), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (9270, 9272), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((10005, 10050), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (10048, 10050), False, 'from lingvo.core import spectrum_augmenter\n'), ((10062, 10125), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (10123, 10125), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((10984, 11029), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (11027, 11029), False, 'from lingvo.core import spectrum_augmenter\n'), ((11041, 11104), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (11102, 11104), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((12091, 12136), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (12134, 12136), False, 'from lingvo.core import spectrum_augmenter\n'), ((12148, 12211), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (12209, 12211), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((13265, 13310), 'lingvo.core.spectrum_augmenter.SpectrumAugmenter.Params', 'spectrum_augmenter.SpectrumAugmenter.Params', ([], {}), '()\n', (13308, 13310), False, 'from lingvo.core import spectrum_augmenter\n'), ((13322, 13385), 'lingvo.core.spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', 'spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params', ([], {}), '()\n', (13383, 13385), False, 'from lingvo.core import spectrum_augmenter_on_device\n'), ((15093, 15123), 'numpy.shape', 'np.shape', (['actual_layer_output1'], {}), '(actual_layer_output1)\n', (15101, 15123), True, 'import numpy as np\n'), ((15125, 15148), 'numpy.array', 'np.array', (['[5, 20, 2, 2]'], {}), '([5, 20, 2, 2])\n', (15133, 15148), True, 'import numpy as np\n'), ((1100, 1110), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (1108, 1110), True, 'import lingvo.compat as tf\n'), ((2174, 2184), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (2182, 2184), True, 'import lingvo.compat as tf\n'), ((3300, 3310), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (3308, 3310), True, 'import lingvo.compat as tf\n'), ((4420, 4430), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (4428, 4430), True, 'import lingvo.compat as tf\n'), ((5609, 5619), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (5617, 5619), True, 'import lingvo.compat as tf\n'), ((6429, 6439), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (6437, 6439), True, 'import lingvo.compat as tf\n'), ((6481, 6493), 'lingvo.compat.range', 'tf.range', (['(10)'], {}), '(10)\n', (6489, 6493), True, 'import lingvo.compat as tf\n'), ((7514, 7524), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (7522, 7524), True, 'import lingvo.compat as tf\n'), ((7597, 7609), 'lingvo.compat.range', 'tf.range', (['(10)'], {}), '(10)\n', (7605, 7609), True, 'import lingvo.compat as tf\n'), ((8680, 8690), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (8688, 8690), True, 'import lingvo.compat as tf\n'), ((8763, 8775), 'lingvo.compat.range', 'tf.range', (['(10)'], {}), '(10)\n', (8771, 8775), True, 'import lingvo.compat as tf\n'), ((9830, 9840), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (9838, 9840), True, 'import lingvo.compat as tf\n'), ((10678, 10688), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (10686, 10688), True, 'import lingvo.compat as tf\n'), ((11667, 11677), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (11675, 11677), True, 'import lingvo.compat as tf\n'), ((12841, 12851), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (12849, 12851), True, 'import lingvo.compat as tf\n'), ((14021, 14031), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (14029, 14031), True, 'import lingvo.compat as tf\n'), ((15567, 15590), 'lingvo.compat.constant', 'tf.constant', (['[123, 123]'], {}), '([123, 123])\n', (15578, 15590), True, 'import lingvo.compat as tf\n'), ((16670, 16680), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (16678, 16680), True, 'import lingvo.compat as tf\n'), ((17294, 17304), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (17302, 17304), True, 'import lingvo.compat as tf\n'), ((17936, 17946), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (17944, 17946), True, 'import lingvo.compat as tf\n'), ((18576, 18586), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (18584, 18586), True, 'import lingvo.compat as tf\n'), ((19212, 19222), 'lingvo.compat.Graph', 'tf.Graph', ([], {}), '()\n', (19220, 19222), True, 'import lingvo.compat as tf\n'), ((1331, 1352), 'lingvo.compat.zeros', 'tf.zeros', (['[1, i + 12]'], {}), '([1, i + 12])\n', (1339, 1352), True, 'import lingvo.compat as tf\n'), ((1377, 1396), 'lingvo.compat.ones', 'tf.ones', (['[1, 8 - i]'], {}), '([1, 8 - i])\n', (1384, 1396), True, 'import lingvo.compat as tf\n'), ((2405, 2429), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 8 * i + 3]'], {}), '([1, 8 * i + 3])\n', (2413, 2429), True, 'import lingvo.compat as tf\n'), ((2454, 2478), 'lingvo.compat.ones', 'tf.ones', (['[1, 17 - 8 * i]'], {}), '([1, 17 - 8 * i])\n', (2461, 2478), True, 'import lingvo.compat as tf\n'), ((3531, 3555), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 5 * i + 5]'], {}), '([1, 5 * i + 5])\n', (3539, 3555), True, 'import lingvo.compat as tf\n'), ((3580, 3604), 'lingvo.compat.ones', 'tf.ones', (['[1, 16 - 5 * i]'], {}), '([1, 16 - 5 * i])\n', (3587, 3604), True, 'import lingvo.compat as tf\n'), ((4651, 4675), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 5 * i + 5]'], {}), '([1, 5 * i + 5])\n', (4659, 4675), True, 'import lingvo.compat as tf\n'), ((4700, 4724), 'lingvo.compat.ones', 'tf.ones', (['[1, 16 - 5 * i]'], {}), '([1, 16 - 5 * i])\n', (4707, 4724), True, 'import lingvo.compat as tf\n'), ((7794, 7814), 'lingvo.compat.zeros', 'tf.zeros', (['[1, i + 7]'], {}), '([1, i + 7])\n', (7802, 7814), True, 'import lingvo.compat as tf\n'), ((7839, 7858), 'lingvo.compat.ones', 'tf.ones', (['[1, 3 - i]'], {}), '([1, 3 - i])\n', (7846, 7858), True, 'import lingvo.compat as tf\n'), ((8960, 8984), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 2 * i + 5]'], {}), '([1, 2 * i + 5])\n', (8968, 8984), True, 'import lingvo.compat as tf\n'), ((9009, 9032), 'lingvo.compat.ones', 'tf.ones', (['[1, 5 - 2 * i]'], {}), '([1, 5 - 2 * i])\n', (9016, 9032), True, 'import lingvo.compat as tf\n'), ((11898, 11922), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 8 * i + 3]'], {}), '([1, 8 * i + 3])\n', (11906, 11922), True, 'import lingvo.compat as tf\n'), ((11947, 11971), 'lingvo.compat.ones', 'tf.ones', (['[1, 17 - 8 * i]'], {}), '([1, 17 - 8 * i])\n', (11954, 11971), True, 'import lingvo.compat as tf\n'), ((13072, 13096), 'lingvo.compat.zeros', 'tf.zeros', (['[1, 8 * i + 3]'], {}), '([1, 8 * i + 3])\n', (13080, 13096), True, 'import lingvo.compat as tf\n'), ((13121, 13145), 'lingvo.compat.ones', 'tf.ones', (['[1, 17 - 8 * i]'], {}), '([1, 17 - 8 * i])\n', (13128, 13145), True, 'import lingvo.compat as tf\n'), ((14383, 14404), 'lingvo.compat.zeros', 'tf.zeros', (['[1, i + 12]'], {}), '([1, i + 12])\n', (14391, 14404), True, 'import lingvo.compat as tf\n'), ((14429, 14448), 'lingvo.compat.ones', 'tf.ones', (['[1, 8 - i]'], {}), '([1, 8 - i])\n', (14436, 14448), True, 'import lingvo.compat as tf\n'), ((15722, 15743), 'lingvo.compat.zeros', 'tf.zeros', (['[1, i + 12]'], {}), '([1, i + 12])\n', (15730, 15743), True, 'import lingvo.compat as tf\n'), ((15768, 15787), 'lingvo.compat.ones', 'tf.ones', (['[1, 8 - i]'], {}), '([1, 8 - i])\n', (15775, 15787), True, 'import lingvo.compat as tf\n')] |
import numpy as np
from numpy.testing import assert_array_equal
from numpy.random import SeedSequence
def test_reference_data():
""" Check that SeedSequence generates data the same as the C++ reference.
https://gist.github.com/imneme/540829265469e673d045
"""
inputs = [
[3735928559, 195939070, 229505742, 305419896],
[3668361503, 4165561550, 1661411377, 3634257570],
[164546577, 4166754639, 1765190214, 1303880213],
[446610472, 3941463886, 522937693, 1882353782],
[1864922766, 1719732118, 3882010307, 1776744564],
[4141682960, 3310988675, 553637289, 902896340],
[1134851934, 2352871630, 3699409824, 2648159817],
[1240956131, 3107113773, 1283198141, 1924506131],
[2669565031, 579818610, 3042504477, 2774880435],
[2766103236, 2883057919, 4029656435, 862374500],
]
outputs = [
[3914649087, 576849849, 3593928901, 2229911004],
[2240804226, 3691353228, 1365957195, 2654016646],
[3562296087, 3191708229, 1147942216, 3726991905],
[1403443605, 3591372999, 1291086759, 441919183],
[1086200464, 2191331643, 560336446, 3658716651],
[3249937430, 2346751812, 847844327, 2996632307],
[2584285912, 4034195531, 3523502488, 169742686],
[959045797, 3875435559, 1886309314, 359682705],
[3978441347, 432478529, 3223635119, 138903045],
[296367413, 4262059219, 13109864, 3283683422],
]
outputs64 = [
[2477551240072187391, 9577394838764454085],
[15854241394484835714, 11398914698975566411],
[13708282465491374871, 16007308345579681096],
[15424829579845884309, 1898028439751125927],
[9411697742461147792, 15714068361935982142],
[10079222287618677782, 12870437757549876199],
[17326737873898640088, 729039288628699544],
[16644868984619524261, 1544825456798124994],
[1857481142255628931, 596584038813451439],
[18305404959516669237, 14103312907920476776],
]
for seed, expected, expected64 in zip(inputs, outputs, outputs64):
expected = np.array(expected, dtype=np.uint32)
ss = SeedSequence(seed)
state = ss.generate_state(len(expected))
assert_array_equal(state, expected)
state64 = ss.generate_state(len(expected64), dtype=np.uint64)
assert_array_equal(state64, expected64)
| [
"numpy.testing.assert_array_equal",
"numpy.random.SeedSequence",
"numpy.array"
] | [((2100, 2135), 'numpy.array', 'np.array', (['expected'], {'dtype': 'np.uint32'}), '(expected, dtype=np.uint32)\n', (2108, 2135), True, 'import numpy as np\n'), ((2149, 2167), 'numpy.random.SeedSequence', 'SeedSequence', (['seed'], {}), '(seed)\n', (2161, 2167), False, 'from numpy.random import SeedSequence\n'), ((2225, 2260), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['state', 'expected'], {}), '(state, expected)\n', (2243, 2260), False, 'from numpy.testing import assert_array_equal\n'), ((2339, 2378), 'numpy.testing.assert_array_equal', 'assert_array_equal', (['state64', 'expected64'], {}), '(state64, expected64)\n', (2357, 2378), False, 'from numpy.testing import assert_array_equal\n')] |
# -*- coding: utf-8 -*-
#
# Copyright 2018-2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import networkx as nx
import numpy as np
import tensorflow as tf
from stellargraph import StellarGraph
from stellargraph.layer import (
GraphSAGE,
GCN,
GAT,
HinSAGE,
link_classification,
link_regression,
)
from stellargraph.mapper import (
GraphSAGENodeGenerator,
FullBatchNodeGenerator,
HinSAGENodeGenerator,
GraphSAGELinkGenerator,
HinSAGELinkGenerator,
)
from stellargraph.utils import Ensemble, BaggingEnsemble
from tensorflow.keras import layers, Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.losses import categorical_crossentropy, binary_crossentropy
# FIXME (#535): Consider using graph fixtures
def example_graph_1(feature_size=None):
G = nx.Graph()
elist = [(1, 2), (2, 3), (1, 4), (3, 2), (5, 6), (1, 5)]
G.add_nodes_from([1, 2, 3, 4, 5, 6], label="default")
G.add_edges_from(elist, label="default")
# Add example features
if feature_size is not None:
for v in G.nodes():
G.nodes[v]["feature"] = np.ones(feature_size)
return StellarGraph(G, node_features="feature")
else:
return StellarGraph(G)
def create_graphSAGE_model(graph, link_prediction=False):
if link_prediction:
# We are going to train on the original graph
generator = GraphSAGELinkGenerator(graph, batch_size=2, num_samples=[2, 2])
edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
else:
generator = GraphSAGENodeGenerator(graph, batch_size=2, num_samples=[2, 2])
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
# if link_prediction:
# edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
# train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
# else:
# train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GraphSAGE(
layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5
)
if link_prediction:
# Expose input and output sockets of graphsage, for source and destination nodes:
x_inp_src, x_out_src = base_model.node_model()
x_inp_dst, x_out_dst = base_model.node_model()
# re-pack into a list where (source, destination) inputs alternate, for link inputs:
x_inp = [x for ab in zip(x_inp_src, x_inp_dst) for x in ab]
# same for outputs:
x_out = [x_out_src, x_out_dst]
prediction = link_classification(
output_dim=1, output_act="relu", edge_embedding_method="ip"
)(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
else:
x_inp, x_out = base_model.node_model()
prediction = layers.Dense(units=2, activation="softmax")(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
return base_model, keras_model, generator, train_gen
def create_HinSAGE_model(graph, link_prediction=False):
if link_prediction:
generator = HinSAGELinkGenerator(
graph,
batch_size=2,
num_samples=[2, 1],
head_node_types=["default", "default"],
)
edge_ids_train = np.array([[1, 2], [2, 3], [1, 3]])
train_gen = generator.flow(edge_ids_train, np.array([1, 1, 0]))
else:
generator = HinSAGENodeGenerator(
graph, batch_size=2, num_samples=[2, 2], head_node_type="default"
)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = HinSAGE(
layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5
)
if link_prediction:
# Define input and output sockets of hinsage:
x_inp, x_out = base_model.build()
# Final estimator layer
prediction = link_regression(edge_embedding_method="ip")(x_out)
else:
x_inp, x_out = base_model.build()
prediction = layers.Dense(units=2, activation="softmax")(x_out)
keras_model = Model(inputs=x_inp, outputs=prediction)
return base_model, keras_model, generator, train_gen
def create_GCN_model(graph):
generator = FullBatchNodeGenerator(graph)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GCN(
layer_sizes=[8, 2],
generator=generator,
bias=True,
dropout=0.5,
activations=["elu", "softmax"],
)
x_inp, x_out = base_model.node_model()
keras_model = Model(inputs=x_inp, outputs=x_out)
return base_model, keras_model, generator, train_gen
def create_GAT_model(graph):
generator = FullBatchNodeGenerator(graph, sparse=False)
train_gen = generator.flow([1, 2], np.array([[1, 0], [0, 1]]))
base_model = GAT(
layer_sizes=[8, 8, 2],
generator=generator,
bias=True,
in_dropout=0.5,
attn_dropout=0.5,
activations=["elu", "elu", "softmax"],
normalize=None,
)
x_inp, x_out = base_model.node_model()
keras_model = Model(inputs=x_inp, outputs=x_out)
return base_model, keras_model, generator, train_gen
#
# Test for class Ensemble instance creation with invalid parameters given.
#
def test_ensemble_init_parameters():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
base_model, keras_model, generator, train_gen = create_graphSAGE_model(graph)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
base_model = gnn_model[0]
keras_model = gnn_model[1]
# Test mixed types
with pytest.raises(ValueError):
Ensemble(base_model, n_estimators=3, n_predictions=3)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=0)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=-3)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=1, n_predictions=1.7)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=0, n_predictions=11)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=-8, n_predictions=11)
with pytest.raises(ValueError):
Ensemble(keras_model, n_estimators=2.5, n_predictions=11)
ens = Ensemble(keras_model, n_estimators=7, n_predictions=10)
assert len(ens.models) == 7
assert ens.n_estimators == 7
assert ens.n_predictions == 10
#
# Repeat for BaggingEnsemble
# Test mixed types
with pytest.raises(ValueError):
BaggingEnsemble(base_model, n_estimators=3, n_predictions=3)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=0)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=-3)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=1, n_predictions=1.7)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=0, n_predictions=11)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=-8, n_predictions=11)
with pytest.raises(ValueError):
BaggingEnsemble(keras_model, n_estimators=2.5, n_predictions=11)
ens = BaggingEnsemble(keras_model, n_estimators=7, n_predictions=10)
assert len(ens.models) == 7
assert ens.n_estimators == 7
assert ens.n_predictions == 10
def test_compile():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=5)
# These are actually raised by keras but I added a check just to make sure
with pytest.raises(ValueError):
ens.compile(optimizer=Adam(), loss=None, weighted_metrics=["acc"])
with pytest.raises(ValueError): # must specify the optimizer to use
ens.compile(
optimizer=None, loss=categorical_crossentropy, weighted_metrics=["acc"]
)
with pytest.raises(
ValueError
): # The metric is made up so it should raise ValueError
ens.compile(
optimizer=Adam(),
loss=categorical_crossentropy,
weighted_metrics=["f1_accuracy"],
)
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=5)
# These are actually raised by keras but I added a check just to make sure
with pytest.raises(ValueError):
ens.compile(optimizer=Adam(), loss=None, weighted_metrics=["acc"])
with pytest.raises(ValueError): # must specify the optimizer to use
ens.compile(
optimizer=None, loss=categorical_crossentropy, weighted_metrics=["acc"]
)
with pytest.raises(
ValueError
): # The metric is made up so it should raise ValueError
ens.compile(
optimizer=Adam(),
loss=categorical_crossentropy,
weighted_metrics=["f1_accuracy"],
)
def test_Ensemble_fit_generator():
tf.keras.backend.clear_session()
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
train_gen = gnn_model[3]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
ens.fit_generator(train_gen, epochs=1, verbose=0, shuffle=False)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator, # wrong type
epochs=10,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
def test_BaggingEnsemble_fit_generator():
tf.keras.backend.clear_session()
train_data = np.array([1, 2])
train_targets = np.array([[1, 0], [0, 1]])
graph = example_graph_1(feature_size=10)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
train_gen = gnn_model[3]
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=1,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
# This is a BaggingEnsemble so the generator in the below call is of the wrong type.
with pytest.raises(ValueError):
ens.fit_generator(
train_gen,
train_data=train_data,
train_targets=train_targets,
epochs=10,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=None, # Should not be None
epochs=10,
validation_data=train_gen,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=None,
train_targets=None,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
bag_size=-1, # should be positive integer smaller than or equal to len(train_data) or None
)
with pytest.raises(ValueError):
ens.fit_generator(
generator=generator,
train_data=train_data,
train_targets=train_targets,
epochs=10,
validation_data=None,
verbose=0,
shuffle=False,
bag_size=10, # larger than the number of training points
)
def test_evaluate_generator():
tf.keras.backend.clear_session()
test_data = np.array([3, 4, 5])
test_targets = np.array([[1, 0], [0, 1], [0, 1]])
graph = example_graph_1(feature_size=5)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator, test_data=test_data, test_targets=test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=test_data,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(test_data, test_targets),
test_data=test_data,
test_targets=test_targets,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(test_data, test_targets)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator, test_data=test_data, test_targets=test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=test_data,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(test_data, test_targets),
test_data=test_data,
test_targets=test_targets,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(test_data, test_targets)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
def test_predict_generator():
tf.keras.backend.clear_session()
# test_data = np.array([[0, 0], [1, 1], [0.8, 0.8]])
test_data = np.array([4, 5, 6])
test_targets = np.array([[1, 0], [0, 1], [0, 1]])
graph = example_graph_1(feature_size=2)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph),
create_HinSAGE_model(graph),
create_GCN_model(graph),
create_GAT_model(graph),
]
for i, gnn_model in enumerate(gnn_models):
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=2)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(test_data)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=test_data)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
if i > 1:
# GAT and GCN are full batch so the batch dimension is 1
assert len(test_predictions) == 1
assert test_predictions.shape[1] == test_targets.shape[0]
else:
assert len(test_predictions) == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
if i > 1:
assert test_predictions.shape[2] == 1
else:
assert test_predictions.shape[2] == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=2)
ens.compile(
optimizer=Adam(), loss=categorical_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(test_data)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=test_data)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
if i > 1:
# GAT and GCN are full batch so the batch dimension is 1
assert len(test_predictions) == 1
assert test_predictions.shape[1] == test_targets.shape[0]
else:
assert len(test_predictions) == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
if i > 1:
assert test_predictions.shape[2] == 1
else:
assert test_predictions.shape[2] == len(test_data)
assert test_predictions.shape[-1] == test_targets.shape[-1]
#
# Tests for link prediction that can't be combined easily with the node attribute inference workflow above.
#
def test_evaluate_generator_link_prediction():
tf.keras.backend.clear_session()
edge_ids_test = np.array([[1, 2], [2, 3], [1, 3]])
edge_labels_test = np.array([1, 1, 0])
graph = example_graph_1(feature_size=4)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_labels_test,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(edge_ids_test, edge_labels_test),
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(edge_ids_test, edge_labels_test)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator,
test_data=edge_labels_test,
test_targets=None, # must give test_targets
)
with pytest.raises(ValueError):
ens.evaluate_generator(
generator=generator.flow(edge_ids_test, edge_labels_test),
test_data=edge_ids_test,
test_targets=edge_labels_test,
)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_metrics_mean, test_metrics_std = ens.evaluate_generator(
generator.flow(edge_ids_test, edge_labels_test)
)
assert len(test_metrics_mean) == len(test_metrics_std)
assert len(test_metrics_mean.shape) == 1
assert len(test_metrics_std.shape) == 1
def test_predict_generator_link_prediction():
tf.keras.backend.clear_session()
edge_ids_test = np.array([[1, 2], [2, 3], [1, 3]])
graph = example_graph_1(feature_size=2)
# base_model, keras_model, generator, train_gen
gnn_models = [
create_graphSAGE_model(graph, link_prediction=True),
create_HinSAGE_model(graph, link_prediction=True),
]
for gnn_model in gnn_models:
keras_model = gnn_model[1]
generator = gnn_model[2]
ens = Ensemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(edge_ids_test)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=edge_ids_test)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
assert len(test_predictions) == len(edge_ids_test)
assert test_predictions.shape[1] == 1
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
assert test_predictions.shape[2] == len(edge_ids_test)
assert test_predictions.shape[3] == 1
#
# Repeat for BaggingEnsemble
ens = BaggingEnsemble(keras_model, n_estimators=2, n_predictions=1)
ens.compile(
optimizer=Adam(), loss=binary_crossentropy, weighted_metrics=["acc"]
)
test_gen = generator.flow(edge_ids_test)
# Check that passing invalid parameters is handled correctly. We will not check error handling for those
# parameters that Keras will be responsible for.
with pytest.raises(ValueError):
ens.predict_generator(generator=test_gen, predict_data=edge_ids_test)
# We won't train the model instead use the initial random weights to test
# the evaluate_generator method.
test_predictions = ens.predict_generator(test_gen, summarise=True)
print("test_predictions shape {}".format(test_predictions.shape))
assert len(test_predictions) == len(edge_ids_test)
assert test_predictions.shape[1] == 1
test_predictions = ens.predict_generator(test_gen, summarise=False)
assert test_predictions.shape[0] == ens.n_estimators
assert test_predictions.shape[1] == ens.n_predictions
assert test_predictions.shape[2] == len(edge_ids_test)
assert test_predictions.shape[3] == 1
| [
"stellargraph.layer.GCN",
"tensorflow.keras.layers.Dense",
"stellargraph.layer.link_classification",
"stellargraph.mapper.GraphSAGELinkGenerator",
"numpy.ones",
"stellargraph.layer.link_regression",
"stellargraph.StellarGraph",
"stellargraph.layer.GraphSAGE",
"stellargraph.layer.GAT",
"stellargrap... | [((1345, 1355), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (1353, 1355), True, 'import networkx as nx\n'), ((2551, 2625), 'stellargraph.layer.GraphSAGE', 'GraphSAGE', ([], {'layer_sizes': '[8, 8]', 'generator': 'generator', 'bias': '(True)', 'dropout': '(0.5)'}), '(layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5)\n', (2560, 2625), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((4163, 4235), 'stellargraph.layer.HinSAGE', 'HinSAGE', ([], {'layer_sizes': '[8, 8]', 'generator': 'generator', 'bias': '(True)', 'dropout': '(0.5)'}), '(layer_sizes=[8, 8], generator=generator, bias=True, dropout=0.5)\n', (4170, 4235), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((4619, 4658), 'tensorflow.keras.Model', 'Model', ([], {'inputs': 'x_inp', 'outputs': 'prediction'}), '(inputs=x_inp, outputs=prediction)\n', (4624, 4658), False, 'from tensorflow.keras import layers, Model\n'), ((4765, 4794), 'stellargraph.mapper.FullBatchNodeGenerator', 'FullBatchNodeGenerator', (['graph'], {}), '(graph)\n', (4787, 4794), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((4880, 4984), 'stellargraph.layer.GCN', 'GCN', ([], {'layer_sizes': '[8, 2]', 'generator': 'generator', 'bias': '(True)', 'dropout': '(0.5)', 'activations': "['elu', 'softmax']"}), "(layer_sizes=[8, 2], generator=generator, bias=True, dropout=0.5,\n activations=['elu', 'softmax'])\n", (4883, 4984), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((5091, 5125), 'tensorflow.keras.Model', 'Model', ([], {'inputs': 'x_inp', 'outputs': 'x_out'}), '(inputs=x_inp, outputs=x_out)\n', (5096, 5125), False, 'from tensorflow.keras import layers, Model\n'), ((5232, 5275), 'stellargraph.mapper.FullBatchNodeGenerator', 'FullBatchNodeGenerator', (['graph'], {'sparse': '(False)'}), '(graph, sparse=False)\n', (5254, 5275), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((5361, 5512), 'stellargraph.layer.GAT', 'GAT', ([], {'layer_sizes': '[8, 8, 2]', 'generator': 'generator', 'bias': '(True)', 'in_dropout': '(0.5)', 'attn_dropout': '(0.5)', 'activations': "['elu', 'elu', 'softmax']", 'normalize': 'None'}), "(layer_sizes=[8, 8, 2], generator=generator, bias=True, in_dropout=0.5,\n attn_dropout=0.5, activations=['elu', 'elu', 'softmax'], normalize=None)\n", (5364, 5512), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((5635, 5669), 'tensorflow.keras.Model', 'Model', ([], {'inputs': 'x_inp', 'outputs': 'x_out'}), '(inputs=x_inp, outputs=x_out)\n', (5640, 5669), False, 'from tensorflow.keras import layers, Model\n'), ((5850, 5882), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (5880, 5882), True, 'import tensorflow as tf\n'), ((8534, 8566), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (8564, 8566), True, 'import tensorflow as tf\n'), ((10648, 10680), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (10678, 10680), True, 'import tensorflow as tf\n'), ((11657, 11689), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (11687, 11689), True, 'import tensorflow as tf\n'), ((11708, 11724), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (11716, 11724), True, 'import numpy as np\n'), ((11745, 11771), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (11753, 11771), True, 'import numpy as np\n'), ((14543, 14575), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (14573, 14575), True, 'import tensorflow as tf\n'), ((14593, 14612), 'numpy.array', 'np.array', (['[3, 4, 5]'], {}), '([3, 4, 5])\n', (14601, 14612), True, 'import numpy as np\n'), ((14632, 14666), 'numpy.array', 'np.array', (['[[1, 0], [0, 1], [0, 1]]'], {}), '([[1, 0], [0, 1], [0, 1]])\n', (14640, 14666), True, 'import numpy as np\n'), ((17954, 17986), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (17984, 17986), True, 'import tensorflow as tf\n'), ((18061, 18080), 'numpy.array', 'np.array', (['[4, 5, 6]'], {}), '([4, 5, 6])\n', (18069, 18080), True, 'import numpy as np\n'), ((18100, 18134), 'numpy.array', 'np.array', (['[[1, 0], [0, 1], [0, 1]]'], {}), '([[1, 0], [0, 1], [0, 1]])\n', (18108, 18134), True, 'import numpy as np\n'), ((21843, 21875), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (21873, 21875), True, 'import tensorflow as tf\n'), ((21896, 21930), 'numpy.array', 'np.array', (['[[1, 2], [2, 3], [1, 3]]'], {}), '([[1, 2], [2, 3], [1, 3]])\n', (21904, 21930), True, 'import numpy as np\n'), ((21954, 21973), 'numpy.array', 'np.array', (['[1, 1, 0]'], {}), '([1, 1, 0])\n', (21962, 21973), True, 'import numpy as np\n'), ((25388, 25420), 'tensorflow.keras.backend.clear_session', 'tf.keras.backend.clear_session', ([], {}), '()\n', (25418, 25420), True, 'import tensorflow as tf\n'), ((25441, 25475), 'numpy.array', 'np.array', (['[[1, 2], [2, 3], [1, 3]]'], {}), '([[1, 2], [2, 3], [1, 3]])\n', (25449, 25475), True, 'import numpy as np\n'), ((1682, 1722), 'stellargraph.StellarGraph', 'StellarGraph', (['G'], {'node_features': '"""feature"""'}), "(G, node_features='feature')\n", (1694, 1722), False, 'from stellargraph import StellarGraph\n'), ((1749, 1764), 'stellargraph.StellarGraph', 'StellarGraph', (['G'], {}), '(G)\n', (1761, 1764), False, 'from stellargraph import StellarGraph\n'), ((1924, 1987), 'stellargraph.mapper.GraphSAGELinkGenerator', 'GraphSAGELinkGenerator', (['graph'], {'batch_size': '(2)', 'num_samples': '[2, 2]'}), '(graph, batch_size=2, num_samples=[2, 2])\n', (1946, 1987), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((2013, 2047), 'numpy.array', 'np.array', (['[[1, 2], [2, 3], [1, 3]]'], {}), '([[1, 2], [2, 3], [1, 3]])\n', (2021, 2047), True, 'import numpy as np\n'), ((2150, 2213), 'stellargraph.mapper.GraphSAGENodeGenerator', 'GraphSAGENodeGenerator', (['graph'], {'batch_size': '(2)', 'num_samples': '[2, 2]'}), '(graph, batch_size=2, num_samples=[2, 2])\n', (2172, 2213), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((3248, 3287), 'tensorflow.keras.Model', 'Model', ([], {'inputs': 'x_inp', 'outputs': 'prediction'}), '(inputs=x_inp, outputs=prediction)\n', (3253, 3287), False, 'from tensorflow.keras import layers, Model\n'), ((3440, 3479), 'tensorflow.keras.Model', 'Model', ([], {'inputs': 'x_inp', 'outputs': 'prediction'}), '(inputs=x_inp, outputs=prediction)\n', (3445, 3479), False, 'from tensorflow.keras import layers, Model\n'), ((3641, 3746), 'stellargraph.mapper.HinSAGELinkGenerator', 'HinSAGELinkGenerator', (['graph'], {'batch_size': '(2)', 'num_samples': '[2, 1]', 'head_node_types': "['default', 'default']"}), "(graph, batch_size=2, num_samples=[2, 1],\n head_node_types=['default', 'default'])\n", (3661, 3746), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((3827, 3861), 'numpy.array', 'np.array', (['[[1, 2], [2, 3], [1, 3]]'], {}), '([[1, 2], [2, 3], [1, 3]])\n', (3835, 3861), True, 'import numpy as np\n'), ((3964, 4055), 'stellargraph.mapper.HinSAGENodeGenerator', 'HinSAGENodeGenerator', (['graph'], {'batch_size': '(2)', 'num_samples': '[2, 2]', 'head_node_type': '"""default"""'}), "(graph, batch_size=2, num_samples=[2, 2],\n head_node_type='default')\n", (3984, 4055), False, 'from stellargraph.mapper import GraphSAGENodeGenerator, FullBatchNodeGenerator, HinSAGENodeGenerator, GraphSAGELinkGenerator, HinSAGELinkGenerator\n'), ((4834, 4860), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (4842, 4860), True, 'import numpy as np\n'), ((5315, 5341), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (5323, 5341), True, 'import numpy as np\n'), ((7261, 7316), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(7)', 'n_predictions': '(10)'}), '(keras_model, n_estimators=7, n_predictions=10)\n', (7269, 7316), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((8332, 8394), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(7)', 'n_predictions': '(10)'}), '(keras_model, n_estimators=7, n_predictions=10)\n', (8347, 8394), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((9036, 9090), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(5)'}), '(keras_model, n_estimators=2, n_predictions=5)\n', (9044, 9090), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((9849, 9910), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(5)'}), '(keras_model, n_estimators=2, n_predictions=5)\n', (9864, 9910), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((11097, 11151), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (11105, 11151), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((12188, 12249), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (12203, 12249), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((15049, 15103), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (15057, 15103), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((16511, 16572), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (16526, 16572), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((18531, 18585), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(2)'}), '(keras_model, n_estimators=2, n_predictions=2)\n', (18539, 18585), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((20132, 20193), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(2)'}), '(keras_model, n_estimators=2, n_predictions=2)\n', (20147, 20193), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((22334, 22388), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (22342, 22388), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((23863, 23924), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (23878, 23924), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((25836, 25890), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (25844, 25890), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((27098, 27159), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2)', 'n_predictions': '(1)'}), '(keras_model, n_estimators=2, n_predictions=1)\n', (27113, 27159), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((1645, 1666), 'numpy.ones', 'np.ones', (['feature_size'], {}), '(feature_size)\n', (1652, 1666), True, 'import numpy as np\n'), ((2099, 2118), 'numpy.array', 'np.array', (['[1, 1, 0]'], {}), '([1, 1, 0])\n', (2107, 2118), True, 'import numpy as np\n'), ((2257, 2283), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (2265, 2283), True, 'import numpy as np\n'), ((3115, 3200), 'stellargraph.layer.link_classification', 'link_classification', ([], {'output_dim': '(1)', 'output_act': '"""relu"""', 'edge_embedding_method': '"""ip"""'}), "(output_dim=1, output_act='relu', edge_embedding_method='ip'\n )\n", (3134, 3200), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((3366, 3409), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': '(2)', 'activation': '"""softmax"""'}), "(units=2, activation='softmax')\n", (3378, 3409), False, 'from tensorflow.keras import layers, Model\n'), ((3913, 3932), 'numpy.array', 'np.array', (['[1, 1, 0]'], {}), '([1, 1, 0])\n', (3921, 3932), True, 'import numpy as np\n'), ((4117, 4143), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (4125, 4143), True, 'import numpy as np\n'), ((4425, 4468), 'stellargraph.layer.link_regression', 'link_regression', ([], {'edge_embedding_method': '"""ip"""'}), "(edge_embedding_method='ip')\n", (4440, 4468), False, 'from stellargraph.layer import GraphSAGE, GCN, GAT, HinSAGE, link_classification, link_regression\n'), ((4549, 4592), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': '(2)', 'activation': '"""softmax"""'}), "(units=2, activation='softmax')\n", (4561, 4592), False, 'from tensorflow.keras import layers, Model\n'), ((6496, 6521), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6509, 6521), False, 'import pytest\n'), ((6535, 6588), 'stellargraph.utils.Ensemble', 'Ensemble', (['base_model'], {'n_estimators': '(3)', 'n_predictions': '(3)'}), '(base_model, n_estimators=3, n_predictions=3)\n', (6543, 6588), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((6603, 6628), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6616, 6628), False, 'import pytest\n'), ((6642, 6696), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(0)'}), '(keras_model, n_estimators=1, n_predictions=0)\n', (6650, 6696), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((6711, 6736), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6724, 6736), False, 'import pytest\n'), ((6750, 6805), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(-3)'}), '(keras_model, n_estimators=1, n_predictions=-3)\n', (6758, 6805), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((6820, 6845), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6833, 6845), False, 'import pytest\n'), ((6859, 6915), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(1.7)'}), '(keras_model, n_estimators=1, n_predictions=1.7)\n', (6867, 6915), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((6930, 6955), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6943, 6955), False, 'import pytest\n'), ((6969, 7024), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(0)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=0, n_predictions=11)\n', (6977, 7024), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7039, 7064), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7052, 7064), False, 'import pytest\n'), ((7078, 7134), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(-8)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=-8, n_predictions=11)\n', (7086, 7134), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7149, 7174), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7162, 7174), False, 'import pytest\n'), ((7188, 7245), 'stellargraph.utils.Ensemble', 'Ensemble', (['keras_model'], {'n_estimators': '(2.5)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=2.5, n_predictions=11)\n', (7196, 7245), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7518, 7543), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7531, 7543), False, 'import pytest\n'), ((7557, 7617), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['base_model'], {'n_estimators': '(3)', 'n_predictions': '(3)'}), '(base_model, n_estimators=3, n_predictions=3)\n', (7572, 7617), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7632, 7657), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7645, 7657), False, 'import pytest\n'), ((7671, 7732), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(0)'}), '(keras_model, n_estimators=1, n_predictions=0)\n', (7686, 7732), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7747, 7772), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7760, 7772), False, 'import pytest\n'), ((7786, 7848), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(-3)'}), '(keras_model, n_estimators=1, n_predictions=-3)\n', (7801, 7848), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7863, 7888), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7876, 7888), False, 'import pytest\n'), ((7902, 7965), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(1)', 'n_predictions': '(1.7)'}), '(keras_model, n_estimators=1, n_predictions=1.7)\n', (7917, 7965), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((7980, 8005), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7993, 8005), False, 'import pytest\n'), ((8019, 8081), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(0)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=0, n_predictions=11)\n', (8034, 8081), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((8096, 8121), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8109, 8121), False, 'import pytest\n'), ((8135, 8198), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(-8)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=-8, n_predictions=11)\n', (8150, 8198), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((8213, 8238), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8226, 8238), False, 'import pytest\n'), ((8252, 8316), 'stellargraph.utils.BaggingEnsemble', 'BaggingEnsemble', (['keras_model'], {'n_estimators': '(2.5)', 'n_predictions': '(11)'}), '(keras_model, n_estimators=2.5, n_predictions=11)\n', (8267, 8316), False, 'from stellargraph.utils import Ensemble, BaggingEnsemble\n'), ((9188, 9213), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9201, 9213), False, 'import pytest\n'), ((9308, 9333), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9321, 9333), False, 'import pytest\n'), ((9513, 9538), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9526, 9538), False, 'import pytest\n'), ((10008, 10033), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10021, 10033), False, 'import pytest\n'), ((10128, 10153), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10141, 10153), False, 'import pytest\n'), ((10333, 10358), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10346, 10358), False, 'import pytest\n'), ((11358, 11383), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (11371, 11383), False, 'import pytest\n'), ((12733, 12758), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (12746, 12758), False, 'import pytest\n'), ((13015, 13040), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13028, 13040), False, 'import pytest\n'), ((13363, 13388), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13376, 13388), False, 'import pytest\n'), ((13678, 13703), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13691, 13703), False, 'import pytest\n'), ((14116, 14141), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (14129, 14141), False, 'import pytest\n'), ((15406, 15431), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (15419, 15431), False, 'import pytest\n'), ((15581, 15606), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (15594, 15606), False, 'import pytest\n'), ((15807, 15832), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (15820, 15832), False, 'import pytest\n'), ((16875, 16900), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (16888, 16900), False, 'import pytest\n'), ((17050, 17075), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (17063, 17075), False, 'import pytest\n'), ((17276, 17301), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (17289, 17301), False, 'import pytest\n'), ((18933, 18958), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (18946, 18958), False, 'import pytest\n'), ((20541, 20566), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (20554, 20566), False, 'import pytest\n'), ((22686, 22711), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (22699, 22711), False, 'import pytest\n'), ((22902, 22927), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (22915, 22927), False, 'import pytest\n'), ((23135, 23160), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (23148, 23160), False, 'import pytest\n'), ((24222, 24247), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24235, 24247), False, 'import pytest\n'), ((24438, 24463), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24451, 24463), False, 'import pytest\n'), ((24671, 24696), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24684, 24696), False, 'import pytest\n'), ((26237, 26262), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (26250, 26262), False, 'import pytest\n'), ((27506, 27531), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (27519, 27531), False, 'import pytest\n'), ((11196, 11202), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (11200, 11202), False, 'from tensorflow.keras.optimizers import Adam\n'), ((12294, 12300), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (12298, 12300), False, 'from tensorflow.keras.optimizers import Adam\n'), ((15148, 15154), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (15152, 15154), False, 'from tensorflow.keras.optimizers import Adam\n'), ((16617, 16623), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (16621, 16623), False, 'from tensorflow.keras.optimizers import Adam\n'), ((18630, 18636), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (18634, 18636), False, 'from tensorflow.keras.optimizers import Adam\n'), ((20238, 20244), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (20242, 20244), False, 'from tensorflow.keras.optimizers import Adam\n'), ((22433, 22439), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (22437, 22439), False, 'from tensorflow.keras.optimizers import Adam\n'), ((23969, 23975), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (23973, 23975), False, 'from tensorflow.keras.optimizers import Adam\n'), ((25935, 25941), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (25939, 25941), False, 'from tensorflow.keras.optimizers import Adam\n'), ((27204, 27210), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (27208, 27210), False, 'from tensorflow.keras.optimizers import Adam\n'), ((9249, 9255), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (9253, 9255), False, 'from tensorflow.keras.optimizers import Adam\n'), ((9668, 9674), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (9672, 9674), False, 'from tensorflow.keras.optimizers import Adam\n'), ((10069, 10075), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (10073, 10075), False, 'from tensorflow.keras.optimizers import Adam\n'), ((10488, 10494), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (10492, 10494), False, 'from tensorflow.keras.optimizers import Adam\n')] |
import gym
import configparser
from os import path
import sys
import numpy as np
import aoi_envs
import csv
def eval_baseline(env, baseline, probability, n_episodes=20):
"""
Evaluate a model against an environment over N games.
"""
results = {'reward': np.zeros(n_episodes)}
for k in range(n_episodes):
done = False
obs = env.reset()
timestep = 1
while not done:
if baseline == 'MST':
action = env.env.mst_controller(probability)
elif baseline == 'Random':
action = env.env.random_controller(probability)
elif baseline == 'RoundRobin':
action = env.env.roundrobin_controller()
else:
print('Not Baseline')
obs, rewards, done, info = env.step(action)
# Record results.
results['reward'][k] += rewards
timestep += 1
mean_reward = np.mean(results['reward'])
std_reward = np.std(results['reward'])
# print(baseline + ' ' + str(probability) + ', mean = {:.1f}, std = {:.1f}'.format(mean_reward, std_reward))
return mean_reward, std_reward
def main(exp_name):
if exp_name == 'power':
environments = ['PowerLevel02Env-v0', 'PowerLevel025Env-v0', 'PowerLevel05Env-v0', 'PowerLevel075Env-v0', 'PowerLevel10Env-v0']
filename = "power.csv"
elif exp_name == 'mobile':
environments = ['MobileEnv005-v0', 'MobileEnv01-v0', 'MobileEnv015-v0', 'MobileEnv025-v0', 'MobileEnv05-v0']
filename = "mobile.csv"
elif exp_name == 'flocking_aoi':
environments = ['FlockingAOI015Env-v0', 'FlockingAOI025Env-v0', 'FlockingAOI0325Env-v0', 'FlockingAOI05Env-v0', 'FlockingAOI0625Env-v0', 'FlockingAOI075Env-v0']
filename = "flocking_aoi.csv"
elif exp_name == 'flocking':
environments = ['Flocking015Env-v0', 'Flocking025Env-v0', 'Flocking0325Env-v0', 'Flocking05Env-v0', 'Flocking0625Env-v0', 'Flocking075Env-v0']
filename = "flocking.csv"
elif exp_name == 'mobile_n':
environments = ['MobileEnv10N10-v0', 'MobileEnv10N20-v0', 'MobileEnv10N40-v0', 'MobileEnv10N60-v0', 'MobileEnv10N80-v0', 'MobileEnv10N100-v0']
filename = 'mobile_n.csv'
elif exp_name == 'n':
environments = ['Stationary10Env-v0', 'Stationary20Env-v0', 'Stationary40Env-v0', 'Stationary60Env-v0', 'Stationary80Env-v0', 'Stationary100Env-v0']
filename = 'n.csv'
else:
environments = ['StationaryEnv-v0']
filename = "stationary.csv"
baselines = ['Random', 'MST', 'RoundRobin']
# probabilities = [0.04, 0.06, 0.08, 0.1, 0.15, 0.2, 0.25, 0.3] #, 0.12, 0.15, 0.18, 0.2, 0.22, 0.25, 0.5]
probabilities = [0.04, 0.06, 0.08, 0.1, 0.12, 0.15] #, 0.18, 0.2, 0.22, 0.25, 0.5]
fields = ['EnvName']
for i in baselines:
fields.append(i + " Mean")
fields.append(i + " Std")
fields.append(i + " Prob")
print(fields)
data_to_csv = []
for env_name in environments:
best_results = [env_name]
env = gym.make(env_name)
print(env_name)
for baseline in baselines:
means = []
if baseline == 'RoundRobin':
best_prob = 0.0
else:
for p in probabilities:
m, _ = eval_baseline(env, baseline, p, n_episodes=50)
means.append(m)
print(m)
max_ind = np.argmax(means)
best_prob = probabilities[max_ind]
final_mean, final_std = eval_baseline(env, baseline, best_prob, n_episodes=100)
best_results.append(final_mean)
best_results.append(final_std)
best_results.append(best_prob)
print(best_results)
data_to_csv.append(best_results)
# writing to csv file
with open(filename, 'w') as csvfile:
# creating a csv writer object
csvwriter = csv.writer(csvfile)
# writing the fields
csvwriter.writerow(fields)
# writing the data rows
csvwriter.writerows(data_to_csv)
if __name__ == '__main__':
main(sys.argv[1])
| [
"csv.writer",
"gym.make",
"numpy.argmax",
"numpy.std",
"numpy.zeros",
"numpy.mean"
] | [((949, 975), 'numpy.mean', 'np.mean', (["results['reward']"], {}), "(results['reward'])\n", (956, 975), True, 'import numpy as np\n'), ((993, 1018), 'numpy.std', 'np.std', (["results['reward']"], {}), "(results['reward'])\n", (999, 1018), True, 'import numpy as np\n'), ((271, 291), 'numpy.zeros', 'np.zeros', (['n_episodes'], {}), '(n_episodes)\n', (279, 291), True, 'import numpy as np\n'), ((3071, 3089), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (3079, 3089), False, 'import gym\n'), ((3978, 3997), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (3988, 3997), False, 'import csv\n'), ((3481, 3497), 'numpy.argmax', 'np.argmax', (['means'], {}), '(means)\n', (3490, 3497), True, 'import numpy as np\n')] |
"""The pre-processing module contains classes for image pre-processing.
Image pre-processing aims to improve the image quality (image intensities) for subsequent pipeline steps.
"""
import pymia.filtering.filter as pymia_fltr
import SimpleITK as sitk
import numpy as np
class ImageNormalization(pymia_fltr.Filter):
"""Represents a normalization filter."""
def __init__(self):
"""Initializes a new instance of the ImageNormalization class."""
super().__init__()
def execute(self, image: sitk.Image, params: pymia_fltr.FilterParams = None) -> sitk.Image:
"""Executes a normalization on an image.
Args:
image (sitk.Image): The image.
params (FilterParams): The parameters (unused).
Returns:
sitk.Image: The normalized image.
"""
img_arr = sitk.GetArrayFromImage(image)
# todo: normalize the image using numpy
img_arr = (img_arr - np.mean(img_arr))/np.std(img_arr)
# warnings.warn('No normalization implemented. Returning unprocessed image.')
img_out = sitk.GetImageFromArray(img_arr)
img_out.CopyInformation(image)
return img_out
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'ImageNormalization:\n' \
.format(self=self)
class SkullStrippingParameters(pymia_fltr.FilterParams):
"""Skull-stripping parameters."""
def __init__(self, img_mask: sitk.Image):
"""Initializes a new instance of the SkullStrippingParameters
Args:
img_mask (sitk.Image): The brain mask image.
"""
self.img_mask = img_mask
class SkullStripping(pymia_fltr.Filter):
"""Represents a skull-stripping filter."""
def __init__(self):
"""Initializes a new instance of the SkullStripping class."""
super().__init__()
def execute(self, image: sitk.Image, params: SkullStrippingParameters = None) -> sitk.Image:
"""Executes a skull stripping on an image.
Args:
image (sitk.Image): The image.
params (SkullStrippingParameters): The parameters with the brain mask.
Returns:
sitk.Image: The normalized image.
"""
mask = params.img_mask # the brain mask
# todo: remove the skull from the image by using the brain mask
image = sitk.Mask(image, mask)
# warnings.warn('No skull-stripping implemented. Returning unprocessed image.')
return image
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'SkullStripping:\n' \
.format(self=self)
class ImageRegistrationParameters(pymia_fltr.FilterParams):
"""Image registration parameters."""
def __init__(self, atlas: sitk.Image, transformation: sitk.Transform, is_ground_truth: bool = False):
"""Initializes a new instance of the ImageRegistrationParameters
Args:
atlas (sitk.Image): The atlas image.
transformation (sitk.Transform): The transformation for registration.
is_ground_truth (bool): Indicates weather the registration is performed on the ground truth or not.
"""
self.atlas = atlas
self.transformation = transformation
self.is_ground_truth = is_ground_truth
class ImageRegistration(pymia_fltr.Filter):
"""Represents a registration filter."""
def __init__(self):
"""Initializes a new instance of the ImageRegistration class."""
super().__init__()
def execute(self, image: sitk.Image, params: ImageRegistrationParameters = None) -> sitk.Image:
"""Registers an image.
Args:
image (sitk.Image): The image.
params (ImageRegistrationParameters): The registration parameters.
Returns:
sitk.Image: The registered image.
"""
# todo: replace this filter by a registration. Registration can be costly, therefore, we provide you the
# transformation, which you only need to apply to the image!
# warnings.warn('No registration implemented. Returning unregistered image')
atlas = params.atlas
transform = params.transformation
is_ground_truth = params.is_ground_truth # the ground truth will be handled slightly different
image = sitk.Resample(image, atlas, transform, sitk.sitkLinear, 0.0, image.GetPixelIDValue())
# note: if you are interested in registration, and want to test it, have a look at
# pymia.filtering.registration.MultiModalRegistration. Think about the type of registration, i.e.
# do you want to register to an atlas or inter-subject? Or just ask us, we can guide you ;-)
return image
def __str__(self):
"""Gets a printable string representation.
Returns:
str: String representation.
"""
return 'ImageRegistration:\n' \
.format(self=self)
| [
"numpy.std",
"SimpleITK.GetArrayFromImage",
"SimpleITK.Mask",
"numpy.mean",
"SimpleITK.GetImageFromArray"
] | [((877, 906), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image'], {}), '(image)\n', (899, 906), True, 'import SimpleITK as sitk\n'), ((1130, 1161), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['img_arr'], {}), '(img_arr)\n', (1152, 1161), True, 'import SimpleITK as sitk\n'), ((2536, 2558), 'SimpleITK.Mask', 'sitk.Mask', (['image', 'mask'], {}), '(image, mask)\n', (2545, 2558), True, 'import SimpleITK as sitk\n'), ((1006, 1021), 'numpy.std', 'np.std', (['img_arr'], {}), '(img_arr)\n', (1012, 1021), True, 'import numpy as np\n'), ((988, 1004), 'numpy.mean', 'np.mean', (['img_arr'], {}), '(img_arr)\n', (995, 1004), True, 'import numpy as np\n')] |
import numpy as np
import pytest
from segment.raster_transform import (
pixels_range_near_point, pixel_coord, pixel_containing,
long_lat_to_xyz,
)
@pytest.fixture
def lspop_geo():
return (-180.0, 0.0083333333333333, 0.0, 89.99999999999929,
0.0, -0.0083333333333333)
@pytest.fixture
def pfpr_geo():
return -118.375, 0.04166665, 0.0, 53.541623217, 0.0, -0.04166665
def test_coord_containing(lspop_geo):
pixel = [25199.5, 10768.5]
coord = pixel_coord(pixel, lspop_geo)
assert 10 < coord[0] < 50
assert -10 < coord[1] < 10
containing = pixel_containing(coord, lspop_geo)
assert containing[0] == 25199
assert containing[1] == 10768
def test_pixels_range_near_point_lspop(lspop_geo):
long_lat = [30, 1]
minmax = pixels_range_near_point(long_lat, 100_000, lspop_geo)
print(f"pixels minmax {minmax}")
assert minmax.long[1] > minmax.long[0]
assert minmax.lat[1] > minmax.lat[0]
center_ish = [0.5 * (minmax.long[0] + minmax.long[1]),
0.5 * (minmax.lat[0] + minmax.lat[1])]
coord = pixel_coord(center_ish, lspop_geo)
print(f"pixel center {coord}")
def test_pixels_range_near_point_pfpr(pfpr_geo):
long_lat = [30, 1]
minmax = pixels_range_near_point(long_lat, 100_000, pfpr_geo)
print(f"pixels minmax {minmax}")
for i in [0, 1]:
for j in [0, 1]:
assert minmax[i][j] > 0
center_ish = [0.5 * (minmax.long[0] + minmax.long[1]),
0.5 * (minmax.lat[0] + minmax.lat[1])]
coord = pixel_coord(center_ish, pfpr_geo)
print(f"pixel center {coord}")
def test_long_lat_to_xyz():
ll = np.array([
[30, 0],
[30, 1],
[28, 0],
[22, 0],
], dtype=np.float)
ans = long_lat_to_xyz(ll)
assert ans.shape == (4, 3)
print(ans) | [
"segment.raster_transform.long_lat_to_xyz",
"segment.raster_transform.pixel_containing",
"segment.raster_transform.pixel_coord",
"numpy.array",
"segment.raster_transform.pixels_range_near_point"
] | [((480, 509), 'segment.raster_transform.pixel_coord', 'pixel_coord', (['pixel', 'lspop_geo'], {}), '(pixel, lspop_geo)\n', (491, 509), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((588, 622), 'segment.raster_transform.pixel_containing', 'pixel_containing', (['coord', 'lspop_geo'], {}), '(coord, lspop_geo)\n', (604, 622), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((780, 832), 'segment.raster_transform.pixels_range_near_point', 'pixels_range_near_point', (['long_lat', '(100000)', 'lspop_geo'], {}), '(long_lat, 100000, lspop_geo)\n', (803, 832), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((1084, 1118), 'segment.raster_transform.pixel_coord', 'pixel_coord', (['center_ish', 'lspop_geo'], {}), '(center_ish, lspop_geo)\n', (1095, 1118), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((1241, 1292), 'segment.raster_transform.pixels_range_near_point', 'pixels_range_near_point', (['long_lat', '(100000)', 'pfpr_geo'], {}), '(long_lat, 100000, pfpr_geo)\n', (1264, 1292), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((1542, 1575), 'segment.raster_transform.pixel_coord', 'pixel_coord', (['center_ish', 'pfpr_geo'], {}), '(center_ish, pfpr_geo)\n', (1553, 1575), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n'), ((1650, 1712), 'numpy.array', 'np.array', (['[[30, 0], [30, 1], [28, 0], [22, 0]]'], {'dtype': 'np.float'}), '([[30, 0], [30, 1], [28, 0], [22, 0]], dtype=np.float)\n', (1658, 1712), True, 'import numpy as np\n'), ((1762, 1781), 'segment.raster_transform.long_lat_to_xyz', 'long_lat_to_xyz', (['ll'], {}), '(ll)\n', (1777, 1781), False, 'from segment.raster_transform import pixels_range_near_point, pixel_coord, pixel_containing, long_lat_to_xyz\n')] |
from flask import Flask, request, Response
from flask_cors import CORS, cross_origin
from PIL import Image
import numpy as np
from numpy import asarray
from mtcnn.mtcnn import MTCNN
from tensorflow.keras.models import load_model
from tensorflow.keras.backend import set_session
import tensorflow as tf
import json
app = Flask(__name__)
CORS(app)
app.config['CORS_HEADERS'] = '*'
graph = tf.get_default_graph()
sess = tf.Session()
set_session(sess)
detector = MTCNN()
model = load_model(r'./models/facenet_keras.h5')
def extract_faces(pixels, required_size=(160, 160)):
results = detector.detect_faces(pixels)
faces = []
for result in results:
x1, y1, width, height = result['box']
x1, y1 = abs(x1), abs(y1)
x2, y2 = x1 + width, y1 + height
face = pixels[y1:y2, x1:x2]
image = Image.fromarray(face)
if required_size:
image = image.resize(required_size)
face_array = asarray(image)
faces.append(face_array)
return faces
def get_embedding(face_pixels):
face_pixels = face_pixels.astype('float32')
mean, std = face_pixels.mean(), face_pixels.std()
face_pixels = (face_pixels - mean) / std
samples = np.expand_dims(face_pixels, axis=0)
yhat = model.predict(samples)
return yhat[0]
@app.route('/getembeddings', methods = ['POST'])
@cross_origin()
def faces_embeddings():
global graph
global sess
with graph.as_default():
set_session(sess)
uploaded_image = request.files['face']
uploaded_image = Image.open(uploaded_image).convert('RGB')
detected_faces = extract_faces(asarray(uploaded_image))
embeddings = [get_embedding(detected_face).tolist() for detected_face in detected_faces]
return Response(json.dumps(embeddings), mimetype="application/json")
@app.after_request
def add_headers(response):
response.headers.add('Access-Control-Allow-Origin', '*')
response.headers.add('Access-Control-Allow-Headers', '*')
return response
if __name__ == '__main__':
app.run(host= '0.0.0.0', port=80, debug = False)
| [
"tensorflow.keras.models.load_model",
"flask_cors.CORS",
"numpy.asarray",
"flask.Flask",
"tensorflow.Session",
"mtcnn.mtcnn.MTCNN",
"tensorflow.keras.backend.set_session",
"flask_cors.cross_origin",
"numpy.expand_dims",
"json.dumps",
"PIL.Image.open",
"PIL.Image.fromarray",
"tensorflow.get_d... | [((321, 336), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (326, 336), False, 'from flask import Flask, request, Response\n'), ((337, 346), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (341, 346), False, 'from flask_cors import CORS, cross_origin\n'), ((389, 411), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (409, 411), True, 'import tensorflow as tf\n'), ((419, 431), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (429, 431), True, 'import tensorflow as tf\n'), ((433, 450), 'tensorflow.keras.backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (444, 450), False, 'from tensorflow.keras.backend import set_session\n'), ((463, 470), 'mtcnn.mtcnn.MTCNN', 'MTCNN', ([], {}), '()\n', (468, 470), False, 'from mtcnn.mtcnn import MTCNN\n'), ((479, 518), 'tensorflow.keras.models.load_model', 'load_model', (['"""./models/facenet_keras.h5"""'], {}), "('./models/facenet_keras.h5')\n", (489, 518), False, 'from tensorflow.keras.models import load_model\n'), ((1358, 1372), 'flask_cors.cross_origin', 'cross_origin', ([], {}), '()\n', (1370, 1372), False, 'from flask_cors import CORS, cross_origin\n'), ((1217, 1252), 'numpy.expand_dims', 'np.expand_dims', (['face_pixels'], {'axis': '(0)'}), '(face_pixels, axis=0)\n', (1231, 1252), True, 'import numpy as np\n'), ((838, 859), 'PIL.Image.fromarray', 'Image.fromarray', (['face'], {}), '(face)\n', (853, 859), False, 'from PIL import Image\n'), ((955, 969), 'numpy.asarray', 'asarray', (['image'], {}), '(image)\n', (962, 969), False, 'from numpy import asarray\n'), ((1467, 1484), 'tensorflow.keras.backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (1478, 1484), False, 'from tensorflow.keras.backend import set_session\n'), ((1638, 1661), 'numpy.asarray', 'asarray', (['uploaded_image'], {}), '(uploaded_image)\n', (1645, 1661), False, 'from numpy import asarray\n'), ((1786, 1808), 'json.dumps', 'json.dumps', (['embeddings'], {}), '(embeddings)\n', (1796, 1808), False, 'import json\n'), ((1557, 1583), 'PIL.Image.open', 'Image.open', (['uploaded_image'], {}), '(uploaded_image)\n', (1567, 1583), False, 'from PIL import Image\n')] |
"""
ECE 4424 - Project Classify Image Using 2-layers Neural Network with MNIST data set
<NAME>
12/7/2020
*About Running: main() function that run real-time training and testing result(s)
Note: This code is run on VSCode, in order to draw graph, we have to have #%%
Related modules:
mnist_official_loader.py
neuralNetwork.py
mnistDB folder (download from MNIST site)
*About pickled Testing trained model .py + trained_models folder (no need for running 3 models in main())
testNet1.py => trainedModel_784_30_10_30Epoch.pkl
testNet2.py => trainedModel_784_100_10_30Epoch.pkl
testNet3.py => trainedModel_784_30_10_40Epoch.pkl
# Save model - Don't use this unless you want to save the trained model in trained_models folder
pickleFile = "trained_models/trainedModel_784_30_10_30Epoch.pkl"
with open(pickleFile, 'wb') as file:
pickle.dump(net1, file)
"""
#%%
import neuralNetwork
from neuralNetwork import Network
import psutil
import mnist_official_loader
from mnist_official_loader import processData
import time
import numpy
import matplotlib.pyplot as plt
import matplotlib as mpl
import pickle
def main():
memory1 = psutil.virtual_memory().percent
########################################################### Data Preprocessing
training_data, testing_data = mnist_official_loader.processData() # Data Preprocessing
########################################################### Running Net1 Real Time
print("========First Run: [784,30,10] 30 epochs=========\n")
net1 = neuralNetwork.Network([784,30,10]) # Create a 3 layers neural nets first layer 784 neurons, hidden layer 30 neurons and the last layers is 10 neurons
net1.StochasticGD(training_data, testing_data, 30, 10, 3.0) # First run over 30 epochs, mini_batch_size = 10 and learning rate of 3
memory2 = psutil.virtual_memory().percent
memory_usage = abs(memory1 - memory2)
print(f"The memory usage is: {memory_usage} bytes")
# Check statistic - How to test a number with trained net
img1 = numpy.random.randint(0,10000) # pick random feature in the test dataset
prediction1 = net1.feedForward(testing_data[img1][0]) #[0] is the 28x28 pixels
print(f"Image number {img1} in the testing set is a {testing_data[img1][1]}, and the current network predicted a {numpy.argmax(prediction1)}")
figure1, ax1 = plt.subplots(1, 2 , figsize = (8,4))
ax1[0].matshow(numpy.reshape(testing_data[img1][0], (28,28)), cmap='gray') # color map
ax1[1].plot(prediction1, lw = 2) # line width
ax1[1].set_aspect(10)
plt.show()
########################################################### Running Net2 Real Time
# print("========Second Run: [784,100,10] 30 epochs=========\n")
# net2 = neuralNetwork.Network([784,100,10])
# net2.StochasticGD(training_data, testing_data, 30, 10, 3.0)
# memory2 = psutil.virtual_memory().percent
# memory_usage = abs(memory1 - memory2)
# print(f"The memory usage is: {memory_usage} bytes")
# # Check statistic - How to test a number with trained net
# img2 = numpy.random.randint(0,10000) # pick random feature in the test dataset
# prediction2 = net2.feedForward(testing_data[img2][0]) #[0] is the 28x28 pixels
# print(f"Image number {img2} in the testing set is a {testing_data[img2][1]}, and the current network predicted a {numpy.argmax(prediction2)}")
# figure2, ax2 = plt.subplots(1, 2 , figsize = (8,4))
# ax2[0].matshow(numpy.reshape(testing_data[img2][0], (28,28)), cmap='gray') # color map
# ax2[1].plot(prediction2, lw = 2) # line width
# ax2[1].set_aspect(10)
# plt.show()
# ########################################################### Running Net3 Real Time
# print("========Third Run: [784,30,10] 40 epochs=========\n")
# net3 = neuralNetwork.Network([784,30,10])
# net3.StochasticGD(training_data, testing_data, 40, 10, 3.0)
# memory2 = psutil.virtual_memory().percent
# memory_usage = abs(memory1 - memory2)
# print(f"The memory usage is: {memory_usage} bytes")
# # Check statistic - How to test a number with trained net
# img3 = numpy.random.randint(0,10000) # pick random feature in the test dataset
# prediction3 = net3.feedForward(testing_data[img3][0]) #[0] is the 28x28 pixels
# print(f"Image number {img3} is a {testing_data[img3][1]}, and the network predicted a {numpy.argmax(prediction3)}")
# figure3, ax3 = plt.subplots(1, 2 , figsize = (8,4))
# ax3[0].matshow(numpy.reshape(testing_data[img3][0], (28,28)), cmap='gray') # color map
# ax3[1].plot(prediction3, lw = 2) # line width
# ax3[1].set_aspect(10)
# plt.show()
print("Finish Running")
###########################################################
if __name__ == "__main__":
main()
| [
"psutil.virtual_memory",
"matplotlib.pyplot.show",
"neuralNetwork.Network",
"numpy.argmax",
"matplotlib.pyplot.subplots",
"numpy.random.randint",
"numpy.reshape",
"mnist_official_loader.processData"
] | [((1377, 1412), 'mnist_official_loader.processData', 'mnist_official_loader.processData', ([], {}), '()\n', (1410, 1412), False, 'import mnist_official_loader\n'), ((1603, 1639), 'neuralNetwork.Network', 'neuralNetwork.Network', (['[784, 30, 10]'], {}), '([784, 30, 10])\n', (1624, 1639), False, 'import neuralNetwork\n'), ((2110, 2140), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(10000)'], {}), '(0, 10000)\n', (2130, 2140), False, 'import numpy\n'), ((2440, 2474), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(8, 4)'}), '(1, 2, figsize=(8, 4))\n', (2452, 2474), True, 'import matplotlib.pyplot as plt\n'), ((2654, 2664), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2662, 2664), True, 'import matplotlib.pyplot as plt\n'), ((1227, 1250), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (1248, 1250), False, 'import psutil\n'), ((1905, 1928), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (1926, 1928), False, 'import psutil\n'), ((2496, 2542), 'numpy.reshape', 'numpy.reshape', (['testing_data[img1][0]', '(28, 28)'], {}), '(testing_data[img1][0], (28, 28))\n', (2509, 2542), False, 'import numpy\n'), ((2387, 2412), 'numpy.argmax', 'numpy.argmax', (['prediction1'], {}), '(prediction1)\n', (2399, 2412), False, 'import numpy\n')] |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.argmax_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class GradientCorrectnessTest(tf.test.TestCase):
def testMultipleOutputChainedGradients(self):
with self.test_session() as sess:
x = tf.constant(1.0, dtype=tf.float32)
yexp = tf.exp(x)
yexplog = tf.log(yexp)
grads = tf.gradients([yexp, yexplog], [x])
grad_vals = sess.run(grads)
exp1_plus_one = (1.0 + np.exp(1.0)).astype(np.float32)
# [dexp(x)/dx + d(log(exp(x)))/dx] @ x=1 == exp(1) + 1
self.assertAllClose(grad_vals[0], exp1_plus_one)
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow.test.main",
"tensorflow.constant",
"tensorflow.exp",
"numpy.exp",
"tensorflow.log",
"tensorflow.gradients"
] | [((1410, 1424), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (1422, 1424), True, 'import tensorflow as tf\n'), ((1033, 1067), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32'}), '(1.0, dtype=tf.float32)\n', (1044, 1067), True, 'import tensorflow as tf\n'), ((1081, 1090), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (1087, 1090), True, 'import tensorflow as tf\n'), ((1107, 1119), 'tensorflow.log', 'tf.log', (['yexp'], {}), '(yexp)\n', (1113, 1119), True, 'import tensorflow as tf\n'), ((1134, 1168), 'tensorflow.gradients', 'tf.gradients', (['[yexp, yexplog]', '[x]'], {}), '([yexp, yexplog], [x])\n', (1146, 1168), True, 'import tensorflow as tf\n'), ((1232, 1243), 'numpy.exp', 'np.exp', (['(1.0)'], {}), '(1.0)\n', (1238, 1243), True, 'import numpy as np\n')] |
from sacred import Experiment
import os.path as osp
import os
import numpy as np
import yaml
import cv2
import torch
from torch.utils.data import DataLoader
from tracktor.config import get_output_dir, get_tb_dir
from tracktor.reid.solver import Solver
from tracktor.datasets.factory import Datasets
from tracktor.reid.resnet import resnet50
ex = Experiment()
ex.add_config('experiments/cfgs/reid.yaml')
Solver = ex.capture(Solver, prefix='reid.solver')
@ex.automain
def my_main(_config, reid):
# set all seeds
torch.manual_seed(reid['seed'])
torch.cuda.manual_seed(reid['seed'])
np.random.seed(reid['seed'])
torch.backends.cudnn.deterministic = True
print(_config)
output_dir = osp.join(get_output_dir(reid['module_name']), reid['name'])
tb_dir = osp.join(get_tb_dir(reid['module_name']), reid['name'])
sacred_config = osp.join(output_dir, 'sacred_config.yaml')
if not osp.exists(output_dir):
os.makedirs(output_dir)
with open(sacred_config, 'w') as outfile:
yaml.dump(_config, outfile, default_flow_style=False)
#########################
# Initialize dataloader #
#########################
print("[*] Initializing Dataloader")
db_train = Datasets(reid['db_train'], reid['dataloader'])
db_train = DataLoader(db_train, batch_size=1, shuffle=True)
if reid['db_val']:
db_val = None
#db_val = DataLoader(db_val, batch_size=1, shuffle=True)
else:
db_val = None
##########################
# Initialize the modules #
##########################
print("[*] Building CNN")
network = resnet50(pretrained=True, **reid['cnn'])
network.train()
network.cuda()
##################
# Begin training #
##################
print("[*] Solving ...")
# build scheduling like in "In Defense of the Triplet Loss for Person Re-Identification"
# from Hermans et al.
lr = reid['solver']['optim_args']['lr']
iters_per_epoch = len(db_train)
# we want to keep lr until iter 15000 and from there to iter 25000 a exponential decay
l = eval("lambda epoch: 1 if epoch*{} < 15000 else 0.001**((epoch*{} - 15000)/(25000-15000))".format(
iters_per_epoch, iters_per_epoch))
#else:
# l = None
max_epochs = 25000 // len(db_train.dataset) + 1 if 25000 % len(db_train.dataset) else 25000 // len(db_train.dataset)
solver = Solver(output_dir, tb_dir, lr_scheduler_lambda=l)
solver.train(network, db_train, db_val, max_epochs, 100, model_args=reid['model_args'])
| [
"numpy.random.seed",
"os.makedirs",
"torch.utils.data.DataLoader",
"torch.manual_seed",
"yaml.dump",
"torch.cuda.manual_seed",
"tracktor.reid.solver.Solver",
"tracktor.datasets.factory.Datasets",
"os.path.exists",
"tracktor.config.get_output_dir",
"sacred.Experiment",
"tracktor.reid.resnet.res... | [((349, 361), 'sacred.Experiment', 'Experiment', ([], {}), '()\n', (359, 361), False, 'from sacred import Experiment\n'), ((523, 554), 'torch.manual_seed', 'torch.manual_seed', (["reid['seed']"], {}), "(reid['seed'])\n", (540, 554), False, 'import torch\n'), ((559, 595), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (["reid['seed']"], {}), "(reid['seed'])\n", (581, 595), False, 'import torch\n'), ((600, 628), 'numpy.random.seed', 'np.random.seed', (["reid['seed']"], {}), "(reid['seed'])\n", (614, 628), True, 'import numpy as np\n'), ((863, 905), 'os.path.join', 'osp.join', (['output_dir', '"""sacred_config.yaml"""'], {}), "(output_dir, 'sacred_config.yaml')\n", (871, 905), True, 'import os.path as osp\n'), ((1230, 1276), 'tracktor.datasets.factory.Datasets', 'Datasets', (["reid['db_train']", "reid['dataloader']"], {}), "(reid['db_train'], reid['dataloader'])\n", (1238, 1276), False, 'from tracktor.datasets.factory import Datasets\n'), ((1292, 1340), 'torch.utils.data.DataLoader', 'DataLoader', (['db_train'], {'batch_size': '(1)', 'shuffle': '(True)'}), '(db_train, batch_size=1, shuffle=True)\n', (1302, 1340), False, 'from torch.utils.data import DataLoader\n'), ((1622, 1662), 'tracktor.reid.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), "(pretrained=True, **reid['cnn'])\n", (1630, 1662), False, 'from tracktor.reid.resnet import resnet50\n'), ((2460, 2509), 'tracktor.reid.solver.Solver', 'Solver', (['output_dir', 'tb_dir'], {'lr_scheduler_lambda': 'l'}), '(output_dir, tb_dir, lr_scheduler_lambda=l)\n', (2466, 2509), False, 'from tracktor.reid.solver import Solver\n'), ((722, 757), 'tracktor.config.get_output_dir', 'get_output_dir', (["reid['module_name']"], {}), "(reid['module_name'])\n", (736, 757), False, 'from tracktor.config import get_output_dir, get_tb_dir\n'), ((795, 826), 'tracktor.config.get_tb_dir', 'get_tb_dir', (["reid['module_name']"], {}), "(reid['module_name'])\n", (805, 826), False, 'from tracktor.config import get_output_dir, get_tb_dir\n'), ((918, 940), 'os.path.exists', 'osp.exists', (['output_dir'], {}), '(output_dir)\n', (928, 940), True, 'import os.path as osp\n'), ((950, 973), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (961, 973), False, 'import os\n'), ((1028, 1081), 'yaml.dump', 'yaml.dump', (['_config', 'outfile'], {'default_flow_style': '(False)'}), '(_config, outfile, default_flow_style=False)\n', (1037, 1081), False, 'import yaml\n')] |
import torch
from torch.utils import data
import warnings
import numpy as np
import cv2
import time
class createDataset(data.Dataset):
def __init__(self, image_path, size=[320, 160], image=None):
warnings.simplefilter("ignore")
self.width = size[0]
self.height = size[1]
self.rng = np.random.RandomState(int(time.time()))
self.path_list = [image_path]
self.image = image
self.rng.shuffle(self.path_list)
self.flags = {'size': size}
self.img = np.zeros(size, np.uint8)
self.label_img = np.zeros(size, np.uint8)
self.ins_img = np.zeros((0, size[0], size[1]), np.uint8)
self.len = len(self.path_list)
self.mainpath = image_path
def next(self, path):
img_path = path + ".jpg"
if self.image is None:
frame = cv2.imread(img_path)
else:
frame = self.image
self.rng = np.random.RandomState(int(time.time()))
if frame is None:
print("Failed to read:", img_path)
frame = cv2.imread(self.mainpath + "/failsafe.jpg")
gamma = self.rng.uniform(0.8, 1.4)
gamma_table = [np.power(x / 255.0, gamma) * 255.0 for x in range(256)]
gamma_table = np.round(np.array(gamma_table)).astype(np.uint8)
# 实现映射用的是Opencv的查表函数
frame = cv2.LUT(frame, gamma_table)
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)
frame_h, frame_w, _ = frame.shape
crop_factor_h = self.rng.uniform(0.0, 0.01)
crop_factor_w = self.rng.uniform(0.0, 0.01)
h = frame_h - frame_h * crop_factor_h
w = frame_w - frame_w * crop_factor_w
x = self.rng.uniform(0, int(frame_w - w))
y = int(frame_h - h) // 2
crop = np.array([y, y + h, x, x + w]).astype('int')
frame = frame[crop[0]:crop[1], crop[2]:crop[3]]
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)
return frame
def __getitem__(self, idx):
self.path_from = self.path_list[idx][:-4]
self.img = self.next(self.path_from)
self.img = np.array(np.transpose(self.img, (2, 0, 1)), dtype=np.float32)
print('item : ', self.img.shape)
return torch.Tensor(self.img)
def __len__(self):
return self.len
| [
"warnings.simplefilter",
"numpy.power",
"numpy.zeros",
"numpy.transpose",
"time.time",
"cv2.imread",
"torch.Tensor",
"cv2.LUT",
"numpy.array",
"cv2.resize"
] | [((212, 243), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (233, 243), False, 'import warnings\n'), ((526, 550), 'numpy.zeros', 'np.zeros', (['size', 'np.uint8'], {}), '(size, np.uint8)\n', (534, 550), True, 'import numpy as np\n'), ((577, 601), 'numpy.zeros', 'np.zeros', (['size', 'np.uint8'], {}), '(size, np.uint8)\n', (585, 601), True, 'import numpy as np\n'), ((625, 666), 'numpy.zeros', 'np.zeros', (['(0, size[0], size[1])', 'np.uint8'], {}), '((0, size[0], size[1]), np.uint8)\n', (633, 666), True, 'import numpy as np\n'), ((1357, 1384), 'cv2.LUT', 'cv2.LUT', (['frame', 'gamma_table'], {}), '(frame, gamma_table)\n', (1364, 1384), False, 'import cv2\n'), ((1402, 1476), 'cv2.resize', 'cv2.resize', (['frame', '(self.width, self.height)'], {'interpolation': 'cv2.INTER_AREA'}), '(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)\n', (1412, 1476), False, 'import cv2\n'), ((1932, 2006), 'cv2.resize', 'cv2.resize', (['frame', '(self.width, self.height)'], {'interpolation': 'cv2.INTER_AREA'}), '(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)\n', (1942, 2006), False, 'import cv2\n'), ((2296, 2318), 'torch.Tensor', 'torch.Tensor', (['self.img'], {}), '(self.img)\n', (2308, 2318), False, 'import torch\n'), ((855, 875), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (865, 875), False, 'import cv2\n'), ((1074, 1117), 'cv2.imread', 'cv2.imread', (["(self.mainpath + '/failsafe.jpg')"], {}), "(self.mainpath + '/failsafe.jpg')\n", (1084, 1117), False, 'import cv2\n'), ((2187, 2220), 'numpy.transpose', 'np.transpose', (['self.img', '(2, 0, 1)'], {}), '(self.img, (2, 0, 1))\n', (2199, 2220), True, 'import numpy as np\n'), ((349, 360), 'time.time', 'time.time', ([], {}), '()\n', (358, 360), False, 'import time\n'), ((967, 978), 'time.time', 'time.time', ([], {}), '()\n', (976, 978), False, 'import time\n'), ((1185, 1211), 'numpy.power', 'np.power', (['(x / 255.0)', 'gamma'], {}), '(x / 255.0, gamma)\n', (1193, 1211), True, 'import numpy as np\n'), ((1814, 1844), 'numpy.array', 'np.array', (['[y, y + h, x, x + w]'], {}), '([y, y + h, x, x + w])\n', (1822, 1844), True, 'import numpy as np\n'), ((1272, 1293), 'numpy.array', 'np.array', (['gamma_table'], {}), '(gamma_table)\n', (1280, 1293), True, 'import numpy as np\n')] |
"""
(*)~---------------------------------------------------------------------------
Pupil - eye tracking platform
Copyright (C) 2012-2019 <NAME>
Distributed under the terms of the GNU
Lesser General Public License (LGPL v3.0).
See COPYING and COPYING.LESSER for license details.
---------------------------------------------------------------------------~(*)
"""
import numpy as np
def intersect_line_line(p11, p12, p21, p22, internal=False):
x1, y1 = p11
x2, y2 = p12
x3, y3 = p21
x4, y4 = p22
if ((x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)) != 0:
Px = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / (
(x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
)
Py = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / (
(x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
)
if internal:
if x1 != x2:
lam = (Px - x2) / (x1 - x2)
else:
lam = (Py - y2) / (y1 - y2)
if 0 <= lam <= 1:
return [True, Px, Py]
else:
return [False]
else:
return [True, Px, Py]
else:
return [False]
def intersect_sphere_multiple_lines(sphere_center, radius, points, directions):
# Note: Directions need to be normalized!
intermediate = np.einsum("ij,ij->i", directions, points - sphere_center)
discriminant = (
intermediate ** 2 - np.sum((points - sphere_center) ** 2, axis=1) + radius ** 2
)
idx = discriminant > 0
sqr = np.sqrt(discriminant[idx])
d1 = -intermediate[idx] + sqr
d2 = -intermediate[idx] - sqr
d_final = np.expand_dims(np.minimum(d1, d2), axis=1)
intersections_on_sphere = points[idx] + d_final * directions[idx]
return intersections_on_sphere, idx
def intersect_sphere_line(sphere_center, radius, point, direction):
temp = np.dot(direction, point - sphere_center)
discriminant = temp ** 2 - np.linalg.norm(point - sphere_center) ** 2 + radius ** 2
if discriminant >= 0.0:
sqr = np.sqrt(discriminant)
d1 = -temp + sqr
d2 = -temp - sqr
return [True, d1, d2]
else:
return [False, 0.0, 0.0]
def intersect_plane_line(p_plane, n_plane, p_line, l_line, radius=-1):
if np.dot(n_plane, l_line) == 0 or np.dot(p_plane - p_line, n_plane) == 0:
return [False]
else:
d = np.dot(p_plane - p_line, n_plane) / np.dot(l_line, n_plane)
p_intersect = p_line + d * l_line
if radius > 0:
if np.linalg.norm(p_plane - p_intersect) <= radius[0]:
return [True, p_intersect[0], p_intersect[1], p_intersect[2]]
else:
return [False, 0.0, 0.0, 0.0]
else:
return [True, p_intersect[0], p_intersect[1], p_intersect[2]]
def nearest_point_on_sphere_to_line(center, radius, origin, direction):
intersection = intersect_sphere_line(center, radius, origin, direction)
if intersection[0]:
d = np.min(intersection[1:])
return origin + d * direction
else:
temp = np.dot(direction, center - origin)
origin_prime = origin + temp * direction
direction_prime = center - origin_prime
direction_prime /= np.linalg.norm(direction_prime)
success, d1, d2 = intersect_sphere_line(
center, radius, origin_prime, direction_prime
)
if success:
d = min(d1, d2)
return origin_prime + d * direction_prime
else:
np.zeros(3)
def nearest_intersection_points(p1, p2, p3, p4):
"""Calculates the two nearest points, and their distance to each other on
two lines defined by (p1,p2) respectively (p3,p4)
"""
def mag(p):
return np.sqrt(p.dot(p))
def normalise(p1, p2):
p = p2 - p1
m = mag(p)
if m == 0:
return [0.0, 0.0, 0.0]
else:
return p / m
d1 = normalise(p1, p2)
d2 = normalise(p3, p4)
diff = p1 - p3
a01 = -d1.dot(d2)
b0 = diff.dot(d1)
if np.abs(a01) < 1.0:
# Lines are not parallel.
det = 1.0 - a01 * a01
b1 = -diff.dot(d2)
s0 = (a01 * b1 - b0) / det
s1 = (a01 * b0 - b1) / det
else:
# Lines are parallel, select any pair of closest points.
s0 = -b0
s1 = 0
closestPoint1 = p1 + s0 * d1
closestPoint2 = p3 + s1 * d2
dist = mag(closestPoint2 - closestPoint1)
return closestPoint1, closestPoint2, dist
def nearest_intersection_lines(lines):
dim = len(lines[0].origin)
R = np.zeros((dim, dim))
q = np.zeros(dim)
for line in lines:
v = np.reshape(line.direction, (dim, 1))
A = np.eye(dim) - v @ v.T
R += A
q += A @ line.origin
return np.linalg.pinv(R) @ q
| [
"numpy.minimum",
"numpy.abs",
"numpy.sum",
"numpy.eye",
"numpy.zeros",
"numpy.einsum",
"numpy.min",
"numpy.linalg.norm",
"numpy.reshape",
"numpy.dot",
"numpy.linalg.pinv",
"numpy.sqrt"
] | [((1379, 1436), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'directions', '(points - sphere_center)'], {}), "('ij,ij->i', directions, points - sphere_center)\n", (1388, 1436), True, 'import numpy as np\n'), ((1589, 1615), 'numpy.sqrt', 'np.sqrt', (['discriminant[idx]'], {}), '(discriminant[idx])\n', (1596, 1615), True, 'import numpy as np\n'), ((1933, 1973), 'numpy.dot', 'np.dot', (['direction', '(point - sphere_center)'], {}), '(direction, point - sphere_center)\n', (1939, 1973), True, 'import numpy as np\n'), ((4649, 4669), 'numpy.zeros', 'np.zeros', (['(dim, dim)'], {}), '((dim, dim))\n', (4657, 4669), True, 'import numpy as np\n'), ((4678, 4691), 'numpy.zeros', 'np.zeros', (['dim'], {}), '(dim)\n', (4686, 4691), True, 'import numpy as np\n'), ((1713, 1731), 'numpy.minimum', 'np.minimum', (['d1', 'd2'], {}), '(d1, d2)\n', (1723, 1731), True, 'import numpy as np\n'), ((2104, 2125), 'numpy.sqrt', 'np.sqrt', (['discriminant'], {}), '(discriminant)\n', (2111, 2125), True, 'import numpy as np\n'), ((3054, 3078), 'numpy.min', 'np.min', (['intersection[1:]'], {}), '(intersection[1:])\n', (3060, 3078), True, 'import numpy as np\n'), ((3142, 3176), 'numpy.dot', 'np.dot', (['direction', '(center - origin)'], {}), '(direction, center - origin)\n', (3148, 3176), True, 'import numpy as np\n'), ((3301, 3332), 'numpy.linalg.norm', 'np.linalg.norm', (['direction_prime'], {}), '(direction_prime)\n', (3315, 3332), True, 'import numpy as np\n'), ((4118, 4129), 'numpy.abs', 'np.abs', (['a01'], {}), '(a01)\n', (4124, 4129), True, 'import numpy as np\n'), ((4728, 4764), 'numpy.reshape', 'np.reshape', (['line.direction', '(dim, 1)'], {}), '(line.direction, (dim, 1))\n', (4738, 4764), True, 'import numpy as np\n'), ((4855, 4872), 'numpy.linalg.pinv', 'np.linalg.pinv', (['R'], {}), '(R)\n', (4869, 4872), True, 'import numpy as np\n'), ((1486, 1531), 'numpy.sum', 'np.sum', (['((points - sphere_center) ** 2)'], {'axis': '(1)'}), '((points - sphere_center) ** 2, axis=1)\n', (1492, 1531), True, 'import numpy as np\n'), ((2329, 2352), 'numpy.dot', 'np.dot', (['n_plane', 'l_line'], {}), '(n_plane, l_line)\n', (2335, 2352), True, 'import numpy as np\n'), ((2361, 2394), 'numpy.dot', 'np.dot', (['(p_plane - p_line)', 'n_plane'], {}), '(p_plane - p_line, n_plane)\n', (2367, 2394), True, 'import numpy as np\n'), ((2446, 2479), 'numpy.dot', 'np.dot', (['(p_plane - p_line)', 'n_plane'], {}), '(p_plane - p_line, n_plane)\n', (2452, 2479), True, 'import numpy as np\n'), ((2482, 2505), 'numpy.dot', 'np.dot', (['l_line', 'n_plane'], {}), '(l_line, n_plane)\n', (2488, 2505), True, 'import numpy as np\n'), ((3578, 3589), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (3586, 3589), True, 'import numpy as np\n'), ((4777, 4788), 'numpy.eye', 'np.eye', (['dim'], {}), '(dim)\n', (4783, 4788), True, 'import numpy as np\n'), ((2005, 2042), 'numpy.linalg.norm', 'np.linalg.norm', (['(point - sphere_center)'], {}), '(point - sphere_center)\n', (2019, 2042), True, 'import numpy as np\n'), ((2586, 2623), 'numpy.linalg.norm', 'np.linalg.norm', (['(p_plane - p_intersect)'], {}), '(p_plane - p_intersect)\n', (2600, 2623), True, 'import numpy as np\n')] |
""" Robot planning problem turned into openai gym-like, reinforcement learning style environment """
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import attr
import copy
import numpy as np
from bc_gym_planning_env.robot_models.tricycle_model import TricycleRobot
from bc_gym_planning_env.robot_models.robot_dimensions_examples import get_dimensions_example
from bc_gym_planning_env.robot_models.robot_examples_factory import create_standard_robot
from bc_gym_planning_env.utilities.costmap_2d import CostMap2D
from bc_gym_planning_env.utilities.serialize import Serializable
from bc_gym_planning_env.utilities.costmap_utils import clone_costmap
from bc_gym_planning_env.utilities.coordinate_transformations import world_to_pixel
from bc_gym_planning_env.utilities.path_tools import get_pixel_footprint
from bc_gym_planning_env.utilities.path_tools import refine_path
from bc_gym_planning_env.envs.base.draw import draw_environment
from bc_gym_planning_env.envs.base.obs import Observation
from bc_gym_planning_env.envs.base.params import EnvParams
from bc_gym_planning_env.envs.base import spaces
from bc_gym_planning_env.envs.base.reward_provider_examples_factory import\
create_reward_provider_state, get_reward_provider_example
from bc_gym_planning_env.utilities.gui import OpenCVGui
def _get_element_from_list_with_delay(item_list, element, delay):
"""
A little util for faking delay of data stream. e.g.
```
l = []
get = generate_delay(l, 3)
for i in range(10):
print get(i)
```
prints
0 0 0 1 2 3 4 5 6
:param item_list list: list of items
:param element object: Just any python object
:param delay int: how many items to delay by
:return: a function that fakes a delay data stream, see above
"""
item_list.append(element)
if len(item_list) > delay:
return item_list.pop(0)
else:
return item_list[0]
@attr.s(cmp=False)
class State(Serializable):
""" State of the environemnt that you can reset your environment to.
However, it doesn't contain parametrization. """
reward_provider_state = attr.ib(type=object)
path = attr.ib(type=np.ndarray)
original_path = attr.ib(type=np.ndarray)
costmap = attr.ib(type=CostMap2D)
iter_timeout = attr.ib(type=int)
current_time = attr.ib(type=float)
current_iter = attr.ib(type=int)
robot_collided = attr.ib(type=bool)
poses_queue = attr.ib(type=list)
robot_state_queue = attr.ib(type=list)
control_queue = attr.ib(type=list)
pose = attr.ib(type=np.ndarray)
robot_state = attr.ib(type=object)
VERSION = 1
def copy(self):
""" Get the copy of the environment.
:return State: get the state of the environment
"""
# pylint: disable=no-member
return attr.evolve(
self,
reward_provider_state=self.reward_provider_state.copy(),
path=np.copy(self.path),
pose=np.copy(self.pose),
original_path=np.copy(self.original_path),
costmap=clone_costmap(self.costmap),
poses_queue=copy.deepcopy(self.poses_queue),
robot_state_queue=copy.deepcopy(self.robot_state_queue),
control_queue=copy.deepcopy(self.control_queue),
robot_state=self.robot_state.copy()
)
def __eq__(self, other):
# pylint: disable=too-many-return-statements
if not isinstance(other, State):
return False
if self.reward_provider_state != other.reward_provider_state:
return False
if (self.path != other.path).any():
return False
if (self.original_path != other.original_path).any():
return False
if self.costmap != other.costmap:
return False
if self.iter_timeout != other.iter_timeout:
return False
if self.current_time != other.current_time:
return False
if self.current_iter != other.current_iter:
return False
if self.robot_collided != other.robot_collided:
return False
if self.poses_queue != other.poses_queue:
return False
if self.robot_state_queue != other.robot_state_queue:
return False
if self.control_queue != other.control_queue:
return False
if (self.pose != other.pose).any():
return False
if self.robot_state != other.robot_state:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@classmethod
def deserialize(cls, state):
ver = state.pop('version')
assert ver == cls.VERSION
state['costmap'] = CostMap2D.from_state(state['costmap'])
reward_provider_state_instance = create_reward_provider_state(state.pop('reward_provider_state_name'))
state['reward_provider_state'] = reward_provider_state_instance.deserialize(state['reward_provider_state'])
# prepare for robot state deserialization
robot_instance = create_standard_robot(state.pop('robot_type_name'))
robot_state_type = robot_instance.get_state_type()
# deserialize the robot state
state['robot_state'] = robot_state_type.deserialize(state['robot_state'])
# deserialize robot state queue
acc = []
for item in state['robot_state_queue']:
acc.append(robot_state_type.deserialize(item))
state['robot_state_queue'] = acc
return cls(**state)
def serialize(self):
resu = attr.asdict(self)
# pylint: disable=no-member
resu['version'] = self.VERSION
resu['costmap'] = self.costmap.get_state()
resu['reward_provider_state_type_name'] = self.reward_provider_state.get_reward_provider_state_type_name()
resu['reward_provider_state'] = self.reward_provider_state.serialize()
resu['robot_type_name'] = self.robot_state.get_robot_type_name()
resu['robot_state'] = self.robot_state.serialize()
return resu
def make_initial_state(path, costmap, robot, reward_provider, params):
""" Prepare the initial full state of the planning environment
:param path: the static path to follow
:param costmap: the static costmap containg all the obstacles
:param robot: robot - we will execute the motion based on its model
:param reward_provider: an instance of the reward computing class
:param params: parametriztion of the environment
:return State: the full initial state of the environment
"""
if params.refine_path:
path = refine_path(path, params.path_delta)
assert path.shape[1] == 3
# generate robot_state, poses,
initial_pose = path[0]
robot_state = robot.get_initial_state()
robot_state.set_pose(initial_pose)
initial_reward_provider_state = reward_provider.generate_initial_state(path, params.reward_provider_params)
return State(
reward_provider_state=initial_reward_provider_state,
path=np.ascontiguousarray(initial_reward_provider_state.current_path()),
original_path=np.copy(np.ascontiguousarray(path)),
costmap=costmap,
iter_timeout=params.iteration_timeout,
current_time=0.0,
current_iter=0,
robot_collided=False,
pose=initial_pose,
poses_queue=[],
robot_state=robot_state,
robot_state_queue=[],
control_queue=[],
)
class PlanEnv(Serializable):
""" Poses planning problem as OpenAI gym task. """
def __init__(self, costmap, path, params):
"""
:param costmap CostMap2D: costmap denoting obstacles
:param path array(N, 3): oriented path, presented as way points
:param params EnvParams: parametrization of the environment
"""
# Stateful things
self._robot = TricycleRobot(dimensions=get_dimensions_example(params.robot_name))
reward_provider_example = get_reward_provider_example(params.reward_provider_name)
self._reward_provider = reward_provider_example(params=params.reward_provider_params)
# Properties, things without state
self.action_space = spaces.Box(
low=np.array([self._robot.get_max_front_wheel_speed() / 10, -np.pi/2]),
high=np.array([self._robot.get_max_front_wheel_speed() / 2, np.pi/2]),
dtype=np.float32)
self.reward_range = (0.0, 1.0)
self._gui = OpenCVGui()
self._params = params
# State
self._state = make_initial_state(path, costmap, self._robot, self._reward_provider, params)
self._initial_state = self._state.copy()
self.set_state(self._state)
def serialize(self):
serialized = {
'version': self.VERSION,
'state': self._state.serialize(),
'params': self._params.serialize(),
'path': self._state.original_path,
'costmap': self._state.costmap.get_state()
}
return serialized
@classmethod
def deserialize(cls, state):
ver = state.pop('version')
assert ver == cls.VERSION
init_costmap = CostMap2D.from_state(state['costmap'])
init_path = state['path']
params = EnvParams.deserialize(state['params'])
state = State.deserialize(state['state'])
instance = cls(init_costmap, init_path, params)
instance.set_state(state)
return instance
def set_state(self, state):
""" Set the state of the environment
:param state State: State of the environment to set the env to
"""
state = state.copy()
self._state = state
self._robot.set_state(self._state.robot_state)
self._reward_provider.set_state(self._state.reward_provider_state)
def get_state(self):
""" Get current state (but not parametrization) of the environment
:return State: the state of the environment
"""
return self._state.copy()
def reset(self):
"""
Resets the state of the environment and returns an initial observation.
Resets the 'done' state as well.
:return Observation: observation on reset of the environment,
to be fed to agent as the initial observation.
"""
self.set_state(self._initial_state)
return self._extract_obs()
def render(self, mode='human'):
"""
Render human-friendly representation of the environment on the screen.
:param mode str: the mode of rendering, currently only 'human' works
:return np.ndarray: the human-friendly image representation returned by the environment
"""
if mode not in ['human', 'rgb_array']:
raise NotImplementedError
img = draw_environment(self._state.path, self._state.original_path, self._robot, self._state.costmap)
if mode == 'human':
return self._gui.display(img)
else:
return img
def close(self):
""" Do whatever you need to do on closing: release the resources etc. """
self._gui.close()
def seed(self, seed=None):
""" Seeding actually doesn't do on the level of this environment,
as it should be fully deterministic. The environments deriving or
using this class it might do something here
:param seed object: whatever you want to use for seeding
"""
pass
def step(self, action):
"""
Run one timestep of the planning environment's dynamics, until end of
episode is reached.
Returns:
observation (Observation): agent's observation of the current environment
reward (float) : amount of reward returned after previous action
done (boolean): whether the episode has ended, in which case further step() calls have no point
info (dict): contains auxiliary diagnostic information (e.g. helpful for debugging)
:param action: (wheel_v, wheel_angle)
:return Tuple[Observation, float, bool, Dict]: the stuff env shuold return
"""
# Process the environment dynamics
self._state = self._resolve_state_transition(action, self._state)
reward = self._reward_provider.reward(self._state)
self._state.reward_provider_state = self._reward_provider.get_state()
self._state.path = self._reward_provider.get_current_path()
obs = self._extract_obs()
info = self._extract_info()
done = self._extract_done(self._state)
return obs, reward, done, info
def _resolve_state_transition(self, action, state):
"""
Mutate state of the environment based on the received motion command.
:param action Tuple[float, float]: motion command (wheel_v, wheel_angle)
:param state State: current state of the environment
:return State: the state of the environment after application of the transition function
"""
delayed_action = _get_element_from_list_with_delay(
state.control_queue, action, self._params.control_delay
)
collided = _env_step(self._state.costmap, self._robot, self._params.dt, delayed_action)
pose = self._robot.get_pose()
delayed_pose = _get_element_from_list_with_delay(
state.poses_queue, pose, self._params.pose_delay
)
current_time = state.current_time + self._params.dt
current_iter = state.current_iter + 1
robot_state = self._robot.get_state()
delayed_robot_state = _get_element_from_list_with_delay(
state.robot_state_queue, robot_state, self._params.state_delay
)
state.current_time = current_time
state.current_iter = current_iter
state.robot_collided = state.robot_collided or collided
state.pose = delayed_pose
state.path = self._reward_provider.get_current_path()
state.robot_state = delayed_robot_state
return state
def _has_timed_out(self):
"""
Has the environment timed out?
:return bool: Has the environment timed out?
"""
return self._state.current_iter >= self._params.iteration_timeout
def _extract_done(self, state):
"""
Extract if we are done with this enviroment.
For example we are done, if the goal has been reached,
we have timed out or the robot has collided.
:param state: current state of the environment
:return bool: are we done with this planning environment?
"""
goal_reached = self._reward_provider.done(state)
timed_out = self._has_timed_out()
done = goal_reached or timed_out or self._state.robot_collided
return done
def _extract_obs(self):
"""
Extract an observation from the environment.
:return Observation: the observation to process
"""
return Observation(
pose=self._state.pose,
path=self._state.path,
costmap=self._state.costmap,
robot_state=self._state.robot_state,
time=self._state.current_time,
dt=self._params.dt
)
@staticmethod
def _extract_info():
""" Extract debug information from the env. For now empty.
:return Dict: empty dict (for now) """
return {}
def _env_step(costmap, robot, dt, control_signals):
"""
Execute movement step for the robot.
:param costmap Costmap2D: costmap containing the obstacles to potentially collide with
:param robot: Robot that will execute the movement based on its model
:param dt: time interval between time steps
:param control_signals: motion primitives to executed
:return bool: Does it collide?
"""
old_position = robot.get_pose()
robot.step(dt, control_signals)
new_position = robot.get_pose()
x, y, angle = new_position
collides = pose_collides(x, y, angle, robot, costmap)
if collides:
robot.set_pose(*old_position)
return collides
def pose_collides(x, y, angle, robot, costmap):
"""
Check if robot footprint at x, y (world coordinates) and
oriented as yaw collides with lethal obstacles.
:param x: robot pose
:param y: robot pose
:param angle: robot pose
:param robot: Robot that will supply the footprint
:param costmap Costmap2D: costmap containing the obstacles to collide with
:return bool : does the pose collide?
"""
kernel_image = get_pixel_footprint(angle, robot.get_footprint(), costmap.get_resolution())
# Get the coordinates of where the footprint is inside the kernel_image (on pixel coordinates)
kernel = np.where(kernel_image)
# Move footprint to (x,y), all in pixel coordinates
x, y = world_to_pixel(np.array([x, y]), costmap.get_origin(), costmap.get_resolution())
collisions = y + kernel[0] - kernel_image.shape[0] // 2, x + kernel[1] - kernel_image.shape[1] // 2
raw_map = costmap.get_data()
# Check if the footprint pixel coordinates are valid, this is, if they are not negative and are inside the map
good = np.logical_and(np.logical_and(collisions[0] >= 0, collisions[0] < raw_map.shape[0]),
np.logical_and(collisions[1] >= 0, collisions[1] < raw_map.shape[1]))
# Just from the footprint coordinates that are good, check if they collide
# with obstacles inside the map
return bool(np.any(raw_map[collisions[0][good],
collisions[1][good]] == CostMap2D.LETHAL_OBSTACLE))
| [
"bc_gym_planning_env.envs.base.reward_provider_examples_factory.get_reward_provider_example",
"attr.s",
"bc_gym_planning_env.utilities.path_tools.refine_path",
"numpy.copy",
"attr.asdict",
"bc_gym_planning_env.envs.base.obs.Observation",
"bc_gym_planning_env.envs.base.params.EnvParams.deserialize",
"c... | [((1974, 1991), 'attr.s', 'attr.s', ([], {'cmp': '(False)'}), '(cmp=False)\n', (1980, 1991), False, 'import attr\n'), ((2173, 2193), 'attr.ib', 'attr.ib', ([], {'type': 'object'}), '(type=object)\n', (2180, 2193), False, 'import attr\n'), ((2205, 2229), 'attr.ib', 'attr.ib', ([], {'type': 'np.ndarray'}), '(type=np.ndarray)\n', (2212, 2229), False, 'import attr\n'), ((2250, 2274), 'attr.ib', 'attr.ib', ([], {'type': 'np.ndarray'}), '(type=np.ndarray)\n', (2257, 2274), False, 'import attr\n'), ((2289, 2312), 'attr.ib', 'attr.ib', ([], {'type': 'CostMap2D'}), '(type=CostMap2D)\n', (2296, 2312), False, 'import attr\n'), ((2332, 2349), 'attr.ib', 'attr.ib', ([], {'type': 'int'}), '(type=int)\n', (2339, 2349), False, 'import attr\n'), ((2369, 2388), 'attr.ib', 'attr.ib', ([], {'type': 'float'}), '(type=float)\n', (2376, 2388), False, 'import attr\n'), ((2408, 2425), 'attr.ib', 'attr.ib', ([], {'type': 'int'}), '(type=int)\n', (2415, 2425), False, 'import attr\n'), ((2447, 2465), 'attr.ib', 'attr.ib', ([], {'type': 'bool'}), '(type=bool)\n', (2454, 2465), False, 'import attr\n'), ((2484, 2502), 'attr.ib', 'attr.ib', ([], {'type': 'list'}), '(type=list)\n', (2491, 2502), False, 'import attr\n'), ((2527, 2545), 'attr.ib', 'attr.ib', ([], {'type': 'list'}), '(type=list)\n', (2534, 2545), False, 'import attr\n'), ((2566, 2584), 'attr.ib', 'attr.ib', ([], {'type': 'list'}), '(type=list)\n', (2573, 2584), False, 'import attr\n'), ((2596, 2620), 'attr.ib', 'attr.ib', ([], {'type': 'np.ndarray'}), '(type=np.ndarray)\n', (2603, 2620), False, 'import attr\n'), ((2639, 2659), 'attr.ib', 'attr.ib', ([], {'type': 'object'}), '(type=object)\n', (2646, 2659), False, 'import attr\n'), ((16851, 16873), 'numpy.where', 'np.where', (['kernel_image'], {}), '(kernel_image)\n', (16859, 16873), True, 'import numpy as np\n'), ((4800, 4838), 'bc_gym_planning_env.utilities.costmap_2d.CostMap2D.from_state', 'CostMap2D.from_state', (["state['costmap']"], {}), "(state['costmap'])\n", (4820, 4838), False, 'from bc_gym_planning_env.utilities.costmap_2d import CostMap2D\n'), ((5651, 5668), 'attr.asdict', 'attr.asdict', (['self'], {}), '(self)\n', (5662, 5668), False, 'import attr\n'), ((6699, 6735), 'bc_gym_planning_env.utilities.path_tools.refine_path', 'refine_path', (['path', 'params.path_delta'], {}), '(path, params.path_delta)\n', (6710, 6735), False, 'from bc_gym_planning_env.utilities.path_tools import refine_path\n'), ((8051, 8107), 'bc_gym_planning_env.envs.base.reward_provider_examples_factory.get_reward_provider_example', 'get_reward_provider_example', (['params.reward_provider_name'], {}), '(params.reward_provider_name)\n', (8078, 8107), False, 'from bc_gym_planning_env.envs.base.reward_provider_examples_factory import create_reward_provider_state, get_reward_provider_example\n'), ((8542, 8553), 'bc_gym_planning_env.utilities.gui.OpenCVGui', 'OpenCVGui', ([], {}), '()\n', (8551, 8553), False, 'from bc_gym_planning_env.utilities.gui import OpenCVGui\n'), ((9252, 9290), 'bc_gym_planning_env.utilities.costmap_2d.CostMap2D.from_state', 'CostMap2D.from_state', (["state['costmap']"], {}), "(state['costmap'])\n", (9272, 9290), False, 'from bc_gym_planning_env.utilities.costmap_2d import CostMap2D\n'), ((9342, 9380), 'bc_gym_planning_env.envs.base.params.EnvParams.deserialize', 'EnvParams.deserialize', (["state['params']"], {}), "(state['params'])\n", (9363, 9380), False, 'from bc_gym_planning_env.envs.base.params import EnvParams\n'), ((10903, 11002), 'bc_gym_planning_env.envs.base.draw.draw_environment', 'draw_environment', (['self._state.path', 'self._state.original_path', 'self._robot', 'self._state.costmap'], {}), '(self._state.path, self._state.original_path, self._robot,\n self._state.costmap)\n', (10919, 11002), False, 'from bc_gym_planning_env.envs.base.draw import draw_environment\n'), ((15081, 15265), 'bc_gym_planning_env.envs.base.obs.Observation', 'Observation', ([], {'pose': 'self._state.pose', 'path': 'self._state.path', 'costmap': 'self._state.costmap', 'robot_state': 'self._state.robot_state', 'time': 'self._state.current_time', 'dt': 'self._params.dt'}), '(pose=self._state.pose, path=self._state.path, costmap=self.\n _state.costmap, robot_state=self._state.robot_state, time=self._state.\n current_time, dt=self._params.dt)\n', (15092, 15265), False, 'from bc_gym_planning_env.envs.base.obs import Observation\n'), ((16956, 16972), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (16964, 16972), True, 'import numpy as np\n'), ((17300, 17368), 'numpy.logical_and', 'np.logical_and', (['(collisions[0] >= 0)', '(collisions[0] < raw_map.shape[0])'], {}), '(collisions[0] >= 0, collisions[0] < raw_map.shape[0])\n', (17314, 17368), True, 'import numpy as np\n'), ((17396, 17464), 'numpy.logical_and', 'np.logical_and', (['(collisions[1] >= 0)', '(collisions[1] < raw_map.shape[1])'], {}), '(collisions[1] >= 0, collisions[1] < raw_map.shape[1])\n', (17410, 17464), True, 'import numpy as np\n'), ((17598, 17689), 'numpy.any', 'np.any', (['(raw_map[collisions[0][good], collisions[1][good]] == CostMap2D.LETHAL_OBSTACLE\n )'], {}), '(raw_map[collisions[0][good], collisions[1][good]] == CostMap2D.\n LETHAL_OBSTACLE)\n', (17604, 17689), True, 'import numpy as np\n'), ((2979, 2997), 'numpy.copy', 'np.copy', (['self.path'], {}), '(self.path)\n', (2986, 2997), True, 'import numpy as np\n'), ((3016, 3034), 'numpy.copy', 'np.copy', (['self.pose'], {}), '(self.pose)\n', (3023, 3034), True, 'import numpy as np\n'), ((3062, 3089), 'numpy.copy', 'np.copy', (['self.original_path'], {}), '(self.original_path)\n', (3069, 3089), True, 'import numpy as np\n'), ((3111, 3138), 'bc_gym_planning_env.utilities.costmap_utils.clone_costmap', 'clone_costmap', (['self.costmap'], {}), '(self.costmap)\n', (3124, 3138), False, 'from bc_gym_planning_env.utilities.costmap_utils import clone_costmap\n'), ((3164, 3195), 'copy.deepcopy', 'copy.deepcopy', (['self.poses_queue'], {}), '(self.poses_queue)\n', (3177, 3195), False, 'import copy\n'), ((3227, 3264), 'copy.deepcopy', 'copy.deepcopy', (['self.robot_state_queue'], {}), '(self.robot_state_queue)\n', (3240, 3264), False, 'import copy\n'), ((3292, 3325), 'copy.deepcopy', 'copy.deepcopy', (['self.control_queue'], {}), '(self.control_queue)\n', (3305, 3325), False, 'import copy\n'), ((7216, 7242), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['path'], {}), '(path)\n', (7236, 7242), True, 'import numpy as np\n'), ((7974, 8015), 'bc_gym_planning_env.robot_models.robot_dimensions_examples.get_dimensions_example', 'get_dimensions_example', (['params.robot_name'], {}), '(params.robot_name)\n', (7996, 8015), False, 'from bc_gym_planning_env.robot_models.robot_dimensions_examples import get_dimensions_example\n')] |
import pytest
#from functools import reduce
import numpy as np
from numpy.testing import assert_allclose
from .test_fixtures import *
from ..standard_systems import LMT, SI
from .. import meta
from .. import solver as slv
from .. import utils as u
def test_solve_e_has_zero_rows():
# Number of solutions is 1 which makes e zero rows (no variables to choose freely).
dm = np.array([
[0.,1,0], # M
[1,1,-2], # F
[0,0,1] # T
]).T # DxV
P = slv.solve(dm, [1.,0, 0]) # PxV
assert P.shape == (1,3)
assert_allclose(P @ dm.T, [[1.,0, 0]]) # PxD
def test_solve_with_e():
dm = np.array([
[1.,1,0],
[0,0,0],
[0,0,0]
])
# rank two, but A 2x2 will always be singular
# if no column swap happens
P = slv.solve(dm, [0,0,0.], strict=False)
@pytest.mark.usefixtures('dm_example_72')
def test_solve_72(dm_example_72):
# No row deletion, no column swap
P = slv.solve(dm_example_72, [3., 5., 7.])
assert P.shape == (3,5)
assert_allclose(P @ dm_example_72.T, np.tile([[3.,5.,7.]], (3,1))) # PxD
@pytest.mark.usefixtures('dm_example_72')
def test_solve_72_with_e(dm_example_72):
# Explicitly specify matrix-e using the values from pp. 138
opts = slv.SolverOptions(col_perm=range(5), e=np.array([[1, 0],[2, 0]]))
P = slv.solve(dm_example_72, [3., 5., 7.], opts=opts)
assert P.shape == (2,5)
assert_allclose(P, [
[1., 2, -1.8, 0.6, 0.2],
[0, 0, 37/15., 6/15., -18/15.]
])
@pytest.mark.usefixtures('dm_example_78')
def test_solve_78(dm_example_78):
# Single row deletion
P = slv.solve(dm_example_78, [2., 0, 0.])
assert P.shape == (4,5)
assert_allclose(P, [
[ 1., 0., 0., 0., 1.],
[ 0., 1., 0., -1., 0.],
[ 0., 0., 1., 0., 1.],
[ 1., 1., 0., -1., -1.],
])
assert_allclose(P @ dm_example_78.T, np.tile([[2.,0.,0.]], (4,1))) # PxD
@pytest.mark.usefixtures('dm_example_72')
def test_solver(dm_example_72):
# Explicitly specify matrix-e using the values from pp. 138
L,M,T = LMT.base_quantities()
vs = LMT.qs_from_dm(dm_example_72) # Interpret dm in the LMT system
s = slv.Solver(vs, LMT.q([3., 5., 7.]))
assert s.variables == {
'a': L*M**2*T**3,
'b': L**2*M**4*T**4,
'c': L**3*M**3*T**3,
'd': L**4*T**2,
'e': L**5*M**2*T}
r = s.solve()
assert_allclose(r.P @ dm_example_72.T, np.tile([[3.,5.,7.]], (3,1)))
opts = slv.SolverOptions(col_perm=range(5), e=np.array([[1, 0],[2, 0]]))
r = s.solve(select_values={'a':[1, 0], 'b':[2, 0]})
assert r.P.shape == (2,5)
assert_allclose(r.P, [
[1., 2, -1.8, 0.6, 0.2],
[0, 0, 37/15., 6/15., -18/15.]
])
r = s.solve(select_values={'d':[1], 'e':[2]})
assert r.P.shape == (1,5)
assert_allclose(r.P, [
[2, 5, -7.666667, 1, 2],
]) | [
"numpy.testing.assert_allclose",
"numpy.array",
"numpy.tile",
"pytest.mark.usefixtures"
] | [((830, 870), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""dm_example_72"""'], {}), "('dm_example_72')\n", (853, 870), False, 'import pytest\n'), ((1098, 1138), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""dm_example_72"""'], {}), "('dm_example_72')\n", (1121, 1138), False, 'import pytest\n'), ((1523, 1563), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""dm_example_78"""'], {}), "('dm_example_78')\n", (1546, 1563), False, 'import pytest\n'), ((1949, 1989), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""dm_example_72"""'], {}), "('dm_example_72')\n", (1972, 1989), False, 'import pytest\n'), ((550, 590), 'numpy.testing.assert_allclose', 'assert_allclose', (['(P @ dm.T)', '[[1.0, 0, 0]]'], {}), '(P @ dm.T, [[1.0, 0, 0]])\n', (565, 590), False, 'from numpy.testing import assert_allclose\n'), ((630, 675), 'numpy.array', 'np.array', (['[[1.0, 1, 0], [0, 0, 0], [0, 0, 0]]'], {}), '([[1.0, 1, 0], [0, 0, 0], [0, 0, 0]])\n', (638, 675), True, 'import numpy as np\n'), ((1411, 1503), 'numpy.testing.assert_allclose', 'assert_allclose', (['P', '[[1.0, 2, -1.8, 0.6, 0.2], [0, 0, 37 / 15.0, 6 / 15.0, -18 / 15.0]]'], {}), '(P, [[1.0, 2, -1.8, 0.6, 0.2], [0, 0, 37 / 15.0, 6 / 15.0, -\n 18 / 15.0]])\n', (1426, 1503), False, 'from numpy.testing import assert_allclose\n'), ((1702, 1837), 'numpy.testing.assert_allclose', 'assert_allclose', (['P', '[[1.0, 0.0, 0.0, 0.0, 1.0], [0.0, 1.0, 0.0, -1.0, 0.0], [0.0, 0.0, 1.0, 0.0,\n 1.0], [1.0, 1.0, 0.0, -1.0, -1.0]]'], {}), '(P, [[1.0, 0.0, 0.0, 0.0, 1.0], [0.0, 1.0, 0.0, -1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 1.0], [1.0, 1.0, 0.0, -1.0, -1.0]])\n', (1717, 1837), False, 'from numpy.testing import assert_allclose\n'), ((2666, 2759), 'numpy.testing.assert_allclose', 'assert_allclose', (['r.P', '[[1.0, 2, -1.8, 0.6, 0.2], [0, 0, 37 / 15.0, 6 / 15.0, -18 / 15.0]]'], {}), '(r.P, [[1.0, 2, -1.8, 0.6, 0.2], [0, 0, 37 / 15.0, 6 / 15.0,\n -18 / 15.0]])\n', (2681, 2759), False, 'from numpy.testing import assert_allclose\n'), ((2855, 2902), 'numpy.testing.assert_allclose', 'assert_allclose', (['r.P', '[[2, 5, -7.666667, 1, 2]]'], {}), '(r.P, [[2, 5, -7.666667, 1, 2]])\n', (2870, 2902), False, 'from numpy.testing import assert_allclose\n'), ((381, 427), 'numpy.array', 'np.array', (['[[0.0, 1, 0], [1, 1, -2], [0, 0, 1]]'], {}), '([[0.0, 1, 0], [1, 1, -2], [0, 0, 1]])\n', (389, 427), True, 'import numpy as np\n'), ((1059, 1093), 'numpy.tile', 'np.tile', (['[[3.0, 5.0, 7.0]]', '(3, 1)'], {}), '([[3.0, 5.0, 7.0]], (3, 1))\n', (1066, 1093), True, 'import numpy as np\n'), ((1911, 1945), 'numpy.tile', 'np.tile', (['[[2.0, 0.0, 0.0]]', '(4, 1)'], {}), '([[2.0, 0.0, 0.0]], (4, 1))\n', (1918, 1945), True, 'import numpy as np\n'), ((2463, 2497), 'numpy.tile', 'np.tile', (['[[3.0, 5.0, 7.0]]', '(3, 1)'], {}), '([[3.0, 5.0, 7.0]], (3, 1))\n', (2470, 2497), True, 'import numpy as np\n'), ((1294, 1320), 'numpy.array', 'np.array', (['[[1, 0], [2, 0]]'], {}), '([[1, 0], [2, 0]])\n', (1302, 1320), True, 'import numpy as np\n'), ((2548, 2574), 'numpy.array', 'np.array', (['[[1, 0], [2, 0]]'], {}), '([[1, 0], [2, 0]])\n', (2556, 2574), True, 'import numpy as np\n')] |
from __future__ import division,absolute_import,print_function
import numpy as np
import pandas as pd
def pricenorm3d(m, features, norm_method, fake_ratio=1.0, with_y=True):
"""normalize the price tensor, whose shape is [features, coins, windowsize]
@:param m: input tensor, unnormalized and there could be nan in it
@:param with_y: if the tensor include y (future price)
logging.debug("price are %s" % (self._latest_price_matrix[0, :, -1]))
"""
result = m.copy()
if features[0] != "close":
raise ValueError("first feature must be close")
for i, feature in enumerate(features):
if with_y:
one_position = 2
else:
one_position = 1
pricenorm2d(result[i], m[0, :, -one_position], norm_method=norm_method,
fake_ratio=fake_ratio, one_position=one_position)
return result
# input m is a 2d matrix, (coinnumber+1) * windowsize
def pricenorm2d(m, reference_column,
norm_method="absolute", fake_ratio=1.0, one_position=2):
if norm_method=="absolute":
output = np.zeros(m.shape)
for row_number, row in enumerate(m):
if np.isnan(row[-one_position]) or np.isnan(reference_column[row_number]):
row[-one_position] = 1.0
for index in range(row.shape[0] - one_position + 1):
if index > 0:
row[-one_position - index] = row[-index - one_position + 1] / fake_ratio
row[-one_position] = 1.0
row[-1] = fake_ratio
else:
row = row / reference_column[row_number]
for index in range(row.shape[0] - one_position + 1):
if index > 0 and np.isnan(row[-one_position - index]):
row[-one_position - index] = row[-index - one_position + 1] / fake_ratio
if np.isnan(row[-1]):
row[-1] = fake_ratio
output[row_number] = row
m[:] = output[:]
elif norm_method=="relative":
output = m[:, 1:]
divisor = m[:, :-1]
output = output / divisor
pad = np.empty((m.shape[0], 1,))
pad.fill(np.nan)
m[:] = np.concatenate((pad, output), axis=1)
m[np.isnan(m)] = fake_ratio
else:
raise ValueError("there is no norm morthod called %s" % norm_method)
def get_chart_until_success(polo, pair, start, period, end):
is_connect_success = False
chart = {}
while not is_connect_success:
try:
chart = polo.marketChart(pair=pair, start=int(start), period=int(period), end=int(end))
is_connect_success = True
except Exception as e:
print(e)
return chart
def get_type_list(feature_number):
"""
:param feature_number: an int indicates the number of features
:return: a list of features n
"""
if feature_number == 1:
type_list = ["close"]
elif feature_number == 2:
type_list = ["close", "volume"]
raise NotImplementedError("the feature volume is not supported currently")
elif feature_number == 3:
type_list = ["close", "high", "low"]
elif feature_number == 4:
type_list = ["close", "high", "low", "open"]
else:
raise ValueError("feature number could not be %s" % feature_number)
return type_list
def panel2array(panel):
"""convert the panel to datatensor (numpy array) without btc
"""
without_btc = np.transpose(panel.values, axes=(2, 0, 1))
return without_btc
def count_periods(start, end, period_length):
"""
:param start: unix time, excluded
:param end: unix time, included
:param period_length: length of the period
:return:
"""
return (int(end)-int(start)) // period_length
def get_volume_forward(time_span, portion, portion_reversed):
volume_forward = 0
if not portion_reversed:
volume_forward = time_span*portion
return volume_forward
def panel_fillna(panel, type="bfill"):
"""
fill nan along the 3rd axis
:param panel: the panel to be filled
:param type: bfill or ffill
"""
frames = {}
for item in panel.items:
if type == "both":
frames[item] = panel.loc[item].fillna(axis=1, method="bfill").\
fillna(axis=1, method="ffill")
else:
frames[item] = panel.loc[item].fillna(axis=1, method=type)
return pd.Panel(frames)
| [
"numpy.empty",
"numpy.zeros",
"numpy.transpose",
"numpy.isnan",
"pandas.Panel",
"numpy.concatenate"
] | [((3493, 3535), 'numpy.transpose', 'np.transpose', (['panel.values'], {'axes': '(2, 0, 1)'}), '(panel.values, axes=(2, 0, 1))\n', (3505, 3535), True, 'import numpy as np\n'), ((4443, 4459), 'pandas.Panel', 'pd.Panel', (['frames'], {}), '(frames)\n', (4451, 4459), True, 'import pandas as pd\n'), ((1096, 1113), 'numpy.zeros', 'np.zeros', (['m.shape'], {}), '(m.shape)\n', (1104, 1113), True, 'import numpy as np\n'), ((2158, 2183), 'numpy.empty', 'np.empty', (['(m.shape[0], 1)'], {}), '((m.shape[0], 1))\n', (2166, 2183), True, 'import numpy as np\n'), ((2225, 2262), 'numpy.concatenate', 'np.concatenate', (['(pad, output)'], {'axis': '(1)'}), '((pad, output), axis=1)\n', (2239, 2262), True, 'import numpy as np\n'), ((1174, 1202), 'numpy.isnan', 'np.isnan', (['row[-one_position]'], {}), '(row[-one_position])\n', (1182, 1202), True, 'import numpy as np\n'), ((1206, 1244), 'numpy.isnan', 'np.isnan', (['reference_column[row_number]'], {}), '(reference_column[row_number])\n', (1214, 1244), True, 'import numpy as np\n'), ((1900, 1917), 'numpy.isnan', 'np.isnan', (['row[-1]'], {}), '(row[-1])\n', (1908, 1917), True, 'import numpy as np\n'), ((2273, 2284), 'numpy.isnan', 'np.isnan', (['m'], {}), '(m)\n', (2281, 2284), True, 'import numpy as np\n'), ((1746, 1782), 'numpy.isnan', 'np.isnan', (['row[-one_position - index]'], {}), '(row[-one_position - index])\n', (1754, 1782), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
import os
import sys
import copy
import json
import math
import pickle
from pprint import pprint
import prody
import pandas as pd
import numpy as np
from .motifs import Generate_Constraints
from .utils import *
import pyrosetta
from pyrosetta import rosetta
def generate_constrained_backrub_ensemble(raw_match_path, matcher_constraint):
"""
Generate a backrub ensemble around the hypothetical binding site with matcher constraints applied
:param raw_match_path: path to raw match output
:param matcher_constraint: constraint file used to find match
"""
pass
def parse_matcher_remarks(match_path):
"""
Parse matcher remarks to return match positions
:param match_pose: pose with matcher remark header
:return:
"""
motif_resnums = list()
with open(match_path, 'r') as match:
for line in match:
split_remark = line.split()
# Only parse matcher remarks
if split_remark[0] == 'REMARK':
if all([split_remark[:4] == ['REMARK', '666', 'MATCH', 'TEMPLATE'], split_remark[7:9] == ['MATCH', 'MOTIF']]):
motif_resnums.append(int(split_remark[11]))
return motif_resnums
def generate_fuzzball_contact_rotamersets(ligand_conformer_path, match_path, match_pose, sfxn, match_residue_map,
flag_special_rot=True, custom_taskop=None, rotset_limit=200,
contact_method='RMSD', RMSD_limit=1.5, apply_minimization=False,
dump_rotamerset_pdb=False, report_stats=False, defined_positions=None):
"""
Generate rotamers that recapitulate observed fuzzball contacts for each position in a nucleated match
:param ligand_conformer_path: path to ligand generated by molfile_to_params.py
:param flag_special_rot: If true, flag rotamers as SPECIAL_ROT variants
:param custom_taskop: list of task operations to apply to the PackerTask used to generate rotamers
:return: viable_rotamers dictionary of rotamers organized by position and residue identity
"""
sfxn_weights = sfxn.weights()
conformer_resnum = match_pose.size() # Assumes single ligand appended to end of sequence
if contact_method not in ['RMSD', 'matcher']:
raise Exception('Contact method needs to be one of the following: "RMSD", "matcher"')
# --- Find and store viable rotamers --- #
viable_rotamers = dict()
rotamer_stats = dict()
# Setting things up is going to mess up the match pose, so use a clone
match_pose_clone = match_pose.clone()
sfxn(match_pose_clone)
# --- Transform match pose clone onto fuzzball conformer --- #
"""Required for contact coordsets to make sense"""
# Get ligand from match, always last residue
# todo: select chain X, ligand is always chain X
match_pose_size = match_pose_clone.size()
match_ligand = match_pose_clone.residue(match_pose_size)
# Get match positions if they exist
motif_resnums = list()
with open(match_path, 'r') as my_match:
for line in my_match:
if line.startswith('REMARK 666 MATCH TEMPLATE'):
motif_resnums.append(int(line.split()[11]))
motif_and_ligand_resnums = motif_resnums + [conformer_resnum]
# Keep track of match positions and compatible residue identites
# match_residue_map = {position: dict() for position in range(1, match_pose.size())} # Assumes one ligand appended to end of sequence
# Import conformer from pose
fuzzball_ligand_pose = rosetta.core.pose.Pose()
rosetta.core.import_pose.pose_from_file(fuzzball_ligand_pose, ligand_conformer_path)
fuzzball_ligand = fuzzball_ligand_pose.residue(1)
# Calculate rotation/translation by hand using first three atoms of ligand
mobile_match = rosetta.numeric.xyzTransform_double_t(match_ligand.xyz(1), match_ligand.xyz(2), match_ligand.xyz(3))
mobile_match_inverse = mobile_match.inverse()
target_fuzzball = rosetta.numeric.xyzTransform_double_t(fuzzball_ligand.xyz(1), fuzzball_ligand.xyz(2), fuzzball_ligand.xyz(3))
ligand_rotation = target_fuzzball.R * mobile_match_inverse.R
ligand_translation = target_fuzzball.R * mobile_match_inverse.t + target_fuzzball.t
# Apply transformation
match_pose_clone.apply_transform_Rx_plus_v(ligand_rotation, ligand_translation)
match_pose_clone_ligand = match_pose_clone.residue(match_pose_size).clone()
# --- All other operations --- #
# Mutate all non-motif residues within 10A from ligand to ALA, interferes with RotamerSet generation
ligand_residue_selector = rosetta.core.select.residue_selector.ChainSelector('X')
neighborhood_selector = rosetta.core.select.residue_selector.NeighborhoodResidueSelector(ligand_residue_selector, 10, False)
neighborhood_selector_bool = neighborhood_selector.apply(match_pose_clone)
neighborhood_residues_resnums = rosetta.core.select.get_residues_from_subset(neighborhood_selector_bool)
positions_to_consider = list(set(neighborhood_residues_resnums) - set(motif_and_ligand_resnums))
mutate = rosetta.protocols.simple_moves.MutateResidue()
mutate.set_res_name('ALA')
for position in positions_to_consider:
if match_pose_clone.residue(position).name3() not in ['GLY', 'PRO'] and 'disulfide' not in match_pose_clone.residue(position).name():
mutate.set_target(position)
mutate.apply(match_pose_clone)
# Build RotamerSets for each extrachi/sample level
if dump_rotamerset_pdb:
all_rotamersets = rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets(match_pose_clone)
task_factory = rosetta.core.pack.task.TaskFactory()
# NATRO positions TaskOp
rotamer_candidates_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(','.join([str(i) for i in match_residue_map.keys()]))
natro_rs = rosetta.core.select.residue_selector.NotResidueSelector(rotamer_candidates_rs)
natro_op = rosetta.core.pack.task.operation.OperateOnResidueSubset(
rosetta.core.pack.task.operation.PreventRepackingRLT(), natro_rs)
task_factory.push_back(natro_op)
rotamersets_packer_task = task_factory.create_task_and_apply_taskoperations(match_pose_clone)
all_rotamersets.set_task(rotamersets_packer_task)
# Remove ligand from match_pose_clone before generating rotamers!!!
match_pose_clone_apo = match_pose_clone.clone()
match_pose_clone_apo.conformation_ptr().delete_residue_slow(match_pose_size)
# Define positions where rotamers will be considered
if defined_positions:
rotamerset_positions = list(set(defined_positions) & set(match_residue_map.keys()))
else:
rotamerset_positions = list(match_residue_map.keys())
print(f'Rotamerset Positions: {rotamerset_positions}')
# Generate rotamers at each position
for position in rotamerset_positions:
# Prepare minimization
if apply_minimization:
motif_movemap = rosetta.core.kinematics.MoveMap()
motif_movemap.set_chi(position, True)
minimize_motif = rosetta.protocols.minimization_packing.MinMover()
minimize_motif.movemap(motif_movemap)
minimize_motif.score_function(sfxn)
minimize_motif.min_type('lbfgs_armijo')
minimize_motif.tolerance(1e-6)
# Prepare infrastructure
rotamer_stats[position] = dict()
if dump_rotamerset_pdb:
current_rotamerset = rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set(match_pose_clone)
# Keep rotamers that are compatible with minimal binding motif
for contact_residue in match_residue_map[position]:
# print(f'Considering position {position}: {contact_residue}')
position_rotamer_list = list()
possible_contact_geometries = match_residue_map[position][contact_residue]
# --- Prepare viable rotamers for each position --- #
# Define packertask using neighborhood_selector
packer_task = rosetta.core.pack.task.TaskFactory.create_packer_task(match_pose_clone_apo)
packer_task.initialize_from_command_line()
# Get boolean vector for packable positions and apply to packer task
packable_positions = rosetta.utility.vector1_bool()
packable_position_list = [True if i == position else False for i in range(1, match_pose_clone_apo.size())]
for bool_value in packable_position_list:
packable_positions.append(bool_value)
packer_task.restrict_to_residues(packable_positions)
# Only build rotamers for residues with Hbond donors/acceptors
restrict_CAAs = rosetta.core.pack.task.operation.RestrictAbsentCanonicalAAS(position, rosetta.utility.vector1_bool(20))
restrict_CAAs.keep_aas(contact_residue)
restrict_CAAs.apply(match_pose_clone_apo, packer_task)
packer_neighbor_graph = rosetta.core.pack.create_packer_graph(match_pose_clone_apo, sfxn, packer_task)
match_rotamer_set = rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set(match_pose_clone_apo)
match_rotamer_set.set_resid(position)
match_rotamer_set.build_rotamers(match_pose_clone_apo, sfxn, packer_task, packer_neighbor_graph, use_neighbor_context=False)
if match_rotamer_set.num_rotamers() <= 1 and match_rotamer_set.rotamer(1).name1() != contact_residue:
continue
print(f'Position {position} ResidueType {contact_residue} - comparing {match_rotamer_set.num_rotamers()} rotamers against {len(possible_contact_geometries)} contact modes')
rotamer_stats[position][contact_residue] = dict()
rotamer_stats[position][contact_residue]['num_rotamers'] = match_rotamer_set.num_rotamers()
rotamer_info = list()
rotamers_accepted = 0
# --- Evaluate Rotamers --- #
for rotamer in range(1, match_rotamer_set.num_rotamers() + 1):
# Place residue before applying to pose!!!!
# Rotamers need to be transformed back onto the backbone of the input pdb!!!
trail_rotamer = match_rotamer_set.rotamer(rotamer)
trail_rotamer.place(match_pose_clone.residue(position), match_pose_clone.conformation_ptr())
match_pose_clone.replace_residue(position, trail_rotamer, False)
pose_trial_rotamer = match_pose_clone.residue(position)
# Evaluate RMSD to possible_contact_geometries
contact_RMSDs = list()
dof_errors = list()
sad_atom_in_rotamer = False
for contact_mode in possible_contact_geometries:
# REFERENCE: contact_info = [current_motif_coord_list, [float(a) for a in dof_tuple], constraint_atoms_dict['residue']['atom_names'], constraint_atoms_dict['ligand']['atom_names']]
current_motif_coord_list = contact_mode[0]
contact_dofs = contact_mode[1]
residue_matchatoms = contact_mode[2]
ligand_matchatoms = contact_mode[3]
# Skip rotamer if contact is mediated by a backbone atom...
if residue_matchatoms[0] in ['C', 'CA', 'N', 'O']:
continue
# Get contact atom coords using atom names
try:
rotamer_contact_coords = [list(match_pose_clone.residue(position).xyz(atom)) for atom in residue_matchatoms]
# If distance is off, don't even bother...
residue_contactatom = pose_trial_rotamer.xyz(residue_matchatoms[0])
ligand_contactatom = match_pose_clone_ligand.xyz(ligand_matchatoms[0])
atom_displacement = ligand_contactatom - residue_contactatom
if atom_displacement.norm() > 4:
# print(f'Contact is {atom_displacement.norm()}A, continuing...')
continue
residue_atomid_list = [pose_trial_rotamer.xyz(atom) for atom in residue_matchatoms]
ligand_atomid_list = [match_pose_clone_ligand.xyz(atom) for atom in ligand_matchatoms]
# Res1 - ligand, Res2 - residue
# 'angle_A' is the angle Res1:Atom2 - Res1:Atom1 - Res2:Atom1
angle_A = rosetta.numeric.angle_degrees_double(ligand_atomid_list[1], ligand_atomid_list[0], residue_atomid_list[0])
# 'angle_B' is the angle Res1:Atom1 - Res2:Atom1 - Res2:Atom2
angle_B = rosetta.numeric.angle_degrees_double(ligand_atomid_list[0], residue_atomid_list[0], residue_atomid_list[1])
# 'torsion_A' is the dihedral Res1:Atom3 - Res1:Atom2 - Res1:Atom1 - Res2:Atom1
torsion_A = rosetta.numeric.dihedral_degrees_double(ligand_atomid_list[2], ligand_atomid_list[1], ligand_atomid_list[0], residue_atomid_list[0])
# 'torsion_AB' is the dihedral Res1:Atom2 - Res1:Atom1 - Res2:Atom1 - Res2:Atom2
torsion_AB = rosetta.numeric.dihedral_degrees_double(ligand_atomid_list[1], ligand_atomid_list[0], residue_atomid_list[0], residue_atomid_list[1])
# 'torsion_B' is the dihedral Res1:Atom1 - Res2:Atom1 - Res2:Atom2 - Res2:Atom3
torsion_B = rosetta.numeric.dihedral_degrees_double(ligand_atomid_list[0], residue_atomid_list[0], residue_atomid_list[1], residue_atomid_list[2])
rotamer_dofs = [angle_A, angle_B, torsion_A, torsion_AB, torsion_B]
except Exception as e:
print(e, residue_matchatoms, ligand_matchatoms)
# print(f'Skipping {contact_mode[0]}: contains sad atom.')
sad_atom_in_rotamer = True
break
# todo: Edge condition at 0/360...
dof_difference_list = [abs(ideal - measured) for ideal, measured in zip(contact_dofs[1:], rotamer_dofs)]
# print('contact_dofs:', contact_dofs)
# print('rotamer_dofs:', rotamer_dofs)
# print('DOF DIFFERENCE LIST:', dof_difference_list)
dof_errors.append(max(dof_difference_list))
contact_RMSDs.append(prody.calcRMSD(np.asarray(current_motif_coord_list), np.asarray(rotamer_contact_coords)))
if len(dof_errors) == 0:
continue
if sad_atom_in_rotamer:
continue
# Continue if current rotamer does not have <{RMSD_limit}A RMSD with any contact mode
if contact_method == 'RMSD' and min(contact_RMSDs, default=666) > RMSD_limit:
rotamer_info.append((contact_RMSDs, None, None))
continue
# Only continue if a contact mode exists where max angle/torsion DOF error < 10 degrees
if contact_method == 'matcher' and min(dof_errors) > 15:
continue
# Apply minimization to rotamer-ligand interaction before deciding to accept
if apply_minimization:
minimize_motif.apply(match_pose_clone)
# Evaluate possible clashes (fa_rep) with motif residues and ligand
sfxn(match_pose_clone)
edges = match_pose_clone.energies().energy_graph()
motif_fa_rep = list()
for motif in motif_and_ligand_resnums:
current_edge = edges.find_energy_edge(position, motif)
if current_edge is not None:
current_edge.fill_energy_map()
motif_fa_rep.append(current_edge[rosetta.core.scoring.fa_rep])
# Get score for current rotamer against ligand
current_edge = edges.find_energy_edge(position, conformer_resnum)
rotamer_ligand_reu = current_edge.dot(sfxn_weights) if current_edge is not None else 0
if all([min(motif_fa_rep, default=666) < 20, rotamer_ligand_reu <= 20]):
if flag_special_rot:
current_rsd_type_ptr = match_pose_clone.residue_type_ptr(position)
new_rsd_type_mutable = rosetta.core.chemical.MutableResidueType(current_rsd_type_ptr)
new_rsd_type_mutable.add_variant_type(rosetta.core.chemical.SPECIAL_ROT)
new_rsd_type = rosetta.core.chemical.ResidueType.make(new_rsd_type_mutable)
rosetta.core.pose.replace_pose_residue_copying_existing_coordinates(match_pose_clone, position, new_rsd_type)
# Place residue before applying to pose!!!!
# Rotamers need to be transformed back onto the backbone of the input pdb!!!
new_rotamer = match_pose_clone.residue(position).clone()
new_rotamer.place(match_pose.residue(position), match_pose.conformation_ptr())
position_rotamer_list.append((rotamer_ligand_reu, new_rotamer))
rotamers_accepted += 1
if dump_rotamerset_pdb:
current_rotamerset.add_rotamer(new_rotamer)
rotamer_info.append((max(dof_errors), max(motif_fa_rep, default=0), rotamer_ligand_reu))
print(f'{rotamers_accepted} of {match_rotamer_set.num_rotamers()} rotamers accepted')
rotamer_stats[position][contact_residue]['rotamer_info'] = rotamer_info
rotamer_stats[position][contact_residue]['rotamers_accepted'] = rotamers_accepted
if len(position_rotamer_list) > 0:
position_rotamer_list_selected = sorted(position_rotamer_list, key=lambda x: x[0])[:rotset_limit]
position_rotamer_list = [rot[1] for rot in position_rotamer_list_selected]
if position not in viable_rotamers.keys():
viable_rotamers[position] = dict()
viable_rotamers[position][contact_residue] = position_rotamer_list
if dump_rotamerset_pdb:
current_moltresid = all_rotamersets.resid_2_moltenres(position)
all_rotamersets.set_explicit_rotamers(current_moltresid, current_rotamerset)
if dump_rotamerset_pdb:
current_extrachi = len([rosetta.basic.options.get_boolean_option(f'packing:ex{i}') for i in range(1,5) if rosetta.basic.options.get_boolean_option(f'packing:ex{i}') is True])
current_sample_level = rosetta.basic.options.get_integer_option(f'packing:ex{current_extrachi}:level')
if current_extrachi <= 2 and current_sample_level <= 3:
match_name = os.path.normpath(os.path.basename(match_path))
# todo: figure out why this doesn't work... problem with CONECT records...
# all_rotamersets.dump_pdb(match_pose_clone, f"{match_name.split('.')[0]}-extrachi_{current_extrachi}-sampling_{current_sample_level}.pdb")
all_rotamers_pose = pyrosetta.pose_from_sequence('A')
for position in match_residue_map.keys():
position_rotset = all_rotamersets.rotamer_set_for_residue(position)
for rot in range(1, position_rotset.num_rotamers() + 1):
all_rotamers_pose.append_residue_by_jump(position_rotset.rotamer(rot), 1)
all_rotamers_pose.dump_pdb(f"{match_name.split('.')[0]}-extrachi_{current_extrachi}-sampling_{current_sample_level}.pdb")
if report_stats:
return viable_rotamers, rotamer_stats
else:
return viable_rotamers
def create_task_factory(match_pose, match_path, return_rs=False):
"""
Default task_factory for design from a given pose
This assumes that the last residue of the pose is a ligand and that you are designing the context around the ligand
All positions within 10A of the ligand with np.dot(CA->ligand_center, CA->CB) < 0 are designable
All positions within clashbasedrepackshell of previous are designable
All positions within clashbasedrepackshell of previous are repackable
All other positions NATRO
:param match_pose: Rosetta pose
:return: task factory for match_pose
"""
# --- Residue Selectors --- #
# Ligand, ASSUMES SINGLE LIGAND AT END OF POSE!!!
matched_ligand_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(str(match_pose.size()))
# matched_ligand_rs = rosetta.core.select.residue_selector.ChainSelector('X')
# Loading params to PoseResidueTypeSet messes up ResidueType names -> selection with residue names...
# match_ligand_name3 = match_pose.residue(match_pose.size()).name3()
# matched_ligand_rs = rosetta.core.select.residue_selector.ResidueNameSelector(match_ligand_name3)
# User-defined design positions
# design_positions = [str(index) for index in design_json_info['design_residue_list']]
# design_position_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(','.join(design_positions))
# NeighborhoodResidueSelector uses CB to determine distances, CA for GLY
# All residues with CB within 10A of ligand
ligand_neghborhood_rs = rosetta.core.select.residue_selector.NeighborhoodResidueSelector(matched_ligand_rs, 10, False)
# All residues with np.dot(CA-CB vector, CA-ligand center) > 0
ligand_facing_residues = list()
for resnum in range(1, match_pose.size()): # Assuming single ligand at end of sequence
current_residue = match_pose.residue(resnum)
if current_residue.name3() in ['GLY', 'CYS', 'PRO']: continue
ca_cb_vector = current_residue.atom('CB').xyz() - current_residue.atom('CA').xyz()
ca_center_vector = current_residue.nbr_atom_xyz() - current_residue.atom('CA').xyz()
dot_product = ca_cb_vector.dot(ca_center_vector)
if dot_product > 0:
ligand_facing_residues.append(resnum)
ligand_facing_residues_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(
','.join([str(a) for a in ligand_facing_residues]))
# First shell ligand contacts
first_shell_rs = rosetta.core.select.residue_selector.AndResidueSelector()
first_shell_rs.add_residue_selector(ligand_neghborhood_rs)
first_shell_rs.add_residue_selector(ligand_facing_residues_rs)
# ClashBasedRepackShell around first shell is designable
second_shell_temp_rs = rosetta.core.pack.task.residue_selector.ClashBasedShellSelector(first_shell_rs)
# Residue Selector for designable positions
designable_residue_rs = rosetta.core.select.residue_selector.OrResidueSelector()
designable_residue_rs.add_residue_selector(first_shell_rs)
designable_residue_rs.add_residue_selector(second_shell_temp_rs)
designable_residue_selection = designable_residue_rs.apply(match_pose)
design_position_list = rosetta.core.select.get_residues_from_subset(designable_residue_selection)
print('Designable Positions (pre-CPG filter):', design_position_list)
# NATRO positions in pose
relevant_positions_rs = rosetta.core.select.residue_selector.OrResidueSelector()
# Matched motif residues
matched_motif_residues = parse_matcher_remarks(match_path)
if len(matched_motif_residues) > 0:
# Remove match residues from designable positions
design_position_list = set(design_position_list) - set(matched_motif_residues)
# Update match and designable ResdiueSelectors
matched_motif_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(
','.join([str(a) for a in matched_motif_residues]))
designable_residue_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(
','.join([str(a) for a in design_position_list]))
# Add match positions to relevant_residues_rs
relevant_positions_rs.add_residue_selector(matched_motif_rs)
# Packing shell around design/matched residues
repack_shell_temp_rs = rosetta.core.pack.task.residue_selector.ClashBasedShellSelector(designable_residue_rs)
repack_shell_selection = repack_shell_temp_rs.apply(match_pose)
real_repack_positions = set(rosetta.core.select.get_residues_from_subset(repack_shell_selection)) - set(design_position_list)
repack_position_list = [str(a) for a in (list(real_repack_positions) + matched_motif_residues)]
add_repack_shell = True if len(repack_position_list) > 0 else False
print('Repack Positions:', repack_position_list)
if add_repack_shell:
repack_shell_rs = rosetta.core.select.residue_selector.ResidueIndexSelector(','.join(repack_position_list))
relevant_positions_rs.add_residue_selector(repack_shell_rs)
relevant_positions_rs.add_residue_selector(designable_residue_rs)
natro_rs = rosetta.core.select.residue_selector.NotResidueSelector(relevant_positions_rs)
# Don't design CGP
gly_rs = rosetta.core.select.residue_selector.ResidueNameSelector('GLY')
cys_rs = rosetta.core.select.residue_selector.ResidueNameSelector('CYS')
pro_rs = rosetta.core.select.residue_selector.ResidueNameSelector('PRO')
cgp_rs = rosetta.core.select.residue_selector.OrResidueSelector()
cgp_rs.add_residue_selector(gly_rs)
cgp_rs.add_residue_selector(cys_rs)
cgp_rs.add_residue_selector(pro_rs)
# --- Create and Populate Task Factory --- #
task_factory = rosetta.core.pack.task.TaskFactory()
racaa = rosetta.core.pack.task.operation.RestrictAbsentCanonicalAASRLT()
racaa.aas_to_keep('ADEFHIKLMNQRSTVWY') # No CGP
design_op = rosetta.core.pack.task.operation.OperateOnResidueSubset(racaa, designable_residue_rs)
task_factory.push_back(design_op)
if add_repack_shell:
repack_op = rosetta.core.pack.task.operation.OperateOnResidueSubset(
rosetta.core.pack.task.operation.RestrictToRepackingRLT(), repack_shell_rs)
task_factory.push_back(repack_op)
repack_cgp = rosetta.core.pack.task.operation.OperateOnResidueSubset(
rosetta.core.pack.task.operation.RestrictToRepackingRLT(), cgp_rs)
task_factory.push_back(repack_cgp)
natro_op = rosetta.core.pack.task.operation.OperateOnResidueSubset(
rosetta.core.pack.task.operation.PreventRepackingRLT(), natro_rs)
task_factory.push_back(natro_op)
fixed_ligand_op = rosetta.core.pack.task.operation.OperateOnResidueSubset(
rosetta.core.pack.task.operation.PreventRepackingRLT(), matched_ligand_rs)
task_factory.push_back(fixed_ligand_op)
# Extra rotamers
extra_rotamers_op = rosetta.core.pack.task.operation.ExtraRotamersGeneric()
extra_rotamers_op.ex1(True)
extra_rotamers_op.ex2(True)
extra_rotamers_op.ex1_sample_level(rosetta.core.pack.task.ExtraRotSample.EX_ONE_STDDEV)
extra_rotamers_op.ex2_sample_level(rosetta.core.pack.task.ExtraRotSample.EX_ONE_STDDEV)
task_factory.push_back(extra_rotamers_op)
if return_rs:
return task_factory, relevant_positions_rs, matched_ligand_rs
else:
return task_factory
def fuzzball_composition_design(ligand_conformer_path, match_path, match_residue_map, params_path,
designdir='Designs', nstruct=1, special_rot_weight=-5, use_complementary_rotsets=True,
rotset_limit=50, rmsd=1.5, apply_minimization=False, dalphaball_path=None, match_cst=None):
"""
Perform design using Vikram's AA_Composition score term, biasing toward rotamers that recapitulate contacts
observed in the iteration fuzzball.
:param dalphaball_path: If provided, use RosettaHoles filter with provided dalphaball.gcc
:return:
"""
# --- Initiate PyRosetta and Score Function -- #
my_options = [f"-extra_res_fa {params_path}",
"-mute core.conformation core.chemical core.pack.task",
'-ex1 -ex2 -extrachi_cutoff 0 -use_input_sc',
'-run:preserve_header',
'-total_threads 1' # This kills the cluster, fun times...
]
pyrosetta.init(options=' '.join(my_options))
# Normal scorefunction for generating rotamers
sfxn = rosetta.core.scoring.get_score_function()
# Create match pose
match_pose = rosetta.core.pose.Pose()
rosetta.core.import_pose.pose_from_file(match_pose, os.path.join(os.getcwd(), match_path))
# --- Create Task Factory --- #
# Create task factory for unrelaxed, but apply to relaxed
task_factory, relevant_positions_rs, matched_ligand_rs = create_task_factory(match_pose, match_path, return_rs=True)
# Relax
fast_relax = rosetta.protocols.relax.FastRelax(sfxn, 5, 'MonomerRelax2019')
fast_relax.constrain_relax_to_native_coords(True)
fast_relax.apply(match_pose)
# Add defined_rotamer scoreterm
sfxn.set_weight(rosetta.core.scoring.special_rot, special_rot_weight)
# --- Set up Annealer for design --- #
# Load viable scaffold positions and corresponding residue types
# todo: make sure backrub ensemble structures also have matcher remarks added
match_residue_map = pickle.load(open(match_residue_map, 'rb'))
# --- Create Packer Task --- #
design_packer_task = task_factory.create_task_and_apply_taskoperations(match_pose)
design_packer_task.or_linmem_ig(True) # Linear memory Interaction Graph
print(design_packer_task)
# coupeldmoves ligand ig edges reweight
# core::pack::task::IGEdgeReweighterOP reweight_ligand(new protocols::toolbox::IGLigandDesignEdgeUpweighter(ligand_weight_) );
# task->set_IGEdgeReweights()->add_reweighter(reweight_ligand);
design_position_list = [index for index, res in enumerate(design_packer_task.designing_residues(), start=1) if res is True]
print(f'Design positions: {design_position_list}')
# --- Create RotamerSets including fuzzball rotamers --- #
rosetta.basic.options.set_boolean_option('packing:ex1', True)
rosetta.basic.options.set_boolean_option('packing:ex2', True)
rosetta.basic.options.set_boolean_option('packing:ex3', True) # Default level:1
rosetta.basic.options.set_boolean_option('packing:ex4', True) # Default level:1
rosetta.basic.options.set_integer_option('packing:ex1:level', 4)
rosetta.basic.options.set_integer_option('packing:ex2:level', 4)
# rosetta.basic.options.set_integer_option('packing:ex3:level', 4)
# rosetta.basic.options.set_integer_option('packing:ex4:level', 4)
if use_complementary_rotsets:
print("Generating complementrary RotamerSets...")
viable_rotamers = generate_fuzzball_contact_rotamersets(ligand_conformer_path, match_path, match_pose, sfxn, match_residue_map,
flag_special_rot=True, rotset_limit=rotset_limit, RMSD_limit=rmsd,
apply_minimization=apply_minimization, defined_positions=design_position_list)
# Turn off ex3 and ex4 after generating fuzzball contact rotamers
rosetta.basic.options.set_boolean_option('packing:ex3', False)
rosetta.basic.options.set_boolean_option('packing:ex4', False)
# Reset ex1 and ex2 sampling level
rosetta.basic.options.set_integer_option('packing:ex1:level', 1)
rosetta.basic.options.set_integer_option('packing:ex2:level', 1)
# --- Create filters --- #
print("Creating Filters...")
# Binding Strain
binding_strain_filter = rosetta.protocols.protein_interface_design.filters.BindingStrainFilter()
binding_strain_filter.threshold(9999)
binding_strain_filter.scorefxn(sfxn)
binding_strain_filter.task_factory(task_factory)
binding_strain_filter.jump(match_pose.num_chains() - 1) # Assumes ligand is at end of pose
# DDG (BindingStrain seems to perform an equivalent operation)
# <Ddg name="(ddg &string)" scorefxn="(score12 &string)" threshold="(-15 &float)" jump="(1 &Integer)" chain_num="(&int,&int...)" repeats="(1 &Integer)" repack="(true &bool)" relax_mover="(&string)" repack_bound="(true &bool)" repack_unbound="(true &bool)" relax_bound="(false &bool)" relax_unbound=("true &bool) filter="(&string)"/>
# ShapeComplementarity
shape_complementarity_filter = rosetta.protocols.simple_filters.ShapeComplementarityFilter()
shape_complementarity_filter.filtered_sc(0)
shape_complementarity_filter.filtered_area(0)
shape_complementarity_filter.jump_id(match_pose.num_chains() - 1)
shape_complementarity_filter.quick(0)
shape_complementarity_filter.verbose(0)
shape_complementarity_filter.write_int_area(1)
# ResidueIE
residueie_resnum = match_pose.size() # Assumes ligand is last residue in Pose
residueie_restype = match_pose.residue(residueie_resnum).name3()
residueie_filter = rosetta.protocols.simple_filters.ResidueIEFilter(str(residueie_resnum), residueie_restype, sfxn, rosetta.core.scoring.total_score, 0, False, True, 1, 8, 0, 1, True)
# PackStat
packstat_filter = rosetta.protocols.simple_filters.PackStatFilter(0)
# RosettaHoles (optional)
if dalphaball_path:
rosetta.basic.options.set_file_option('holes:dalphaball', dalphaball_path)
relevant_positions_selection = relevant_positions_rs.apply(match_pose)
relevant_positions_str_list = [str(a) for a in set(rosetta.core.select.get_residues_from_subset(relevant_positions_selection))]
filters_xml = f'''
<SCOREFXNS>
<ScoreFunction name="sfxn" weights="ref2015"/>
</SCOREFXNS>
<RESIDUE_SELECTORS>
<Index name="relevant_positions" resnums="{','.join(relevant_positions_str_list)}"/>
</RESIDUE_SELECTORS>
<FILTERS>
<Holes name="holes_filter" threshold="1" residue_selector="relevant_positions" confidence="0"/>
</FILTERS>'''
holes_filter = rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string(filters_xml).get_filter("holes_filter")
# Buried Unsats
buried_unsat_filter = rosetta.protocols.simple_filters.BuriedUnsatHbondFilter()
buried_unsat_filter.set_residue_selector(relevant_positions_rs)
buried_unsat_filter.set_print_out_info_to_pdb(True)
# SASAMetric
sasa_metric = rosetta.core.simple_metrics.metrics.SasaMetric()
sasa_metric.set_residue_selector(matched_ligand_rs)
# --- Create Constraints --- #
if match_cst:
match_constraints = rosetta.protocols.enzdes.AddOrRemoveMatchCsts()
match_constraints.cstfile(match_cst)
# --- Perform Design --- #
"Essentially pack_rotamers.cc"
sfxn(match_pose)
sfxn.setup_for_packing(match_pose, design_packer_task.repacking_residues(), design_packer_task.designing_residues())
packer_neighbor_graph = rosetta.core.pack.create_packer_graph(match_pose, sfxn, design_packer_task)
rotamer_sets = rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets(match_pose)
rotamer_sets.set_task(design_packer_task)
rotamer_sets.initialize_pose_for_rotsets_creation(match_pose)
rotamer_sets.build_rotamers(match_pose, sfxn, packer_neighbor_graph)
# DEBUGGING
# pprint(viable_rotamers)
# derp = pyrosetta.pose_from_sequence('A')
# for position in viable_rotamers:
# for residuetype in viable_rotamers[position]:
# for res in viable_rotamers[position][residuetype]:
# derp.append_residue_by_jump(res, 1)
# derp.dump_pdb('rotset.pdb')
if use_complementary_rotsets:
for position in viable_rotamers:
if design_packer_task.design_residue(position):
print(f"Adding complementary rotamers for position {position}")
position_rotamer_set = rotamer_sets.rotamer_set_for_residue(position)
# Add fuzzball rotamers to the appropriate rotamer_set in rotamer_sets
if int(position_rotamer_set.resid()) == position:
for residue_type in viable_rotamers[position]:
print(f'Adding {len(viable_rotamers[position][residue_type])} {residue_type} rotamers at position {position}.')
for fuzz_rotamer in viable_rotamers[position][residue_type]:
position_rotamer_set.add_rotamer_into_existing_group(fuzz_rotamer)
match_dir, match_filename = os.path.split(match_path)
match_name = os.path.splitext(match_filename)[0]
os.makedirs(designdir, exist_ok=True)
list_of_dicts = list()
for i in range(nstruct):
design_pose = match_pose.clone()
design_path = os.path.join(designdir, f'{match_name}-{i}.pdb')
# Mutate all designable positions to alanine first
# mutate = rosetta.protocols.simple_moves.MutateResidue()
# mutate.set_res_name('ALA')
# for position in design_position_list:
# mutate.set_target(position)
# mutate.apply(design_pose)
# Apply match constraints
if match_cst:
# There's no way to set cst_instruction through a pure PyRosetta interface...
add_match_cst_xml = f'''
<SCOREFXNS>
<ScoreFunction name="sfxn" weights="ref2015"/>
</SCOREFXNS>
<MOVERS>
<AddOrRemoveMatchCsts cst_instruction="add_new" name="add_match_constraints" cstfile="{match_cst}"/>
</MOVERS>'''
rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string(add_match_cst_xml).get_mover("add_match_constraints").apply(design_pose)
# Perform design
sfxn.setup_for_packing_with_rotsets(design_pose, rotamer_sets)
rotamer_sets.prepare_sets_for_packing(design_pose, sfxn)
ig = rosetta.core.pack.interaction_graph.InteractionGraphFactory.create_and_initialize_annealing_graph(design_packer_task, rotamer_sets, design_pose, sfxn, packer_neighbor_graph)
rosetta.core.pack.pack_rotamers_run(design_pose, design_packer_task, rotamer_sets, ig)
ig.clean_up_after_packing(design_pose)
sfxn(design_pose)
# --- Apply Filters --- #
bindingstrain = binding_strain_filter.compute(design_pose)
binding_strain_remark = rosetta.core.io.RemarkInfo()
binding_strain_remark.value = f'BindingStrain\t{bindingstrain}'
shape_complementarity_filter.apply(design_pose)
shapecomplementarity = shape_complementarity_filter.report_sm(design_pose)
shape_complementarity_remark = rosetta.core.io.RemarkInfo()
shape_complementarity_remark.value = f'ShapeComplementarity\t{shapecomplementarity}'
residueie = residueie_filter.compute(design_pose)
residueie_remark = rosetta.core.io.RemarkInfo()
residueie_remark.value = f'ResidueIE\t{residueie}'
packstat = packstat_filter.compute(design_pose)
packstat_remark = rosetta.core.io.RemarkInfo()
packstat_remark.value = f'Packstat\t{packstat}'
heavyburiedunsats = buried_unsat_filter.compute(design_pose)
heavyburiedunsats_remark = rosetta.core.io.RemarkInfo()
heavyburiedunsats_remark.value = f'HeavyBuriedUnsats\t{heavyburiedunsats}'
ligand_sasa = sasa_metric.calculate(design_pose)
ligand_sasa_remark = rosetta.core.io.RemarkInfo()
ligand_sasa_remark.value = f'LigandSASA\t{ligand_sasa}'
if dalphaball_path:
holes_value = holes_filter.report_sm(design_pose)
print(f'HOLES (compute): {holes_value}')
# Count hbonds to ligand
pose_hbondset = design_pose.get_hbonds()
ligand_position = design_pose.size()
ligand_hbond_vector = pose_hbondset.residue_hbonds(ligand_position) # Assumes ligand is last residue in pose!!!
design_dict = {'path': design_path,
'match': match_name,
'bindingstrain': bindingstrain,
'shapecomplementarity': shapecomplementarity,
'residueie': residueie,
'packstat': packstat,
'heavyburiedunsats': heavyburiedunsats,
'ligand_sasa': ligand_sasa,
'hbonds': len(ligand_hbond_vector),
'comprotset': use_complementary_rotsets,
'special_rot_weight': special_rot_weight,
}
if dalphaball_path:
design_dict['holes'] = holes_value
list_of_dicts.append(design_dict)
# --- Add Remarks to PDB --- #
# todo: figure out why bad_alloc is thrown here
# design_pose.pdb_info().remarks().append(binding_strain_remark)
# design_pose.pdb_info().remarks().append(shape_complementarity_remark)
# design_pose.pdb_info().remarks().append(residueie_remark)
# design_pose.pdb_info().remarks().pdb_info().remarks().append(packstat_remark)
# design_pose.pdb_info().remarks().append(heavyburiedunsats_remark)
# design_pose.pdb_info().remarks().append(ligand_sasa_remark)
# --- Write design to file --- #
design_pose.dump_pdb(design_path)
return pd.DataFrame(list_of_dicts)
| [
"pyrosetta.rosetta.numeric.dihedral_degrees_double",
"pyrosetta.rosetta.core.io.RemarkInfo",
"pyrosetta.rosetta.core.pack.task.operation.PreventRepackingRLT",
"pyrosetta.rosetta.basic.options.set_integer_option",
"pyrosetta.rosetta.core.chemical.MutableResidueType",
"pyrosetta.rosetta.core.pack.task.resid... | [((3604, 3628), 'pyrosetta.rosetta.core.pose.Pose', 'rosetta.core.pose.Pose', ([], {}), '()\n', (3626, 3628), False, 'from pyrosetta import rosetta\n'), ((3633, 3721), 'pyrosetta.rosetta.core.import_pose.pose_from_file', 'rosetta.core.import_pose.pose_from_file', (['fuzzball_ligand_pose', 'ligand_conformer_path'], {}), '(fuzzball_ligand_pose,\n ligand_conformer_path)\n', (3672, 3721), False, 'from pyrosetta import rosetta\n'), ((4674, 4729), 'pyrosetta.rosetta.core.select.residue_selector.ChainSelector', 'rosetta.core.select.residue_selector.ChainSelector', (['"""X"""'], {}), "('X')\n", (4724, 4729), False, 'from pyrosetta import rosetta\n'), ((4758, 4863), 'pyrosetta.rosetta.core.select.residue_selector.NeighborhoodResidueSelector', 'rosetta.core.select.residue_selector.NeighborhoodResidueSelector', (['ligand_residue_selector', '(10)', '(False)'], {}), '(\n ligand_residue_selector, 10, False)\n', (4822, 4863), False, 'from pyrosetta import rosetta\n'), ((4974, 5046), 'pyrosetta.rosetta.core.select.get_residues_from_subset', 'rosetta.core.select.get_residues_from_subset', (['neighborhood_selector_bool'], {}), '(neighborhood_selector_bool)\n', (5018, 5046), False, 'from pyrosetta import rosetta\n'), ((5162, 5208), 'pyrosetta.rosetta.protocols.simple_moves.MutateResidue', 'rosetta.protocols.simple_moves.MutateResidue', ([], {}), '()\n', (5206, 5208), False, 'from pyrosetta import rosetta\n'), ((21555, 21654), 'pyrosetta.rosetta.core.select.residue_selector.NeighborhoodResidueSelector', 'rosetta.core.select.residue_selector.NeighborhoodResidueSelector', (['matched_ligand_rs', '(10)', '(False)'], {}), '(\n matched_ligand_rs, 10, False)\n', (21619, 21654), False, 'from pyrosetta import rosetta\n'), ((22496, 22553), 'pyrosetta.rosetta.core.select.residue_selector.AndResidueSelector', 'rosetta.core.select.residue_selector.AndResidueSelector', ([], {}), '()\n', (22551, 22553), False, 'from pyrosetta import rosetta\n'), ((22773, 22852), 'pyrosetta.rosetta.core.pack.task.residue_selector.ClashBasedShellSelector', 'rosetta.core.pack.task.residue_selector.ClashBasedShellSelector', (['first_shell_rs'], {}), '(first_shell_rs)\n', (22836, 22852), False, 'from pyrosetta import rosetta\n'), ((22930, 22986), 'pyrosetta.rosetta.core.select.residue_selector.OrResidueSelector', 'rosetta.core.select.residue_selector.OrResidueSelector', ([], {}), '()\n', (22984, 22986), False, 'from pyrosetta import rosetta\n'), ((23222, 23296), 'pyrosetta.rosetta.core.select.get_residues_from_subset', 'rosetta.core.select.get_residues_from_subset', (['designable_residue_selection'], {}), '(designable_residue_selection)\n', (23266, 23296), False, 'from pyrosetta import rosetta\n'), ((23430, 23486), 'pyrosetta.rosetta.core.select.residue_selector.OrResidueSelector', 'rosetta.core.select.residue_selector.OrResidueSelector', ([], {}), '()\n', (23484, 23486), False, 'from pyrosetta import rosetta\n'), ((24325, 24416), 'pyrosetta.rosetta.core.pack.task.residue_selector.ClashBasedShellSelector', 'rosetta.core.pack.task.residue_selector.ClashBasedShellSelector', (['designable_residue_rs'], {}), '(\n designable_residue_rs)\n', (24388, 24416), False, 'from pyrosetta import rosetta\n'), ((25133, 25211), 'pyrosetta.rosetta.core.select.residue_selector.NotResidueSelector', 'rosetta.core.select.residue_selector.NotResidueSelector', (['relevant_positions_rs'], {}), '(relevant_positions_rs)\n', (25188, 25211), False, 'from pyrosetta import rosetta\n'), ((25249, 25312), 'pyrosetta.rosetta.core.select.residue_selector.ResidueNameSelector', 'rosetta.core.select.residue_selector.ResidueNameSelector', (['"""GLY"""'], {}), "('GLY')\n", (25305, 25312), False, 'from pyrosetta import rosetta\n'), ((25326, 25389), 'pyrosetta.rosetta.core.select.residue_selector.ResidueNameSelector', 'rosetta.core.select.residue_selector.ResidueNameSelector', (['"""CYS"""'], {}), "('CYS')\n", (25382, 25389), False, 'from pyrosetta import rosetta\n'), ((25403, 25466), 'pyrosetta.rosetta.core.select.residue_selector.ResidueNameSelector', 'rosetta.core.select.residue_selector.ResidueNameSelector', (['"""PRO"""'], {}), "('PRO')\n", (25459, 25466), False, 'from pyrosetta import rosetta\n'), ((25480, 25536), 'pyrosetta.rosetta.core.select.residue_selector.OrResidueSelector', 'rosetta.core.select.residue_selector.OrResidueSelector', ([], {}), '()\n', (25534, 25536), False, 'from pyrosetta import rosetta\n'), ((25727, 25763), 'pyrosetta.rosetta.core.pack.task.TaskFactory', 'rosetta.core.pack.task.TaskFactory', ([], {}), '()\n', (25761, 25763), False, 'from pyrosetta import rosetta\n'), ((25777, 25841), 'pyrosetta.rosetta.core.pack.task.operation.RestrictAbsentCanonicalAASRLT', 'rosetta.core.pack.task.operation.RestrictAbsentCanonicalAASRLT', ([], {}), '()\n', (25839, 25841), False, 'from pyrosetta import rosetta\n'), ((25911, 26000), 'pyrosetta.rosetta.core.pack.task.operation.OperateOnResidueSubset', 'rosetta.core.pack.task.operation.OperateOnResidueSubset', (['racaa', 'designable_residue_rs'], {}), '(racaa,\n designable_residue_rs)\n', (25966, 26000), False, 'from pyrosetta import rosetta\n'), ((26894, 26949), 'pyrosetta.rosetta.core.pack.task.operation.ExtraRotamersGeneric', 'rosetta.core.pack.task.operation.ExtraRotamersGeneric', ([], {}), '()\n', (26947, 26949), False, 'from pyrosetta import rosetta\n'), ((28486, 28527), 'pyrosetta.rosetta.core.scoring.get_score_function', 'rosetta.core.scoring.get_score_function', ([], {}), '()\n', (28525, 28527), False, 'from pyrosetta import rosetta\n'), ((28570, 28594), 'pyrosetta.rosetta.core.pose.Pose', 'rosetta.core.pose.Pose', ([], {}), '()\n', (28592, 28594), False, 'from pyrosetta import rosetta\n'), ((28940, 29002), 'pyrosetta.rosetta.protocols.relax.FastRelax', 'rosetta.protocols.relax.FastRelax', (['sfxn', '(5)', '"""MonomerRelax2019"""'], {}), "(sfxn, 5, 'MonomerRelax2019')\n", (28973, 29002), False, 'from pyrosetta import rosetta\n'), ((30191, 30252), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex1"""', '(True)'], {}), "('packing:ex1', True)\n", (30231, 30252), False, 'from pyrosetta import rosetta\n'), ((30257, 30318), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex2"""', '(True)'], {}), "('packing:ex2', True)\n", (30297, 30318), False, 'from pyrosetta import rosetta\n'), ((30323, 30384), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex3"""', '(True)'], {}), "('packing:ex3', True)\n", (30363, 30384), False, 'from pyrosetta import rosetta\n'), ((30408, 30469), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex4"""', '(True)'], {}), "('packing:ex4', True)\n", (30448, 30469), False, 'from pyrosetta import rosetta\n'), ((30493, 30557), 'pyrosetta.rosetta.basic.options.set_integer_option', 'rosetta.basic.options.set_integer_option', (['"""packing:ex1:level"""', '(4)'], {}), "('packing:ex1:level', 4)\n", (30533, 30557), False, 'from pyrosetta import rosetta\n'), ((30562, 30626), 'pyrosetta.rosetta.basic.options.set_integer_option', 'rosetta.basic.options.set_integer_option', (['"""packing:ex2:level"""', '(4)'], {}), "('packing:ex2:level', 4)\n", (30602, 30626), False, 'from pyrosetta import rosetta\n'), ((31347, 31409), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex3"""', '(False)'], {}), "('packing:ex3', False)\n", (31387, 31409), False, 'from pyrosetta import rosetta\n'), ((31414, 31476), 'pyrosetta.rosetta.basic.options.set_boolean_option', 'rosetta.basic.options.set_boolean_option', (['"""packing:ex4"""', '(False)'], {}), "('packing:ex4', False)\n", (31454, 31476), False, 'from pyrosetta import rosetta\n'), ((31520, 31584), 'pyrosetta.rosetta.basic.options.set_integer_option', 'rosetta.basic.options.set_integer_option', (['"""packing:ex1:level"""', '(1)'], {}), "('packing:ex1:level', 1)\n", (31560, 31584), False, 'from pyrosetta import rosetta\n'), ((31589, 31653), 'pyrosetta.rosetta.basic.options.set_integer_option', 'rosetta.basic.options.set_integer_option', (['"""packing:ex2:level"""', '(1)'], {}), "('packing:ex2:level', 1)\n", (31629, 31653), False, 'from pyrosetta import rosetta\n'), ((31769, 31841), 'pyrosetta.rosetta.protocols.protein_interface_design.filters.BindingStrainFilter', 'rosetta.protocols.protein_interface_design.filters.BindingStrainFilter', ([], {}), '()\n', (31839, 31841), False, 'from pyrosetta import rosetta\n'), ((32542, 32603), 'pyrosetta.rosetta.protocols.simple_filters.ShapeComplementarityFilter', 'rosetta.protocols.simple_filters.ShapeComplementarityFilter', ([], {}), '()\n', (32601, 32603), False, 'from pyrosetta import rosetta\n'), ((33304, 33354), 'pyrosetta.rosetta.protocols.simple_filters.PackStatFilter', 'rosetta.protocols.simple_filters.PackStatFilter', (['(0)'], {}), '(0)\n', (33351, 33354), False, 'from pyrosetta import rosetta\n'), ((34459, 34516), 'pyrosetta.rosetta.protocols.simple_filters.BuriedUnsatHbondFilter', 'rosetta.protocols.simple_filters.BuriedUnsatHbondFilter', ([], {}), '()\n', (34514, 34516), False, 'from pyrosetta import rosetta\n'), ((34677, 34725), 'pyrosetta.rosetta.core.simple_metrics.metrics.SasaMetric', 'rosetta.core.simple_metrics.metrics.SasaMetric', ([], {}), '()\n', (34723, 34725), False, 'from pyrosetta import rosetta\n'), ((35195, 35270), 'pyrosetta.rosetta.core.pack.create_packer_graph', 'rosetta.core.pack.create_packer_graph', (['match_pose', 'sfxn', 'design_packer_task'], {}), '(match_pose, sfxn, design_packer_task)\n', (35232, 35270), False, 'from pyrosetta import rosetta\n'), ((35291, 35376), 'pyrosetta.rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets', 'rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets', (['match_pose'], {}), '(match_pose\n )\n', (35359, 35376), False, 'from pyrosetta import rosetta\n'), ((36771, 36796), 'os.path.split', 'os.path.split', (['match_path'], {}), '(match_path)\n', (36784, 36796), False, 'import os\n'), ((36854, 36891), 'os.makedirs', 'os.makedirs', (['designdir'], {'exist_ok': '(True)'}), '(designdir, exist_ok=True)\n', (36865, 36891), False, 'import os\n'), ((41580, 41607), 'pandas.DataFrame', 'pd.DataFrame', (['list_of_dicts'], {}), '(list_of_dicts)\n', (41592, 41607), True, 'import pandas as pd\n'), ((5619, 5710), 'pyrosetta.rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets', 'rosetta.core.pack.rotamer_set.RotamerSetsFactory.create_rotamer_sets', (['match_pose_clone'], {}), '(\n match_pose_clone)\n', (5687, 5710), False, 'from pyrosetta import rosetta\n'), ((5729, 5765), 'pyrosetta.rosetta.core.pack.task.TaskFactory', 'rosetta.core.pack.task.TaskFactory', ([], {}), '()\n', (5763, 5765), False, 'from pyrosetta import rosetta\n'), ((5963, 6041), 'pyrosetta.rosetta.core.select.residue_selector.NotResidueSelector', 'rosetta.core.select.residue_selector.NotResidueSelector', (['rotamer_candidates_rs'], {}), '(rotamer_candidates_rs)\n', (6018, 6041), False, 'from pyrosetta import rosetta\n'), ((18912, 18991), 'pyrosetta.rosetta.basic.options.get_integer_option', 'rosetta.basic.options.get_integer_option', (['f"""packing:ex{current_extrachi}:level"""'], {}), "(f'packing:ex{current_extrachi}:level')\n", (18952, 18991), False, 'from pyrosetta import rosetta\n'), ((26351, 26408), 'pyrosetta.rosetta.core.pack.task.operation.RestrictToRepackingRLT', 'rosetta.core.pack.task.operation.RestrictToRepackingRLT', ([], {}), '()\n', (26406, 26408), False, 'from pyrosetta import rosetta\n'), ((26538, 26592), 'pyrosetta.rosetta.core.pack.task.operation.PreventRepackingRLT', 'rosetta.core.pack.task.operation.PreventRepackingRLT', ([], {}), '()\n', (26590, 26592), False, 'from pyrosetta import rosetta\n'), ((26729, 26783), 'pyrosetta.rosetta.core.pack.task.operation.PreventRepackingRLT', 'rosetta.core.pack.task.operation.PreventRepackingRLT', ([], {}), '()\n', (26781, 26783), False, 'from pyrosetta import rosetta\n'), ((33418, 33492), 'pyrosetta.rosetta.basic.options.set_file_option', 'rosetta.basic.options.set_file_option', (['"""holes:dalphaball"""', 'dalphaball_path'], {}), "('holes:dalphaball', dalphaball_path)\n", (33455, 33492), False, 'from pyrosetta import rosetta\n'), ((34864, 34911), 'pyrosetta.rosetta.protocols.enzdes.AddOrRemoveMatchCsts', 'rosetta.protocols.enzdes.AddOrRemoveMatchCsts', ([], {}), '()\n', (34909, 34911), False, 'from pyrosetta import rosetta\n'), ((36814, 36846), 'os.path.splitext', 'os.path.splitext', (['match_filename'], {}), '(match_filename)\n', (36830, 36846), False, 'import os\n'), ((37013, 37061), 'os.path.join', 'os.path.join', (['designdir', 'f"""{match_name}-{i}.pdb"""'], {}), "(designdir, f'{match_name}-{i}.pdb')\n", (37025, 37061), False, 'import os\n'), ((38163, 38341), 'pyrosetta.rosetta.core.pack.interaction_graph.InteractionGraphFactory.create_and_initialize_annealing_graph', 'rosetta.core.pack.interaction_graph.InteractionGraphFactory.create_and_initialize_annealing_graph', (['design_packer_task', 'rotamer_sets', 'design_pose', 'sfxn', 'packer_neighbor_graph'], {}), '(\n design_packer_task, rotamer_sets, design_pose, sfxn, packer_neighbor_graph)\n', (38260, 38341), False, 'from pyrosetta import rosetta\n'), ((38345, 38435), 'pyrosetta.rosetta.core.pack.pack_rotamers_run', 'rosetta.core.pack.pack_rotamers_run', (['design_pose', 'design_packer_task', 'rotamer_sets', 'ig'], {}), '(design_pose, design_packer_task,\n rotamer_sets, ig)\n', (38380, 38435), False, 'from pyrosetta import rosetta\n'), ((38640, 38668), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (38666, 38668), False, 'from pyrosetta import rosetta\n'), ((38920, 38948), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (38946, 38948), False, 'from pyrosetta import rosetta\n'), ((39128, 39156), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (39154, 39156), False, 'from pyrosetta import rosetta\n'), ((39299, 39327), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (39325, 39327), False, 'from pyrosetta import rosetta\n'), ((39489, 39517), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (39515, 39517), False, 'from pyrosetta import rosetta\n'), ((39688, 39716), 'pyrosetta.rosetta.core.io.RemarkInfo', 'rosetta.core.io.RemarkInfo', ([], {}), '()\n', (39714, 39716), False, 'from pyrosetta import rosetta\n'), ((6130, 6184), 'pyrosetta.rosetta.core.pack.task.operation.PreventRepackingRLT', 'rosetta.core.pack.task.operation.PreventRepackingRLT', ([], {}), '()\n', (6182, 6184), False, 'from pyrosetta import rosetta\n'), ((7089, 7122), 'pyrosetta.rosetta.core.kinematics.MoveMap', 'rosetta.core.kinematics.MoveMap', ([], {}), '()\n', (7120, 7122), False, 'from pyrosetta import rosetta\n'), ((7203, 7252), 'pyrosetta.rosetta.protocols.minimization_packing.MinMover', 'rosetta.protocols.minimization_packing.MinMover', ([], {}), '()\n', (7250, 7252), False, 'from pyrosetta import rosetta\n'), ((7587, 7676), 'pyrosetta.rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set', 'rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set', (['match_pose_clone'], {}), '(\n match_pose_clone)\n', (7653, 7676), False, 'from pyrosetta import rosetta\n'), ((8164, 8239), 'pyrosetta.rosetta.core.pack.task.TaskFactory.create_packer_task', 'rosetta.core.pack.task.TaskFactory.create_packer_task', (['match_pose_clone_apo'], {}), '(match_pose_clone_apo)\n', (8217, 8239), False, 'from pyrosetta import rosetta\n'), ((8410, 8440), 'pyrosetta.rosetta.utility.vector1_bool', 'rosetta.utility.vector1_bool', ([], {}), '()\n', (8438, 8440), False, 'from pyrosetta import rosetta\n'), ((9097, 9175), 'pyrosetta.rosetta.core.pack.create_packer_graph', 'rosetta.core.pack.create_packer_graph', (['match_pose_clone_apo', 'sfxn', 'packer_task'], {}), '(match_pose_clone_apo, sfxn, packer_task)\n', (9134, 9175), False, 'from pyrosetta import rosetta\n'), ((9209, 9302), 'pyrosetta.rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set', 'rosetta.core.pack.rotamer_set.RotamerSetFactory.create_rotamer_set', (['match_pose_clone_apo'], {}), '(\n match_pose_clone_apo)\n', (9275, 9302), False, 'from pyrosetta import rosetta\n'), ((19402, 19435), 'pyrosetta.pose_from_sequence', 'pyrosetta.pose_from_sequence', (['"""A"""'], {}), "('A')\n", (19430, 19435), False, 'import pyrosetta\n'), ((24512, 24580), 'pyrosetta.rosetta.core.select.get_residues_from_subset', 'rosetta.core.select.get_residues_from_subset', (['repack_shell_selection'], {}), '(repack_shell_selection)\n', (24556, 24580), False, 'from pyrosetta import rosetta\n'), ((26150, 26207), 'pyrosetta.rosetta.core.pack.task.operation.RestrictToRepackingRLT', 'rosetta.core.pack.task.operation.RestrictToRepackingRLT', ([], {}), '()\n', (26205, 26207), False, 'from pyrosetta import rosetta\n'), ((28664, 28675), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (28673, 28675), False, 'import os\n'), ((8907, 8939), 'pyrosetta.rosetta.utility.vector1_bool', 'rosetta.utility.vector1_bool', (['(20)'], {}), '(20)\n', (8935, 8939), False, 'from pyrosetta import rosetta\n'), ((18730, 18788), 'pyrosetta.rosetta.basic.options.get_boolean_option', 'rosetta.basic.options.get_boolean_option', (['f"""packing:ex{i}"""'], {}), "(f'packing:ex{i}')\n", (18770, 18788), False, 'from pyrosetta import rosetta\n'), ((19099, 19127), 'os.path.basename', 'os.path.basename', (['match_path'], {}), '(match_path)\n', (19115, 19127), False, 'import os\n'), ((34308, 34384), 'pyrosetta.rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string', 'rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string', (['filters_xml'], {}), '(filters_xml)\n', (34371, 34384), False, 'from pyrosetta import rosetta\n'), ((33632, 33706), 'pyrosetta.rosetta.core.select.get_residues_from_subset', 'rosetta.core.select.get_residues_from_subset', (['relevant_positions_selection'], {}), '(relevant_positions_selection)\n', (33676, 33706), False, 'from pyrosetta import rosetta\n'), ((12659, 12769), 'pyrosetta.rosetta.numeric.angle_degrees_double', 'rosetta.numeric.angle_degrees_double', (['ligand_atomid_list[1]', 'ligand_atomid_list[0]', 'residue_atomid_list[0]'], {}), '(ligand_atomid_list[1],\n ligand_atomid_list[0], residue_atomid_list[0])\n', (12695, 12769), False, 'from pyrosetta import rosetta\n'), ((12886, 12997), 'pyrosetta.rosetta.numeric.angle_degrees_double', 'rosetta.numeric.angle_degrees_double', (['ligand_atomid_list[0]', 'residue_atomid_list[0]', 'residue_atomid_list[1]'], {}), '(ligand_atomid_list[0],\n residue_atomid_list[0], residue_atomid_list[1])\n', (12922, 12997), False, 'from pyrosetta import rosetta\n'), ((13134, 13270), 'pyrosetta.rosetta.numeric.dihedral_degrees_double', 'rosetta.numeric.dihedral_degrees_double', (['ligand_atomid_list[2]', 'ligand_atomid_list[1]', 'ligand_atomid_list[0]', 'residue_atomid_list[0]'], {}), '(ligand_atomid_list[2],\n ligand_atomid_list[1], ligand_atomid_list[0], residue_atomid_list[0])\n', (13173, 13270), False, 'from pyrosetta import rosetta\n'), ((13409, 13546), 'pyrosetta.rosetta.numeric.dihedral_degrees_double', 'rosetta.numeric.dihedral_degrees_double', (['ligand_atomid_list[1]', 'ligand_atomid_list[0]', 'residue_atomid_list[0]', 'residue_atomid_list[1]'], {}), '(ligand_atomid_list[1],\n ligand_atomid_list[0], residue_atomid_list[0], residue_atomid_list[1])\n', (13448, 13546), False, 'from pyrosetta import rosetta\n'), ((13683, 13821), 'pyrosetta.rosetta.numeric.dihedral_degrees_double', 'rosetta.numeric.dihedral_degrees_double', (['ligand_atomid_list[0]', 'residue_atomid_list[0]', 'residue_atomid_list[1]', 'residue_atomid_list[2]'], {}), '(ligand_atomid_list[0],\n residue_atomid_list[0], residue_atomid_list[1], residue_atomid_list[2])\n', (13722, 13821), False, 'from pyrosetta import rosetta\n'), ((16665, 16727), 'pyrosetta.rosetta.core.chemical.MutableResidueType', 'rosetta.core.chemical.MutableResidueType', (['current_rsd_type_ptr'], {}), '(current_rsd_type_ptr)\n', (16705, 16727), False, 'from pyrosetta import rosetta\n'), ((16864, 16924), 'pyrosetta.rosetta.core.chemical.ResidueType.make', 'rosetta.core.chemical.ResidueType.make', (['new_rsd_type_mutable'], {}), '(new_rsd_type_mutable)\n', (16902, 16924), False, 'from pyrosetta import rosetta\n'), ((16949, 17063), 'pyrosetta.rosetta.core.pose.replace_pose_residue_copying_existing_coordinates', 'rosetta.core.pose.replace_pose_residue_copying_existing_coordinates', (['match_pose_clone', 'position', 'new_rsd_type'], {}), '(\n match_pose_clone, position, new_rsd_type)\n', (17016, 17063), False, 'from pyrosetta import rosetta\n'), ((18812, 18870), 'pyrosetta.rosetta.basic.options.get_boolean_option', 'rosetta.basic.options.get_boolean_option', (['f"""packing:ex{i}"""'], {}), "(f'packing:ex{i}')\n", (18852, 18870), False, 'from pyrosetta import rosetta\n'), ((14684, 14720), 'numpy.asarray', 'np.asarray', (['current_motif_coord_list'], {}), '(current_motif_coord_list)\n', (14694, 14720), True, 'import numpy as np\n'), ((14722, 14756), 'numpy.asarray', 'np.asarray', (['rotamer_contact_coords'], {}), '(rotamer_contact_coords)\n', (14732, 14756), True, 'import numpy as np\n'), ((37851, 37938), 'pyrosetta.rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string', 'rosetta.protocols.rosetta_scripts.XmlObjects.create_from_string', (['add_match_cst_xml'], {}), '(\n add_match_cst_xml)\n', (37914, 37938), False, 'from pyrosetta import rosetta\n')] |
import os
import argparse
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision import transforms
from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater
from src.model import EfficientDet
from tensorboardX import SummaryWriter
import shutil
import numpy as np
from tqdm.autonotebook import tqdm
def get_args():
parser = argparse.ArgumentParser(
"EfficientDet: Scalable and Efficient Object Detection implementation by Signatrix GmbH")
parser.add_argument("--image_size", type=int, default=512, help="The common width and height for all images")
parser.add_argument("--batch_size", type=int, default=8, help="The number of images per batch")
parser.add_argument("--lr", type=float, default=1e-4)
parser.add_argument('--alpha', type=float, default=0.25)
parser.add_argument('--gamma', type=float, default=1.5)
parser.add_argument("--num_epochs", type=int, default=500)
parser.add_argument("--test_interval", type=int, default=1, help="Number of epoches between testing phases")
parser.add_argument("--es_min_delta", type=float, default=0.0,
help="Early stopping's parameter: minimum change loss to qualify as an improvement")
parser.add_argument("--es_patience", type=int, default=0,
help="Early stopping's parameter: number of epochs with no improvement after which training will be stopped. Set to 0 to disable this technique.")
parser.add_argument("--data_path", type=str, default="data/COCO", help="the root folder of dataset")
parser.add_argument("--log_path", type=str, default="tensorboard/signatrix_efficientdet_coco")
parser.add_argument("--saved_path", type=str, default="trained_models")
args = parser.parse_args()
return args
def train(opt):
num_gpus = 1
if torch.cuda.is_available():
num_gpus = torch.cuda.device_count()
torch.cuda.manual_seed(123)
else:
torch.manual_seed(123)
training_params = {"batch_size": opt.batch_size * num_gpus,
"shuffle": True,
"drop_last": True,
"collate_fn": collater,
"num_workers": 12}
test_params = {"batch_size": opt.batch_size,
"shuffle": False,
"drop_last": False,
"collate_fn": collater,
"num_workers": 12}
training_set = CocoDataset(root_dir=opt.data_path, set="train2017",
transform=transforms.Compose([Normalizer(), Augmenter(), Resizer()]))
training_generator = DataLoader(training_set, **training_params)
test_set = CocoDataset(root_dir=opt.data_path, set="val2017",
transform=transforms.Compose([Normalizer(), Resizer()]))
test_generator = DataLoader(test_set, **test_params)
model = EfficientDet(num_classes=training_set.num_classes())
if os.path.isdir(opt.log_path):
shutil.rmtree(opt.log_path)
os.makedirs(opt.log_path)
if not os.path.isdir(opt.saved_path):
os.makedirs(opt.saved_path)
writer = SummaryWriter(opt.log_path)
if torch.cuda.is_available():
model = model.cuda()
model = nn.DataParallel(model)
optimizer = torch.optim.Adam(model.parameters(), opt.lr)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=3, verbose=True)
best_loss = 1e5
best_epoch = 0
model.train()
num_iter_per_epoch = len(training_generator)
for epoch in range(opt.num_epochs):
model.train()
# if torch.cuda.is_available():
# model.module.freeze_bn()
# else:
# model.freeze_bn()
epoch_loss = []
progress_bar = tqdm(training_generator)
for iter, data in enumerate(progress_bar):
try:
optimizer.zero_grad()
if torch.cuda.is_available():
cls_loss, reg_loss = model([data['img'].cuda().float(), data['annot'].cuda()])
else:
cls_loss, reg_loss = model([data['img'].float(), data['annot']])
cls_loss = cls_loss.mean()
reg_loss = reg_loss.mean()
loss = cls_loss + reg_loss
if loss == 0:
continue
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), 0.1)
optimizer.step()
epoch_loss.append(float(loss))
total_loss = np.mean(epoch_loss)
progress_bar.set_description(
'Epoch: {}/{}. Iteration: {}/{}. Cls loss: {:.5f}. Reg loss: {:.5f}. Batch loss: {:.5f} Total loss: {:.5f}'.format(
epoch + 1, opt.num_epochs, iter + 1, num_iter_per_epoch, cls_loss, reg_loss, loss,
total_loss))
writer.add_scalar('Train/Total_loss', total_loss, epoch * num_iter_per_epoch + iter)
writer.add_scalar('Train/Regression_loss', reg_loss, epoch * num_iter_per_epoch + iter)
writer.add_scalar('Train/Classfication_loss (focal loss)', cls_loss, epoch * num_iter_per_epoch + iter)
except Exception as e:
print(e)
continue
scheduler.step(np.mean(epoch_loss))
if epoch % opt.test_interval == 0:
model.eval()
loss_regression_ls = []
loss_classification_ls = []
for iter, data in enumerate(test_generator):
with torch.no_grad():
if torch.cuda.is_available():
cls_loss, reg_loss = model([data['img'].cuda().float(), data['annot'].cuda()])
else:
cls_loss, reg_loss = model([data['img'].float(), data['annot']])
cls_loss = cls_loss.mean()
reg_loss = reg_loss.mean()
loss_classification_ls.append(float(cls_loss))
loss_regression_ls.append(float(reg_loss))
cls_loss = np.mean(loss_classification_ls)
reg_loss = np.mean(loss_regression_ls)
loss = cls_loss + reg_loss
print(
'Epoch: {}/{}. Classification loss: {:1.5f}. Regression loss: {:1.5f}. Total loss: {:1.5f}'.format(
epoch + 1, opt.num_epochs, cls_loss, reg_loss,
np.mean(loss)))
writer.add_scalar('Test/Total_loss', loss, epoch)
writer.add_scalar('Test/Regression_loss', reg_loss, epoch)
writer.add_scalar('Test/Classfication_loss (focal loss)', cls_loss, epoch)
if loss + opt.es_min_delta < best_loss:
best_loss = loss
best_epoch = epoch
torch.save(model, os.path.join(opt.saved_path, "signatrix_efficientdet_coco.pth"))
dummy_input = torch.rand(opt.batch_size, 3, 512, 512)
if torch.cuda.is_available():
dummy_input = dummy_input.cuda()
if isinstance(model, nn.DataParallel):
model.module.backbone_net.model.set_swish(memory_efficient=False)
torch.onnx.export(model.module, dummy_input,
os.path.join(opt.saved_path, "signatrix_efficientdet_coco.onnx"),
verbose=False)
model.module.backbone_net.model.set_swish(memory_efficient=True)
else:
model.backbone_net.model.set_swish(memory_efficient=False)
torch.onnx.export(model, dummy_input,
os.path.join(opt.saved_path, "signatrix_efficientdet_coco.onnx"),
verbose=False)
model.backbone_net.model.set_swish(memory_efficient=True)
# Early stopping
if epoch - best_epoch > opt.es_patience > 0:
print("Stop training at epoch {}. The lowest loss achieved is {}".format(epoch, loss))
break
writer.close()
if __name__ == "__main__":
opt = get_args()
train(opt)
| [
"argparse.ArgumentParser",
"torch.cuda.device_count",
"numpy.mean",
"shutil.rmtree",
"torch.no_grad",
"os.path.join",
"torch.utils.data.DataLoader",
"torch.optim.lr_scheduler.ReduceLROnPlateau",
"src.dataset.Augmenter",
"torch.manual_seed",
"torch.cuda.manual_seed",
"torch.cuda.is_available",
... | [((387, 510), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""EfficientDet: Scalable and Efficient Object Detection implementation by Signatrix GmbH"""'], {}), "(\n 'EfficientDet: Scalable and Efficient Object Detection implementation by Signatrix GmbH'\n )\n", (410, 510), False, 'import argparse\n'), ((1858, 1883), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1881, 1883), False, 'import torch\n'), ((2649, 2692), 'torch.utils.data.DataLoader', 'DataLoader', (['training_set'], {}), '(training_set, **training_params)\n', (2659, 2692), False, 'from torch.utils.data import DataLoader\n'), ((2865, 2900), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {}), '(test_set, **test_params)\n', (2875, 2900), False, 'from torch.utils.data import DataLoader\n'), ((2976, 3003), 'os.path.isdir', 'os.path.isdir', (['opt.log_path'], {}), '(opt.log_path)\n', (2989, 3003), False, 'import os\n'), ((3045, 3070), 'os.makedirs', 'os.makedirs', (['opt.log_path'], {}), '(opt.log_path)\n', (3056, 3070), False, 'import os\n'), ((3164, 3191), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['opt.log_path'], {}), '(opt.log_path)\n', (3177, 3191), False, 'from tensorboardX import SummaryWriter\n'), ((3199, 3224), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3222, 3224), False, 'import torch\n'), ((3372, 3451), 'torch.optim.lr_scheduler.ReduceLROnPlateau', 'torch.optim.lr_scheduler.ReduceLROnPlateau', (['optimizer'], {'patience': '(3)', 'verbose': '(True)'}), '(optimizer, patience=3, verbose=True)\n', (3414, 3451), False, 'import torch\n'), ((1904, 1929), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1927, 1929), False, 'import torch\n'), ((1938, 1965), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(123)'], {}), '(123)\n', (1960, 1965), False, 'import torch\n'), ((1984, 2006), 'torch.manual_seed', 'torch.manual_seed', (['(123)'], {}), '(123)\n', (2001, 2006), False, 'import torch\n'), ((3013, 3040), 'shutil.rmtree', 'shutil.rmtree', (['opt.log_path'], {}), '(opt.log_path)\n', (3026, 3040), False, 'import shutil\n'), ((3083, 3112), 'os.path.isdir', 'os.path.isdir', (['opt.saved_path'], {}), '(opt.saved_path)\n', (3096, 3112), False, 'import os\n'), ((3122, 3149), 'os.makedirs', 'os.makedirs', (['opt.saved_path'], {}), '(opt.saved_path)\n', (3133, 3149), False, 'import os\n'), ((3271, 3293), 'torch.nn.DataParallel', 'nn.DataParallel', (['model'], {}), '(model)\n', (3286, 3293), True, 'import torch.nn as nn\n'), ((3796, 3820), 'tqdm.autonotebook.tqdm', 'tqdm', (['training_generator'], {}), '(training_generator)\n', (3800, 3820), False, 'from tqdm.autonotebook import tqdm\n'), ((5362, 5381), 'numpy.mean', 'np.mean', (['epoch_loss'], {}), '(epoch_loss)\n', (5369, 5381), True, 'import numpy as np\n'), ((6141, 6172), 'numpy.mean', 'np.mean', (['loss_classification_ls'], {}), '(loss_classification_ls)\n', (6148, 6172), True, 'import numpy as np\n'), ((6196, 6223), 'numpy.mean', 'np.mean', (['loss_regression_ls'], {}), '(loss_regression_ls)\n', (6203, 6223), True, 'import numpy as np\n'), ((3946, 3971), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3969, 3971), False, 'import torch\n'), ((4581, 4600), 'numpy.mean', 'np.mean', (['epoch_loss'], {}), '(epoch_loss)\n', (4588, 4600), True, 'import numpy as np\n'), ((6973, 7012), 'torch.rand', 'torch.rand', (['opt.batch_size', '(3)', '(512)', '(512)'], {}), '(opt.batch_size, 3, 512, 512)\n', (6983, 7012), False, 'import torch\n'), ((7032, 7057), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7055, 7057), False, 'import torch\n'), ((2584, 2596), 'src.dataset.Normalizer', 'Normalizer', ([], {}), '()\n', (2594, 2596), False, 'from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater\n'), ((2598, 2609), 'src.dataset.Augmenter', 'Augmenter', ([], {}), '()\n', (2607, 2609), False, 'from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater\n'), ((2611, 2620), 'src.dataset.Resizer', 'Resizer', ([], {}), '()\n', (2618, 2620), False, 'from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater\n'), ((2817, 2829), 'src.dataset.Normalizer', 'Normalizer', ([], {}), '()\n', (2827, 2829), False, 'from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater\n'), ((2831, 2840), 'src.dataset.Resizer', 'Resizer', ([], {}), '()\n', (2838, 2840), False, 'from src.dataset import CocoDataset, Resizer, Normalizer, Augmenter, collater\n'), ((5606, 5621), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5619, 5621), False, 'import torch\n'), ((5646, 5671), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5669, 5671), False, 'import torch\n'), ((6486, 6499), 'numpy.mean', 'np.mean', (['loss'], {}), '(loss)\n', (6493, 6499), True, 'import numpy as np\n'), ((6877, 6940), 'os.path.join', 'os.path.join', (['opt.saved_path', '"""signatrix_efficientdet_coco.pth"""'], {}), "(opt.saved_path, 'signatrix_efficientdet_coco.pth')\n", (6889, 6940), False, 'import os\n'), ((7357, 7421), 'os.path.join', 'os.path.join', (['opt.saved_path', '"""signatrix_efficientdet_coco.onnx"""'], {}), "(opt.saved_path, 'signatrix_efficientdet_coco.onnx')\n", (7369, 7421), False, 'import os\n'), ((7759, 7823), 'os.path.join', 'os.path.join', (['opt.saved_path', '"""signatrix_efficientdet_coco.onnx"""'], {}), "(opt.saved_path, 'signatrix_efficientdet_coco.onnx')\n", (7771, 7823), False, 'import os\n')] |
import os
import json
import glob
import argparse
import numpy as np
from tqdm import tqdm
from scipy.spatial import HalfspaceIntersection
from scipy.spatial import ConvexHull
from .misc import post_proc, panostretch
def tri2halfspace(pa, pb, p):
''' Helper function for evaluating 3DIoU '''
v1 = pa - p
v2 = pb - p
vn = np.cross(v1, v2)
if -vn @ p > 0:
vn = -vn
return [*vn, -vn @ p]
def xyzlst2halfspaces(xyz_floor, xyz_ceil):
'''
Helper function for evaluating 3DIoU
return halfspace enclose (0, 0, 0)
'''
N = xyz_floor.shape[0]
halfspaces = []
for i in range(N):
last_i = (i - 1 + N) % N
next_i = (i + 1) % N
p_floor_a = xyz_floor[last_i]
p_floor_b = xyz_floor[next_i]
p_floor = xyz_floor[i]
p_ceil_a = xyz_ceil[last_i]
p_ceil_b = xyz_ceil[next_i]
p_ceil = xyz_ceil[i]
halfspaces.append(tri2halfspace(p_floor_a, p_floor_b, p_floor))
halfspaces.append(tri2halfspace(p_floor_a, p_ceil, p_floor))
halfspaces.append(tri2halfspace(p_ceil, p_floor_b, p_floor))
halfspaces.append(tri2halfspace(p_ceil_a, p_ceil_b, p_ceil))
halfspaces.append(tri2halfspace(p_ceil_a, p_floor, p_ceil))
halfspaces.append(tri2halfspace(p_floor, p_ceil_b, p_ceil))
return np.array(halfspaces)
def eval_3diou(dt_floor_coor, dt_ceil_coor, gt_floor_coor, gt_ceil_coor, ch=-1.6,
coorW=1024, coorH=512, floorW=1024, floorH=512):
''' Evaluate 3D IoU using halfspace intersection '''
dt_floor_coor = np.array(dt_floor_coor)
dt_ceil_coor = np.array(dt_ceil_coor)
gt_floor_coor = np.array(gt_floor_coor)
gt_ceil_coor = np.array(gt_ceil_coor)
assert (dt_floor_coor[:, 0] != dt_ceil_coor[:, 0]).sum() == 0
assert (gt_floor_coor[:, 0] != gt_ceil_coor[:, 0]).sum() == 0
N = len(dt_floor_coor)
dt_floor_xyz = np.hstack([
post_proc.np_coor2xy(dt_floor_coor, ch, coorW, coorH, floorW=1, floorH=1),
np.zeros((N, 1)) + ch,
])
gt_floor_xyz = np.hstack([
post_proc.np_coor2xy(gt_floor_coor, ch, coorW, coorH, floorW=1, floorH=1),
np.zeros((N, 1)) + ch,
])
dt_c = np.sqrt((dt_floor_xyz[:, :2] ** 2).sum(1))
gt_c = np.sqrt((gt_floor_xyz[:, :2] ** 2).sum(1))
dt_v2 = post_proc.np_coory2v(dt_ceil_coor[:, 1], coorH)
gt_v2 = post_proc.np_coory2v(gt_ceil_coor[:, 1], coorH)
dt_ceil_z = dt_c * np.tan(dt_v2)
gt_ceil_z = gt_c * np.tan(gt_v2)
dt_ceil_xyz = dt_floor_xyz.copy()
dt_ceil_xyz[:, 2] = dt_ceil_z
gt_ceil_xyz = gt_floor_xyz.copy()
gt_ceil_xyz[:, 2] = gt_ceil_z
dt_halfspaces = xyzlst2halfspaces(dt_floor_xyz, dt_ceil_xyz)
gt_halfspaces = xyzlst2halfspaces(gt_floor_xyz, gt_ceil_xyz)
in_halfspaces = HalfspaceIntersection(np.concatenate([dt_halfspaces, gt_halfspaces]),
np.zeros(3))
dt_halfspaces = HalfspaceIntersection(dt_halfspaces, np.zeros(3))
gt_halfspaces = HalfspaceIntersection(gt_halfspaces, np.zeros(3))
in_volume = ConvexHull(in_halfspaces.intersections).volume
dt_volume = ConvexHull(dt_halfspaces.intersections).volume
gt_volume = ConvexHull(gt_halfspaces.intersections).volume
un_volume = dt_volume + gt_volume - in_volume
return 100 * in_volume / un_volume
def gen_reg_from_xy(xy, w):
xy = xy[np.argsort(xy[:, 0])]
return np.interp(np.arange(w), xy[:, 0], xy[:, 1], period=w)
def test(dt_cor_id, z0, z1, gt_cor_id, w, h, losses):
# Eval corner error
mse = np.sqrt(((gt_cor_id - dt_cor_id)**2).sum(1)).mean()
ce_loss = 100 * mse / np.sqrt(w**2 + h**2)
# Pixel surface error (3 labels: ceiling, wall, floor)
y0_dt = []
y0_gt = []
y1_gt = []
for j in range(4):
coorxy = panostretch.pano_connect_points(dt_cor_id[j * 2],
dt_cor_id[(j * 2 + 2) % 8],
-z0)
y0_dt.append(coorxy)
coorxy = panostretch.pano_connect_points(gt_cor_id[j * 2],
gt_cor_id[(j * 2 + 2) % 8],
-z0)
y0_gt.append(coorxy)
coorxy = panostretch.pano_connect_points(gt_cor_id[j * 2 + 1],
gt_cor_id[(j * 2 + 3) % 8],
z0)
y1_gt.append(coorxy)
y0_dt = gen_reg_from_xy(np.concatenate(y0_dt, 0), w)
y1_dt = post_proc.infer_coory(y0_dt, z1 - z0, z0)
y0_gt = gen_reg_from_xy(np.concatenate(y0_gt, 0), w)
y1_gt = gen_reg_from_xy(np.concatenate(y1_gt, 0), w)
surface = np.zeros((h, w), dtype=np.int32)
surface[np.round(y0_dt).astype(int), np.arange(w)] = 1
surface[np.round(y1_dt).astype(int), np.arange(w)] = 1
surface = np.cumsum(surface, axis=0)
surface_gt = np.zeros((h, w), dtype=np.int32)
surface_gt[np.round(y0_gt).astype(int), np.arange(w)] = 1
surface_gt[np.round(y1_gt).astype(int), np.arange(w)] = 1
surface_gt = np.cumsum(surface_gt, axis=0)
pe_loss = 100 * (surface != surface_gt).sum() / (h * w)
# Eval 3d IoU
iou3d = eval_3diou(dt_cor_id[1::2], dt_cor_id[0::2], gt_cor_id[1::2], gt_cor_id[0::2])
losses['CE'].append(ce_loss)
losses['PE'].append(pe_loss)
losses['3DIoU'].append(iou3d)
def prepare_gtdt_pairs(gt_glob, dt_glob):
gt_paths = sorted(glob.glob(gt_glob))
dt_paths = dict([(os.path.split(v)[-1].split('.')[0], v)
for v in glob.glob(dt_glob) if v.endswith('json')])
gtdt_pairs = []
for gt_path in gt_paths:
k = os.path.split(gt_path)[-1].split('.')[0]
if k in dt_paths:
gtdt_pairs.append((gt_path, dt_paths[k]))
return gtdt_pairs
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--dt_glob', required=True,
help='NOTE: Remeber to quote your glob path.'
'Files assumed to be json from inference.py')
parser.add_argument('--gt_glob', default='data/test/label_cor/*txt',
help='NOTE: Remeber to quote your glob path.'
'Files assumed to be txt')
parser.add_argument('--w', default=1024, type=int,
help='GT images width')
parser.add_argument('--h', default=512, type=int,
help='GT images height')
args = parser.parse_args()
# Prepare (gt, dt) pairs
gtdt_pairs = prepare_gtdt_pairs(args.gt_glob, args.dt_glob)
# Testing
losses = {
'CE': [],
'PE': [],
'3DIoU': [],
}
for gt_path, dt_path in tqdm(gtdt_pairs, desc='Testing'):
with open(gt_path) as f:
gt_cor_id = np.array([l.split() for l in f], np.float32)
with open(dt_path) as f:
dt = json.load(f)
dt_cor_id = np.array(dt['uv'], np.float32)
dt_cor_id[:, 0] *= args.w
dt_cor_id[:, 1] *= args.h
test(dt_cor_id, dt['z0'], dt['z1'], gt_cor_id, args.w, args.h, losses)
print(' Testing Result '.center(50, '='))
print('Corner Error (%):', np.mean(losses['CE']))
print('Pixel Error (%):', np.mean(losses['PE']))
print('3DIoU (%):', np.mean(losses['3DIoU']))
print('=' * 50)
| [
"tqdm.tqdm",
"json.load",
"argparse.ArgumentParser",
"os.path.split",
"numpy.zeros",
"numpy.cross",
"numpy.argsort",
"numpy.cumsum",
"numpy.tan",
"numpy.array",
"numpy.arange",
"numpy.mean",
"glob.glob",
"scipy.spatial.ConvexHull",
"numpy.round",
"numpy.concatenate",
"numpy.sqrt"
] | [((340, 356), 'numpy.cross', 'np.cross', (['v1', 'v2'], {}), '(v1, v2)\n', (348, 356), True, 'import numpy as np\n'), ((1329, 1349), 'numpy.array', 'np.array', (['halfspaces'], {}), '(halfspaces)\n', (1337, 1349), True, 'import numpy as np\n'), ((1575, 1598), 'numpy.array', 'np.array', (['dt_floor_coor'], {}), '(dt_floor_coor)\n', (1583, 1598), True, 'import numpy as np\n'), ((1618, 1640), 'numpy.array', 'np.array', (['dt_ceil_coor'], {}), '(dt_ceil_coor)\n', (1626, 1640), True, 'import numpy as np\n'), ((1661, 1684), 'numpy.array', 'np.array', (['gt_floor_coor'], {}), '(gt_floor_coor)\n', (1669, 1684), True, 'import numpy as np\n'), ((1704, 1726), 'numpy.array', 'np.array', (['gt_ceil_coor'], {}), '(gt_ceil_coor)\n', (1712, 1726), True, 'import numpy as np\n'), ((4706, 4738), 'numpy.zeros', 'np.zeros', (['(h, w)'], {'dtype': 'np.int32'}), '((h, w), dtype=np.int32)\n', (4714, 4738), True, 'import numpy as np\n'), ((4871, 4897), 'numpy.cumsum', 'np.cumsum', (['surface'], {'axis': '(0)'}), '(surface, axis=0)\n', (4880, 4897), True, 'import numpy as np\n'), ((4915, 4947), 'numpy.zeros', 'np.zeros', (['(h, w)'], {'dtype': 'np.int32'}), '((h, w), dtype=np.int32)\n', (4923, 4947), True, 'import numpy as np\n'), ((5089, 5118), 'numpy.cumsum', 'np.cumsum', (['surface_gt'], {'axis': '(0)'}), '(surface_gt, axis=0)\n', (5098, 5118), True, 'import numpy as np\n'), ((5860, 5939), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (5883, 5939), False, 'import argparse\n'), ((6788, 6820), 'tqdm.tqdm', 'tqdm', (['gtdt_pairs'], {'desc': '"""Testing"""'}), "(gtdt_pairs, desc='Testing')\n", (6792, 6820), False, 'from tqdm import tqdm\n'), ((2441, 2454), 'numpy.tan', 'np.tan', (['dt_v2'], {}), '(dt_v2)\n', (2447, 2454), True, 'import numpy as np\n'), ((2478, 2491), 'numpy.tan', 'np.tan', (['gt_v2'], {}), '(gt_v2)\n', (2484, 2491), True, 'import numpy as np\n'), ((2811, 2857), 'numpy.concatenate', 'np.concatenate', (['[dt_halfspaces, gt_halfspaces]'], {}), '([dt_halfspaces, gt_halfspaces])\n', (2825, 2857), True, 'import numpy as np\n'), ((2901, 2912), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2909, 2912), True, 'import numpy as np\n'), ((2971, 2982), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (2979, 2982), True, 'import numpy as np\n'), ((3041, 3052), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (3049, 3052), True, 'import numpy as np\n'), ((3071, 3110), 'scipy.spatial.ConvexHull', 'ConvexHull', (['in_halfspaces.intersections'], {}), '(in_halfspaces.intersections)\n', (3081, 3110), False, 'from scipy.spatial import ConvexHull\n'), ((3134, 3173), 'scipy.spatial.ConvexHull', 'ConvexHull', (['dt_halfspaces.intersections'], {}), '(dt_halfspaces.intersections)\n', (3144, 3173), False, 'from scipy.spatial import ConvexHull\n'), ((3197, 3236), 'scipy.spatial.ConvexHull', 'ConvexHull', (['gt_halfspaces.intersections'], {}), '(gt_halfspaces.intersections)\n', (3207, 3236), False, 'from scipy.spatial import ConvexHull\n'), ((3376, 3396), 'numpy.argsort', 'np.argsort', (['xy[:, 0]'], {}), '(xy[:, 0])\n', (3386, 3396), True, 'import numpy as np\n'), ((3419, 3431), 'numpy.arange', 'np.arange', (['w'], {}), '(w)\n', (3428, 3431), True, 'import numpy as np\n'), ((3631, 3655), 'numpy.sqrt', 'np.sqrt', (['(w ** 2 + h ** 2)'], {}), '(w ** 2 + h ** 2)\n', (3638, 3655), True, 'import numpy as np\n'), ((4494, 4518), 'numpy.concatenate', 'np.concatenate', (['y0_dt', '(0)'], {}), '(y0_dt, 0)\n', (4508, 4518), True, 'import numpy as np\n'), ((4605, 4629), 'numpy.concatenate', 'np.concatenate', (['y0_gt', '(0)'], {}), '(y0_gt, 0)\n', (4619, 4629), True, 'import numpy as np\n'), ((4662, 4686), 'numpy.concatenate', 'np.concatenate', (['y1_gt', '(0)'], {}), '(y1_gt, 0)\n', (4676, 4686), True, 'import numpy as np\n'), ((5457, 5475), 'glob.glob', 'glob.glob', (['gt_glob'], {}), '(gt_glob)\n', (5466, 5475), False, 'import glob\n'), ((7008, 7038), 'numpy.array', 'np.array', (["dt['uv']", 'np.float32'], {}), "(dt['uv'], np.float32)\n", (7016, 7038), True, 'import numpy as np\n'), ((7265, 7286), 'numpy.mean', 'np.mean', (["losses['CE']"], {}), "(losses['CE'])\n", (7272, 7286), True, 'import numpy as np\n'), ((7319, 7340), 'numpy.mean', 'np.mean', (["losses['PE']"], {}), "(losses['PE'])\n", (7326, 7340), True, 'import numpy as np\n'), ((7373, 7397), 'numpy.mean', 'np.mean', (["losses['3DIoU']"], {}), "(losses['3DIoU'])\n", (7380, 7397), True, 'import numpy as np\n'), ((4780, 4792), 'numpy.arange', 'np.arange', (['w'], {}), '(w)\n', (4789, 4792), True, 'import numpy as np\n'), ((4839, 4851), 'numpy.arange', 'np.arange', (['w'], {}), '(w)\n', (4848, 4851), True, 'import numpy as np\n'), ((4992, 5004), 'numpy.arange', 'np.arange', (['w'], {}), '(w)\n', (5001, 5004), True, 'import numpy as np\n'), ((5054, 5066), 'numpy.arange', 'np.arange', (['w'], {}), '(w)\n', (5063, 5066), True, 'import numpy as np\n'), ((6975, 6987), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6984, 6987), False, 'import json\n'), ((2008, 2024), 'numpy.zeros', 'np.zeros', (['(N, 1)'], {}), '((N, 1))\n', (2016, 2024), True, 'import numpy as np\n'), ((2160, 2176), 'numpy.zeros', 'np.zeros', (['(N, 1)'], {}), '((N, 1))\n', (2168, 2176), True, 'import numpy as np\n'), ((5568, 5586), 'glob.glob', 'glob.glob', (['dt_glob'], {}), '(dt_glob)\n', (5577, 5586), False, 'import glob\n'), ((4751, 4766), 'numpy.round', 'np.round', (['y0_dt'], {}), '(y0_dt)\n', (4759, 4766), True, 'import numpy as np\n'), ((4810, 4825), 'numpy.round', 'np.round', (['y1_dt'], {}), '(y1_dt)\n', (4818, 4825), True, 'import numpy as np\n'), ((4963, 4978), 'numpy.round', 'np.round', (['y0_gt'], {}), '(y0_gt)\n', (4971, 4978), True, 'import numpy as np\n'), ((5025, 5040), 'numpy.round', 'np.round', (['y1_gt'], {}), '(y1_gt)\n', (5033, 5040), True, 'import numpy as np\n'), ((5673, 5695), 'os.path.split', 'os.path.split', (['gt_path'], {}), '(gt_path)\n', (5686, 5695), False, 'import os\n'), ((5499, 5515), 'os.path.split', 'os.path.split', (['v'], {}), '(v)\n', (5512, 5515), False, 'import os\n')] |
'''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Nov 7, 2011
@author: <NAME>
@contact: <EMAIL>
@summary: File containing various feature functions
'''
#''' Python imports '''
import random
#''' 3rd Party Imports '''
import pandas as pand
import numpy as np
import datetime as dt
#''' QSTK Imports '''
import QSTK.qstkutil.tsutil as tsu
from QSTK.qstkutil import DataAccess as da
import QSTK.qstkutil.qsdateutil as du
def featMomentum(dData, lLookback=20, b_human=False ):
'''
@summary: N day cumulative return (based on 1) indicator
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
dfPrice = dData['close'].copy()
#Calculate Returns
tsu.returnize0(dfPrice.values)
#Calculate rolling sum
dfRet = pand.rolling_sum(dfPrice, lLookback)
return dfRet
def featHiLow(dData, lLookback=20, b_human=False ):
'''
@summary: 1 represents a high for the lookback -1 represents a low
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
dfPrice = dData['close']
#Find Max for each price for lookback
maxes = pand.rolling_max(dfPrice, lLookback, 1)
#Find Min
mins = pand.rolling_min(dfPrice, lLookback, 1)
#Find Range
ranges = maxes - mins
#Calculate (price - min) * 2 / range -1
dfRet = (((dfPrice-mins)*2)/ranges)-1
return dfRet
def featDate(dData, b_human=False ):
'''
@summary: Returns -1 for jan 1st 1 for dec 31st
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
dfPrice = dData['close']
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
for sStock in dfPrice.columns:
tsPrice = dfPrice[sStock]
tsRet = dfRet[sStock]
#'' Loop over time '''
for i in range(len(tsPrice.index)):
#get current date
today = tsPrice.index[i]
#get days since January 1st
days = today - dt.datetime(today.year, 1, 1)
# multiply by 2, divide by 365, subtract 1
tsRet[i] = float(days.days * 2) / 365 - 1
return dfRet
def featOption(dData, b_human=False ):
'''
@summary: Returns 1 if option close is today, -1 if it was yesterday
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
dfPrice = dData['close']
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
for sStock in dfPrice.columns:
tsPrice = dfPrice[sStock]
tsRet = dfRet[sStock]
#'' Loop over time '''
for i in range(len(tsPrice.index)):
#get current date
today = tsPrice.index[i]
#get last option close
last_close = du.getLastOptionClose(today, tsPrice.index)
#get next option close
next_close = du.getNextOptionClose(today, tsPrice.index)
#get days between
days_between = next_close - last_close
#get days since last close
days = today - last_close
# multiply by 2, divide by 365, subtract 1
tsRet[i] = float(days.days * 2) / days_between.days - 1
return dfRet
def featMA( dData, lLookback=30, bRel=True, b_human=False ):
'''
@summary: Calculate moving average
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
dfPrice = dData['close']
dfRet = pand.rolling_mean(dfPrice, lLookback)
if bRel:
dfRet = dfRet / dfPrice
if b_human:
data2 = dfRet * dData['close']
data3 = pand.DataFrame({"Raw":data2[data2.columns[0]]})
for sym in dfRet.columns:
if sym != '$SPX' and sym != '$VIX':
data3[sym + " Moving Average"] = data2[sym]
data3[sym] = dData['close'][sym]
del data3['Raw']
return data3
return dfRet
def featEMA( dData, lLookback=20, bRel=True, b_human=False ):
'''
@summary: Calculate exponential moving average
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
dfPrice = dData['close']
dfRet = pand.ewma(dfPrice, span=lLookback)
if bRel:
dfRet = dfRet / dfPrice;
if b_human:
data2 = dfRet*dData['close']
data3 = pand.DataFrame({"Raw":data2[data2.columns[0]]})
for sym in dfRet.columns:
if sym != '$SPX' and sym != '$VIX':
data3[sym + " Moving Average"] = data2[sym]
data3[sym] = dData['close'][sym]
del data3['Raw']
return data3
return dfRet
def featSTD( dData, lLookback=20, bRel=True, b_human=False ):
'''
@summary: Calculate standard deviation
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
dfPrice = dData['close'].copy()
tsu.returnize1(dfPrice.values)
dfRet = pand.rolling_std(dfPrice, lLookback)
if bRel:
dfRet = dfRet / dfPrice
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featRSI( dData, lLookback=14, b_human=False):
'''
@summary: Calculate RSI
@param dData: Dictionary of data to use
@param lLookback: Number of days to look in the past, 14 is standard
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
# create deltas per day
dfDelta = dData['close'].copy()
dfDelta.ix[1:,:] -= dfDelta.ix[:-1,:].values
dfDelta.ix[0,:] = np.NAN
dfDeltaUp = dfDelta
dfDeltaDown = dfDelta.copy()
# seperate data into positive and negative for easy calculations
for sColumn in dfDeltaUp.columns:
tsColDown = dfDeltaDown[sColumn]
tsColDown[tsColDown >= 0] = 0
tsColUp = dfDeltaUp[sColumn]
tsColUp[tsColUp <= 0] = 0
# Note we take abs() of negative values, all should be positive now
dfRolUp = pand.rolling_mean(dfDeltaUp, lLookback, min_periods=1)
dfRolDown = pand.rolling_mean(dfDeltaDown, lLookback, min_periods=1).abs()
# relative strength
dfRS = dfRolUp / dfRolDown
dfRSI = 100.0 - (100.0 / (1.0 + dfRS))
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRSI
def featDrawDown( dData, lLookback=30, b_human=False):
'''
@summary: Calculate Drawdown for the stock
@param dData: Dictionary of data to use
@param lLookback: Days to look back
@return: DataFrame array containing values
@param b_human: if true return dataframe to plot
@warning: Drawdown and RunUp can depend heavily on sample period
'''
dfPrice = dData['close']
#''' Feature DataFrame will be 1:1, we can use the price as a template '''
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
dfMax = pand.rolling_max(dfPrice, lLookback)
return (dfMax - dfPrice) / dfMax;
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featRunUp( dData, lLookback=30, b_human=False ):
'''
@summary: CalculateRunup for the stock
@param dData: Dictionary of data to use
@param lLookback: Number of days to calculate min over
@return: DataFrame array containing feature values
@param b_human: if true return dataframe to plot
@warning: Drawdown and RunUp can depend heavily on when the sample starts
'''
dfPrice = dData['close']
dfMax = pand.rolling_min(dfPrice, lLookback)
return dfPrice / dfMax;
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featVolumeDelta( dData, lLookback=30, b_human=False ):
'''
@summary: Calculate moving average
@param dData: Dictionary of data to use
@param lLookback: Number of days to use for MA
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
dfVolume = dData['volume']
dfRet = pand.rolling_mean(dfVolume, lLookback)
dfRet /= dfVolume
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featAroon( dData, bDown=False, lLookback=25, b_human=False ):
'''
@summary: Calculate Aroon - indicator indicating days since a 25-day
high/low, weighted between 0 and 100
@param dData: Dictionary of data to use
@param bDown: If false, calculates aroonUp (high), else aroonDown (lows)
@param lLookback: Days to lookback to calculate high/low from
@param b_human: if true return dataframe to plot
@return: DataFrame array containing feature values
'''
dfPrice = dData['close']
#Feature DataFrame will be 1:1, we can use the price as a template
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns,
data=np.zeros(dfPrice.shape) )
#Loop through time
for i in range(dfPrice.shape[0]):
if( (i-lLookback) < 0 ):
dfRet.ix[i,:] = np.NAN
else:
if bDown:
dfRet.ix[i,:] = dfPrice.values[i:(i-lLookback):-1,:].argmin(
axis=0)
else:
dfRet.ix[i,:] = dfPrice.values[i:(i-lLookback):-1,:].argmax(
axis=0)
dfRet = ((lLookback - 1.) - dfRet) / (lLookback - 1.) * 100.
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featAroonDown( dData, lLookback=25, b_human=False ):
'''
@summary: Wrapper to call aroon with flag = true
'''
return featAroon(dData, bDown=True, lLookback=lLookback, b_human=b_human)
def featStochastic( dData, lLookback=14, bFast=True, lMA=3, b_human=False ):
'''
@summary: Calculate stochastic oscillator - indicates what range of recent low-high spread we are in.
@param dData: Dictionary of data to use
@param bFast: If false, do slow stochastics, 3 day MA, if not use fast, no MA
@param b_human: if true return dataframe to plot
@return: DataFrame array containing feature values
'''
dfLow = dData['low']
dfHigh = dData['high']
dfPrice = dData['close']
#''' Loop through stocks '''
dfLows = pand.rolling_min(dfLow, lLookback)
dfHighs = pand.rolling_max(dfHigh, lLookback)
dfStoch = (dfPrice - dfLows) / (dfHighs - dfLows)
#''' For fast we just take the stochastic value, slow we need 3 day MA '''
if not bFast:
dfStoch = pand.rolling_mean(dfStoch, lMA)
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfStoch
def featBeta( dData, lLookback=14, sMarket='$SPX', b_human=False ):
'''
@summary: Calculate beta relative to a given stock/index.
@param dData: Dictionary of data to use
@param sStock: Stock to calculate beta relative to
@param b_human: if true return dataframe to plot
@return: DataFrame array containing feature values
'''
dfPrice = dData['close']
#''' Calculate returns '''
dfRets = dfPrice.copy()
tsu.returnize1(dfRets.values)
tsMarket = dfRets[sMarket]
dfRet = pand.rolling_cov(tsMarket, dfRets, lLookback)
dfRet /= dfRet[sMarket]
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featBollinger( dData, lLookback=20, b_human=False ):
'''
@summary: Calculate bollinger position as a function of std deviations.
@param dData: Dictionary of data to use
@param lLookback: Number of days to calculate moving average over
@param b_human: if true return dataframe to plot
@return: DataFrame array containing feature values
'''
if b_human:
dfPrice = dData['close']
nstdsRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
#average minus standard deviation
pstdsRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
data3 = pand.DataFrame({"Raw":dfPrice[dfPrice.columns[0]]})
for sym in dfPrice.columns:
if sym != '$SPX' and sym != '$VIX':
tsPrice = dfPrice[sym]
nstdRet = nstdsRet[sym]
pstdRet = pstdsRet[sym]
for i in range(len(tsPrice.index)):
if i < lLookback - 1:
nstdRet[i] = float('nan')
pstdRet[i] = float('nan')
continue
fAvg = np.average( tsPrice[ i-(lLookback-1):i+1 ] )
fStd = np.std( tsPrice[ i-(lLookback-1):i+1 ] )
pstdRet[i] = fAvg+2.0*fStd
nstdRet[i] = fAvg-2.0*fStd
data3[sym] = dfPrice[sym]
data3[sym + " Lower"] = nstdsRet[sym]
data3[sym + " Upper"] = pstdsRet[sym]
del data3['Raw']
return data3
else:
dfPrice = dData['close']
#''' Feature DataFrame will be 1:1, we can use the price as a template '''
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
#''' Loop through stocks '''
dfAvg = pand.rolling_mean(dfPrice, lLookback)
dfStd = pand.rolling_std(dfPrice, lLookback)
return (dfPrice - dfAvg) / (2.0*dfStd)
def featCorrelation( dData, lLookback=20, sRel='$SPX', b_human=False ):
'''
@summary: Calculate correlation of two stocks.
@param dData: Dictionary of data to use
@param lLookback: Number of days to calculate moving average over
@param b_human: if true return dataframe to plot
@return: DataFrame array containing feature values
'''
dfPrice = dData['close']
if sRel not in dfPrice.columns:
raise KeyError( "%s not found in data provided to featCorrelation"%sRel )
#''' Calculate returns '''
naRets = dfPrice.values.copy()
tsu.returnize1(naRets)
dfHistReturns = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=naRets )
#''' Feature DataFrame will be 1:1, we can use the price as a template '''
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns, data=np.zeros(dfPrice.shape) )
#''' Loop through stocks '''
for sStock in dfHistReturns.columns:
tsHistReturns = dfHistReturns[sStock]
tsRelativeReturns = dfHistReturns[sRel]
tsRet = dfRet[sStock]
#''' Loop over time '''
for i in range(len(tsHistReturns.index)):
#''' NaN if not enough data to do lookback '''
if i < lLookback - 1:
tsRet[i] = float('nan')
continue
naCorr = np.corrcoef( tsHistReturns[ i-(lLookback-1):i+1 ], tsRelativeReturns[ i-(lLookback-1):i+1 ] )
tsRet[i] = naCorr[0,1]
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
def featPrice(dData, b_human=False):
'''
@summary: Price feature
@param dData: Dictionary of data to use
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dData['close']
def featVolume(dData, b_human=False):
'''
@summary: Volume feature
@param dData: Dictionary of data to use
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dData['volume']
def featRand( dData, b_human=False ):
'''
@summary: Random feature - used for robustness testing
@param dData: Dictionary of data to use
@param b_human: if true return dataframe to plot
@return: DataFrame array containing values
'''
dfPrice = dData['close']
#''' Feature DataFrame will be 1:1, we can use the price as a template '''
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns,
data=np.random.randn(*dfPrice.shape) )
if b_human:
for sym in dData['close']:
x=1000/dData['close'][sym][0]
dData['close'][sym]=dData['close'][sym]*x
return dData['close']
return dfRet
if __name__ == '__main__':
pass
| [
"pandas.DataFrame",
"pandas.rolling_std",
"numpy.average",
"pandas.rolling_mean",
"numpy.random.randn",
"pandas.ewma",
"numpy.corrcoef",
"numpy.std",
"pandas.rolling_sum",
"numpy.zeros",
"QSTK.qstkutil.qsdateutil.getNextOptionClose",
"datetime.datetime",
"QSTK.qstkutil.tsutil.returnize0",
... | [((1164, 1194), 'QSTK.qstkutil.tsutil.returnize0', 'tsu.returnize0', (['dfPrice.values'], {}), '(dfPrice.values)\n', (1178, 1194), True, 'import QSTK.qstkutil.tsutil as tsu\n'), ((1239, 1275), 'pandas.rolling_sum', 'pand.rolling_sum', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (1255, 1275), True, 'import pandas as pand\n'), ((1909, 1948), 'pandas.rolling_max', 'pand.rolling_max', (['dfPrice', 'lLookback', '(1)'], {}), '(dfPrice, lLookback, 1)\n', (1925, 1948), True, 'import pandas as pand\n'), ((1979, 2018), 'pandas.rolling_min', 'pand.rolling_min', (['dfPrice', 'lLookback', '(1)'], {}), '(dfPrice, lLookback, 1)\n', (1995, 2018), True, 'import pandas as pand\n'), ((5151, 5188), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (5168, 5188), True, 'import pandas as pand\n'), ((5998, 6032), 'pandas.ewma', 'pand.ewma', (['dfPrice'], {'span': 'lLookback'}), '(dfPrice, span=lLookback)\n', (6007, 6032), True, 'import pandas as pand\n'), ((6841, 6871), 'QSTK.qstkutil.tsutil.returnize1', 'tsu.returnize1', (['dfPrice.values'], {}), '(dfPrice.values)\n', (6855, 6871), True, 'import QSTK.qstkutil.tsutil as tsu\n'), ((6884, 6920), 'pandas.rolling_std', 'pand.rolling_std', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (6900, 6920), True, 'import pandas as pand\n'), ((8042, 8096), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfDeltaUp', 'lLookback'], {'min_periods': '(1)'}), '(dfDeltaUp, lLookback, min_periods=1)\n', (8059, 8096), True, 'import pandas as pand\n'), ((9097, 9133), 'pandas.rolling_max', 'pand.rolling_max', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (9113, 9133), True, 'import pandas as pand\n'), ((9825, 9861), 'pandas.rolling_min', 'pand.rolling_min', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (9841, 9861), True, 'import pandas as pand\n'), ((10461, 10499), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfVolume', 'lLookback'], {}), '(dfVolume, lLookback)\n', (10478, 10499), True, 'import pandas as pand\n'), ((12934, 12968), 'pandas.rolling_min', 'pand.rolling_min', (['dfLow', 'lLookback'], {}), '(dfLow, lLookback)\n', (12950, 12968), True, 'import pandas as pand\n'), ((12983, 13018), 'pandas.rolling_max', 'pand.rolling_max', (['dfHigh', 'lLookback'], {}), '(dfHigh, lLookback)\n', (12999, 13018), True, 'import pandas as pand\n'), ((13904, 13933), 'QSTK.qstkutil.tsutil.returnize1', 'tsu.returnize1', (['dfRets.values'], {}), '(dfRets.values)\n', (13918, 13933), True, 'import QSTK.qstkutil.tsutil as tsu\n'), ((13979, 14024), 'pandas.rolling_cov', 'pand.rolling_cov', (['tsMarket', 'dfRets', 'lLookback'], {}), '(tsMarket, dfRets, lLookback)\n', (13995, 14024), True, 'import pandas as pand\n'), ((16907, 16929), 'QSTK.qstkutil.tsutil.returnize1', 'tsu.returnize1', (['naRets'], {}), '(naRets)\n', (16921, 16929), True, 'import QSTK.qstkutil.tsutil as tsu\n'), ((16950, 17023), 'pandas.DataFrame', 'pand.DataFrame', ([], {'index': 'dfPrice.index', 'columns': 'dfPrice.columns', 'data': 'naRets'}), '(index=dfPrice.index, columns=dfPrice.columns, data=naRets)\n', (16964, 17023), True, 'import pandas as pand\n'), ((5312, 5360), 'pandas.DataFrame', 'pand.DataFrame', (["{'Raw': data2[data2.columns[0]]}"], {}), "({'Raw': data2[data2.columns[0]]})\n", (5326, 5360), True, 'import pandas as pand\n'), ((6155, 6203), 'pandas.DataFrame', 'pand.DataFrame', (["{'Raw': data2[data2.columns[0]]}"], {}), "({'Raw': data2[data2.columns[0]]})\n", (6169, 6203), True, 'import pandas as pand\n'), ((13205, 13236), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfStoch', 'lMA'], {}), '(dfStoch, lMA)\n', (13222, 13236), True, 'import pandas as pand\n'), ((14960, 15012), 'pandas.DataFrame', 'pand.DataFrame', (["{'Raw': dfPrice[dfPrice.columns[0]]}"], {}), "({'Raw': dfPrice[dfPrice.columns[0]]})\n", (14974, 15012), True, 'import pandas as pand\n'), ((16175, 16212), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (16192, 16212), True, 'import pandas as pand\n'), ((16229, 16265), 'pandas.rolling_std', 'pand.rolling_std', (['dfPrice', 'lLookback'], {}), '(dfPrice, lLookback)\n', (16245, 16265), True, 'import pandas as pand\n'), ((2776, 2799), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (2784, 2799), True, 'import numpy as np\n'), ((3926, 3949), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (3934, 3949), True, 'import numpy as np\n'), ((4271, 4314), 'QSTK.qstkutil.qsdateutil.getLastOptionClose', 'du.getLastOptionClose', (['today', 'tsPrice.index'], {}), '(today, tsPrice.index)\n', (4292, 4314), True, 'import QSTK.qstkutil.qsdateutil as du\n'), ((4388, 4431), 'QSTK.qstkutil.qsdateutil.getNextOptionClose', 'du.getNextOptionClose', (['today', 'tsPrice.index'], {}), '(today, tsPrice.index)\n', (4409, 4431), True, 'import QSTK.qstkutil.qsdateutil as du\n'), ((8113, 8169), 'pandas.rolling_mean', 'pand.rolling_mean', (['dfDeltaDown', 'lLookback'], {'min_periods': '(1)'}), '(dfDeltaDown, lLookback, min_periods=1)\n', (8130, 8169), True, 'import pandas as pand\n'), ((9054, 9077), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (9062, 9077), True, 'import numpy as np\n'), ((11446, 11469), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (11454, 11469), True, 'import numpy as np\n'), ((17185, 17208), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (17193, 17208), True, 'import numpy as np\n'), ((17717, 17820), 'numpy.corrcoef', 'np.corrcoef', (['tsHistReturns[i - (lLookback - 1):i + 1]', 'tsRelativeReturns[i - (lLookback - 1):i + 1]'], {}), '(tsHistReturns[i - (lLookback - 1):i + 1], tsRelativeReturns[i -\n (lLookback - 1):i + 1])\n', (17728, 17820), True, 'import numpy as np\n'), ((19405, 19436), 'numpy.random.randn', 'np.random.randn', (['*dfPrice.shape'], {}), '(*dfPrice.shape)\n', (19420, 19436), True, 'import numpy as np\n'), ((3128, 3157), 'datetime.datetime', 'dt.datetime', (['today.year', '(1)', '(1)'], {}), '(today.year, 1, 1)\n', (3139, 3157), True, 'import datetime as dt\n'), ((14758, 14781), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (14766, 14781), True, 'import numpy as np\n'), ((14912, 14935), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (14920, 14935), True, 'import numpy as np\n'), ((16087, 16110), 'numpy.zeros', 'np.zeros', (['dfPrice.shape'], {}), '(dfPrice.shape)\n', (16095, 16110), True, 'import numpy as np\n'), ((15473, 15519), 'numpy.average', 'np.average', (['tsPrice[i - (lLookback - 1):i + 1]'], {}), '(tsPrice[i - (lLookback - 1):i + 1])\n', (15483, 15519), True, 'import numpy as np\n'), ((15545, 15587), 'numpy.std', 'np.std', (['tsPrice[i - (lLookback - 1):i + 1]'], {}), '(tsPrice[i - (lLookback - 1):i + 1])\n', (15551, 15587), True, 'import numpy as np\n')] |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Ftrl operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class FtrlOptimizerTest(tf.test.TestCase):
def testFtrlwithoutRegularization(self):
for dtype in [tf.half, tf.float32]:
with self.test_session() as sess:
var0 = tf.Variable([0.0, 0.0], dtype=dtype)
var1 = tf.Variable([0.0, 0.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.2], dtype=dtype)
grads1 = tf.constant([0.01, 0.02], dtype=dtype)
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(np.array([-2.60260963, -4.29698515]),
v0_val)
self.assertAllCloseAccordingToType(np.array([-0.28432083, -0.56694895]),
v1_val)
def testFtrlwithoutRegularization2(self):
for dtype in [tf.half, tf.float32]:
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([4.0, 3.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.2], dtype=dtype)
grads1 = tf.constant([0.01, 0.02], dtype=dtype)
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(np.array([-2.55607247, -3.98729396]),
v0_val)
self.assertAllCloseAccordingToType(np.array([-0.28232238, -0.56096673]),
v1_val)
def testFtrlWithL1(self):
for dtype in [tf.half, tf.float32]:
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([4.0, 3.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.2], dtype=dtype)
grads1 = tf.constant([0.01, 0.02], dtype=dtype)
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-7.66718769, -10.91273689]),
v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.93460727, -1.86147261]),
v1_val)
def testFtrlWithL1_L2(self):
for dtype in [tf.half, tf.float32]:
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0], dtype=dtype)
var1 = tf.Variable([4.0, 3.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.2], dtype=dtype)
grads1 = tf.constant([0.01, 0.02], dtype=dtype)
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(np.array([-0.24059935, -0.46829352]),
v0_val)
self.assertAllCloseAccordingToType(np.array([-0.02406147, -0.04830509]),
v1_val)
def applyOptimizer(self, opt, dtype, steps=5, is_sparse=False):
if is_sparse:
var0 = tf.Variable([[0.0], [0.0]], dtype=dtype)
var1 = tf.Variable([[0.0], [0.0]], dtype=dtype)
grads0 = tf.IndexedSlices(tf.constant([0.1], shape=[1, 1], dtype=dtype),
tf.constant([0]),
tf.constant([2, 1]))
grads1 = tf.IndexedSlices(tf.constant([0.02], shape=[1, 1], dtype=dtype),
tf.constant([1]),
tf.constant([2, 1]))
else:
var0 = tf.Variable([0.0, 0.0], dtype=dtype)
var1 = tf.Variable([0.0, 0.0], dtype=dtype)
grads0 = tf.constant([0.1, 0.2], dtype=dtype)
grads1 = tf.constant([0.01, 0.02], dtype=dtype)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.global_variables_initializer().run()
sess = tf.get_default_session()
v0_val, v1_val = sess.run([var0, var1])
if is_sparse:
self.assertAllCloseAccordingToType([[0.0], [0.0]], v0_val)
self.assertAllCloseAccordingToType([[0.0], [0.0]], v1_val)
else:
self.assertAllCloseAccordingToType([0.0, 0.0], v0_val)
self.assertAllCloseAccordingToType([0.0, 0.0], v1_val)
# Run Ftrl for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = sess.run([var0, var1])
return v0_val, v1_val
# When variables are initialized with Zero, FTRL-Proximal has two properties:
# 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical
# with GradientDescent.
# 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical
# with Adagrad.
# So, basing on these two properties, we test if our implementation of
# FTRL-Proximal performs same updates as Adagrad or GradientDescent.
def testEquivAdagradwithoutRegularization(self):
for dtype in [tf.half, tf.float32]:
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseAdagradwithoutRegularization(self):
for dtype in [tf.half, tf.float32]:
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
dtype, is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseGradientDescentwithoutRegularization(self):
for dtype in [tf.half, tf.float32]:
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.GradientDescentOptimizer(3.0), dtype, is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivGradientDescentwithoutRegularization(self):
for dtype in [tf.half, tf.float32]:
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.GradientDescentOptimizer(3.0), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
if __name__ == "__main__":
tf.test.main()
| [
"tensorflow.test.main",
"tensorflow.global_variables_initializer",
"tensorflow.train.AdagradOptimizer",
"tensorflow.train.FtrlOptimizer",
"tensorflow.constant",
"tensorflow.Variable",
"numpy.array",
"tensorflow.train.GradientDescentOptimizer",
"tensorflow.get_default_session"
] | [((11133, 11147), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (11145, 11147), True, 'import tensorflow as tf\n'), ((6847, 6871), 'tensorflow.get_default_session', 'tf.get_default_session', ([], {}), '()\n', (6869, 6871), True, 'import tensorflow as tf\n'), ((6044, 6084), 'tensorflow.Variable', 'tf.Variable', (['[[0.0], [0.0]]'], {'dtype': 'dtype'}), '([[0.0], [0.0]], dtype=dtype)\n', (6055, 6084), True, 'import tensorflow as tf\n'), ((6098, 6138), 'tensorflow.Variable', 'tf.Variable', (['[[0.0], [0.0]]'], {'dtype': 'dtype'}), '([[0.0], [0.0]], dtype=dtype)\n', (6109, 6138), True, 'import tensorflow as tf\n'), ((6527, 6563), 'tensorflow.Variable', 'tf.Variable', (['[0.0, 0.0]'], {'dtype': 'dtype'}), '([0.0, 0.0], dtype=dtype)\n', (6538, 6563), True, 'import tensorflow as tf\n'), ((6577, 6613), 'tensorflow.Variable', 'tf.Variable', (['[0.0, 0.0]'], {'dtype': 'dtype'}), '([0.0, 0.0], dtype=dtype)\n', (6588, 6613), True, 'import tensorflow as tf\n'), ((6629, 6665), 'tensorflow.constant', 'tf.constant', (['[0.1, 0.2]'], {'dtype': 'dtype'}), '([0.1, 0.2], dtype=dtype)\n', (6640, 6665), True, 'import tensorflow as tf\n'), ((6681, 6719), 'tensorflow.constant', 'tf.constant', (['[0.01, 0.02]'], {'dtype': 'dtype'}), '([0.01, 0.02], dtype=dtype)\n', (6692, 6719), True, 'import tensorflow as tf\n'), ((1071, 1107), 'tensorflow.Variable', 'tf.Variable', (['[0.0, 0.0]'], {'dtype': 'dtype'}), '([0.0, 0.0], dtype=dtype)\n', (1082, 1107), True, 'import tensorflow as tf\n'), ((1123, 1159), 'tensorflow.Variable', 'tf.Variable', (['[0.0, 0.0]'], {'dtype': 'dtype'}), '([0.0, 0.0], dtype=dtype)\n', (1134, 1159), True, 'import tensorflow as tf\n'), ((1177, 1213), 'tensorflow.constant', 'tf.constant', (['[0.1, 0.2]'], {'dtype': 'dtype'}), '([0.1, 0.2], dtype=dtype)\n', (1188, 1213), True, 'import tensorflow as tf\n'), ((1231, 1269), 'tensorflow.constant', 'tf.constant', (['[0.01, 0.02]'], {'dtype': 'dtype'}), '([0.01, 0.02], dtype=dtype)\n', (1242, 1269), True, 'import tensorflow as tf\n'), ((1284, 1410), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, initial_accumulator_value=0.1,\n l1_regularization_strength=0.0, l2_regularization_strength=0.0)\n', (1306, 1410), True, 'import tensorflow as tf\n'), ((2316, 2352), 'tensorflow.Variable', 'tf.Variable', (['[1.0, 2.0]'], {'dtype': 'dtype'}), '([1.0, 2.0], dtype=dtype)\n', (2327, 2352), True, 'import tensorflow as tf\n'), ((2368, 2404), 'tensorflow.Variable', 'tf.Variable', (['[4.0, 3.0]'], {'dtype': 'dtype'}), '([4.0, 3.0], dtype=dtype)\n', (2379, 2404), True, 'import tensorflow as tf\n'), ((2422, 2458), 'tensorflow.constant', 'tf.constant', (['[0.1, 0.2]'], {'dtype': 'dtype'}), '([0.1, 0.2], dtype=dtype)\n', (2433, 2458), True, 'import tensorflow as tf\n'), ((2476, 2514), 'tensorflow.constant', 'tf.constant', (['[0.01, 0.02]'], {'dtype': 'dtype'}), '([0.01, 0.02], dtype=dtype)\n', (2487, 2514), True, 'import tensorflow as tf\n'), ((2530, 2656), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, initial_accumulator_value=0.1,\n l1_regularization_strength=0.0, l2_regularization_strength=0.0)\n', (2552, 2656), True, 'import tensorflow as tf\n'), ((3575, 3611), 'tensorflow.Variable', 'tf.Variable', (['[1.0, 2.0]'], {'dtype': 'dtype'}), '([1.0, 2.0], dtype=dtype)\n', (3586, 3611), True, 'import tensorflow as tf\n'), ((3627, 3663), 'tensorflow.Variable', 'tf.Variable', (['[4.0, 3.0]'], {'dtype': 'dtype'}), '([4.0, 3.0], dtype=dtype)\n', (3638, 3663), True, 'import tensorflow as tf\n'), ((3681, 3717), 'tensorflow.constant', 'tf.constant', (['[0.1, 0.2]'], {'dtype': 'dtype'}), '([0.1, 0.2], dtype=dtype)\n', (3692, 3717), True, 'import tensorflow as tf\n'), ((3735, 3773), 'tensorflow.constant', 'tf.constant', (['[0.01, 0.02]'], {'dtype': 'dtype'}), '([0.01, 0.02], dtype=dtype)\n', (3746, 3773), True, 'import tensorflow as tf\n'), ((3789, 3917), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.001)', 'l2_regularization_strength': '(0.0)'}), '(3.0, initial_accumulator_value=0.1,\n l1_regularization_strength=0.001, l2_regularization_strength=0.0)\n', (3811, 3917), True, 'import tensorflow as tf\n'), ((4806, 4842), 'tensorflow.Variable', 'tf.Variable', (['[1.0, 2.0]'], {'dtype': 'dtype'}), '([1.0, 2.0], dtype=dtype)\n', (4817, 4842), True, 'import tensorflow as tf\n'), ((4858, 4894), 'tensorflow.Variable', 'tf.Variable', (['[4.0, 3.0]'], {'dtype': 'dtype'}), '([4.0, 3.0], dtype=dtype)\n', (4869, 4894), True, 'import tensorflow as tf\n'), ((4912, 4948), 'tensorflow.constant', 'tf.constant', (['[0.1, 0.2]'], {'dtype': 'dtype'}), '([0.1, 0.2], dtype=dtype)\n', (4923, 4948), True, 'import tensorflow as tf\n'), ((4966, 5004), 'tensorflow.constant', 'tf.constant', (['[0.01, 0.02]'], {'dtype': 'dtype'}), '([0.01, 0.02], dtype=dtype)\n', (4977, 5004), True, 'import tensorflow as tf\n'), ((5020, 5148), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.001)', 'l2_regularization_strength': '(2.0)'}), '(3.0, initial_accumulator_value=0.1,\n l1_regularization_strength=0.001, l2_regularization_strength=2.0)\n', (5042, 5148), True, 'import tensorflow as tf\n'), ((6171, 6216), 'tensorflow.constant', 'tf.constant', (['[0.1]'], {'shape': '[1, 1]', 'dtype': 'dtype'}), '([0.1], shape=[1, 1], dtype=dtype)\n', (6182, 6216), True, 'import tensorflow as tf\n'), ((6250, 6266), 'tensorflow.constant', 'tf.constant', (['[0]'], {}), '([0])\n', (6261, 6266), True, 'import tensorflow as tf\n'), ((6300, 6319), 'tensorflow.constant', 'tf.constant', (['[2, 1]'], {}), '([2, 1])\n', (6311, 6319), True, 'import tensorflow as tf\n'), ((6353, 6399), 'tensorflow.constant', 'tf.constant', (['[0.02]'], {'shape': '[1, 1]', 'dtype': 'dtype'}), '([0.02], shape=[1, 1], dtype=dtype)\n', (6364, 6399), True, 'import tensorflow as tf\n'), ((6433, 6449), 'tensorflow.constant', 'tf.constant', (['[1]'], {}), '([1])\n', (6444, 6449), True, 'import tensorflow as tf\n'), ((6483, 6502), 'tensorflow.constant', 'tf.constant', (['[2, 1]'], {}), '([2, 1])\n', (6494, 6502), True, 'import tensorflow as tf\n'), ((6795, 6828), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (6826, 6828), True, 'import tensorflow as tf\n'), ((1955, 1991), 'numpy.array', 'np.array', (['[-2.60260963, -4.29698515]'], {}), '([-2.60260963, -4.29698515])\n', (1963, 1991), True, 'import numpy as np\n'), ((2087, 2123), 'numpy.array', 'np.array', (['[-0.28432083, -0.56694895]'], {}), '([-0.28432083, -0.56694895])\n', (2095, 2123), True, 'import numpy as np\n'), ((3230, 3266), 'numpy.array', 'np.array', (['[-2.55607247, -3.98729396]'], {}), '([-2.55607247, -3.98729396])\n', (3238, 3266), True, 'import numpy as np\n'), ((3362, 3398), 'numpy.array', 'np.array', (['[-0.28232238, -0.56096673]'], {}), '([-0.28232238, -0.56096673])\n', (3370, 3398), True, 'import numpy as np\n'), ((4506, 4543), 'numpy.array', 'np.array', (['[-7.66718769, -10.91273689]'], {}), '([-7.66718769, -10.91273689])\n', (4514, 4543), True, 'import numpy as np\n'), ((4621, 4657), 'numpy.array', 'np.array', (['[-0.93460727, -1.86147261]'], {}), '([-0.93460727, -1.86147261])\n', (4629, 4657), True, 'import numpy as np\n'), ((5725, 5761), 'numpy.array', 'np.array', (['[-0.24059935, -0.46829352]'], {}), '([-0.24059935, -0.46829352])\n', (5733, 5761), True, 'import numpy as np\n'), ((5857, 5893), 'numpy.array', 'np.array', (['[-0.02406147, -0.04830509]'], {}), '([-0.02406147, -0.04830509])\n', (5865, 5893), True, 'import numpy as np\n'), ((7950, 8106), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'learning_rate_power': '(-0.5)', 'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, learning_rate_power=-0.5,\n initial_accumulator_value=0.1, l1_regularization_strength=0.0,\n l2_regularization_strength=0.0)\n', (7972, 8106), True, 'import tensorflow as tf\n'), ((8405, 8466), 'tensorflow.train.AdagradOptimizer', 'tf.train.AdagradOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)'}), '(3.0, initial_accumulator_value=0.1)\n', (8430, 8466), True, 'import tensorflow as tf\n'), ((8778, 8934), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'learning_rate_power': '(-0.5)', 'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, learning_rate_power=-0.5,\n initial_accumulator_value=0.1, l1_regularization_strength=0.0,\n l2_regularization_strength=0.0)\n', (8800, 8934), True, 'import tensorflow as tf\n'), ((9261, 9322), 'tensorflow.train.AdagradOptimizer', 'tf.train.AdagradOptimizer', (['(3.0)'], {'initial_accumulator_value': '(0.1)'}), '(3.0, initial_accumulator_value=0.1)\n', (9286, 9322), True, 'import tensorflow as tf\n'), ((9658, 9814), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'learning_rate_power': '(-0.0)', 'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, learning_rate_power=-0.0,\n initial_accumulator_value=0.1, l1_regularization_strength=0.0,\n l2_regularization_strength=0.0)\n', (9680, 9814), True, 'import tensorflow as tf\n'), ((10139, 10177), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['(3.0)'], {}), '(3.0)\n', (10172, 10177), True, 'import tensorflow as tf\n'), ((10495, 10651), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (['(3.0)'], {'learning_rate_power': '(-0.0)', 'initial_accumulator_value': '(0.1)', 'l1_regularization_strength': '(0.0)', 'l2_regularization_strength': '(0.0)'}), '(3.0, learning_rate_power=-0.0,\n initial_accumulator_value=0.1, l1_regularization_strength=0.0,\n l2_regularization_strength=0.0)\n', (10517, 10651), True, 'import tensorflow as tf\n'), ((10948, 10986), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['(3.0)'], {}), '(3.0)\n', (10981, 10986), True, 'import tensorflow as tf\n'), ((1600, 1633), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (1631, 1633), True, 'import tensorflow as tf\n'), ((2846, 2879), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2877, 2879), True, 'import tensorflow as tf\n'), ((4107, 4140), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (4138, 4140), True, 'import tensorflow as tf\n'), ((5338, 5371), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5369, 5371), True, 'import tensorflow as tf\n')] |
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Lie algebra definitions relevant for SO(8) supergravity."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy
import scipy.linalg
import re
def dict_from_tensor(tensor, magnitude_threshold=0):
"""Converts a tensor to a dict of nonzero-entries keyed by index-tuple."""
ret = {}
for index_tuple in itertools.product(*(map(range, tensor.shape))):
v = tensor[index_tuple]
if abs(v) > magnitude_threshold:
ret[index_tuple] = v
return ret
def permutation_sign(p):
"""Determines the sign of a permutation, given as a sequence of integers."""
q = list(p) # Copy to list.
sign = 1
for n in range(len(p)):
while n != q[n]:
qn = q[n]
q[n], q[qn] = q[qn], q[n] # Flip to make q[qn] = qn.
sign = -sign
return sign
class Spin8(object):
r"""Container class for Spin(8) tensor invariants.
An instance essentially is just a namespace for constants.
All attributes are to be treated read-only by the user.
Attributes:
gamma_vsc: The spin(8) gamma^i_{alpha,\dot\beta} gamma matrices, indexed by
vector, spinor, co-spinor index.
gamma_vvss: The spin(8) gamma^{ij}_{\alpha\beta}, indexed
[i, j, \alpha, \beta].
gamma_vvcc: The spin(8) gamma^{ij}_{\dot\alpha\dot\beta}, indexed
[i, j, \alpha, \beta].
gamma_sscc: The spin(8) gamma_{\alpha\beta\dot\delta\dot\epsilon}, indexed
[\alpha, \beta, \dot\delta, \dot\epsilon].
gamma_vvvvss: The spin(8) gamma^{ijkl}_{\alpha\beta}, indexed
[i, j, k, l, \alpha, \beta].
gamma_vvvvcc: The spin(8) gamma^{ijkl}_{\dot\alpha\dot\beta}, indexed
[i, j, k, l, \dot\alpha, \dot\beta].
"""
def __init__(self):
r8 = range(8)
self.gamma_vsc = gamma_vsc = self._get_gamma_vsc()
#
# The gamma^{ab}_{alpha beta} tensor that translates between antisymmetric
# matrices over vectors [ij] and antisymmetric matrices over spinors [sS].
self.gamma_vvss = 0.5 * (
numpy.einsum('isc,jSc->ijsS', gamma_vsc, gamma_vsc) -
numpy.einsum('jsc,iSc->ijsS', gamma_vsc, gamma_vsc))
# The gamma^{ab}_{alpha* beta*} tensor that translates between antisymmetric
# matrices over vectors [ij] and antisymmetric matrices over cospinors [cC].
self.gamma_vvcc = 0.5 * (
numpy.einsum('isc,jsC->ijcC', gamma_vsc, gamma_vsc) -
numpy.einsum('jsc,isC->ijcC', gamma_vsc, gamma_vsc))
# The gamma^{alpha beta}_{alpha* beta*} are not needed for the supergravity
# computation per se, but we use these objects to determine the Spin(8)
# rotation that brings E7 / SU(8)
# scalar manifold coordinates into uniquely defined normal form.
self.gamma_sscc = 0.5 * (
numpy.einsum('vsc,vSC->sScC', gamma_vsc, gamma_vsc) -
numpy.einsum('vsc,vSC->SscC', gamma_vsc, gamma_vsc))
#
# The gamma^{ijkl}_{alpha beta} tensor that translates between antisymmetric
# 4-forms [ijkl] and symmetric traceless matrices over the spinors (sS),
# as well as its co-spinor (cC) cousin.
g_ijsS = numpy.einsum('isc,jSc->ijsS', self.gamma_vsc, self.gamma_vsc)
g_ijcC = numpy.einsum('isc,jsC->ijcC', self.gamma_vsc, self.gamma_vsc)
g_ijklsS = numpy.einsum('ijst,kltS->ijklsS', g_ijsS, g_ijsS)
g_ijklcC = numpy.einsum('ijcd,kldC->ijklcC', g_ijcC, g_ijcC)
gamma_vvvvss = numpy.zeros([8] * 6)
gamma_vvvvcc = numpy.zeros([8] * 6)
for perm in itertools.permutations(range(4)):
perm_ijkl = ''.join('ijkl' [p] for p in perm)
sign = permutation_sign(perm)
gamma_vvvvss += sign * numpy.einsum(perm_ijkl + 'sS->ijklsS', g_ijklsS)
gamma_vvvvcc += sign * numpy.einsum(perm_ijkl + 'cC->ijklcC', g_ijklcC)
self.gamma_vvvvss = gamma_vvvvss / 24.0
self.gamma_vvvvcc = gamma_vvvvcc / 24.0
def _get_gamma_vsc(self):
"""Computes Spin(8) gamma-matrices."""
# Conventions for Spin(8) gamma matrices match Green, Schwarz, Witten,
# but with indices shifted down by 1 to the range [0 .. 7].
entries = (
"007+ 016- 025- 034+ 043- 052+ 061+ 070- "
"101+ 110- 123- 132+ 145+ 154- 167- 176+ "
"204+ 215- 226+ 237- 240- 251+ 262- 273+ "
"302+ 313+ 320- 331- 346- 357- 364+ 375+ "
"403+ 412- 421+ 430- 447+ 456- 465+ 474- "
"505+ 514+ 527+ 536+ 541- 550- 563- 572- "
"606+ 617+ 624- 635- 642+ 653+ 660- 671- "
"700+ 711+ 722+ 733+ 744+ 755+ 766+ 777+")
ret = numpy.zeros([8, 8, 8])
for ijkc in entries.split():
indices = tuple([int(m) for m in ijkc[:-1]])
sign = 1 if ijkc[-1] == '+' else -1
ret[indices] = sign
return ret
class SU8(object):
"""Container class for su(8) tensor invariants.
An instance essentially is just a namespace for constants.
All attributes are to be treated read-only by the user.
Attributes:
index56_and_coeff_by_ijk: dict mapping triplet (i, j, k) of three different
su(8) indices to a pair of a 56-index and a sign factor (+1 or -1).
ij_map: Lexicographically sorted list of pairs of su(8) indices
(i, j) with i < j.
m_35_8_8: [35, 8, 8]-array mapping a 35-index to a symmetric traceless
matrix. Each such matrix has two entries of magnitude 1.
The first 7 (8, 8) matrices are the lexicographically ordered matrices
of the form diag(0, ..., 0, 1, -1, 0, ..., 0). The remaining 28 have
a 1 in (i, j) and (j, i)-position and are zero otherwise.
m_56_8_8_8: [56, 8, 8, 8]-array mapping a 56-index to an antisymmetric
[8, 8, 8]-array (or vice versa).
eps_56_56_8_8: epsilon^{ijklmnpq} with index groups (ijk) and (lmn) mapped
to a 56-index.
t_aij: su(8) generators (T_a)^j{}_i = t_aij[a, i, j].
"""
def __init__(self):
# Translates between adjoint indices 'a' and (vector) x (vector)
# indices 'ij'.
ij_map = [(i, j) for i in range(8) for j in range(8) if i < j]
#
# We also need the mapping between 8 x 8 and 35 representations, using
# common conventions for a basis of the 35-representation, and likewise
# for 8 x 8 and 28.
# These are used in various places, often with real-only quantities,
# so we use dtype=float here, even though they also are used in complex
# context.
m_35_8_8 = numpy.zeros([35, 8, 8], dtype=numpy.float64)
m_28_8_8 = numpy.zeros([28, 8, 8], dtype=numpy.float64)
for n in range(7):
m_35_8_8[n, n, n] = +1.0
m_35_8_8[n, n + 1, n + 1] = -1.0
for a, (m, n) in enumerate(ij_map):
m_35_8_8[a + 7, m, n] = m_35_8_8[a + 7, n, m] = 1.0
m_28_8_8[a, m, n] = 1.0
m_28_8_8[a, n, m] = -1.0
#
# The su8 'Generator Matrices'.
t_aij = numpy.zeros([63, 8, 8], dtype=numpy.complex128)
t_aij[:35, :, :] = 1.0j * m_35_8_8
for a, (i, j) in enumerate(ij_map):
t_aij[a + 35, i, j] = 1.0
t_aij[a + 35, j, i] = -1.0
#
# We also need to be able to map [ijk] to a linear 56-index.
# Our choice of signs for the ijk-basis is essentially arbitrary here.
# We lexicographically order triplets and attribute a + sign to
# every first occurrence of a particular combination.
index56_and_coeff_by_ijk = {}
ijk_by_index56 = {}
num_ijk = 0
for i in range(8):
for j in range(i + 1, 8):
for k in range(j + 1, 8):
ijk = (i, j, k)
index56 = num_ijk
ijk_by_index56[index56] = ijk
num_ijk += 1
for p in ((0, 1, 2), (1, 2, 0), (2, 0, 1)):
for q_sign, q in ((1, (0, 1, 2)), (-1, (1, 0, 2))):
pq_ijk = (ijk[p[q[0]]], ijk[p[q[1]]], ijk[p[q[2]]])
index56_and_coeff_by_ijk[pq_ijk] = (index56, q_sign)
# Let us also provide this as an (actually rather sparse) tensor.
# We will only use this very occasionally.
m_56_8_8_8 = numpy.zeros([56, 8, 8, 8])
for ijk, (i56, sign) in index56_and_coeff_by_ijk.items():
m_56_8_8_8[i56, ijk[0], ijk[1], ijk[2]] = sign
# Supergravity has the "fermion mass" tensor
# A3^ijk,lmn = (sqrt(2) / 144) * eps^ijkpqr[lm A2^n]_pqr
# This structure suggests that it is numerically convenient
# to have epsilon as a 56 x 56 x 8 x 8 tensor.
eps_56_56_8_8 = numpy.zeros([56, 56, 8, 8])
for p8 in itertools.permutations(range(8)):
sign8 = permutation_sign(p8)
i56, coeff_i56 = index56_and_coeff_by_ijk[p8[:3]]
j56, coeff_j56 = index56_and_coeff_by_ijk[p8[3: 6]]
eps_56_56_8_8[i56, j56, p8[6], p8[7]] = sign8 * coeff_i56 * coeff_j56
# Also, we need to know how su(8) elements given as 8x8 matrices act on this
# 56x56-basis.
m_action_56_56_8_8 = numpy.zeros([56, 56, 8, 8])
for index56_left, ijk_left in ijk_by_index56.items():
for index56_right, ijk_right in ijk_by_index56.items():
common_indices = set(ijk_left) & set(ijk_right)
if len(common_indices) != 2:
continue
# Two indices are the same, one gets transformed by the generator.
transforming_index_left = [idx for idx in ijk_left
if idx not in common_indices][0]
transforming_index_right = [idx for idx in ijk_right
if idx not in common_indices][0]
transformed_ijk_left = [
transforming_index_left if idx == transforming_index_right else idx
for idx in ijk_right]
sign = permutation_sign([ijk_left.index(i)
for i in transformed_ijk_left])
m_action_56_56_8_8[
index56_left, index56_right,
transforming_index_left, transforming_index_right] = sign
#
self.index56_and_coeff_by_ijk = index56_and_coeff_by_ijk
self.ij_map = ij_map
self.m_35_8_8 = m_35_8_8
self.m_28_8_8 = m_28_8_8
self.m_56_8_8_8 = m_56_8_8_8
self.eps_56_56_8_8 = eps_56_56_8_8
self.m_action_56_56_8_8 = m_action_56_56_8_8
self.t_aij = t_aij
class E7(object):
"""Container class for e7 tensor invariants.
An instance essentially is just a namespace for constants.
All attributes are to be treated read-only by the user.
Due to triality, we have freedom which 8-dimensional spin(8)
representation to call the 'vector', 'spinor', and 'co-spinor'
representation. For convenience, we here call the 8-dimensional
representation whose symmetric product with itself provides
compact directions in su(8) the 'vector' representation, and the other
two representations the 'spinor' and 'co-spinor' representation, as this gives
a mostly-symmetric role to spinors and co-spinors. These conventions deviate
from some of the supergravity literature, but are convenient here.
Attributes:
t_a_ij_kl: [133, 56, 56]-array of e7(+7) generators
(T_a)^{kl}{}_{ij} = t_aij[a, ij, kl] for the 56-dimensional fundamental
irreducible representation. The 56-indices split into two pairs of
28-indices that are antisymmetric index-pairs of the 8-dimensional su(8)
representation. The first 70 of the 133 generators are the 35+35
noncompact directions corresponding to the scalars of SO(8) supergravity.
The subsequent 63 form the maximal compact subalgebra su(8).
inv_gramian70: [70, 70]-array. Inverse inner product matrix of the first
70 basis vectors. All entries in this matrix are exact (as integer
multiples of 1/8) despite being float. This property can be relied on
for high-accuracy computations.
v70_as_sc8x8: [70, 2, 8, 8]-array that decomposes an e7 generator in
e7/su(8) into two sets of symmetric traceless 8x8 matrices,
(\alpha, \beta) and (\dot\alpha, \dot\beta), in this order.
v70_from_sc8x8: Implements the inverse transform to v70_as_sc8x8.
spin8_action_on_v70: [28, 70, 70]-array. For each spin(8) element
in the 'vector' [i, j]-basis, provides the [70, 70] generator matrix
when this generator acts on e7(7) / su(8). CAUTION: the output vector
space's basis is dual (w.r.t. Killing form) to the input vector space's.
This is useful for determining so(8)-invariant directions, but for
computing mass matrices, spin8_action_on_v70o is much more appropriate.
v70_from_v70o: [70, 70]-array that maps orthonormal-basis-70-vectors to
'common basis' 70-vectors.
v70o_from_v70: The inverse mapping of the above.
spin8_action_on_v70o: [28, 70, 70]-array. Like spin8_action_on_v70o,
but with 70-vectors in the orthonormal basis.
"""
def __init__(self, spin8, su8):
self._spin8 = spin8
self._su8 = su8
ij_map = su8.ij_map
t_a_ij_kl = numpy.zeros([133, 56, 56], dtype=numpy.complex128)
# numpy.einsum() does not compute intermediate tensors in a smart way,
# hence we manually split 3+-tensor contractions for better efficiency.
for a in range(35):
t_a_ij_kl[:35, 28:, :28] = (1 / 8.0) * (
numpy.einsum(
'qIkl,Kkl->qIK',
numpy.einsum(
'ijklq,Iij->qIkl',
numpy.einsum('ijklsS,qsS->ijklq', spin8.gamma_vvvvss,
su8.m_35_8_8), su8.m_28_8_8), su8.m_28_8_8))
t_a_ij_kl[:35, :28, 28:] = (1 / 8.0) * (
numpy.einsum(
'qIkl,Kkl->qIK',
numpy.einsum(
'ijklq,Iij->qIkl',
numpy.einsum('ijklsS,qsS->ijklq', spin8.gamma_vvvvss,
su8.m_35_8_8), su8.m_28_8_8), su8.m_28_8_8))
#
t_a_ij_kl[35:70, 28:, :28] = (1.0j / 8.0) * (
numpy.einsum(
'qIkl,Kkl->qIK',
numpy.einsum(
'ijklq,Iij->qIkl',
numpy.einsum('ijklcC,qcC->ijklq', spin8.gamma_vvvvcc,
su8.m_35_8_8), su8.m_28_8_8), su8.m_28_8_8))
t_a_ij_kl[35:70, :28, 28:] = (-1.0j / 8.0) * (
numpy.einsum(
'qIkl,Kkl->qIK',
numpy.einsum(
'ijklq,Iij->qIkl',
numpy.einsum('ijklcC,qcC->ijklq', spin8.gamma_vvvvcc,
su8.m_35_8_8), su8.m_28_8_8), su8.m_28_8_8))
#
# We need to find the action of the su(8) algebra on the
# 28-representation.
su8_28 = 2 * (
numpy.einsum(
'aIjn,Jjn->aIJ',
numpy.einsum(
'aimjn,Iim->aIjn',
numpy.einsum('aij,mn->aimjn', su8.t_aij,
numpy.eye(8, dtype=numpy.complex128)),
su8.m_28_8_8), su8.m_28_8_8))
t_a_ij_kl[70:, :28, :28] = su8_28
t_a_ij_kl[70:, 28:, 28:] = su8_28.conjugate()
self.t_a_ij_kl = t_a_ij_kl
m_35_8_8 = su8.m_35_8_8.real
inv_inner_products = numpy.linalg.inv(
numpy.einsum('aij,bij->ab', m_35_8_8, m_35_8_8))
# Note that, due to the way our conventions work, the entries of this
# matrix are all multiples of 1/8.0 = 0.125, which is an
# exactly-representable floating point number. So, we are good to use this
# even in conjunction with high-accuracy numerics(!). However,
# we first have to 'sanitize away' numerical noise.
raw_inv_gramian70 = numpy.einsum('AB,ab->AaBb', numpy.eye(2),
inv_inner_products).reshape(70, 70)
self.inv_gramian70 = numpy.round(raw_inv_gramian70 * 8) / 8
assert numpy.allclose(raw_inv_gramian70, self.inv_gramian70)
# Assert that we only see 'good exact' numbers that are multiples of 1/8
# with nonnegative values up to 16/8 = 2.
assert set(abs(x * 8)
for x in self.inv_gramian70.reshape(-1)) <= set(range(17))
# Auxiliary constant to map [2, 8, 8] (sc, i, j)-data to 70-vectors.
aux_35_from_8x8 = numpy.einsum('Aa,aij->Aij',
inv_inner_products, m_35_8_8)
self.v70_as_sc8x8 = numpy.einsum('sc,xab->sxcab',
numpy.eye(2),
m_35_8_8).reshape(70, 2, 8, 8)
self.v70_from_sc8x8 = numpy.einsum('vsab,vw->wsab',
self.v70_as_sc8x8,
self.inv_gramian70)
# We also want to directly look at the action of the 28 Spin(8) generators
# on the 70 scalars, both to determine residual gauge groups
# (which we could also do in a 56-representation of E7),
# and also to look for residual discrete subgroups of SO(8).
spin8_action_on_s = 0.5 * numpy.einsum(
'Aij,ijab->Aab', su8.m_28_8_8, spin8.gamma_vvss)
spin8_action_on_c = 0.5 * numpy.einsum(
'Aij,ijab->Aab', su8.m_28_8_8, spin8.gamma_vvcc)
spin8_action_on_35s = (
# [A,v,m,n]-array showing how acting with spin(8) generator A
# changes a 35s element indexed by v, but with the change
# expressed as a symmetric 8x8 matrix indexed (m, n).
#
# This could be simplified, exploiting symmetry, at the cost
# of making the expression slightly less readable.
numpy.einsum('Aab,van->Avbn', spin8_action_on_s,
self.v70_as_sc8x8[:35, 0, :, :]) +
numpy.einsum('Aab,vma->Avmb', spin8_action_on_s,
self.v70_as_sc8x8[:35, 0, :, :]))
spin8_action_on_35c = (
# This could be simplified, exploiting symmetry, at the cost
# of making the expression slightly less readable.
numpy.einsum('Aab,van->Avbn', spin8_action_on_c,
self.v70_as_sc8x8[35:, 1, :, :]) +
numpy.einsum('Aab,vma->Avmb', spin8_action_on_c,
self.v70_as_sc8x8[35:, 1, :, :]))
spin8_action_on_35s35c = numpy.stack([spin8_action_on_35s,
spin8_action_on_35c],
axis=1)
self.spin8_action_on_v70 = numpy.einsum(
'Asvab,wsab->Asvw',
spin8_action_on_35s35c,
self.v70_from_sc8x8).reshape(28, 70, 70)
#
# We also need an orthonormal basis for the 70 scalars.
# While we can find mass-eigenstates with the non-orthonormal basis
# above (exercising a bit of care), these would be the eigenvalues of
# a non-hermitean matrix operator. We do need orthonormal bases for
# the mass-eigenstate subspaces so that subsequent automatic numerical
# identification of charges can work (for which the code assumes that
# charge-operators are represented as hermitean matrices, on which it
# uses scipy.linalg.eigh() to produce orthonormal eigenbases).
# We do not have to pay attention to define the mapping between these
# 70-bases in a particularly elegant way.
#
# Also, it is important for high-accuracy calculations to have
# exactly-representable matrix entries, while we can absorb an overall
# (not-exactly-representable-at-finite-accuracy)
# factor into the definition of the inner product.
v70_from_v70o = numpy.zeros([70, 70])
for num_ijkl, ijkl in enumerate(
ijkl for ijkl in itertools.combinations(range(8), 4) if 0 in ijkl):
v35a = numpy.einsum('vsab,s,ab->v',
self.v70_from_sc8x8,
numpy.array([1.0, 0.0]),
spin8.gamma_vvvvss[
ijkl[0], ijkl[1], ijkl[2], ijkl[3], :, :])
v35b = numpy.einsum('vsab,s,ab->v',
self.v70_from_sc8x8,
numpy.array([0.0, 1.0]),
spin8.gamma_vvvvcc[
ijkl[0], ijkl[1], ijkl[2], ijkl[3], :, :])
v70_from_v70o[:, num_ijkl] = 0.5 * v35a
v70_from_v70o[:, 35 + num_ijkl] = 0.5 * v35b
assert numpy.allclose(
numpy.einsum('Vv,Wv->VW', v70_from_v70o, v70_from_v70o),
2 * self.inv_gramian70)
self.v70_from_v70o = v70_from_v70o
self.v70o_from_v70 = numpy.linalg.inv(v70_from_v70o)
self.spin8_action_on_v70o = numpy.einsum(
'aVw,Ww->aVW',
numpy.einsum('avw,vV->aVw',
self.spin8_action_on_v70,
self.v70_from_v70o),
self.v70o_from_v70)
def v70_from_35s35c(self, m35s, m35c):
"""Computes a v70-vector from 35s and 35c matrices."""
return numpy.einsum('vsab,sab->v',
self.v70_from_sc8x8,
numpy.stack([m35s, m35c]))
def v70_as_35s35c(self, v70):
m = numpy.einsum('v,vsab->sab', v70, self.v70_as_sc8x8)
return m[0], m[1]
spin8 = Spin8()
su8 = SU8()
e7 = E7(spin8, su8)
| [
"numpy.stack",
"numpy.allclose",
"numpy.zeros",
"numpy.einsum",
"numpy.linalg.inv",
"numpy.array",
"numpy.eye",
"numpy.round"
] | [((3701, 3762), 'numpy.einsum', 'numpy.einsum', (['"""isc,jSc->ijsS"""', 'self.gamma_vsc', 'self.gamma_vsc'], {}), "('isc,jSc->ijsS', self.gamma_vsc, self.gamma_vsc)\n", (3713, 3762), False, 'import numpy\n'), ((3776, 3837), 'numpy.einsum', 'numpy.einsum', (['"""isc,jsC->ijcC"""', 'self.gamma_vsc', 'self.gamma_vsc'], {}), "('isc,jsC->ijcC', self.gamma_vsc, self.gamma_vsc)\n", (3788, 3837), False, 'import numpy\n'), ((3853, 3902), 'numpy.einsum', 'numpy.einsum', (['"""ijst,kltS->ijklsS"""', 'g_ijsS', 'g_ijsS'], {}), "('ijst,kltS->ijklsS', g_ijsS, g_ijsS)\n", (3865, 3902), False, 'import numpy\n'), ((3918, 3967), 'numpy.einsum', 'numpy.einsum', (['"""ijcd,kldC->ijklcC"""', 'g_ijcC', 'g_ijcC'], {}), "('ijcd,kldC->ijklcC', g_ijcC, g_ijcC)\n", (3930, 3967), False, 'import numpy\n'), ((3987, 4007), 'numpy.zeros', 'numpy.zeros', (['([8] * 6)'], {}), '([8] * 6)\n', (3998, 4007), False, 'import numpy\n'), ((4027, 4047), 'numpy.zeros', 'numpy.zeros', (['([8] * 6)'], {}), '([8] * 6)\n', (4038, 4047), False, 'import numpy\n'), ((5075, 5097), 'numpy.zeros', 'numpy.zeros', (['[8, 8, 8]'], {}), '([8, 8, 8])\n', (5086, 5097), False, 'import numpy\n'), ((6889, 6933), 'numpy.zeros', 'numpy.zeros', (['[35, 8, 8]'], {'dtype': 'numpy.float64'}), '([35, 8, 8], dtype=numpy.float64)\n', (6900, 6933), False, 'import numpy\n'), ((6949, 6993), 'numpy.zeros', 'numpy.zeros', (['[28, 8, 8]'], {'dtype': 'numpy.float64'}), '([28, 8, 8], dtype=numpy.float64)\n', (6960, 6993), False, 'import numpy\n'), ((7300, 7347), 'numpy.zeros', 'numpy.zeros', (['[63, 8, 8]'], {'dtype': 'numpy.complex128'}), '([63, 8, 8], dtype=numpy.complex128)\n', (7311, 7347), False, 'import numpy\n'), ((8429, 8455), 'numpy.zeros', 'numpy.zeros', (['[56, 8, 8, 8]'], {}), '([56, 8, 8, 8])\n', (8440, 8455), False, 'import numpy\n'), ((8816, 8843), 'numpy.zeros', 'numpy.zeros', (['[56, 56, 8, 8]'], {}), '([56, 56, 8, 8])\n', (8827, 8843), False, 'import numpy\n'), ((9242, 9269), 'numpy.zeros', 'numpy.zeros', (['[56, 56, 8, 8]'], {}), '([56, 56, 8, 8])\n', (9253, 9269), False, 'import numpy\n'), ((13197, 13247), 'numpy.zeros', 'numpy.zeros', (['[133, 56, 56]'], {'dtype': 'numpy.complex128'}), '([133, 56, 56], dtype=numpy.complex128)\n', (13208, 13247), False, 'import numpy\n'), ((21149, 21200), 'numpy.einsum', 'numpy.einsum', (['"""v,vsab->sab"""', 'v70', 'self.v70_as_sc8x8'], {}), "('v,vsab->sab', v70, self.v70_as_sc8x8)\n", (21161, 21200), False, 'import numpy\n'), ((15951, 16004), 'numpy.allclose', 'numpy.allclose', (['raw_inv_gramian70', 'self.inv_gramian70'], {}), '(raw_inv_gramian70, self.inv_gramian70)\n', (15965, 16004), False, 'import numpy\n'), ((16335, 16392), 'numpy.einsum', 'numpy.einsum', (['"""Aa,aij->Aij"""', 'inv_inner_products', 'm_35_8_8'], {}), "('Aa,aij->Aij', inv_inner_products, m_35_8_8)\n", (16347, 16392), False, 'import numpy\n'), ((16637, 16705), 'numpy.einsum', 'numpy.einsum', (['"""vsab,vw->wsab"""', 'self.v70_as_sc8x8', 'self.inv_gramian70'], {}), "('vsab,vw->wsab', self.v70_as_sc8x8, self.inv_gramian70)\n", (16649, 16705), False, 'import numpy\n'), ((18313, 18376), 'numpy.stack', 'numpy.stack', (['[spin8_action_on_35s, spin8_action_on_35c]'], {'axis': '(1)'}), '([spin8_action_on_35s, spin8_action_on_35c], axis=1)\n', (18324, 18376), False, 'import numpy\n'), ((19630, 19651), 'numpy.zeros', 'numpy.zeros', (['[70, 70]'], {}), '([70, 70])\n', (19641, 19651), False, 'import numpy\n'), ((20606, 20637), 'numpy.linalg.inv', 'numpy.linalg.inv', (['v70_from_v70o'], {}), '(v70_from_v70o)\n', (20622, 20637), False, 'import numpy\n'), ((21081, 21106), 'numpy.stack', 'numpy.stack', (['[m35s, m35c]'], {}), '([m35s, m35c])\n', (21092, 21106), False, 'import numpy\n'), ((2634, 2685), 'numpy.einsum', 'numpy.einsum', (['"""isc,jSc->ijsS"""', 'gamma_vsc', 'gamma_vsc'], {}), "('isc,jSc->ijsS', gamma_vsc, gamma_vsc)\n", (2646, 2685), False, 'import numpy\n'), ((2696, 2747), 'numpy.einsum', 'numpy.einsum', (['"""jsc,iSc->ijsS"""', 'gamma_vsc', 'gamma_vsc'], {}), "('jsc,iSc->ijsS', gamma_vsc, gamma_vsc)\n", (2708, 2747), False, 'import numpy\n'), ((2949, 3000), 'numpy.einsum', 'numpy.einsum', (['"""isc,jsC->ijcC"""', 'gamma_vsc', 'gamma_vsc'], {}), "('isc,jsC->ijcC', gamma_vsc, gamma_vsc)\n", (2961, 3000), False, 'import numpy\n'), ((3011, 3062), 'numpy.einsum', 'numpy.einsum', (['"""jsc,isC->ijcC"""', 'gamma_vsc', 'gamma_vsc'], {}), "('jsc,isC->ijcC', gamma_vsc, gamma_vsc)\n", (3023, 3062), False, 'import numpy\n'), ((3365, 3416), 'numpy.einsum', 'numpy.einsum', (['"""vsc,vSC->sScC"""', 'gamma_vsc', 'gamma_vsc'], {}), "('vsc,vSC->sScC', gamma_vsc, gamma_vsc)\n", (3377, 3416), False, 'import numpy\n'), ((3427, 3478), 'numpy.einsum', 'numpy.einsum', (['"""vsc,vSC->SscC"""', 'gamma_vsc', 'gamma_vsc'], {}), "('vsc,vSC->SscC', gamma_vsc, gamma_vsc)\n", (3439, 3478), False, 'import numpy\n'), ((4215, 4263), 'numpy.einsum', 'numpy.einsum', (["(perm_ijkl + 'sS->ijklsS')", 'g_ijklsS'], {}), "(perm_ijkl + 'sS->ijklsS', g_ijklsS)\n", (4227, 4263), False, 'import numpy\n'), ((4293, 4341), 'numpy.einsum', 'numpy.einsum', (["(perm_ijkl + 'cC->ijklcC')", 'g_ijklcC'], {}), "(perm_ijkl + 'cC->ijklcC', g_ijklcC)\n", (4305, 4341), False, 'import numpy\n'), ((15333, 15380), 'numpy.einsum', 'numpy.einsum', (['"""aij,bij->ab"""', 'm_35_8_8', 'm_35_8_8'], {}), "('aij,bij->ab', m_35_8_8, m_35_8_8)\n", (15345, 15380), False, 'import numpy\n'), ((15899, 15933), 'numpy.round', 'numpy.round', (['(raw_inv_gramian70 * 8)'], {}), '(raw_inv_gramian70 * 8)\n', (15910, 15933), False, 'import numpy\n'), ((17098, 17159), 'numpy.einsum', 'numpy.einsum', (['"""Aij,ijab->Aab"""', 'su8.m_28_8_8', 'spin8.gamma_vvss'], {}), "('Aij,ijab->Aab', su8.m_28_8_8, spin8.gamma_vvss)\n", (17110, 17159), False, 'import numpy\n'), ((17203, 17264), 'numpy.einsum', 'numpy.einsum', (['"""Aij,ijab->Aab"""', 'su8.m_28_8_8', 'spin8.gamma_vvcc'], {}), "('Aij,ijab->Aab', su8.m_28_8_8, spin8.gamma_vvcc)\n", (17215, 17264), False, 'import numpy\n'), ((17664, 17749), 'numpy.einsum', 'numpy.einsum', (['"""Aab,van->Avbn"""', 'spin8_action_on_s', 'self.v70_as_sc8x8[:35, 0, :, :]'], {}), "('Aab,van->Avbn', spin8_action_on_s, self.v70_as_sc8x8[:35, 0,\n :, :])\n", (17676, 17749), False, 'import numpy\n'), ((17781, 17866), 'numpy.einsum', 'numpy.einsum', (['"""Aab,vma->Avmb"""', 'spin8_action_on_s', 'self.v70_as_sc8x8[:35, 0, :, :]'], {}), "('Aab,vma->Avmb', spin8_action_on_s, self.v70_as_sc8x8[:35, 0,\n :, :])\n", (17793, 17866), False, 'import numpy\n'), ((18059, 18144), 'numpy.einsum', 'numpy.einsum', (['"""Aab,van->Avbn"""', 'spin8_action_on_c', 'self.v70_as_sc8x8[35:, 1, :, :]'], {}), "('Aab,van->Avbn', spin8_action_on_c, self.v70_as_sc8x8[35:, 1,\n :, :])\n", (18071, 18144), False, 'import numpy\n'), ((18176, 18261), 'numpy.einsum', 'numpy.einsum', (['"""Aab,vma->Avmb"""', 'spin8_action_on_c', 'self.v70_as_sc8x8[35:, 1, :, :]'], {}), "('Aab,vma->Avmb', spin8_action_on_c, self.v70_as_sc8x8[35:, 1,\n :, :])\n", (18188, 18261), False, 'import numpy\n'), ((20447, 20502), 'numpy.einsum', 'numpy.einsum', (['"""Vv,Wv->VW"""', 'v70_from_v70o', 'v70_from_v70o'], {}), "('Vv,Wv->VW', v70_from_v70o, v70_from_v70o)\n", (20459, 20502), False, 'import numpy\n'), ((20721, 20794), 'numpy.einsum', 'numpy.einsum', (['"""avw,vV->aVw"""', 'self.spin8_action_on_v70', 'self.v70_from_v70o'], {}), "('avw,vV->aVw', self.spin8_action_on_v70, self.v70_from_v70o)\n", (20733, 20794), False, 'import numpy\n'), ((18497, 18574), 'numpy.einsum', 'numpy.einsum', (['"""Asvab,wsab->Asvw"""', 'spin8_action_on_35s35c', 'self.v70_from_sc8x8'], {}), "('Asvab,wsab->Asvw', spin8_action_on_35s35c, self.v70_from_sc8x8)\n", (18509, 18574), False, 'import numpy\n'), ((19890, 19913), 'numpy.array', 'numpy.array', (['[1.0, 0.0]'], {}), '([1.0, 0.0])\n', (19901, 19913), False, 'import numpy\n'), ((20159, 20182), 'numpy.array', 'numpy.array', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (20170, 20182), False, 'import numpy\n'), ((13608, 13675), 'numpy.einsum', 'numpy.einsum', (['"""ijklsS,qsS->ijklq"""', 'spin8.gamma_vvvvss', 'su8.m_35_8_8'], {}), "('ijklsS,qsS->ijklq', spin8.gamma_vvvvss, su8.m_35_8_8)\n", (13620, 13675), False, 'import numpy\n'), ((13923, 13990), 'numpy.einsum', 'numpy.einsum', (['"""ijklsS,qsS->ijklq"""', 'spin8.gamma_vvvvss', 'su8.m_35_8_8'], {}), "('ijklsS,qsS->ijklq', spin8.gamma_vvvvss, su8.m_35_8_8)\n", (13935, 13990), False, 'import numpy\n'), ((14251, 14318), 'numpy.einsum', 'numpy.einsum', (['"""ijklcC,qcC->ijklq"""', 'spin8.gamma_vvvvcc', 'su8.m_35_8_8'], {}), "('ijklcC,qcC->ijklq', spin8.gamma_vvvvcc, su8.m_35_8_8)\n", (14263, 14318), False, 'import numpy\n'), ((14572, 14639), 'numpy.einsum', 'numpy.einsum', (['"""ijklcC,qcC->ijklq"""', 'spin8.gamma_vvvvcc', 'su8.m_35_8_8'], {}), "('ijklcC,qcC->ijklq', spin8.gamma_vvvvcc, su8.m_35_8_8)\n", (14584, 14639), False, 'import numpy\n'), ((15783, 15795), 'numpy.eye', 'numpy.eye', (['(2)'], {}), '(2)\n', (15792, 15795), False, 'import numpy\n'), ((16525, 16537), 'numpy.eye', 'numpy.eye', (['(2)'], {}), '(2)\n', (16534, 16537), False, 'import numpy\n'), ((15031, 15067), 'numpy.eye', 'numpy.eye', (['(8)'], {'dtype': 'numpy.complex128'}), '(8, dtype=numpy.complex128)\n', (15040, 15067), False, 'import numpy\n')] |
import os
import numpy as np
import tensorflow as tf
import cv2
import matplotlib.pyplot as plt
from tqdm import tqdm
from lpsrgan import LPSRGAN
import load
learning_rate = 1e-3
batch_size = 16
vgg_model = '../vgg19/backup/latest'
def train():
x = tf.placeholder(tf.float32, [None, 96, 96, 3])
is_training = tf.placeholder(tf.bool, [])
model = LPSRGAN(x, is_training, batch_size)
sess = tf.Session()
with tf.variable_scope('lpsrgan'):
global_step = tf.Variable(0, name='global_step', trainable=False)
opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
g_train_op = opt.minimize(
model.g_loss, global_step=global_step, var_list=model.g_variables)
d_train_op = opt.minimize(
model.d_loss, global_step=global_step, var_list=model.d_variables)
init = tf.global_variables_initializer()
sess.run(init)
# Restore the VGG-19 network
var = tf.global_variables()
vgg_var = [var_ for var_ in var if "vgg19" in var_.name]
saver = tf.train.Saver(vgg_var)
saver.restore(sess, vgg_model)
# Restore the LPSRGAN network
if tf.train.get_checkpoint_state('backup/'):
saver = tf.train.Saver()
saver.restore(sess, 'backup/latest')
# Load the data
x_train, x_test = load.load()
# Train the LPSRGAN model
n_iter = int(len(x_train) / batch_size)
while True:
epoch = int(sess.run(global_step) / n_iter / 2) + 1
print('epoch:', epoch)
np.random.shuffle(x_train)
for i in tqdm(range(n_iter)):
x_batch = normalize(x_train[i*batch_size:(i+1)*batch_size])
sess.run(
[g_train_op, d_train_op],
feed_dict={x: x_batch, is_training: True})
# Validate
raw = normalize(x_test[:batch_size])
mos, fake = sess.run(
[model.downscaled, model.imitation],
feed_dict={x: raw, is_training: False})
save_img([mos, fake, raw], ['Input', 'Output', 'Ground Truth'], epoch)
# Save the model
saver = tf.train.Saver()
saver.save(sess, 'backup/latest', write_meta_graph=False)
def save_img(imgs, label, epoch):
for i in range(batch_size):
fig = plt.figure()
for j, img in enumerate(imgs):
im = np.uint8((img[i]+1)*127.5)
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
fig.add_subplot(1, len(imgs), j+1)
plt.imshow(im)
plt.tick_params(labelbottom='off')
plt.tick_params(labelleft='off')
plt.gca().get_xaxis().set_ticks_position('none')
plt.gca().get_yaxis().set_ticks_position('none')
plt.xlabel(label[j])
seq_ = "{0:09d}".format(i+1)
epoch_ = "{0:09d}".format(epoch)
path = os.path.join('result', seq_, '{}.jpg'.format(epoch_))
if os.path.exists(os.path.join('result', seq_)) == False:
os.mkdir(os.path.join('result', seq_))
plt.savefig(path)
plt.close()
def normalize(images):
return np.array([image/127.5-1 for image in images])
if __name__ == '__main__':
train()
| [
"tensorflow.global_variables",
"tensorflow.Variable",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.tick_params",
"os.path.join",
"matplotlib.pyplot.xlabel",
"cv2.cvtColor",
"matplotlib.pyplot.close",
"matplotlib.pyplot.imshow",
"tensorflow.variable_scope",
"load.load... | [((269, 314), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 96, 96, 3]'], {}), '(tf.float32, [None, 96, 96, 3])\n', (283, 314), True, 'import tensorflow as tf\n'), ((334, 361), 'tensorflow.placeholder', 'tf.placeholder', (['tf.bool', '[]'], {}), '(tf.bool, [])\n', (348, 361), True, 'import tensorflow as tf\n'), ((377, 412), 'lpsrgan.LPSRGAN', 'LPSRGAN', (['x', 'is_training', 'batch_size'], {}), '(x, is_training, batch_size)\n', (384, 412), False, 'from lpsrgan import LPSRGAN\n'), ((425, 437), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (435, 437), True, 'import tensorflow as tf\n'), ((564, 615), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (586, 615), True, 'import tensorflow as tf\n'), ((844, 877), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (875, 877), True, 'import tensorflow as tf\n'), ((946, 967), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (965, 967), True, 'import tensorflow as tf\n'), ((1043, 1066), 'tensorflow.train.Saver', 'tf.train.Saver', (['vgg_var'], {}), '(vgg_var)\n', (1057, 1066), True, 'import tensorflow as tf\n'), ((1148, 1188), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['"""backup/"""'], {}), "('backup/')\n", (1177, 1188), True, 'import tensorflow as tf\n'), ((1316, 1327), 'load.load', 'load.load', ([], {}), '()\n', (1325, 1327), False, 'import load\n'), ((3125, 3176), 'numpy.array', 'np.array', (['[(image / 127.5 - 1) for image in images]'], {}), '([(image / 127.5 - 1) for image in images])\n', (3133, 3176), True, 'import numpy as np\n'), ((448, 476), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""lpsrgan"""'], {}), "('lpsrgan')\n", (465, 476), True, 'import tensorflow as tf\n'), ((501, 552), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (512, 552), True, 'import tensorflow as tf\n'), ((1207, 1223), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (1221, 1223), True, 'import tensorflow as tf\n'), ((1525, 1551), 'numpy.random.shuffle', 'np.random.shuffle', (['x_train'], {}), '(x_train)\n', (1542, 1551), True, 'import numpy as np\n'), ((2117, 2133), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (2131, 2133), True, 'import tensorflow as tf\n'), ((2288, 2300), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2298, 2300), True, 'import matplotlib.pyplot as plt\n'), ((3046, 3063), 'matplotlib.pyplot.savefig', 'plt.savefig', (['path'], {}), '(path)\n', (3057, 3063), True, 'import matplotlib.pyplot as plt\n'), ((3073, 3084), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3082, 3084), True, 'import matplotlib.pyplot as plt\n'), ((2359, 2389), 'numpy.uint8', 'np.uint8', (['((img[i] + 1) * 127.5)'], {}), '((img[i] + 1) * 127.5)\n', (2367, 2389), True, 'import numpy as np\n'), ((2404, 2439), 'cv2.cvtColor', 'cv2.cvtColor', (['im', 'cv2.COLOR_BGR2RGB'], {}), '(im, cv2.COLOR_BGR2RGB)\n', (2416, 2439), False, 'import cv2\n'), ((2501, 2515), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (2511, 2515), True, 'import matplotlib.pyplot as plt\n'), ((2529, 2563), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'labelbottom': '"""off"""'}), "(labelbottom='off')\n", (2544, 2563), True, 'import matplotlib.pyplot as plt\n'), ((2577, 2609), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'labelleft': '"""off"""'}), "(labelleft='off')\n", (2592, 2609), True, 'import matplotlib.pyplot as plt\n'), ((2747, 2767), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['label[j]'], {}), '(label[j])\n', (2757, 2767), True, 'import matplotlib.pyplot as plt\n'), ((2945, 2973), 'os.path.join', 'os.path.join', (['"""result"""', 'seq_'], {}), "('result', seq_)\n", (2957, 2973), False, 'import os\n'), ((3007, 3035), 'os.path.join', 'os.path.join', (['"""result"""', 'seq_'], {}), "('result', seq_)\n", (3019, 3035), False, 'import os\n'), ((2623, 2632), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2630, 2632), True, 'import matplotlib.pyplot as plt\n'), ((2685, 2694), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2692, 2694), True, 'import matplotlib.pyplot as plt\n')] |
import pytest
import miceforest as mf
from miceforest.ImputationSchema import _ImputationSchema
from sklearn.datasets import load_boston
import pandas as pd
import numpy as np
# Set random state and load data from sklearn
random_state = np.random.RandomState(1991)
boston = pd.DataFrame(load_boston(return_X_y=True)[0])
boston[3] = boston[3].astype("category")
boston[8] = boston[8].astype("category")
boston.columns = [str(i) for i in boston.columns]
# Several types of datasets are tested:
boston_amp = mf.ampute_data(boston, perc=0.25, random_state=random_state)
# Ampute only some variables
somevars = ["1", "2", "5", "10"]
boston_amp_somevars = mf.ampute_data(
boston, variables=somevars, perc=0.25, random_state=random_state
)
# Ampute only 1 variable
onevar = ["1"]
boston_amp_onevar = mf.ampute_data(
boston, variables=onevar, perc=0.25, random_state=random_state
)
def test_vanilla():
impschem = _ImputationSchema(
validation_data=boston_amp, variable_schema=None, mean_match_candidates=None
)
assert set(impschem.response_vars) == set(boston_amp.columns)
assert set(impschem.predictor_vars) == set(boston_amp.columns)
impschem = _ImputationSchema(
validation_data=boston_amp_somevars,
variable_schema=None,
mean_match_candidates=None,
)
assert set(impschem.response_vars) == set(somevars)
impschem = _ImputationSchema(
validation_data=boston_amp_onevar,
variable_schema=None,
mean_match_candidates=None,
)
assert set(impschem.response_vars) == set(onevar)
def test_var_schem_list():
impschem = _ImputationSchema(
validation_data=boston_amp,
variable_schema=["1", "2", "5"],
mean_match_candidates=4,
)
assert set(impschem.response_vars) == set(["1", "2", "5"])
assert set(impschem.predictor_vars) == set(boston_amp.columns)
# 6 has no missing data, make sure it doesn't show up in response_vars
impschem = _ImputationSchema(
validation_data=boston_amp_somevars,
variable_schema=["1", "2", "5", "6"],
mean_match_candidates=None,
)
assert set(impschem.response_vars) == set(["1", "2", "5"])
assert set(impschem.predictor_vars) == set(boston_amp.columns)
impschem = _ImputationSchema(
validation_data=boston_amp_onevar,
variable_schema=["1"],
mean_match_candidates=10,
)
assert set(impschem.response_vars) == set(onevar)
bostcols = list(boston.columns)
bostcols.remove(onevar[0])
assert set(impschem.predictor_vars) == set(bostcols)
def test_var_schem_dict():
schem = {"1": ["2", "5"], "2": ["3", "5"], "5": ["6", "7", "8"]}
mmc = {"1": 3, "2": 4, "5": 5}
impschem = _ImputationSchema(
validation_data=boston_amp, variable_schema=schem, mean_match_candidates=mmc
)
assert set(impschem.response_vars) == {"1", "2", "5"}
assert set(impschem.predictor_vars) == {"2", "5", "3", "6", "7", "8"}
assert impschem.mean_match_candidates
# 6 has no missing data, make sure it doesn't show up in response_vars
schem = {"1": ["2", "5"], "2": ["3", "5"], "6": ["10", "7", "8"]}
mmc = {"1": 3, "3": 4, "5": 5}
impschem = _ImputationSchema(
validation_data=boston_amp_somevars,
variable_schema=schem,
mean_match_candidates=mmc,
)
assert set(impschem.response_vars) == {"1", "2"}
assert set(impschem.predictor_vars) == {"2", "5", "3"}
assert set(impschem.mean_match_candidates.keys()) == {"1", "3", "5"}
| [
"sklearn.datasets.load_boston",
"miceforest.ampute_data",
"miceforest.ImputationSchema._ImputationSchema",
"numpy.random.RandomState"
] | [((238, 265), 'numpy.random.RandomState', 'np.random.RandomState', (['(1991)'], {}), '(1991)\n', (259, 265), True, 'import numpy as np\n'), ((507, 567), 'miceforest.ampute_data', 'mf.ampute_data', (['boston'], {'perc': '(0.25)', 'random_state': 'random_state'}), '(boston, perc=0.25, random_state=random_state)\n', (521, 567), True, 'import miceforest as mf\n'), ((653, 738), 'miceforest.ampute_data', 'mf.ampute_data', (['boston'], {'variables': 'somevars', 'perc': '(0.25)', 'random_state': 'random_state'}), '(boston, variables=somevars, perc=0.25, random_state=random_state\n )\n', (667, 738), True, 'import miceforest as mf\n'), ((801, 879), 'miceforest.ampute_data', 'mf.ampute_data', (['boston'], {'variables': 'onevar', 'perc': '(0.25)', 'random_state': 'random_state'}), '(boston, variables=onevar, perc=0.25, random_state=random_state)\n', (815, 879), True, 'import miceforest as mf\n'), ((923, 1022), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp', 'variable_schema': 'None', 'mean_match_candidates': 'None'}), '(validation_data=boston_amp, variable_schema=None,\n mean_match_candidates=None)\n', (940, 1022), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((1182, 1290), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp_somevars', 'variable_schema': 'None', 'mean_match_candidates': 'None'}), '(validation_data=boston_amp_somevars, variable_schema=None,\n mean_match_candidates=None)\n', (1199, 1290), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((1390, 1496), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp_onevar', 'variable_schema': 'None', 'mean_match_candidates': 'None'}), '(validation_data=boston_amp_onevar, variable_schema=None,\n mean_match_candidates=None)\n', (1407, 1496), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((1622, 1729), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp', 'variable_schema': "['1', '2', '5']", 'mean_match_candidates': '(4)'}), "(validation_data=boston_amp, variable_schema=['1', '2',\n '5'], mean_match_candidates=4)\n", (1639, 1729), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((1978, 2102), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp_somevars', 'variable_schema': "['1', '2', '5', '6']", 'mean_match_candidates': 'None'}), "(validation_data=boston_amp_somevars, variable_schema=['1',\n '2', '5', '6'], mean_match_candidates=None)\n", (1995, 2102), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((2276, 2381), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp_onevar', 'variable_schema': "['1']", 'mean_match_candidates': '(10)'}), "(validation_data=boston_amp_onevar, variable_schema=['1'],\n mean_match_candidates=10)\n", (2293, 2381), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((2735, 2834), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp', 'variable_schema': 'schem', 'mean_match_candidates': 'mmc'}), '(validation_data=boston_amp, variable_schema=schem,\n mean_match_candidates=mmc)\n', (2752, 2834), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((3215, 3324), 'miceforest.ImputationSchema._ImputationSchema', '_ImputationSchema', ([], {'validation_data': 'boston_amp_somevars', 'variable_schema': 'schem', 'mean_match_candidates': 'mmc'}), '(validation_data=boston_amp_somevars, variable_schema=\n schem, mean_match_candidates=mmc)\n', (3232, 3324), False, 'from miceforest.ImputationSchema import _ImputationSchema\n'), ((288, 316), 'sklearn.datasets.load_boston', 'load_boston', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (299, 316), False, 'from sklearn.datasets import load_boston\n')] |
import numpy
from scipy.optimize import bisect
from xminds.compat import logger
from .config import INTERACTIONS_DTYPE, MIN_RATING, MAX_RATING
from .utils import partition_int, njit
class InteractionsSampler:
"""
This class samples interactions, i.e. pairs user-item for which the ratings is known.
Sampling interactions can equivalently be viewed as sampling a mask for the ratings matrix.
Sampling is designed to be in O(n_interactions).
"""
BUFFER_SIZE_MULTIPIER = 2.5
ALLOWED_DISTRIBUTION_SCHEME = {'uniform', 'exponential', 'invlog'}
N_SAMPLES_RATINGS_DIS_ESTIMATION = 300_000
DEFAULT_ITEM_MAX_POPULARITY_FACTOR = 100
def __init__(self, density, users_distribution='uniform', items_distribution='uniform',
min_per_user=None, max_per_user=None, max_item_popularity_factor=None,
ensure_one_per_item=True, target_ratings_distribution=None):
"""
:param float density: must be in ]0, 1[
:param string users_distribution: interactions distribution scheme for users
:param string items_distribution: interactions distribution scheme for items
:param int? min_per_user: minimum number of interactions per user
(will be strictly respected)
:param int? max_per_user: maximum number of interactions per user
:param float? max_item_popularity_factor:
= popularity(most popular item) / popularity(least popular item)
(aimed but not strictly respected)
:param bool? ensure_one_per_item: if True (default), each item will have at least
one interaction
:param float-array? target_ratings_distribution: array of size (MAX_RATING - MIN_RATING + 1)
The ratings distribution to be aimed while sampling interactions for MNAR sampling.
Users/Items distribution scheme can be either:
- 'uniform': the number of interactions of each user/item will roughly be the same.
- 'exponential': the number of interactions of users/items follow an
exponential distribution
- 'invlog': the number of interactions of users/items is distributed even more unevenly
than for 'exponential'
Note: MNAR means "missing not at random sampling"
In recommendations, this term is usually used about the phenomenon:
"a user has more chance to interact with an item he/she likes"
(so the missing interactions are missing not at random)
"""
assert 0 < density < 1
assert users_distribution in self.ALLOWED_DISTRIBUTION_SCHEME
assert items_distribution in self.ALLOWED_DISTRIBUTION_SCHEME
self.density = density
self.users_reparition = users_distribution
self.items_reparition = items_distribution
self.min_per_user = min_per_user
self.max_per_user = max_per_user
self.max_item_popularity_factor = max_item_popularity_factor
self.ensure_one_per_item = ensure_one_per_item
self.target_ratings_distribution = target_ratings_distribution
def sample(self, n_users, n_items, ratings_factory=None):
"""
:param int n_users:
:param int n_items:
:param RatingsFactoryBase? ratings_factory: Must be provided for MNAR sampling
:returns: INTERACTIONS_DTYPE-array interactions
"""
n_interacts = round(self.density * n_users * n_items)
users_n_interacts = self._pick_users_n_interacts(
n_users, n_items, n_interacts,
self.users_reparition, self.min_per_user, self.max_per_user)
items_popularity = self._pick_items_popularity(n_items, self.items_reparition,
self.max_item_popularity_factor)
ratings_acceptance, bins, remaining_mass = None, None, None
if self.target_ratings_distribution is not None:
assert ratings_factory is not None, 'For MNAR sampling, must provide ratings factory'
ratings_acceptance, bins, remaining_mass = self._compute_ratings_acceptance(
n_users, n_items, ratings_factory, self.target_ratings_distribution)
interacts, offset = self._sample_interactions(
users_n_interacts, items_popularity, ratings_factory,
ratings_acceptance=ratings_acceptance, ratings_bin_edges=bins,
remaining_mass=remaining_mass)
if self.ensure_one_per_item:
interacts, offset = self._ensure_at_least_one_per_item(
interacts, offset, n_users, n_items)
users_n_interacts = numpy.bincount(interacts['user'][:offset])
items_n_interacts = numpy.bincount(interacts['item'][:offset])
nu_all = (users_n_interacts == n_items).sum()
ni_all = (items_n_interacts == n_users).sum()
if nu_all > 0:
logger.warning(f'WARNING: some users ({nu_all:,}) have interactions with all the items')
if ni_all > 0:
logger.warning(f'WARNING: some items ({ni_all:,}) have interactions with all the users')
return interacts[:offset]
@classmethod
def _sample_interactions(cls, users_n_interacts, items_popularity, ratings_factory,
ratings_acceptance=None, ratings_bin_edges=None, remaining_mass=None):
"""
:param int-array users_n_interacts: (nu,)
:param float-array items_popularity: (ni,)
:param RatingsFactory ratings_factory: not used for missing at random sampling
(i.e. not ratings-based interactions sampling)
:param float-array? ratings_acceptance: (m,)
:param float-array? ratings_bin_edges: (m+1,)
`ratings_acceptance` and `ratings_bin_edges` are provided for MNAR sampling
(i.e. ratings-based interactions sampling).
They define the probabilty of keeping a sampled interaction given its rating value.
:param float? remaining_mass:
:returns: INTERACTIONS_DTYPE-array interactions, int
"""
numpy.random.shuffle(users_n_interacts)
numpy.random.shuffle(items_popularity)
n_interacts = users_n_interacts.sum()
n_items = items_popularity.size
items_cp = items_popularity.cumsum()
items_cp /= items_cp[-1]
interacts = cls._init_interactions_buffer(n_interacts)
items_mask = numpy.ones(n_items, dtype=bool)
if ratings_acceptance is None:
k = 0
for u, dk in enumerate(users_n_interacts):
interacts['user'][k:k+dk] = u
interacts['item'][k:k+dk] = cls._sample_available_items(dk, items_cp, items_mask)
k += dk
return interacts, k
else:
k = 0
mul = (1 / remaining_mass)*1.1 + 5
max_n_tries = 10 # arbitrary value
for u in range(users_n_interacts.size):
dk = users_n_interacts[u]
interacts['user'][k:k+dk] = u
n_to_sample = min(n_items // 2, int(dk*mul))
u_repeated = numpy.full(n_to_sample, u)
for _ in range(max_n_tries):
items = cls._sample_available_items(n_to_sample, items_cp, items_mask)
# apply the missing not a random step
# i.e. keep interactions with a probability depending on their rating value
ratings = ratings_factory.get_ratings(u_repeated[:items.size], items)
idxs = numpy.searchsorted(ratings_bin_edges, ratings)
idxs = numpy.maximum(idxs - 1, 0) # avoids issue when rating=MIN_RATING
keep_propability = ratings_acceptance[idxs]
keep = numpy.random.rand(items.size) < keep_propability
items = items[keep]
items = items[:dk] # keep at most `dk` items
interacts['item'][k:k+items.size] = items
k += items.size
dk -= items.size
items_mask[items] = False
if dk == 0:
break
if dk > 0:
raise ValueError(
f'Could not sampled {users_n_interacts[u]} interactions for one user')
u_n_inters = users_n_interacts[u]
items_mask[interacts['item'][k-u_n_inters:k]] = True
return interacts, k
@classmethod
def _init_interactions_buffer(cls, n_interactions):
"""
:param int n_interactions:
:returns: interactions_buffer
"""
buffer_size = int(n_interactions * cls.BUFFER_SIZE_MULTIPIER)
interactions_buffer = numpy.empty(buffer_size, dtype=INTERACTIONS_DTYPE)
return interactions_buffer
@classmethod
def _compute_ratings_acceptance(cls, n_users, n_items, ratings_factory, target_dis):
"""
:param int n_users:
:param int n_items:
:param RatingsFactory ratings_factory:
:param float-array target_dis: array of size MAX_RATING - MIN_RATING + 1.
The ratings distribution to be aimed while sampling interactions
:returns: tuple(
(n,)-float-array acceptance: a rating in the i-th bin will be kept
with probability acceptance[i]
(n+1,)-float-array bin_edges: edges of the bins (same as what is returned
by `numpy.histogram`)
)
"""
rnd_users = numpy.random.choice(n_users, cls.N_SAMPLES_RATINGS_DIS_ESTIMATION)
rnd_items = numpy.random.choice(n_items, cls.N_SAMPLES_RATINGS_DIS_ESTIMATION)
ratings = ratings_factory.get_ratings(rnd_users, rnd_items)
hist, bin_edges = numpy.histogram(ratings, bins=30, range=(MIN_RATING, MAX_RATING))
acceptance = numpy.zeros(hist.size)
for i, (n_rtgs_bin, left, right) in enumerate(zip(hist, bin_edges, bin_edges[1:])):
if n_rtgs_bin > 0:
mid = (left + right)/2 - MIN_RATING
mid_floor = numpy.floor(mid).astype(int)
mid_frac = mid - mid_floor
target_val = (target_dis[mid_floor]*(1 - mid_frac)
+ target_dis[mid_floor + 1]*mid_frac)
acceptance[i] = target_val / n_rtgs_bin
acceptance /= acceptance.max()
remaining_mass = ((hist * acceptance) / hist.sum()).sum()
if remaining_mass < 1/10:
logger.warning('WARNING: Interactions sampling might be slow or even'
+ f'impossible (remaining mass is {remaining_mass})')
return acceptance, bin_edges, remaining_mass
@classmethod
def _ensure_at_least_one_per_item(cls, interacts_buffer, offset, n_users, n_items):
"""
:param INTERACTIONS_DTYPE-array interacts_buffer:
:param int offset: offset of the buffer (i.e. number of interactions in the buffer)
:param int n_users:
:param int n_items:
"""
items_count = numpy.bincount(interacts_buffer['item'][:offset], minlength=n_items)
items_no_interacts, = numpy.where(items_count == 0)
n_no_interacts = items_no_interacts.size
assert interacts_buffer.size >= offset + n_no_interacts, \
f'interactions buffer too small: {interacts_buffer.size} < {offset} + {n_no_interacts}'
interacts_buffer['item'][offset:offset + n_no_interacts] = items_no_interacts
interacts_buffer['user'][offset:offset +
n_no_interacts] = numpy.random.choice(n_users, n_no_interacts)
return interacts_buffer, offset + n_no_interacts
def _pick_users_n_interacts(self, n_users, n_items, n_interacts, scheme, min_interact=None,
max_interact=None):
"""
:param int n_users:
:param int n_items:
:param int n_interacts: the desired number of interactions to sample (and thus to be
distributed among users)
:param str scheme: distribution scheme
:param int? min_interacts:
:param int? max_interacts:
:returns: int-array (nu,) n_interacts_per_user
"""
if scheme == 'uniform':
msg = 'can not specify min_interact or max_interact for uniform distribution'
assert min_interact is None and max_interact is None, msg
return partition_int(n_interacts, n_users)
elif scheme == 'exponential':
min_interact = min_interact or 1
assert max_interact is None, 'can not specify max_interact for exponential distribution'
def compute_n_interacts(mi):
"""
:param int-or-float mi: maximum number of interactions allowed for one user
Compute the number of interactions per user with an exponential shape (uneven distribution)
in function of the maximum number of interactions allowed for one user
and other global parameters: n_users, min_interact
"""
x = numpy.linspace(numpy.log(min_interact), numpy.log(mi), num=n_users)
n_inters = (numpy.exp(x) + 1e-5).astype(int)
return n_inters
# performs a bisection method to find a value for `mi` (max_interact)
def f(mi): return compute_n_interacts(mi).sum() - n_interacts
max_interact = bisect(f, min_interact + 1, n_items)
return compute_n_interacts(max_interact)
elif scheme == 'invlog':
min_interact = min_interact or 1
max_interact = max_interact or min(n_items//4, n_interacts//n_users * 30)
# note that default of max_interact is very arbitrary
def compute_n_interacts(mul):
"""
:param float mul:
Compute the number of interactions per user with a "invlog" shape (very uneven distribution)
in function of the parameter `mul`, and other global parameters: n_users, max_interact, min_interact
"""
x = numpy.linspace(0, 1, num=n_users + 1)[1:]
eps = 1 / max_interact
y = - 1 / (-eps + mul*numpy.log(x))
n_inters = (y + min_interact).astype(int)
return n_inters
# performs a bisection method to find a value for `mul`
# we are looking for a value such that `compute_n_interacts(mul)` ~= `n_interacts`
# because we can't directly find it by solving an equation
def f(mul): return compute_n_interacts(mul).sum() - n_interacts
mul = bisect(f, 1e-3, 100)
return compute_n_interacts(mul)
def _pick_items_popularity(self, n_items, scheme, max_popularity_factor=None):
"""
:param int n_items:
:param str scheme: distribution scheme
:param int? max_popularity_factor:
:returns: float-array (ni,) items_popularity
"""
max_popularity_factor = max_popularity_factor or self.DEFAULT_ITEM_MAX_POPULARITY_FACTOR
if scheme == 'uniform':
return numpy.ones(n_items)
elif scheme == 'exponential':
x = numpy.linspace(0, numpy.log(max_popularity_factor), num=n_items)
return numpy.exp(x)
elif scheme == 'invlog':
x = numpy.linspace(0, 1, num=n_items + 1)[1:]
eps = 1 / max_popularity_factor
y = - 1 / (-eps + numpy.log(x))
return y + 1
@staticmethod
@njit
def _sample_available_items(n_to_sample, cp, items_mask):
"""
:param int n_to_sample:
:param (ni,)-float-array cp: cumulative distribution
:param (ni,)-bool-array items_mask:
:returns: (n_to_sample,)-int-array sampled_items
"""
sampled_items = numpy.empty(n_to_sample, dtype=numpy.int32)
for k in range(n_to_sample):
item = -1
while item == -1 or not items_mask[item]:
item = numpy.searchsorted(cp, numpy.random.rand())
items_mask[item] = False
sampled_items[k] = item
items_mask[sampled_items] = True
return sampled_items
| [
"numpy.full",
"numpy.maximum",
"numpy.log",
"numpy.empty",
"numpy.floor",
"numpy.zeros",
"numpy.ones",
"numpy.searchsorted",
"numpy.histogram",
"numpy.where",
"numpy.exp",
"numpy.linspace",
"numpy.random.choice",
"numpy.random.rand",
"scipy.optimize.bisect",
"numpy.bincount",
"numpy.... | [((4645, 4687), 'numpy.bincount', 'numpy.bincount', (["interacts['user'][:offset]"], {}), "(interacts['user'][:offset])\n", (4659, 4687), False, 'import numpy\n'), ((4716, 4758), 'numpy.bincount', 'numpy.bincount', (["interacts['item'][:offset]"], {}), "(interacts['item'][:offset])\n", (4730, 4758), False, 'import numpy\n'), ((6084, 6123), 'numpy.random.shuffle', 'numpy.random.shuffle', (['users_n_interacts'], {}), '(users_n_interacts)\n', (6104, 6123), False, 'import numpy\n'), ((6132, 6170), 'numpy.random.shuffle', 'numpy.random.shuffle', (['items_popularity'], {}), '(items_popularity)\n', (6152, 6170), False, 'import numpy\n'), ((6419, 6450), 'numpy.ones', 'numpy.ones', (['n_items'], {'dtype': 'bool'}), '(n_items, dtype=bool)\n', (6429, 6450), False, 'import numpy\n'), ((8765, 8815), 'numpy.empty', 'numpy.empty', (['buffer_size'], {'dtype': 'INTERACTIONS_DTYPE'}), '(buffer_size, dtype=INTERACTIONS_DTYPE)\n', (8776, 8815), False, 'import numpy\n'), ((9550, 9616), 'numpy.random.choice', 'numpy.random.choice', (['n_users', 'cls.N_SAMPLES_RATINGS_DIS_ESTIMATION'], {}), '(n_users, cls.N_SAMPLES_RATINGS_DIS_ESTIMATION)\n', (9569, 9616), False, 'import numpy\n'), ((9637, 9703), 'numpy.random.choice', 'numpy.random.choice', (['n_items', 'cls.N_SAMPLES_RATINGS_DIS_ESTIMATION'], {}), '(n_items, cls.N_SAMPLES_RATINGS_DIS_ESTIMATION)\n', (9656, 9703), False, 'import numpy\n'), ((9798, 9863), 'numpy.histogram', 'numpy.histogram', (['ratings'], {'bins': '(30)', 'range': '(MIN_RATING, MAX_RATING)'}), '(ratings, bins=30, range=(MIN_RATING, MAX_RATING))\n', (9813, 9863), False, 'import numpy\n'), ((9885, 9907), 'numpy.zeros', 'numpy.zeros', (['hist.size'], {}), '(hist.size)\n', (9896, 9907), False, 'import numpy\n'), ((11087, 11155), 'numpy.bincount', 'numpy.bincount', (["interacts_buffer['item'][:offset]"], {'minlength': 'n_items'}), "(interacts_buffer['item'][:offset], minlength=n_items)\n", (11101, 11155), False, 'import numpy\n'), ((11186, 11215), 'numpy.where', 'numpy.where', (['(items_count == 0)'], {}), '(items_count == 0)\n', (11197, 11215), False, 'import numpy\n'), ((11619, 11663), 'numpy.random.choice', 'numpy.random.choice', (['n_users', 'n_no_interacts'], {}), '(n_users, n_no_interacts)\n', (11638, 11663), False, 'import numpy\n'), ((15930, 15973), 'numpy.empty', 'numpy.empty', (['n_to_sample'], {'dtype': 'numpy.int32'}), '(n_to_sample, dtype=numpy.int32)\n', (15941, 15973), False, 'import numpy\n'), ((4902, 4995), 'xminds.compat.logger.warning', 'logger.warning', (['f"""WARNING: some users ({nu_all:,}) have interactions with all the items"""'], {}), "(\n f'WARNING: some users ({nu_all:,}) have interactions with all the items')\n", (4916, 4995), False, 'from xminds.compat import logger\n'), ((5026, 5119), 'xminds.compat.logger.warning', 'logger.warning', (['f"""WARNING: some items ({ni_all:,}) have interactions with all the users"""'], {}), "(\n f'WARNING: some items ({ni_all:,}) have interactions with all the users')\n", (5040, 5119), False, 'from xminds.compat import logger\n'), ((10525, 10652), 'xminds.compat.logger.warning', 'logger.warning', (["('WARNING: Interactions sampling might be slow or even' +\n f'impossible (remaining mass is {remaining_mass})')"], {}), "('WARNING: Interactions sampling might be slow or even' +\n f'impossible (remaining mass is {remaining_mass})')\n", (10539, 10652), False, 'from xminds.compat import logger\n'), ((15220, 15239), 'numpy.ones', 'numpy.ones', (['n_items'], {}), '(n_items)\n', (15230, 15239), False, 'import numpy\n'), ((7120, 7146), 'numpy.full', 'numpy.full', (['n_to_sample', 'u'], {}), '(n_to_sample, u)\n', (7130, 7146), False, 'import numpy\n'), ((13488, 13524), 'scipy.optimize.bisect', 'bisect', (['f', '(min_interact + 1)', 'n_items'], {}), '(f, min_interact + 1, n_items)\n', (13494, 13524), False, 'from scipy.optimize import bisect\n'), ((15378, 15390), 'numpy.exp', 'numpy.exp', (['x'], {}), '(x)\n', (15387, 15390), False, 'import numpy\n'), ((7554, 7600), 'numpy.searchsorted', 'numpy.searchsorted', (['ratings_bin_edges', 'ratings'], {}), '(ratings_bin_edges, ratings)\n', (7572, 7600), False, 'import numpy\n'), ((7628, 7654), 'numpy.maximum', 'numpy.maximum', (['(idxs - 1)', '(0)'], {}), '(idxs - 1, 0)\n', (7641, 7654), False, 'import numpy\n'), ((14726, 14747), 'scipy.optimize.bisect', 'bisect', (['f', '(0.001)', '(100)'], {}), '(f, 0.001, 100)\n', (14732, 14747), False, 'from scipy.optimize import bisect\n'), ((15312, 15344), 'numpy.log', 'numpy.log', (['max_popularity_factor'], {}), '(max_popularity_factor)\n', (15321, 15344), False, 'import numpy\n'), ((16133, 16152), 'numpy.random.rand', 'numpy.random.rand', ([], {}), '()\n', (16150, 16152), False, 'import numpy\n'), ((7785, 7814), 'numpy.random.rand', 'numpy.random.rand', (['items.size'], {}), '(items.size)\n', (7802, 7814), False, 'import numpy\n'), ((10111, 10127), 'numpy.floor', 'numpy.floor', (['mid'], {}), '(mid)\n', (10122, 10127), False, 'import numpy\n'), ((13158, 13181), 'numpy.log', 'numpy.log', (['min_interact'], {}), '(min_interact)\n', (13167, 13181), False, 'import numpy\n'), ((13183, 13196), 'numpy.log', 'numpy.log', (['mi'], {}), '(mi)\n', (13192, 13196), False, 'import numpy\n'), ((15440, 15477), 'numpy.linspace', 'numpy.linspace', (['(0)', '(1)'], {'num': '(n_items + 1)'}), '(0, 1, num=n_items + 1)\n', (15454, 15477), False, 'import numpy\n'), ((14174, 14211), 'numpy.linspace', 'numpy.linspace', (['(0)', '(1)'], {'num': '(n_users + 1)'}), '(0, 1, num=n_users + 1)\n', (14188, 14211), False, 'import numpy\n'), ((15556, 15568), 'numpy.log', 'numpy.log', (['x'], {}), '(x)\n', (15565, 15568), False, 'import numpy\n'), ((13239, 13251), 'numpy.exp', 'numpy.exp', (['x'], {}), '(x)\n', (13248, 13251), False, 'import numpy\n'), ((14293, 14305), 'numpy.log', 'numpy.log', (['x'], {}), '(x)\n', (14302, 14305), False, 'import numpy\n')] |
import numpy as np
import random
from collections import namedtuple, deque
from replaybuffer import ExperienceReplay
from model import QNetwork
import torch
import torch.nn.functional as F
import torch.optim as optim
BUFFER_SIZE = int(1e5) # replay buffer size
BATCH_SIZE = 32 # minibatch size
GAMMA = 0.99 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR = 5e-4 # learning rate
C = 1250 # how often to update the network
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, seed, algorithm = 'DDQN'):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(seed)
# algorithm
self.algorithm = algorithm
# Q-Network
self.qnetwork_local = QNetwork(state_size, action_size, seed)
self.qnetwork_target = QNetwork(state_size, action_size, seed)
self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR)
# Replay memory
self.memory = ExperienceReplay(action_size, BUFFER_SIZE, BATCH_SIZE, seed)
# Initialize time step (for updating every UPDATE_EVERY steps)
self.t_step = 0
def step(self, state, action, reward, next_state, done):
# Save experience in replay memory
self.memory.add(state, action, reward, next_state, done)
# If enough samples are available in memory, get random subset and learn
if len(self.memory) > BATCH_SIZE:
experiences = self.memory.sample()
self.learn(experiences, GAMMA)
# Update target network every C steps.
self.t_step = (self.t_step + 1) % C
if self.t_step == 0:
# ------------------- update target network ------------------- #
self.hard_update(self.qnetwork_local, self.qnetwork_target)
def choose_action(self, state, eps=0.):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
# Epsilon-greedy action selection
if random.random() < eps:
return random.choice(np.arange(self.action_size))
else:
state = torch.from_numpy(state).float().unsqueeze(0)
self.qnetwork_local.eval() # evaluation mode activated
with torch.no_grad():
action_values = self.qnetwork_local(state)
self.qnetwork_local.train() # set train mode
return np.argmax(action_values.numpy())
def learn(self, experiences, gamma):
"""Update value parameters using given batch of experience tuples.
Params
======
experiences (Tuple[torch.Variable]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
if self.algorithm == 'DDQN':
# Double DQN with Target Networks
values,indices = self.qnetwork_local(next_states).detach().max(1)
Q_targets_next = self.qnetwork_target(next_states).detach().gather(1,indices.unsqueeze(1))
elif self.algorithm == 'DQN':
# DQN with Target Networks
Q_targets_next = self.qnetwork_target(next_states).detach().max(1)[0].unsqueeze(1)
# Compute Q targets for current states
Q_targets = rewards + (gamma * Q_targets_next * (1 - dones))
# Get expected Q values from local model
Q_expected = self.qnetwork_local(states).gather(1, actions)
# Compute loss
loss = F.mse_loss(Q_expected, Q_targets)
# Minimize the loss
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
def hard_update(self, local_model, target_model):
"""Hard update model parameters. Copy the values of local network into the target.
θ_target = θ_local
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(local_param.data)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data)
| [
"model.QNetwork",
"torch.nn.functional.mse_loss",
"replaybuffer.ExperienceReplay",
"random.random",
"random.seed",
"numpy.arange",
"torch.no_grad",
"torch.from_numpy"
] | [((984, 1001), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (995, 1001), False, 'import random\n'), ((1108, 1147), 'model.QNetwork', 'QNetwork', (['state_size', 'action_size', 'seed'], {}), '(state_size, action_size, seed)\n', (1116, 1147), False, 'from model import QNetwork\n'), ((1179, 1218), 'model.QNetwork', 'QNetwork', (['state_size', 'action_size', 'seed'], {}), '(state_size, action_size, seed)\n', (1187, 1218), False, 'from model import QNetwork\n'), ((1343, 1403), 'replaybuffer.ExperienceReplay', 'ExperienceReplay', (['action_size', 'BUFFER_SIZE', 'BATCH_SIZE', 'seed'], {}), '(action_size, BUFFER_SIZE, BATCH_SIZE, seed)\n', (1359, 1403), False, 'from replaybuffer import ExperienceReplay\n'), ((3973, 4006), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['Q_expected', 'Q_targets'], {}), '(Q_expected, Q_targets)\n', (3983, 4006), True, 'import torch.nn.functional as F\n'), ((2477, 2492), 'random.random', 'random.random', ([], {}), '()\n', (2490, 2492), False, 'import random\n'), ((2533, 2560), 'numpy.arange', 'np.arange', (['self.action_size'], {}), '(self.action_size)\n', (2542, 2560), True, 'import numpy as np\n'), ((2725, 2740), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2738, 2740), False, 'import torch\n'), ((2596, 2619), 'torch.from_numpy', 'torch.from_numpy', (['state'], {}), '(state)\n', (2612, 2619), False, 'import torch\n')] |
from __future__ import print_function, unicode_literals
import tensorflow as tf
import numpy as np
import scipy.misc
import os
import argparse
import operator
import csv
import cv2
from moviepy.editor import VideoFileClip
from nets.ColorHandPose3DNetwork import ColorHandPose3DNetwork
from utils.general import detect_keypoints, trafo_coords, plot_hand, plot_hand_2d, plot_hand_3d
from pose.DeterminePositions import *
from pose.utils.FingerPoseEstimate import FingerPoseEstimate
# Variables to be used
# TODO: Check how to pass parameters through fl_image function. Remove global variables
image_tf = None
threshold = None
known_finger_poses = None
network_elements = None
output_txt_path = None
reqd_pose_name = None
def parse_args():
parser = argparse.ArgumentParser(description = 'Process frames in a video of a particular pose')
parser.add_argument('video_path', help = 'Path of video', type = str)
# This part needs improvement. Currently, pose_no is position_id present in FingerDataFormation.py
parser.add_argument('pose_no', help = 'Pose to classify at', type = int)
parser.add_argument('--output-path', dest = 'output_path', type = str, default = None,
help = 'Path of folder where to store the text output')
parser.add_argument('--thresh', dest = 'threshold', help = 'Threshold of confidence level(0-1)', default = 0.45,
type = float)
args = parser.parse_args()
return args
def prepare_paths(video_path, output_txt_path):
video_path = os.path.abspath(video_path)
if output_txt_path is None:
output_txt_path = os.path.split(video_path)[0]
else:
output_txt_path = os.path.abspath(output_txt_path)
if not os.path.exists(output_txt_path):
os.mkdir(output_txt_path)
file_name = os.path.basename(video_path).split('.')[0]
output_video_path = os.path.join(output_txt_path, '{}_save.mp4'.format(file_name))
output_txt_path = os.path.join(output_txt_path, '{}.csv'.format(file_name))
if not os.path.exists(output_txt_path):
open(output_txt_path, 'w').close()
return video_path, output_txt_path, output_video_path
def prepare_network():
# network input
image_tf = tf.placeholder(tf.float32, shape = (1, 240, 320, 3))
hand_side_tf = tf.constant([[1.0, 1.0]]) # Both left and right hands included
evaluation = tf.placeholder_with_default(True, shape = ())
# build network
net = ColorHandPose3DNetwork()
hand_scoremap_tf, image_crop_tf, scale_tf, center_tf,\
keypoints_scoremap_tf, keypoint_coord3d_tf = net.inference(image_tf, hand_side_tf, evaluation)
# Start TF
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
# initialize network
net.init(sess)
return sess, image_tf, keypoint_coord3d_tf, scale_tf, center_tf, keypoints_scoremap_tf
def process_video_frame(video_frame):
video_frame = video_frame[:, :, :3]
video_frame = scipy.misc.imresize(video_frame, (240, 320))
image_v = np.expand_dims((video_frame.astype('float') / 255.0) - 0.5, 0)
keypoint_coord3d_tf, scale_tf, center_tf, keypoints_scoremap_tf = network_elements
keypoint_coord3d_v, scale_v, center_v, keypoints_scoremap_v = sess.run([keypoint_coord3d_tf,
scale_tf, center_tf, keypoints_scoremap_tf], feed_dict = {image_tf: image_v})
keypoints_scoremap_v = np.squeeze(keypoints_scoremap_v)
keypoint_coord3d_v = np.squeeze(keypoint_coord3d_v)
# post processing
coord_hw_crop = detect_keypoints(np.squeeze(keypoints_scoremap_v))
coord_hw = trafo_coords(coord_hw_crop, center_v, scale_v, 256)
plot_hand_2d(coord_hw, video_frame)
score_label = process_keypoints(keypoint_coord3d_v)
if score_label is not None:
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(video_frame, score_label, (10, 200), font, 1.0, (255, 0, 0), 2, cv2.LINE_AA)
return video_frame
def process_keypoints(keypoint_coord3d_v):
fingerPoseEstimate = FingerPoseEstimate(keypoint_coord3d_v)
fingerPoseEstimate.calculate_positions_of_fingers(print_finger_info = False)
obtained_positions = determine_position(fingerPoseEstimate.finger_curled,
fingerPoseEstimate.finger_position, known_finger_poses,
threshold)
score_label = None
if len(obtained_positions) > 0:
max_pose_label = max(obtained_positions.items(), key=operator.itemgetter(1))[0]
if obtained_positions[max_pose_label] >= threshold and max_pose_label == reqd_pose_name:
score_label = max_pose_label
with open(output_txt_path, 'a') as fid:
list_entry = [entry for sublist in keypoint_coord3d_v for entry in sublist]
csv_writer = csv.writer(fid)
csv_writer.writerow(list_entry)
return score_label
if __name__ == '__main__':
args = parse_args()
threshold = args.threshold * 10
video_path, output_txt_path, output_video_path = prepare_paths(args.video_path, args.output_path)
known_finger_poses = create_known_finger_poses()
reqd_pose_name = get_position_name_with_pose_id(args.pose_no, known_finger_poses)
sess, image_tf, keypoint_coord3d_tf, scale_tf, center_tf, keypoints_scoremap_tf = prepare_network()
network_elements = [keypoint_coord3d_tf, scale_tf, center_tf, keypoints_scoremap_tf]
video_clip = VideoFileClip(video_path)
white_clip = video_clip.fl_image(process_video_frame) #NOTE: this function expects color images!!
white_clip.write_videofile(output_video_path, audio=False)
| [
"os.mkdir",
"argparse.ArgumentParser",
"tensorflow.ConfigProto",
"tensorflow.GPUOptions",
"os.path.abspath",
"moviepy.editor.VideoFileClip",
"tensorflow.placeholder_with_default",
"os.path.exists",
"tensorflow.placeholder",
"operator.itemgetter",
"csv.writer",
"os.path.basename",
"tensorflow... | [((751, 841), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process frames in a video of a particular pose"""'}), "(description=\n 'Process frames in a video of a particular pose')\n", (774, 841), False, 'import argparse\n'), ((1487, 1514), 'os.path.abspath', 'os.path.abspath', (['video_path'], {}), '(video_path)\n', (1502, 1514), False, 'import os\n'), ((2129, 2179), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '(1, 240, 320, 3)'}), '(tf.float32, shape=(1, 240, 320, 3))\n', (2143, 2179), True, 'import tensorflow as tf\n'), ((2198, 2223), 'tensorflow.constant', 'tf.constant', (['[[1.0, 1.0]]'], {}), '([[1.0, 1.0]])\n', (2209, 2223), True, 'import tensorflow as tf\n'), ((2276, 2319), 'tensorflow.placeholder_with_default', 'tf.placeholder_with_default', (['(True)'], {'shape': '()'}), '(True, shape=())\n', (2303, 2319), True, 'import tensorflow as tf\n'), ((2347, 2371), 'nets.ColorHandPose3DNetwork.ColorHandPose3DNetwork', 'ColorHandPose3DNetwork', ([], {}), '()\n', (2369, 2371), False, 'from nets.ColorHandPose3DNetwork import ColorHandPose3DNetwork\n'), ((2553, 2603), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'per_process_gpu_memory_fraction': '(0.8)'}), '(per_process_gpu_memory_fraction=0.8)\n', (2566, 2603), True, 'import tensorflow as tf\n'), ((3293, 3325), 'numpy.squeeze', 'np.squeeze', (['keypoints_scoremap_v'], {}), '(keypoints_scoremap_v)\n', (3303, 3325), True, 'import numpy as np\n'), ((3348, 3378), 'numpy.squeeze', 'np.squeeze', (['keypoint_coord3d_v'], {}), '(keypoint_coord3d_v)\n', (3358, 3378), True, 'import numpy as np\n'), ((3479, 3530), 'utils.general.trafo_coords', 'trafo_coords', (['coord_hw_crop', 'center_v', 'scale_v', '(256)'], {}), '(coord_hw_crop, center_v, scale_v, 256)\n', (3491, 3530), False, 'from utils.general import detect_keypoints, trafo_coords, plot_hand, plot_hand_2d, plot_hand_3d\n'), ((3533, 3568), 'utils.general.plot_hand_2d', 'plot_hand_2d', (['coord_hw', 'video_frame'], {}), '(coord_hw, video_frame)\n', (3545, 3568), False, 'from utils.general import detect_keypoints, trafo_coords, plot_hand, plot_hand_2d, plot_hand_3d\n'), ((3866, 3904), 'pose.utils.FingerPoseEstimate.FingerPoseEstimate', 'FingerPoseEstimate', (['keypoint_coord3d_v'], {}), '(keypoint_coord3d_v)\n', (3884, 3904), False, 'from pose.utils.FingerPoseEstimate import FingerPoseEstimate\n'), ((5144, 5169), 'moviepy.editor.VideoFileClip', 'VideoFileClip', (['video_path'], {}), '(video_path)\n', (5157, 5169), False, 'from moviepy.editor import VideoFileClip\n'), ((1621, 1653), 'os.path.abspath', 'os.path.abspath', (['output_txt_path'], {}), '(output_txt_path)\n', (1636, 1653), False, 'import os\n'), ((1951, 1982), 'os.path.exists', 'os.path.exists', (['output_txt_path'], {}), '(output_txt_path)\n', (1965, 1982), False, 'import os\n'), ((3433, 3465), 'numpy.squeeze', 'np.squeeze', (['keypoints_scoremap_v'], {}), '(keypoints_scoremap_v)\n', (3443, 3465), True, 'import numpy as np\n'), ((3688, 3780), 'cv2.putText', 'cv2.putText', (['video_frame', 'score_label', '(10, 200)', 'font', '(1.0)', '(255, 0, 0)', '(2)', 'cv2.LINE_AA'], {}), '(video_frame, score_label, (10, 200), font, 1.0, (255, 0, 0), 2,\n cv2.LINE_AA)\n', (3699, 3780), False, 'import cv2\n'), ((1565, 1590), 'os.path.split', 'os.path.split', (['video_path'], {}), '(video_path)\n', (1578, 1590), False, 'import os\n'), ((1663, 1694), 'os.path.exists', 'os.path.exists', (['output_txt_path'], {}), '(output_txt_path)\n', (1677, 1694), False, 'import os\n'), ((1699, 1724), 'os.mkdir', 'os.mkdir', (['output_txt_path'], {}), '(output_txt_path)\n', (1707, 1724), False, 'import os\n'), ((2630, 2669), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'gpu_options': 'gpu_options'}), '(gpu_options=gpu_options)\n', (2644, 2669), True, 'import tensorflow as tf\n'), ((1739, 1767), 'os.path.basename', 'os.path.basename', (['video_path'], {}), '(video_path)\n', (1755, 1767), False, 'import os\n'), ((4545, 4560), 'csv.writer', 'csv.writer', (['fid'], {}), '(fid)\n', (4555, 4560), False, 'import csv\n'), ((4255, 4277), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (4274, 4277), False, 'import operator\n')] |
import pymc as pm
import numpy as np
from numpy.linalg import inv
import numpy.random as rand
import matplotlib.pyplot as plt
from pandas.util.testing import set_trace as st
from gpustats import pdfs
# Generate MV normal mixture
gen_mean = {
0: [0, 5],
1: [-10, 0],
2: [-10, 10]
}
gen_sd = {
0: [0.5, 0.5],
1: [.5, 1],
2: [1, .25]
}
gen_corr = {
0: 0.5,
1: -0.5,
2: 0
}
group_weights = [0.6, 0.3, 0.1]
def generate_data(n=1e5, k=2, ncomps=3, seed=1):
rand.seed(seed)
data_concat = []
labels_concat = []
for j in range(ncomps):
mean = gen_mean[j]
sd = gen_sd[j]
corr = gen_corr[j]
cov = np.empty((k, k))
cov.fill(corr)
cov[np.diag_indices(k)] = 1
cov *= np.outer(sd, sd)
num = int(n * group_weights[j])
rvs = pm.rmv_normal_cov(mean, cov, size=num)
data_concat.append(rvs)
labels_concat.append(np.repeat(j, num))
return (np.concatenate(labels_concat),
np.concatenate(data_concat, axis=0))
N = int(1e5) # n data points per component
K = 2 # n dim
ncomps = 3 # n mixture components
true_labels, data = generate_data(n=N, k=K, ncomps=ncomps)
def plot_2d_mixture(data, labels):
plt.figure(figsize=(10, 10))
colors = 'bgr'
for j in np.unique(labels):
x, y = data[labels == j].T
plt.plot(x, y, '%s.' % colors[j], ms=2)
def plot_thetas(sampler):
plot_2d_mixture(data, true_labels)
def plot_theta(i):
x, y = sampler.trace('theta_%d' % i)[:].T
plt.plot(x, y, 'k.')
for i in range(3):
plot_theta(i)
# set up PyMC model
# priors, fairly vague
prior_mean = data.mean(0)
sigma0 = np.diag([1., 1.])
prior_cov = np.cov(data.T)
thetas = []
taus = []
for j in range(ncomps):
# need a hyper-parameter for degrees of freedom?
tau = pm.Wishart('C_%d' % j, n=3, Tau=inv(prior_cov))
theta = pm.MvNormal('theta_%d' % j, mu=prior_mean, tau=inv(2 * prior_cov))
thetas.append(theta)
taus.append(tau)
alpha0 = np.ones(3.) / 3
weights = pm.Dirichlet('weights', theta=alpha0)
@pm.deterministic
def adj_weights(weights=weights):
return np.sort(np.r_[weights, 1 - weights.sum()])
sampler = pm.MCMC(locals())
sampler.sample(iter=3000, burn=100, tune_interval=100, thin=10)
| [
"numpy.outer",
"numpy.random.seed",
"pymc.rmv_normal_cov",
"numpy.concatenate",
"matplotlib.pyplot.plot",
"numpy.empty",
"numpy.ones",
"numpy.diag_indices",
"pymc.Dirichlet",
"matplotlib.pyplot.figure",
"numpy.linalg.inv",
"numpy.diag",
"numpy.cov",
"numpy.unique",
"numpy.repeat"
] | [((1718, 1737), 'numpy.diag', 'np.diag', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (1725, 1737), True, 'import numpy as np\n'), ((1748, 1762), 'numpy.cov', 'np.cov', (['data.T'], {}), '(data.T)\n', (1754, 1762), True, 'import numpy as np\n'), ((2083, 2120), 'pymc.Dirichlet', 'pm.Dirichlet', (['"""weights"""'], {'theta': 'alpha0'}), "('weights', theta=alpha0)\n", (2095, 2120), True, 'import pymc as pm\n'), ((496, 511), 'numpy.random.seed', 'rand.seed', (['seed'], {}), '(seed)\n', (505, 511), True, 'import numpy.random as rand\n'), ((1258, 1286), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (1268, 1286), True, 'import matplotlib.pyplot as plt\n'), ((1319, 1336), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (1328, 1336), True, 'import numpy as np\n'), ((2057, 2069), 'numpy.ones', 'np.ones', (['(3.0)'], {}), '(3.0)\n', (2064, 2069), True, 'import numpy as np\n'), ((677, 693), 'numpy.empty', 'np.empty', (['(k, k)'], {}), '((k, k))\n', (685, 693), True, 'import numpy as np\n'), ((768, 784), 'numpy.outer', 'np.outer', (['sd', 'sd'], {}), '(sd, sd)\n', (776, 784), True, 'import numpy as np\n'), ((840, 878), 'pymc.rmv_normal_cov', 'pm.rmv_normal_cov', (['mean', 'cov'], {'size': 'num'}), '(mean, cov, size=num)\n', (857, 878), True, 'import pymc as pm\n'), ((973, 1002), 'numpy.concatenate', 'np.concatenate', (['labels_concat'], {}), '(labels_concat)\n', (987, 1002), True, 'import numpy as np\n'), ((1016, 1051), 'numpy.concatenate', 'np.concatenate', (['data_concat'], {'axis': '(0)'}), '(data_concat, axis=0)\n', (1030, 1051), True, 'import numpy as np\n'), ((1381, 1420), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y', "('%s.' % colors[j])"], {'ms': '(2)'}), "(x, y, '%s.' % colors[j], ms=2)\n", (1389, 1420), True, 'import matplotlib.pyplot as plt\n'), ((1570, 1590), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y', '"""k."""'], {}), "(x, y, 'k.')\n", (1578, 1590), True, 'import matplotlib.pyplot as plt\n'), ((729, 747), 'numpy.diag_indices', 'np.diag_indices', (['k'], {}), '(k)\n', (744, 747), True, 'import numpy as np\n'), ((941, 958), 'numpy.repeat', 'np.repeat', (['j', 'num'], {}), '(j, num)\n', (950, 958), True, 'import numpy as np\n'), ((1905, 1919), 'numpy.linalg.inv', 'inv', (['prior_cov'], {}), '(prior_cov)\n', (1908, 1919), False, 'from numpy.linalg import inv\n'), ((1980, 1998), 'numpy.linalg.inv', 'inv', (['(2 * prior_cov)'], {}), '(2 * prior_cov)\n', (1983, 1998), False, 'from numpy.linalg import inv\n')] |
import pickle
import numpy as np
def read_jpg(jpg_path, plt):
'''
Dependency : matplotlib.pyplot as plt
Args:
jpg_path - string
ends with jpg
plt - plt object
Return:
numpy 3D image
'''
return plt.imread(jpg_path)
def read_pkl(path, encoding='ASCII'):
'''read path(pkl) and return files
Dependency : pickle
Args:
path - string
ends with pkl
Return:
pickle content
'''
print("Pickle is read from %s"%path)
with open(path, 'rb') as f: return pickle.load(f, encoding=encoding)
def read_txt(path):
'''read txt files
Args:
path - string
ends with txt
Return:
txt_content - list
line by line
'''
print("Txt is read from %s"%path)
txt_content = list()
with open(path, 'r') as lines:
for line in lines: txt_content.append(line)
return txt_content
def read_npy(path):
'''read npy files
Args:
path - string
ends with npy
Return:
npy_content in path
'''
print("Npy is read from %s"%path)
npy_content = np.load(path)
return npy_content
| [
"numpy.load",
"pickle.load"
] | [((1160, 1173), 'numpy.load', 'np.load', (['path'], {}), '(path)\n', (1167, 1173), True, 'import numpy as np\n'), ((568, 601), 'pickle.load', 'pickle.load', (['f'], {'encoding': 'encoding'}), '(f, encoding=encoding)\n', (579, 601), False, 'import pickle\n')] |
from bs4 import BeautifulSoup
import requests
import numpy as np
def get_all_links(url):
response = requests.get(url)
soup = BeautifulSoup(response.content, features="html.parser")
for link in soup.find_all('a', href=True):
href_array = np.array(link['href'])
if np.char.startswith(href_array, 'http', start=0, end=None):
print(href_array)
def get_images_count(url):
response = requests.get(url)
soup = BeautifulSoup(response.content, features="html.parser")
# Print images alt
print("\n".join([img['alt'] for img in soup.find_all('img', alt=True)]))
print(f'Images count: {len(soup.find_all("img"))}')
get_images_count('https://www.crummy.com/software/BeautifulSoup/bs4/doc.ru/bs4ru.html')
get_all_links('https://www.crummy.com/software/BeautifulSoup/bs4/doc.ru/bs4ru.html')
| [
"bs4.BeautifulSoup",
"numpy.array",
"requests.get",
"numpy.char.startswith"
] | [((106, 123), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (118, 123), False, 'import requests\n'), ((135, 190), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (148, 190), False, 'from bs4 import BeautifulSoup\n'), ((426, 443), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (438, 443), False, 'import requests\n'), ((455, 510), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (468, 510), False, 'from bs4 import BeautifulSoup\n'), ((259, 281), 'numpy.array', 'np.array', (["link['href']"], {}), "(link['href'])\n", (267, 281), True, 'import numpy as np\n'), ((293, 350), 'numpy.char.startswith', 'np.char.startswith', (['href_array', '"""http"""'], {'start': '(0)', 'end': 'None'}), "(href_array, 'http', start=0, end=None)\n", (311, 350), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'newGUI.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
import sys
import threading
import time
import cv2
import numpy
import numpy as np
from PIL import Image, ImageDraw, ImageFont
from PIL.ImageQt import ImageQt
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import pyqtSignal, QDateTime
from PyQt5.QtGui import QImage, QPixmap
from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog
from yolo import YOLO
def cv2ImgAddText(img, text, left, top): # 视频帧绘制中文
img = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
draw = ImageDraw.Draw(img)
fillColor = (255, 0, 0)
fontStyle = ImageFont.truetype("font/simsun.ttc", 20, encoding='utf-8')
draw.text((left, top - 20), text, font=fontStyle, fill=fillColor)
return cv2.cvtColor(np.asarray(img), cv2.COLOR_RGB2BGR)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.priceText = QtWidgets.QTextEdit(self.centralwidget)
self.priceText.setGeometry(QtCore.QRect(340, 390, 231, 111))
self.priceText.setObjectName("priceText")
self.Photelabel = QtWidgets.QLabel(self.centralwidget)
self.Photelabel.setGeometry(QtCore.QRect(340, 70, 381, 291))
self.Photelabel.setStyleSheet("background-color: rgb(244, 247, 255);")
self.Photelabel.setText("")
self.Photelabel.setObjectName("ShowPicArea")
self.stopLabel = QtWidgets.QPushButton(self.centralwidget)
self.stopLabel.setGeometry(QtCore.QRect(610, 390, 111, 111))
self.stopLabel.setObjectName("stopLabel")
self.pictureButton = QtWidgets.QPushButton(self.centralwidget)
self.pictureButton.setGeometry(QtCore.QRect(100, 140, 141, 61))
self.pictureButton.setObjectName("pictureButton")
self.realTimeButton = QtWidgets.QPushButton(self.centralwidget)
self.realTimeButton.setGeometry(QtCore.QRect(100, 230, 141, 61))
self.realTimeButton.setObjectName("realTimeButton")
self.getTime = QtWidgets.QPushButton(self.centralwidget)
self.getTime.setGeometry(QtCore.QRect(570, 10, 51, 31))
self.getTime.setObjectName("getTime")
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setGeometry(QtCore.QRect(630, 10, 151, 33))
self.textEdit.setObjectName("textEdit")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 26))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.stopLabel.setText(_translate("MainWindow", "Stop"))
self.pictureButton.setText(_translate("MainWindow", "识别图片"))
self.realTimeButton.setText(_translate("MainWindow", "实时识别"))
self.getTime.setText(_translate("MainWindow", "Time"))
#按钮连接的函数
self.getTime.clicked.connect(self.updateTime)
self.realTimeButton.clicked.connect(self.updateFrame)
self.pictureButton.clicked.connect(self.showPicture)
self.stopLabel.clicked.connect(self.cutscreen)
def showPicture(self):
print("加载网络模型")
yolo = YOLO()
print("实例化Yolo完成,打开图片--")
path, _ = QFileDialog.getOpenFileName(self, '选择图片', 'D:\Python\kears-yolov3-dev\OpenCVtest', 'Image files(*.jpg *.gif *.png)')
img=Image.open(path)
r_image = yolo.detect_image(img) # r_image 为 PIL 图片数据格式
qim = ImageQt(r_image) # PIL -> Pixmap 格式转换
pix = QtGui.QPixmap.fromImage(qim)
self.Photelabel.setPixmap(pix) # 图像更新到UI上
self.Photelabel.setScaledContents(True)
#时间控件相关函数
def updateTime(self): # 点击按钮,启动获取时间的线程
self.backend = BackendThread()
self.backend.update_time.connect(self.updateTimeUI) # 线程绑定更新主线程的UI函数
self.backend.start()
def updateTimeUI(self,data): # 更新主界面UI Time函数
self.textEdit.setText(data)
#实时识别相关函数
def updateFrame(self): #点击按钮,启动实时视频流的线程
# th = threading.Thread(target=self.RealTimeThread) # 创建视频线程
# th.start()
self.updatePrice = UpdatePrice()
self.updatePrice.update_price.connect(self.updatePriceUI) #线程绑定更新主线程的UI函数
self.updatePrice.update_picture.connect(self.updatePictureUI) #线程绑定更新主线程的UI函数
self.updatePrice.start()
def updatePriceUI(self,data): # 更新主界面UI 价格
self.priceText.setText(data)
def updatePictureUI(self,img): # 更新主界面UI 图像 接受QImage格式
self.Photelabel.setPixmap(QPixmap.fromImage(img)) # 图像更新到UI上
self.Photelabel.setScaledContents(True)
def RealTimeThread(self): # 实时识别的子线程,不断update视频帧在Qlabel上
# Load Yolo
net = cv2.dnn.readNet("yolov3.weights", "yolov3.cfg")
classes = []
with open("coco.names", "r") as f:
classes = [line.strip() for line in f.readlines()]
layer_names = net.getLayerNames()
output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]
colors = np.random.uniform(0, 255, size=(len(classes), 3))
# Initialize frame rate calculation
frame_rate_calc = 1
freq = cv2.getTickFrequency()
cap = cv2.VideoCapture(0) # 打开摄像头
##############################################回传实时识别信号##########################################################
while True:
# # Start timer (for calculating frame rate)
# t1 = cv2.getTickCount()
ret, frame = cap.read()
height, width, channels = frame.shape
# Detecting objects
blob = cv2.dnn.blobFromImage(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)
net.setInput(blob)
outs = net.forward(output_layers)
# Showing informations on the screen
class_ids = []
confidences = []
boxes = []
for out in outs:
for detection in out:
scores = detection[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
if confidence > 0.4:
# Object detected
center_x = int(detection[0] * width)
center_y = int(detection[1] * height)
w = int(detection[2] * width)
h = int(detection[3] * height)
# Rectangle coordinates
x = int(center_x - w / 2)
y = int(center_y - h / 2)
boxes.append([x, y, w, h])
confidences.append(float(confidence))
class_ids.append(class_id)
indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
price = 0
font = cv2.FONT_HERSHEY_SIMPLEX
for i in range(len(boxes)):
if i in indexes:
x, y, w, h = boxes[i]
label = str(classes[class_ids[i]])
color = colors[i]
cv2.rectangle(frame, (x, y), (x + w, y + h), color, 1)
frame = cv2ImgAddText(frame, label, x, y)
# price = price + sumPrice(label)
print('total price is ' + str(price))
frame = cv2ImgAddText(frame, '总价为: ' + str(price), 15, 20)
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
img = QImage(frame.data, width, height, QImage.Format_RGB888)
self.Photelabel.setPixmap(QPixmap.fromImage(img)) # 图像更新到UI上
self.Photelabel.setScaledContents(True)
#控件截图函数
def cutscreen(self):
print("截图Qlabel")
screen = QApplication.primaryScreen()
pix = screen.grabWindow(self.Photelabel.winId())
pix.save("test.jpg")
def sumPrice(label):
thisprice = 0
if (label == '花卷'):
thisprice = 2
elif (label == '煎蛋'):
thisprice = 2
elif (label == '烧鸡'):
thisprice = 15
elif (label == '鱼'):
thisprice = 10
elif (label == '粽子'):
thisprice = 5
return thisprice
class UpdatePrice(QtCore.QThread): #新开一个更新图像和价格的子线程
update_price = pyqtSignal(str) #通过类成员对象定义信号对象
update_picture = pyqtSignal(QImage)
def __init__(self):
super(UpdatePrice, self).__init__()
self.flag = 1 # 用来判断循环是否继续的标志,通过改变该标志来使得线程中run函数退出
def run(self): #线程执行的操作 -> 实时识别
print("启动实时识别的线程")
# Load Yolo
net = cv2.dnn.readNet("yolov3.weights", "yolov3.cfg")
classes = []
with open("coco.names", "r") as f:
classes = [line.strip() for line in f.readlines()]
layer_names = net.getLayerNames()
output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]
colors = np.random.uniform(0, 255, size=(len(classes), 3))
# Initialize frame rate calculation
frame_rate_calc = 1
freq = cv2.getTickFrequency()
cap = cv2.VideoCapture(0) # 打开摄像头
print("实时识别的线程加载完毕")
while True:
print("正在识别")
ret, frame = cap.read()
height, width, channels = frame.shape
# Detecting objects
blob = cv2.dnn.blobFromImage(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)
net.setInput(blob)
outs = net.forward(output_layers)
# Showing informations on the screen
class_ids = []
confidences = []
boxes = []
for out in outs:
for detection in out:
scores = detection[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
if confidence > 0.4:
# Object detected
center_x = int(detection[0] * width)
center_y = int(detection[1] * height)
w = int(detection[2] * width)
h = int(detection[3] * height)
# Rectangle coordinates
x = int(center_x - w / 2)
y = int(center_y - h / 2)
boxes.append([x, y, w, h])
confidences.append(float(confidence))
class_ids.append(class_id)
indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
price = 0
font = cv2.FONT_HERSHEY_SIMPLEX
for i in range(len(boxes)):
if i in indexes:
x, y, w, h = boxes[i]
label = str(classes[class_ids[i]])
color = colors[i]
cv2.rectangle(frame, (x, y), (x + w, y + h), color, 1)
frame = cv2ImgAddText(frame, label, x, y)
price = price + sumPrice(label)
print('total price is ' + str(price))
frame = cv2ImgAddText(frame, '总价为: ' + str(price), 15, 20)
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
img = QImage(frame.data, width, height, QImage.Format_RGB888)
self.update_picture.emit(img) #传递信号
self.update_price.emit("总价为: "+str(price))
class MyWindow(QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super(MyWindow, self).__init__(parent)
self.setupUi(self)
class BackendThread(QtCore.QThread): #新开一个更新时间的子线程
update_time = pyqtSignal(str) #通过类成员对象定义信号对象
def run(self): #线程执行的操作 -> 实时识别
print("启动 显示当前时间 的线程")
while True:
data = QDateTime.currentDateTime()
currentTime = data.toString("hh:mm:ss")
self.update_time.emit(str(currentTime))
time.sleep(1)
if __name__ == '__main__':
app = QApplication(sys.argv)
myWin = MyWindow()
myWin.show()
sys.exit(app.exec_()) | [
"PyQt5.QtCore.pyqtSignal",
"PyQt5.QtWidgets.QApplication.primaryScreen",
"cv2.dnn.NMSBoxes",
"numpy.argmax",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"cv2.rectangle",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QMenuBar",
"PyQt5.QtWidgets.QLabel",
"PyQt5... | [((703, 722), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (717, 722), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((767, 826), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""font/simsun.ttc"""', '(20)'], {'encoding': '"""utf-8"""'}), "('font/simsun.ttc', 20, encoding='utf-8')\n", (785, 826), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((9294, 9309), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['str'], {}), '(str)\n', (9304, 9309), False, 'from PyQt5.QtCore import pyqtSignal, QDateTime\n'), ((9379, 9397), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['QImage'], {}), '(QImage)\n', (9389, 9397), False, 'from PyQt5.QtCore import pyqtSignal, QDateTime\n'), ((12789, 12804), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['str'], {}), '(str)\n', (12799, 12804), False, 'from PyQt5.QtCore import pyqtSignal, QDateTime\n'), ((13209, 13231), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (13221, 13231), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog\n'), ((654, 690), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (666, 690), False, 'import cv2\n'), ((921, 936), 'numpy.asarray', 'np.asarray', (['img'], {}), '(img)\n', (931, 936), True, 'import numpy as np\n'), ((1135, 1164), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (1152, 1164), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1248, 1287), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1267, 1287), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1433, 1469), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1449, 1469), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1732, 1773), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1753, 1773), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1922, 1963), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1943, 1963), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2124, 2165), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2145, 2165), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2322, 2363), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2343, 2363), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2498, 2537), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2517, 2537), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2731, 2761), 'PyQt5.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (2749, 2761), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2939, 2971), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (2959, 2971), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3118, 3167), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (3155, 3167), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3916, 3922), 'yolo.YOLO', 'YOLO', ([], {}), '()\n', (3920, 3922), False, 'from yolo import YOLO\n'), ((3975, 4102), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""选择图片"""', '"""D:\\\\Python\\\\kears-yolov3-dev\\\\OpenCVtest"""', '"""Image files(*.jpg *.gif *.png)"""'], {}), "(self, '选择图片',\n 'D:\\\\Python\\\\kears-yolov3-dev\\\\OpenCVtest',\n 'Image files(*.jpg *.gif *.png)')\n", (4002, 4102), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog\n'), ((4105, 4121), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (4115, 4121), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((4232, 4248), 'PIL.ImageQt.ImageQt', 'ImageQt', (['r_image'], {}), '(r_image)\n', (4239, 4248), False, 'from PIL.ImageQt import ImageQt\n'), ((4330, 4358), 'PyQt5.QtGui.QPixmap.fromImage', 'QtGui.QPixmap.fromImage', (['qim'], {}), '(qim)\n', (4353, 4358), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5754, 5801), 'cv2.dnn.readNet', 'cv2.dnn.readNet', (['"""yolov3.weights"""', '"""yolov3.cfg"""'], {}), "('yolov3.weights', 'yolov3.cfg')\n", (5769, 5801), False, 'import cv2\n'), ((6213, 6235), 'cv2.getTickFrequency', 'cv2.getTickFrequency', ([], {}), '()\n', (6233, 6235), False, 'import cv2\n'), ((6250, 6269), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (6266, 6269), False, 'import cv2\n'), ((8773, 8801), 'PyQt5.QtWidgets.QApplication.primaryScreen', 'QApplication.primaryScreen', ([], {}), '()\n', (8799, 8801), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QFileDialog\n'), ((9723, 9770), 'cv2.dnn.readNet', 'cv2.dnn.readNet', (['"""yolov3.weights"""', '"""yolov3.cfg"""'], {}), "('yolov3.weights', 'yolov3.cfg')\n", (9738, 9770), False, 'import cv2\n'), ((10182, 10204), 'cv2.getTickFrequency', 'cv2.getTickFrequency', ([], {}), '()\n', (10202, 10204), False, 'import cv2\n'), ((10219, 10238), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (10235, 10238), False, 'import cv2\n'), ((1323, 1355), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(340)', '(390)', '(231)', '(111)'], {}), '(340, 390, 231, 111)\n', (1335, 1355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1506, 1537), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(340)', '(70)', '(381)', '(291)'], {}), '(340, 70, 381, 291)\n', (1518, 1537), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1809, 1841), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(610)', '(390)', '(111)', '(111)'], {}), '(610, 390, 111, 111)\n', (1821, 1841), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2003, 2034), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(100)', '(140)', '(141)', '(61)'], {}), '(100, 140, 141, 61)\n', (2015, 2034), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2206, 2237), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(100)', '(230)', '(141)', '(61)'], {}), '(100, 230, 141, 61)\n', (2218, 2237), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2397, 2426), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(570)', '(10)', '(51)', '(31)'], {}), '(570, 10, 51, 31)\n', (2409, 2426), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2572, 2602), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(630)', '(10)', '(151)', '(33)'], {}), '(630, 10, 151, 33)\n', (2584, 2602), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2795, 2822), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(800)', '(26)'], {}), '(0, 0, 800, 26)\n', (2807, 2822), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5563, 5585), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['img'], {}), '(img)\n', (5580, 5585), False, 'from PyQt5.QtGui import QImage, QPixmap\n'), ((6655, 6733), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['frame', '(0.00392)', '(416, 416)', '(0, 0, 0)', '(True)'], {'crop': '(False)'}), '(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)\n', (6676, 6733), False, 'import cv2\n'), ((7802, 7848), 'cv2.dnn.NMSBoxes', 'cv2.dnn.NMSBoxes', (['boxes', 'confidences', '(0.5)', '(0.4)'], {}), '(boxes, confidences, 0.5, 0.4)\n', (7818, 7848), False, 'import cv2\n'), ((8457, 8495), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2BGR'], {}), '(frame, cv2.COLOR_RGB2BGR)\n', (8469, 8495), False, 'import cv2\n'), ((8514, 8569), 'PyQt5.QtGui.QImage', 'QImage', (['frame.data', 'width', 'height', 'QImage.Format_RGB888'], {}), '(frame.data, width, height, QImage.Format_RGB888)\n', (8520, 8569), False, 'from PyQt5.QtGui import QImage, QPixmap\n'), ((10462, 10540), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['frame', '(0.00392)', '(416, 416)', '(0, 0, 0)', '(True)'], {'crop': '(False)'}), '(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)\n', (10483, 10540), False, 'import cv2\n'), ((11609, 11655), 'cv2.dnn.NMSBoxes', 'cv2.dnn.NMSBoxes', (['boxes', 'confidences', '(0.5)', '(0.4)'], {}), '(boxes, confidences, 0.5, 0.4)\n', (11625, 11655), False, 'import cv2\n'), ((12263, 12301), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2BGR'], {}), '(frame, cv2.COLOR_RGB2BGR)\n', (12275, 12301), False, 'import cv2\n'), ((12320, 12375), 'PyQt5.QtGui.QImage', 'QImage', (['frame.data', 'width', 'height', 'QImage.Format_RGB888'], {}), '(frame.data, width, height, QImage.Format_RGB888)\n', (12326, 12375), False, 'from PyQt5.QtGui import QImage, QPixmap\n'), ((13013, 13040), 'PyQt5.QtCore.QDateTime.currentDateTime', 'QDateTime.currentDateTime', ([], {}), '()\n', (13038, 13040), False, 'from PyQt5.QtCore import pyqtSignal, QDateTime\n'), ((13157, 13170), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (13167, 13170), False, 'import time\n'), ((8609, 8631), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['img'], {}), '(img)\n', (8626, 8631), False, 'from PyQt5.QtGui import QImage, QPixmap\n'), ((7082, 7099), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (7091, 7099), True, 'import numpy as np\n'), ((8144, 8198), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', 'color', '(1)'], {}), '(frame, (x, y), (x + w, y + h), color, 1)\n', (8157, 8198), False, 'import cv2\n'), ((10889, 10906), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (10898, 10906), True, 'import numpy as np\n'), ((11951, 12005), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', 'color', '(1)'], {}), '(frame, (x, y), (x + w, y + h), color, 1)\n', (11964, 12005), False, 'import cv2\n')] |
#
# RawIO
# Copyright (c) 2021 <NAME>.
#
from cv2 import findTransformECC, MOTION_TRANSLATION, TERM_CRITERIA_COUNT, TERM_CRITERIA_EPS
from numpy import asarray, eye, float32
from PIL import Image
from sklearn.feature_extraction.image import extract_patches_2d
from typing import Callable
def markov_similarity (min_probability: float=0.8, trials: int=100, patch_size: float=0.1) -> Callable[[str, str], bool]:
"""
Create a similarity function which estimates a binomial distribution on a Markov random field defined over the image.
In simple terms, it checks for patch correspondences :/
We use Evangelidis & Psarakis, 2008 with Monte Carlo simulation to estimate the binomial distribution.
Parameters:
min_probability (float): Minimum probability for images to be considered similar, in range [0., 1.].
trials (int): Number of Monte Carlo trials for estimating the binomial distribution.
patch_size (float): Relative patch size for ECC trials, in range [0., 1.].
Returns:
callable: Pairwise image similarity function returning a boolean.
"""
def similarity_fn (path_a: str, path_b: str) -> bool:
# Load images
image_a = Image.open(path_a)
image_b = Image.open(path_b)
# Check sizes
if image_a.size != image_b.size:
return False
# Load images
image_a.draft("L", (2560, 1440))
image_b.draft("L", (2560, 1440))
image_a = asarray(image_a)
image_b = asarray(image_b)
# Extract patches
SEED = 1
size = int(min(image_a.shape) * patch_size)
patches_a = extract_patches_2d(image_a, (size, size), max_patches=trials, random_state=SEED)
patches_b = extract_patches_2d(image_b, (size, size), max_patches=trials, random_state=SEED)
# Run Monte Carlo estimation
IDENTITY = eye(2, 3, dtype=float32)
CRITERIA = (TERM_CRITERIA_EPS | TERM_CRITERIA_COUNT, 50, 1e-4)
passes = 0
for patch_a, patch_b in zip(patches_a, patches_b):
try:
findTransformECC(patch_a, patch_b, IDENTITY.copy(), MOTION_TRANSLATION, CRITERIA, None, 5)
passes += 1
except:
pass
# Check
estimator = passes / patches_a.shape[0]
return estimator >= min_probability
return similarity_fn | [
"sklearn.feature_extraction.image.extract_patches_2d",
"numpy.eye",
"numpy.asarray",
"PIL.Image.open"
] | [((1211, 1229), 'PIL.Image.open', 'Image.open', (['path_a'], {}), '(path_a)\n', (1221, 1229), False, 'from PIL import Image\n'), ((1248, 1266), 'PIL.Image.open', 'Image.open', (['path_b'], {}), '(path_b)\n', (1258, 1266), False, 'from PIL import Image\n'), ((1477, 1493), 'numpy.asarray', 'asarray', (['image_a'], {}), '(image_a)\n', (1484, 1493), False, 'from numpy import asarray, eye, float32\n'), ((1512, 1528), 'numpy.asarray', 'asarray', (['image_b'], {}), '(image_b)\n', (1519, 1528), False, 'from numpy import asarray, eye, float32\n'), ((1644, 1729), 'sklearn.feature_extraction.image.extract_patches_2d', 'extract_patches_2d', (['image_a', '(size, size)'], {'max_patches': 'trials', 'random_state': 'SEED'}), '(image_a, (size, size), max_patches=trials, random_state=SEED\n )\n', (1662, 1729), False, 'from sklearn.feature_extraction.image import extract_patches_2d\n'), ((1745, 1830), 'sklearn.feature_extraction.image.extract_patches_2d', 'extract_patches_2d', (['image_b', '(size, size)'], {'max_patches': 'trials', 'random_state': 'SEED'}), '(image_b, (size, size), max_patches=trials, random_state=SEED\n )\n', (1763, 1830), False, 'from sklearn.feature_extraction.image import extract_patches_2d\n'), ((1882, 1906), 'numpy.eye', 'eye', (['(2)', '(3)'], {'dtype': 'float32'}), '(2, 3, dtype=float32)\n', (1885, 1906), False, 'from numpy import asarray, eye, float32\n')] |
# Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
import math
import numpy as np
from coremltools.converters.mil.mil import types
from coremltools.converters.mil.mil.input_type import (
DefaultInputs,
FloatInputType,
InputSpec,
IntInputType,
ScalarOrTensorInputType,
StringInputType,
TensorInputType,
)
from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE
from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op
from .elementwise_unary import elementwise_unary
@register_op(doc_str="")
class clamped_relu(Operation):
"""
If ``x >= 0`` return elementwise ``min(beta, x)``, otherwise return
``min(beta, alpha * x)``.
Parameters
----------
x: tensor<\*?, T> (Required)
alpha: const fp32 (Required)
beta: const fp32 (Required)
Returns
-------
tensor<\*?, T>
* A tensor of the same type and shape as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True),
beta=FloatInputType(const=True),
)
def __init__(self, **kwargs):
super(clamped_relu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
x = np.minimum(np.maximum(self.x.val, 0), self.beta.val)
y = np.minimum(np.minimum(self.x.val, 0) * self.alpha.val, self.beta.val)
return x + y
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class elu(Operation):
"""
If ``x > 0`` return elementwise ``x``, otherwise return ``alpha * (e^x - 1)``.
Parameters
----------
x: tensor<\*?, T> (Required)
alpha: const fp32 (Optional)
* Default is ``1``.
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
alpha=1.,
)
def __init__(self, **kwargs):
super(elu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
b = np.copy(self.x.val)
b[b < 0] = self.alpha.val * (np.exp(b[b < 0]) - 1)
return b
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class gelu(Operation):
"""
Return the elementwise Gaussian error linear unit activation function for ``x``.
You can use ``EXACT``, ``TANH_APPROXIMATION``, or ``SIGMOID_APPROXIMATION`` values
based on the following formulas:
* ``EXACT``:
.. math::
f(x) = 0.5x\\left ( 1+\\rm{erf}\\left ( \\frac{x}{\\sqrt{2}} \\right ) \\right )
* ``TANH_APPROXIMATION``:
.. math::
f(x) = 0.5x\\left ( 1+\\rm{tanh}\\left ( \\sqrt{2/\\pi}\\left ( x + 0.044715x^3 \\right ) \\right ) \\right )
* ``SIGMOID_APPROXIMATION``:
.. math::
f(x) = x*\\rm{sigmoid}(1.702x)
Parameters
----------
x: tensor<\*?, T> (Required)
mode: const str (Optional)
* Use ``'EXACT'``, ``'TANH_APPROXIMATION'``, or ``'SIGMOID_APPROXIMATION'`` for ``str``.
* Default is ``'EXACT'``.
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
mode=StringInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
mode="EXACT",
)
def __init__(self, **kwargs):
super(gelu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
if self.mode.val == "TANH_APPROXIMATION":
a = np.sqrt(2 / np.pi) * (self.x.val + 0.044715 * np.power(self.x.val, 3))
return 0.5 * self.x.val * (1 + np.tanh(a))
elif self.mode.val == "SIGMOID_APPROXIMATION":
return self.x.val * (1 / (1 + np.exp(-(1.702 * self.x.val))))
else:
sqaure_root_of_2 = np.sqrt(2)
vfunc = np.vectorize(lambda x: 0.5 * x * (1 + math.erf(x / sqaure_root_of_2)))
return vfunc(self.x.val)
def type_inference(self):
allowed_values = {"EXACT", "TANH_APPROXIMATION", "SIGMOID_APPROXIMATION"}
if self.mode.val not in allowed_values:
msg = '"gelu" op: unrecognized value of mode: "{}". Allowed values are {}'
raise ValueError(msg.format(self.mode.val, allowed_values))
return self.x.sym_type
@register_op(doc_str="")
class leaky_relu(Operation):
"""
If ``x >= 0`` apply ``x`` elementwise, otherwise apply ``alpha * x`` elementwise.
Parameters
----------
x: <*?, T> (Required)
alpha: const fp32 (Optional)
* Default is ``0.01``.
Returns
-------
tensor<\*?, fp32>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
alpha=0.01,
)
def __init__(self, **kwargs):
super(leaky_relu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
b = np.copy(self.x.val)
b[b < 0] *= self.alpha.val
return b
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class linear_activation(Operation):
"""
Apply elementwise ``x * alpha + beta``.
Parameters
----------
x: tensor<\*?, T> (Required)
alpha: const fp32 (Required)
beta: const fp32 (Optional)
* Default is ``0``.
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True),
beta=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
beta=0.,
)
def __init__(self, **kwargs):
super(linear_activation, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return self.alpha.val * self.x.val + self.beta.val
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class prelu(Operation):
"""
Where ``i = 1 ... C``, if ``x_i > 0``, return ``x_i`` , otherwise return ``alpha_i * x_i``.
Parameters
----------
x: tensor<[b, C, n, m], T> (Required)
alpha: const tensor<[C], T>, (Required)
Returns
-------
tensor<[b, C, n, m], fp32>
* A tensor of the same shape as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=TensorInputType(),
alpha=TensorInputType(const=True),)
def __init__(self, **kwargs):
super(prelu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
alpha_br = self.alpha.val
for i in range(1, len(self.x.shape)):
alpha_br = np.expand_dims(alpha_br, i)
x_pos = np.maximum(self.x.val, 0)
b = np.minimum(self.x.val, 0)
return x_pos + b * alpha_br
def type_inference(self):
if len(self.x.shape) < 3:
raise ValueError("x should be at least rank 3")
if len(self.alpha.val.shape) != 1:
raise ValueError("alpha should be rank 1")
if self.x.shape[1] != self.alpha.val.shape[0]:
raise ValueError(
"Size of dimension 1 of alpha should be the same as "
+ "the size of dimension 1 of x."
)
return self.x.sym_type
@register_op(doc_str="")
class relu(elementwise_unary):
"""
Return elementwise-applied rectified linear activation: ``min(x, 0)``.
Parameters
----------
x: tensor<\*?, fp32> (Required)
Returns
-------
tensor<\*?, fp32>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
def __init__(self, **kwargs):
super(relu, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return np.maximum(self.x.val, 0)
@register_op(doc_str="")
class relu6(elementwise_unary):
"""
Return elementwise-applied rectified linear activation: ``max(min(x, 0), 6)``.
Parameters
----------
x: tensor<\*?, T> (Required)
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
def __init__(self, **kwargs):
super(relu6, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return np.minimum(np.maximum(self.x.val, 0), 6)
@register_op(doc_str="")
class scaled_tanh(Operation):
"""
Return ``alpha * tanh(beta * x)`` elementwise.
Parameters
----------
x: tensor<\*?, T> (Required)
* Input range is ``(-inf, inf)``.
alpha: const fp32 (Optional)
* Default is ``1``.
beta: const fp32 (Optional)
* Default is ``1``.
Returns
-------
tensor<\*?, fp32>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True, optional=True),
beta=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
alpha=1,
beta=1,
)
def __init__(self, **kwargs):
super(scaled_tanh, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return self.alpha.val * np.tanh(self.x.val * self.beta.val)
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class sigmoid(elementwise_unary):
"""
Return ``sigmoid(x)`` elementwise.
Parameters
----------
x: tensor<\*?, T> (Required)
Returns
-------
tensor<\*?, T>
* A tensor of the same shape as ``x``.
Attributes
----------
T: fp16, fp32
"""
def __init__(self, **kwargs):
super(sigmoid, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return 1 / (1 + np.exp(-self.x.val))
@register_op(doc_str="")
class sigmoid_hard(Operation):
"""
Return ``min( max( alpha * x + beta, 0 ), 1 )`` elementwise.
Parameters
----------
x: tensor<\*?, T> (Required)
alpha: const fp32 (Optional)
* Default is ``0.2``.
beta: const fp32 (Optional)
* Default is ``0.5``.
Returns
-------
tensor<\*?, fp32>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True, optional=True),
beta=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
alpha=0.2,
beta=0.5,
)
def __init__(self, **kwargs):
super(sigmoid_hard, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return np.minimum(
np.maximum((self.alpha.val * self.x.val) + self.beta.val, 0), 1
)
def type_inference(self):
return self.x.sym_type
@register_op(doc_str="")
class silu(Operation):
"""
Sigmoid Linear Unit, elementwise apply the SiLU or Swish operation ``x * sigmoid(x)``.
Parameters
----------
x: tensor<\*, T>
Returns
-------
tensor<\*, T>
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(x=TensorInputType(),)
def __init__(self, **kwargs):
super(silu, self).__init__(**kwargs)
def type_inference(self):
return types.tensor(self.x.dtype, tuple(self.x.shape))
@register_op(doc_str="")
class softplus(elementwise_unary):
"""
Return ``log( 1 + e^x )`` elementwise.
Parameters
----------
x: tensor<\*?, T> (Required)
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
def __init__(self, **kwargs):
super(softplus, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return np.log(1 + np.exp(-np.abs(self.x.val))) + np.maximum(self.x.val, 0)
@register_op(doc_str="")
class softplus_parametric(Operation):
"""
Return ``alpha_i * log( 1 + e^( beta_i * x_i ) )``, where ``i = 1 ... C``.
Parameters
----------
x: tensor<[b, C, n, m], T> (Required)
alpha: const tensor<[C], fp32> (Required)
beta: const tensor<[C], fp32> (Required)
Returns
-------
tensor<[b, C, n, m], T>
* A tensor of the same shape as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=TensorInputType(),
alpha=TensorInputType(const=True),
beta=TensorInputType(const=True),
)
def __init__(self, **kwargs):
super(softplus_parametric, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
alpha_br = np.copy(self.alpha.val)
beta_br = np.copy(self.beta.val)
for i in range(1, len(self.x.val.shape)):
alpha_br = np.expand_dims(alpha_br, i)
beta_br = np.expand_dims(beta_br, i)
return alpha_br * np.log(1 + np.exp(self.x.val * beta_br))
def type_inference(self):
if len(self.x.shape) < 3:
raise ValueError("x should be at least rank 3")
if len(self.alpha.val.shape) != 1:
raise ValueError("alpha should be rank 1")
if self.x.shape[1] != self.alpha.val.shape[0]:
raise ValueError(
"Size of dimension 0 of alpha should be the same as "
+ "the size of dimension 1 of x."
)
if len(self.beta.val.shape) != 1:
raise ValueError("beta should be rank 1")
if self.x.shape[1] != self.beta.val.shape[0]:
raise ValueError(
"Size of dimension 0 of beta should be the same as "
+ "the size of dimension 1 of x."
)
return self.x.sym_type
@register_op(doc_str="")
class softmax(Operation):
"""
Return ``exp(x) / tf.reduce_sum(tf.exp(x), axis)``.
Parameters
----------
x: tensor<\*?, T> (Required)
axis: const i32 (Optional)
* Default is ``-1``.
Returns
-------
tensor<\*?, fp32>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=TensorInputType(),
axis=IntInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
axis=-1,
)
def __init__(self, **kwargs):
super(softmax, self).__init__(**kwargs)
def type_inference(self):
return self.x.sym_type
@precondition(allow=VALUE)
def value_inference(self):
x = self.x.val
axis = self.axis.val
max_vals = np.max(x, axis=axis, keepdims=True)
temp = np.exp(x - max_vals)
return temp / np.sum(temp, axis=axis, keepdims=True)
@register_op(doc_str="")
class softsign(elementwise_unary):
"""
Return ``x / ( 1 + |x| )`` applied elementwise.
Parameters
----------
x: tensor<\*?, T> (Required)
Returns
-------
tensor<\*?, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
def __init__(self, **kwargs):
super(softsign, self).__init__(**kwargs)
@precondition(allow=VALUE)
def value_inference(self):
return self.x.val / (1 + np.abs(self.x.val))
@register_op(doc_str="")
class thresholded_relu(Operation):
"""
Return ``x`` if ``x >= alpha``, otherwise return ``0``.
Parameters
----------
x: tensor<\*?, T> (Required)
alpha: const fp32 (Optional)
* Default is ``1``.
Returns
-------
tensor<\*, T>
* A tensor of the same shape and type as ``x``.
Attributes
----------
T: fp16, fp32
"""
input_spec = InputSpec(
x=ScalarOrTensorInputType(),
alpha=FloatInputType(const=True, optional=True),
)
def default_inputs(self):
return DefaultInputs(
alpha=1.,
)
def __init__(self, **kwargs):
super(thresholded_relu, self).__init__(**kwargs)
def type_inference(self):
return self.x.sym_type
@precondition(allow=VALUE)
def value_inference(self):
y = self.x.val
y[y < self.alpha.val] = 0
return y
| [
"numpy.maximum",
"numpy.sum",
"numpy.abs",
"math.erf",
"numpy.exp",
"coremltools.converters.mil.mil.input_type.DefaultInputs",
"coremltools.converters.mil.mil.input_type.StringInputType",
"numpy.copy",
"numpy.power",
"numpy.max",
"coremltools.converters.mil.mil.ops.defs._op_reqs.register_op",
... | [((710, 733), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (721, 733), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((1700, 1723), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (1711, 1723), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((2660, 2683), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (2671, 2683), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((4949, 4972), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (4960, 4972), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((5903, 5926), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (5914, 5926), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((6875, 6898), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (6886, 6898), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((8274, 8297), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (8285, 8297), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((8826, 8849), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (8837, 8849), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((9397, 9420), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (9408, 9420), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((10481, 10504), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (10492, 10504), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((10992, 11015), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (11003, 11015), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((12102, 12125), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (12113, 12125), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((12626, 12649), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (12637, 12649), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((13190, 13213), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (13201, 13213), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((15057, 15080), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (15068, 15080), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((16083, 16106), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (16094, 16106), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((16626, 16649), 'coremltools.converters.mil.mil.ops.defs._op_reqs.register_op', 'register_op', ([], {'doc_str': '""""""'}), "(doc_str='')\n", (16637, 16649), False, 'from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op\n'), ((1410, 1435), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (1422, 1435), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((2430, 2455), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (2442, 2455), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((4033, 4058), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (4045, 4058), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((5697, 5722), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (5709, 5722), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((6694, 6719), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (6706, 6719), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((7494, 7519), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (7506, 7519), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((8725, 8750), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (8737, 8750), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((9281, 9306), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (9293, 9306), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((10291, 10316), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (10303, 10316), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((10887, 10912), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (10899, 10912), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((11868, 11893), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (11880, 11893), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((13047, 13072), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (13059, 13072), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((13910, 13935), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (13922, 13935), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((15820, 15845), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (15832, 15845), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((16513, 16538), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (16525, 16538), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((17420, 17445), 'coremltools.converters.mil.mil.operation.precondition', 'precondition', ([], {'allow': 'VALUE'}), '(allow=VALUE)\n', (17432, 17445), False, 'from coremltools.converters.mil.mil.operation import Operation, precondition, VALUE\n'), ((2294, 2318), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'alpha': '(1.0)'}), '(alpha=1.0)\n', (2307, 2318), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((2499, 2518), 'numpy.copy', 'np.copy', (['self.x.val'], {}), '(self.x.val)\n', (2506, 2518), True, 'import numpy as np\n'), ((3892, 3919), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'mode': '"""EXACT"""'}), "(mode='EXACT')\n", (3905, 3919), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((5552, 5577), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'alpha': '(0.01)'}), '(alpha=0.01)\n', (5565, 5577), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((5766, 5785), 'numpy.copy', 'np.copy', (['self.x.val'], {}), '(self.x.val)\n', (5773, 5785), True, 'import numpy as np\n'), ((6545, 6568), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'beta': '(0.0)'}), '(beta=0.0)\n', (6558, 6568), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((7698, 7723), 'numpy.maximum', 'np.maximum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (7708, 7723), True, 'import numpy as np\n'), ((7736, 7761), 'numpy.minimum', 'np.minimum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (7746, 7761), True, 'import numpy as np\n'), ((8797, 8822), 'numpy.maximum', 'np.maximum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (8807, 8822), True, 'import numpy as np\n'), ((10128, 10158), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'alpha': '(1)', 'beta': '(1)'}), '(alpha=1, beta=1)\n', (10141, 10158), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((11700, 11734), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'alpha': '(0.2)', 'beta': '(0.5)'}), '(alpha=0.2, beta=0.5)\n', (11713, 11734), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((13986, 14009), 'numpy.copy', 'np.copy', (['self.alpha.val'], {}), '(self.alpha.val)\n', (13993, 14009), True, 'import numpy as np\n'), ((14028, 14050), 'numpy.copy', 'np.copy', (['self.beta.val'], {}), '(self.beta.val)\n', (14035, 14050), True, 'import numpy as np\n'), ((15619, 15641), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'axis': '(-1)'}), '(axis=-1)\n', (15632, 15641), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((15948, 15983), 'numpy.max', 'np.max', (['x'], {'axis': 'axis', 'keepdims': '(True)'}), '(x, axis=axis, keepdims=True)\n', (15954, 15983), True, 'import numpy as np\n'), ((15999, 16019), 'numpy.exp', 'np.exp', (['(x - max_vals)'], {}), '(x - max_vals)\n', (16005, 16019), True, 'import numpy as np\n'), ((17209, 17233), 'coremltools.converters.mil.mil.input_type.DefaultInputs', 'DefaultInputs', ([], {'alpha': '(1.0)'}), '(alpha=1.0)\n', (17222, 17233), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((1200, 1225), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (1223, 1225), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((1241, 1267), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)'}), '(const=True)\n', (1255, 1267), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((1282, 1308), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)'}), '(const=True)\n', (1296, 1308), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((1490, 1515), 'numpy.maximum', 'np.maximum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (1500, 1515), True, 'import numpy as np\n'), ((2158, 2183), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (2181, 2183), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((2199, 2240), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (2213, 2240), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((3756, 3781), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (3779, 3781), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((3796, 3838), 'coremltools.converters.mil.mil.input_type.StringInputType', 'StringInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (3811, 3838), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((5416, 5441), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (5439, 5441), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((5457, 5498), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (5471, 5498), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((6368, 6393), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (6391, 6393), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((6409, 6435), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)'}), '(const=True)\n', (6423, 6435), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((6450, 6491), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (6464, 6491), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((7343, 7360), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {}), '()\n', (7358, 7360), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((7377, 7404), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {'const': '(True)'}), '(const=True)\n', (7392, 7404), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((7654, 7681), 'numpy.expand_dims', 'np.expand_dims', (['alpha_br', 'i'], {}), '(alpha_br, i)\n', (7668, 7681), True, 'import numpy as np\n'), ((9364, 9389), 'numpy.maximum', 'np.maximum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (9374, 9389), True, 'import numpy as np\n'), ((9936, 9961), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (9959, 9961), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((9977, 10018), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (9991, 10018), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((10033, 10074), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (10047, 10074), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((10380, 10415), 'numpy.tanh', 'np.tanh', (['(self.x.val * self.beta.val)'], {}), '(self.x.val * self.beta.val)\n', (10387, 10415), True, 'import numpy as np\n'), ((11508, 11533), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (11531, 11533), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((11549, 11590), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (11563, 11590), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((11605, 11646), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (11619, 11646), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((11964, 12022), 'numpy.maximum', 'np.maximum', (['(self.alpha.val * self.x.val + self.beta.val)', '(0)'], {}), '(self.alpha.val * self.x.val + self.beta.val, 0)\n', (11974, 12022), True, 'import numpy as np\n'), ((12430, 12447), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {}), '()\n', (12445, 12447), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((13161, 13186), 'numpy.maximum', 'np.maximum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (13171, 13186), True, 'import numpy as np\n'), ((13699, 13716), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {}), '()\n', (13714, 13716), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((13732, 13759), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {'const': '(True)'}), '(const=True)\n', (13747, 13759), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((13774, 13801), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {'const': '(True)'}), '(const=True)\n', (13789, 13801), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((14124, 14151), 'numpy.expand_dims', 'np.expand_dims', (['alpha_br', 'i'], {}), '(alpha_br, i)\n', (14138, 14151), True, 'import numpy as np\n'), ((14174, 14200), 'numpy.expand_dims', 'np.expand_dims', (['beta_br', 'i'], {}), '(beta_br, i)\n', (14188, 14200), True, 'import numpy as np\n'), ((15494, 15511), 'coremltools.converters.mil.mil.input_type.TensorInputType', 'TensorInputType', ([], {}), '()\n', (15509, 15511), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((15526, 15565), 'coremltools.converters.mil.mil.input_type.IntInputType', 'IntInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (15538, 15565), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((16042, 16080), 'numpy.sum', 'np.sum', (['temp'], {'axis': 'axis', 'keepdims': '(True)'}), '(temp, axis=axis, keepdims=True)\n', (16048, 16080), True, 'import numpy as np\n'), ((17073, 17098), 'coremltools.converters.mil.mil.input_type.ScalarOrTensorInputType', 'ScalarOrTensorInputType', ([], {}), '()\n', (17096, 17098), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((17114, 17155), 'coremltools.converters.mil.mil.input_type.FloatInputType', 'FloatInputType', ([], {'const': '(True)', 'optional': '(True)'}), '(const=True, optional=True)\n', (17128, 17155), False, 'from coremltools.converters.mil.mil.input_type import DefaultInputs, FloatInputType, InputSpec, IntInputType, ScalarOrTensorInputType, StringInputType, TensorInputType\n'), ((1555, 1580), 'numpy.minimum', 'np.minimum', (['self.x.val', '(0)'], {}), '(self.x.val, 0)\n', (1565, 1580), True, 'import numpy as np\n'), ((2556, 2572), 'numpy.exp', 'np.exp', (['b[b < 0]'], {}), '(b[b < 0])\n', (2562, 2572), True, 'import numpy as np\n'), ((4156, 4174), 'numpy.sqrt', 'np.sqrt', (['(2 / np.pi)'], {}), '(2 / np.pi)\n', (4163, 4174), True, 'import numpy as np\n'), ((4456, 4466), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4463, 4466), True, 'import numpy as np\n'), ((10968, 10987), 'numpy.exp', 'np.exp', (['(-self.x.val)'], {}), '(-self.x.val)\n', (10974, 10987), True, 'import numpy as np\n'), ((16603, 16621), 'numpy.abs', 'np.abs', (['self.x.val'], {}), '(self.x.val)\n', (16609, 16621), True, 'import numpy as np\n'), ((4270, 4280), 'numpy.tanh', 'np.tanh', (['a'], {}), '(a)\n', (4277, 4280), True, 'import numpy as np\n'), ((14238, 14266), 'numpy.exp', 'np.exp', (['(self.x.val * beta_br)'], {}), '(self.x.val * beta_br)\n', (14244, 14266), True, 'import numpy as np\n'), ((4202, 4225), 'numpy.power', 'np.power', (['self.x.val', '(3)'], {}), '(self.x.val, 3)\n', (4210, 4225), True, 'import numpy as np\n'), ((4379, 4408), 'numpy.exp', 'np.exp', (['(-(1.702 * self.x.val))'], {}), '(-(1.702 * self.x.val))\n', (4385, 4408), True, 'import numpy as np\n'), ((13138, 13156), 'numpy.abs', 'np.abs', (['self.x.val'], {}), '(self.x.val)\n', (13144, 13156), True, 'import numpy as np\n'), ((4525, 4555), 'math.erf', 'math.erf', (['(x / sqaure_root_of_2)'], {}), '(x / sqaure_root_of_2)\n', (4533, 4555), False, 'import math\n')] |
import numpy as np
from artemis.experiments.decorators import experiment_function
from matplotlib import pyplot as plt
from six.moves import xrange
__author__ = 'peter'
"""
This file demonstates Artemis's "Experiments"
When you run an experiment, all figures and console output, as well as some metadata such as total run time, arguments,
etc are saved to disk.
This demo illustrates how you can create an experiment, create variations on that experiment, and view the results.
"""
class OnlineLinearRegressor:
def __init__(self, n_in, n_out, learning_rate = 0.01):
self.w = np.zeros((n_in, n_out))
self.learning_rate = learning_rate
def train(self, x, targ): # x: (n_samples, n_in), targ: (n_samples, n_out)
y = self.predict(x)
self.w -= self.learning_rate * (x.T.dot(y-targ))
def predict(self, x): # x: (n_samples, n_in)
return x.dot(self.w)
@experiment_function
def demo_linear_regression(
n_in = 100,
n_out = 4,
n_training_samples = 500,
n_test_samples = 500,
noise = .1,
n_epochs = 10,
eta = 0.001,
random_seed = 1234,
score_report_period = 100,
):
"""
Generate a random linear regression problem and train an online predictor to solve it with Stochastic gradient descent.
Log the scores and plot the resulting learning curves.
:param n_in: Number of inputs
:param n_out: Number of outputs
:param n_training_samples: Number of training samples in generated dataset.
:param n_test_samples: Number of test samples in generated dataset.
:param noise: Noise to add to generated dataset
:param n_epochs: Number of epochs to run for
:param eta: Learning rate for SGD
:param random_seed: Random seed (for generating data)
:param score_report_period: Report score every X training iterations.
"""
# Setup data
rng = np.random.RandomState(random_seed)
w_true = rng.randn(n_in, n_out)*.1 # (n_in, n_out)
training_data = rng.randn(n_training_samples, n_in) # (n_training_samples, n_in)
training_target = training_data.dot(w_true) + noise*rng.randn(n_training_samples, n_out) # (n_training_samples, n_out)
test_data = rng.randn(n_test_samples, n_in) # (n_test_samples, n_in)
test_target = test_data.dot(w_true) + noise*rng.randn(n_test_samples, n_out) # (n_test_samples, n_out)
predictor = OnlineLinearRegressor(n_in=n_in, n_out=n_out, learning_rate=eta)
# Train and periodically record scores.
epoch_scores = []
for i in xrange(n_training_samples*n_epochs+1):
if i % score_report_period == 0:
training_out = predictor.predict(training_data)
training_cost = ((training_target-training_out)**2).sum(axis=1).mean(axis=0)
test_out = predictor.predict(test_data)
test_cost = ((test_target-test_out)**2).sum(axis=1).mean(axis=0)
print('Epoch {epoch}: Test Cost: {test}, Training Cost: {train}'.format(epoch=float(i)/n_training_samples, test=test_cost, train=training_cost))
epoch = float(i) / n_training_samples
epoch_scores.append((epoch, training_cost, test_cost))
predictor.train(training_data[[i % n_training_samples]], training_target[[i % n_training_samples]])
# Plot
epochs, training_costs, test_costs = zip(*epoch_scores)
plt.plot(epochs, np.array([training_costs, test_costs]).T)
plt.xlabel('epoch')
plt.ylabel('cost')
plt.legend(['Training Cost', 'Test Cost'])
plt.title("Learning Curve")
plt.ion()
plt.show()
return {'training_cost': training_cost, 'test_cost': test_cost}
demo_linear_regression.add_variant('fast-learn', eta=0.01)
demo_linear_regression.add_variant('large_input_space', n_in=1000)
if __name__ == "__main__":
# Open a menu that allows you to run experiments and view old ones.
demo_linear_regression.browse(display_format="flat")
| [
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"numpy.zeros",
"numpy.random.RandomState",
"matplotlib.pyplot.ion",
"six.moves.xrange",
"numpy.array",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] | [((1921, 1955), 'numpy.random.RandomState', 'np.random.RandomState', (['random_seed'], {}), '(random_seed)\n', (1942, 1955), True, 'import numpy as np\n'), ((2565, 2606), 'six.moves.xrange', 'xrange', (['(n_training_samples * n_epochs + 1)'], {}), '(n_training_samples * n_epochs + 1)\n', (2571, 2606), False, 'from six.moves import xrange\n'), ((3444, 3463), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (3454, 3463), True, 'from matplotlib import pyplot as plt\n'), ((3468, 3486), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""cost"""'], {}), "('cost')\n", (3478, 3486), True, 'from matplotlib import pyplot as plt\n'), ((3491, 3533), 'matplotlib.pyplot.legend', 'plt.legend', (["['Training Cost', 'Test Cost']"], {}), "(['Training Cost', 'Test Cost'])\n", (3501, 3533), True, 'from matplotlib import pyplot as plt\n'), ((3538, 3565), 'matplotlib.pyplot.title', 'plt.title', (['"""Learning Curve"""'], {}), "('Learning Curve')\n", (3547, 3565), True, 'from matplotlib import pyplot as plt\n'), ((3570, 3579), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (3577, 3579), True, 'from matplotlib import pyplot as plt\n'), ((3584, 3594), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3592, 3594), True, 'from matplotlib import pyplot as plt\n'), ((595, 618), 'numpy.zeros', 'np.zeros', (['(n_in, n_out)'], {}), '((n_in, n_out))\n', (603, 618), True, 'import numpy as np\n'), ((3398, 3436), 'numpy.array', 'np.array', (['[training_costs, test_costs]'], {}), '([training_costs, test_costs])\n', (3406, 3436), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 21 13:10:44 2011
@author: <NAME> (OTO), <<EMAIL>>
"""
# Import necessary modules
import numpy as np
import numpy.linalg as npla
import statTools as st
import cross_val as cv
import matplotlib.pyplot as plt
class nipalsPCA:
"""
GENERAL INFO:
-------------
This class carries out Principal Component Analysis on arrays using
NIPALS algorithm.
EXAMPLE USE:
----
import pca
model = pca.nipalsPCA(array, numPC=5, Xstand=False)
model = pca.nipalsPCA(array)
model = pca.nipalsPCA(array, numPC=3)
model = pca.nipalsPCA(array, Xstand=True)
model = pca.nipalsPCA(array, cvType=["loo"])
model = pca.nipalsPCA(array, cvType=["lpo", 4])
model = pca.nipalsPCA(array, cvType=["lolo", [1,2,3,2,3,1]])
TYPES:
------
array: <array>
numPC: <integer>
mode: <boolean>
False: first column centre input data then run PCA
True: first scale columns of input data to equal variance
then run PCA
"""
def __init__(self, arrX, **kargs):
"""
On initialisation check how arrX and arrY are to be pre-processed
(Xstand and Ystand are either True or False). Then check whether
number of PC's chosen by user is OK.
"""
#===============================================================================
# Check what is provided by user for PCA
#===============================================================================
# Check whether number of PC's that are to be computed is provided.
# If NOT, then number of PC's is set to either number of objects or
# variables of X whichever is smallest (numPC). If number of
# PC's IS provided, then number is checked against maxPC and set to
# numPC if provided number is larger.
if 'numPC' not in kargs.keys():
self.numPC = min(np.shape(arrX))
else:
maxNumPC = min(np.shape(arrX))
if kargs['numPC'] > maxNumPC:
self.numPC = maxNumPC
else:
self.numPC = kargs['numPC']
# Define X and Y within class such that the data can be accessed from
# all attributes in class.
self.arrX_input = arrX
# Pre-process data according to user request.
# -------------------------------------------
# Check whether standardisation of X and Y are requested by user. If
# NOT, then X and y are centred by default.
if 'Xstand' not in kargs.keys():
self.Xstand = False
else:
self.Xstand = kargs['Xstand']
# Standardise X if requested by user, otherwise center X.
if self.Xstand == True:
self.Xmeans = np.average(self.arrX_input, axis=0)
self.Xstd = np.std(self.arrX_input, axis=0, ddof=1)
self.arrX = (self.arrX_input - self.Xmeans) / self.Xstd
else:
self.Xmeans = np.average(self.arrX_input, axis=0)
self.arrX = self.arrX_input - self.Xmeans
# Check whether cvType is provided. If NOT, then no cross validation
# is carried out.
if 'cvType' not in kargs.keys():
self.cvType = None
else:
self.cvType = kargs['cvType']
# Before PLS2 NIPALS algorithm starts initiate dictionaries and lists
# in which results are stored.
self.X_scoresList = []
self.X_loadingsList = []
self.X_loadingsWeightsList = []
self.coeffList = []
self.X_residualsList = [self.arrX]
# Collect residual matrices/arrays after each computed PC
self.resids = {}
self.X_residualsDict = {}
# Collect predicted matrices/array Xhat after each computed PC
self.calXhatDict_singPC = {}
# Collect explained variance in each PC
self.calExplainedVariancesDict = {}
self.X_calExplainedVariancesList = []
#===============================================================================
# Here the NIPALS PCA algorithm on X starts
#===============================================================================
threshold = 1.0e-8
#X_new = self.data.copy()
X_new = self.arrX.copy()
# Compute number of principal components as specified by user
for j in range(self.numPC):
t = X_new[:,0].reshape(-1,1)
# Iterate until score vector converges according to threshold
while 1:
num = np.dot(np.transpose(X_new), t)
denom = npla.norm(num)
p = num / denom
t_new = np.dot(X_new, p)
diff = t - t_new
t = t_new.copy()
SS = np.sum(np.square(diff))
# Check whether sum of squares is smaller than threshold. Break
# out of loop if true and start computation of next PC.
if SS < threshold:
self.X_scoresList.append(t)
self.X_loadingsList.append(p)
break
# Peel off information explained by actual PC and continue with
# decomposition on the residuals (X_new = E).
X_old = X_new.copy()
Xhat_j = np.dot(t, np.transpose(p))
X_new = X_old - Xhat_j
# Store residuals E and Xhat in their dictionaries
self.X_residualsDict[j+1] = X_new
self.calXhatDict_singPC[j+1] = Xhat_j
if self.Xstand == True:
self.calXhatDict_singPC[j+1] = (Xhat_j * self.Xstd) + \
self.Xmeans
else:
self.calXhatDict_singPC[j+1] = Xhat_j + self.Xmeans
# Collect scores and loadings for the actual PC.
self.arrT = np.hstack(self.X_scoresList)
self.arrP = np.hstack(self.X_loadingsList)
#==============================================================================
# From here computation of CALIBRATED explained variance starts
#==============================================================================
# ========== COMPUTATIONS FOR X ==========
# ---------------------------------------------------------------------
# Create a list holding arrays of Xhat predicted calibration after each
# component. Xhat is computed with Xhat = T*P'
self.calXpredList = []
# Compute Xhat for 1 and more components (cumulatively).
for ind in range(1,self.numPC+1):
part_arrT = self.arrT[:,0:ind]
part_arrP = self.arrP[:,0:ind]
predXcal = np.dot(part_arrT, np.transpose(part_arrP))
if self.Xstand == True:
Xhat = (predXcal * self.Xstd) + self.Xmeans
else:
Xhat = predXcal + self.Xmeans
self.calXpredList.append(Xhat)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Collect all PRESSE for individual variables in a dictionary.
# Keys represent number of component.
self.PRESSEdict_indVar_X = {}
# Compute PRESS for calibration / estimation
PRESSE_0_indVar_X = np.sum(np.square(st.centre(self.arrX_input)), axis=0)
self.PRESSEdict_indVar_X[0] = PRESSE_0_indVar_X
# Compute PRESS for each Xhat for 1, 2, 3, etc number of components
# and compute explained variance
for ind, Xhat in enumerate(self.calXpredList):
diffX = st.centre(self.arrX_input) - st.centre(Xhat)
PRESSE_indVar_X = np.sum(np.square(diffX), axis=0)
self.PRESSEdict_indVar_X[ind+1] = PRESSE_indVar_X
# Now store all PRESSE values into an array. Then compute MSEE and
# RMSEE.
self.PRESSEarr_indVar_X = np.array(list(self.PRESSEdict_indVar_X.values()))
self.MSEEarr_indVar_X = self.PRESSEarr_indVar_X / \
np.shape(self.arrX_input)[0]
self.RMSEEarr_indVar_X = np.sqrt(self.MSEEarr_indVar_X)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Compute explained variance for each variable in X using the
# MSEE for each variable. Also collect PRESSE, MSEE, RMSEE in
# their respective dictionaries for each variable. Keys represent
# now variables and NOT components as above with
# self.PRESSEdict_indVar_X
self.cumCalExplVarXarr_indVar = np.zeros(np.shape(self.MSEEarr_indVar_X))
MSEE_0_indVar_X = self.MSEEarr_indVar_X[0,:]
for ind, MSEE_indVar_X in enumerate(self.MSEEarr_indVar_X):
explVar = (MSEE_0_indVar_X - MSEE_indVar_X) / MSEE_0_indVar_X * 100
self.cumCalExplVarXarr_indVar[ind] = explVar
self.PRESSE_indVar_X = {}
self.MSEE_indVar_X = {}
self.RMSEE_indVar_X = {}
self.cumCalExplVarX_indVar = {}
for ind in range(np.shape(self.PRESSEarr_indVar_X)[1]):
self.PRESSE_indVar_X[ind] = self.PRESSEarr_indVar_X[:,ind]
self.MSEE_indVar_X[ind] = self.MSEEarr_indVar_X[:,ind]
self.RMSEE_indVar_X[ind] = self.RMSEEarr_indVar_X[:,ind]
self.cumCalExplVarX_indVar[ind] = self.cumCalExplVarXarr_indVar[:,ind]
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Collect total PRESSE across all variables in a dictionary. Also,
# compute total calibrated explained variance in X.
self.PRESSE_total_dict_X = {}
self.PRESSE_total_list_X = np.sum(self.PRESSEarr_indVar_X, axis=1)
for ind, PRESSE_X in enumerate(self.PRESSE_total_list_X):
self.PRESSE_total_dict_X[ind] = PRESSE_X
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Collect total MSEE across all variables in a dictionary. Also,
# compute total validated explained variance in X.
self.MSEE_total_dict_X = {}
self.MSEE_total_list_X = np.sum(self.MSEEarr_indVar_X, axis=1) / \
np.shape(self.arrX_input)[1]
MSEE_0_X = self.MSEE_total_list_X[0]
# Compute total cumulated calibrated explained variance in X
self.XcumCalExplVarList = []
if self.Xstand == False:
for ind, MSEE_X in enumerate(self.MSEE_total_list_X):
perc = (MSEE_0_X - MSEE_X) / MSEE_0_X * 100
self.MSEE_total_dict_X[ind] = MSEE_X
self.XcumCalExplVarList.append(perc)
else:
self.XcumCalExplVarArr = np.average(self.cumCalExplVarXarr_indVar, axis=1)
self.XcumCalExplVarList = list(self.XcumCalExplVarArr)
# Construct list with total explained variance in X for each PC
self.XcalExplVarList = []
for ind, item in enumerate(self.XcumCalExplVarList):
if ind == len(self.XcumCalExplVarList)-1: break
explVarComp = self.XcumCalExplVarList[ind+1] - \
self.XcumCalExplVarList[ind]
self.XcalExplVarList.append(explVarComp)
# Construct a dictionary that holds predicted X (Xhat) from calibration
# for each number of components.
self.calXpredDict = {}
for ind, item in enumerate(self.calXpredList):
self.calXpredDict[ind+1] = item
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Compute total RMSEE and store values in a dictionary and list.
self.RMSEE_total_dict_X = {}
self.RMSEE_total_list_X = np.sqrt(self.MSEE_total_list_X)
for ind, RMSEE_X in enumerate(self.RMSEE_total_list_X):
self.RMSEE_total_dict_X[ind] = RMSEE_X
# ---------------------------------------------------------------------
#==============================================================================
# From here cross validation procedure starts
#==============================================================================
if self.cvType == None:
pass
else:
numObj = np.shape(self.arrX)[0]
if self.cvType[0] == "loo":
print("loo")
cvComb = cv.LeaveOneOut(numObj)
elif self.cvType[0] == "lpo":
print("lpo")
cvComb = cv.LeavePOut(numObj, self.cvType[1])
elif self.cvType[0] == "lolo":
print("lolo")
cvComb = cv.LeaveOneLabelOut(self.cvType[1])
else:
print('Requested form of cross validation is not available')
# Collect predicted x (i.e. xhat) for each CV segment in a
# dictionary according to number of PC
self.valXpredDict = {}
for ind in range(1, self.numPC+1):
self.valXpredDict[ind] = []
# Collect train and test set in dictionaries for each PC and put
# them in this list.
self.cvTrainAndTestDataList = []
# Collect: validation X scores T, validation X loadings P,
# validation Y scores U, validation Y loadings Q,
# validation X loading weights W and scores regression coefficients C
# in lists for each PC
self.val_arrTlist = []
self.val_arrPlist = []
self.val_arrQlist = []
# Collect train and test set in a dictionary for each PC
self.cvTrainAndTestDataList = []
self.X_train_means_list = []
# First devide into combinations of training and test sets
for train_index, test_index in cvComb:
X_train, X_test = cv.split(train_index, test_index, self.arrX_input)
subDict = {}
subDict['x train'] = X_train
subDict['x test'] = X_test
self.cvTrainAndTestDataList.append(subDict)
# -------------------------------------------------------------
# Center or standardise X according to users choice
if self.Xstand == True:
X_train_mean = np.average(X_train, axis=0).reshape(1,-1)
X_train_std = np.std(X_train, axis=0, ddof=1).reshape(1,-1)
X_train_proc = (X_train - X_train_mean) / X_train_std
# Standardise X test using mean and STD from training set
X_test_proc = (X_test - X_train_mean) / X_train_std
else:
X_train_mean = np.average(X_train, axis=0).reshape(1,-1)
X_train_proc = X_train - X_train_mean
# Center X test using mean from training set
X_test_proc = X_test - X_train_mean
# -------------------------------------------------------------
self.X_train_means_list.append(X_train_mean)
# Here the NIPALS PCA algorithm starts
# ------------------------------------
threshold = 1.0e-8
X_new = X_train_proc.copy()
# Collect scores and loadings in lists that will be later converted
# to arrays.
scoresList = []
loadingsList = []
# Compute number of principal components as specified by user
for j in range(self.numPC):
t = X_new[:,0].reshape(-1,1)
# Iterate until score vector converges according to threshold
while 1:
num = np.dot(np.transpose(X_new), t)
denom = npla.norm(num)
p = num / denom
t_new = np.dot(X_new, p)
diff = t - t_new
t = t_new.copy()
SS = np.sum(np.square(diff))
# Check whether sum of squares is smaller than threshold. Break
# out of loop if true and start computation of next PC.
if SS < threshold:
scoresList.append(t)
loadingsList.append(p)
break
# Peel off information explained by actual PC and continue with
# decomposition on the residuals (X_new = E).
X_old = X_new.copy()
Xhat_j = np.dot(t, np.transpose(p))
X_new = X_old - Xhat_j
# Collect X scores and X loadings for the actual PC.
valT = np.hstack(scoresList)
valP = np.hstack(loadingsList)
self.val_arrTlist.append(valT)
self.val_arrPlist.append(valP)
# Compute the scores for the left out object
projT = np.dot(X_test_proc, valP)
dims = np.shape(projT)[1]
# Construct validated predicted X first for one component,
# then two, three, etc
for ind in range(0, dims):
part_projT = projT[:,0:ind+1].reshape(1,-1)
part_valP = valP[:,0:ind+1]
valPredX_proc = np.dot(part_projT, np.transpose(part_valP))
# Depending on preprocessing re-process in same manner
# in order to get values that compare to original values.
if self.Xstand == True:
valPredX = (valPredX_proc * X_train_std) + \
X_train_mean
else:
valPredX = valPredX_proc + X_train_mean
self.valXpredDict[ind+1].append(valPredX)
# Convert list of one-row arrays into one array such that it
# corresponds to the orignial variable
for ind in range(1, dims+1):
self.valXpredDict[ind] = np.vstack(self.valXpredDict[ind])
# Put all predicitons into an array that corresponds to the
# original variable
#self.valPredXarrList = []
self.valXpredList = []
valPreds = self.valXpredDict.values()
for preds in valPreds:
pc_arr = np.vstack(preds)
self.valXpredList.append(pc_arr)
#==============================================================================
# From here VALIDATED explained variance is computed
#==============================================================================
# ========== Computations for X ==========
# -----------------------------------------------------------------
# Compute PRESSCV (PRediction Error Sum of Squares) for cross
# validation
self.valXpredList = self.valXpredDict.values()
# Collect all PRESS in a dictionary. Keys represent number of
# component.
self.PRESSdict_indVar_X = {}
# First compute PRESSCV for zero components
varX = np.var(self.arrX_input, axis=0, ddof=1)
self.PRESSCV_0_indVar_X = (varX * np.square(np.shape(self.arrX_input)[0])) \
/ (np.shape(X_train)[0])
self.PRESSdict_indVar_X[0] = self.PRESSCV_0_indVar_X
# Compute PRESSCV for each Yhat for 1, 2, 3, etc number of
# components and compute explained variance
for ind, Xhat in enumerate(self.valXpredList):
#diffX = self.arrX_input - Xhat
diffX = st.centre(self.arrX_input) - st.centre(Xhat)
PRESSCV_indVar_X = np.sum(np.square(diffX), axis=0)
self.PRESSdict_indVar_X[ind+1] = PRESSCV_indVar_X
# Now store all PRESSCV values into an array. Then compute MSECV
# and RMSECV.
self.PRESSCVarr_indVar_X = np.array(list(self.PRESSdict_indVar_X.values()))
self.MSECVarr_indVar_X = self.PRESSCVarr_indVar_X / \
np.shape(self.arrX_input)[0]
self.RMSECVarr_indVar_X = np.sqrt(self.MSECVarr_indVar_X)
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# Compute explained variance for each variable in X using the
# MSEP for each variable. Also collect PRESS, MSECV, RMSECV in
# their respective dictionaries for each variable. Keys represent
# now variables and NOT components as above with
# self.PRESSdict_indVar
self.cumValExplVarXarr_indVar = np.zeros(np.shape(self.MSECVarr_indVar_X))
MSECV_0_indVar_X = self.MSECVarr_indVar_X[0,:]
for ind, MSECV_indVar_X in enumerate(self.MSECVarr_indVar_X):
explVar = (MSECV_0_indVar_X - MSECV_indVar_X) / MSECV_0_indVar_X * 100
self.cumValExplVarXarr_indVar[ind] = explVar
self.PRESSCV_indVar_X = {}
self.MSECV_indVar_X = {}
self.RMSECV_indVar_X = {}
self.cumValExplVarX_indVar = {}
for ind in range(np.shape(self.PRESSCVarr_indVar_X)[1]):
self.PRESSCV_indVar_X[ind] = self.PRESSCVarr_indVar_X[:,ind]
self.MSECV_indVar_X[ind] = self.MSECVarr_indVar_X[:,ind]
self.RMSECV_indVar_X[ind] = self.RMSECVarr_indVar_X[:,ind]
self.cumValExplVarX_indVar[ind] = self.cumValExplVarXarr_indVar[:,ind]
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# Collect total PRESSCV across all variables in a dictionary.
self.PRESSCV_total_dict_X = {}
self.PRESSCV_total_list_X = np.sum(self.PRESSCVarr_indVar_X, axis=1)
for ind, PRESSCV_X in enumerate(self.PRESSCV_total_list_X):
self.PRESSCV_total_dict_X[ind] = PRESSCV_X
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# Collect total MSECV across all variables in a dictionary. Also,
# compute total validated explained variance in X.
self.MSECV_total_dict_X = {}
self.MSECV_total_list_X = np.sum(self.MSECVarr_indVar_X, axis=1) / \
np.shape(self.arrX_input)[1]
MSECV_0_X = self.MSECV_total_list_X[0]
# Compute total validated explained variance in X
self.XcumValExplVarList = []
if self.Xstand == False:
for ind, MSECV_X in enumerate(self.MSECV_total_list_X):
perc = (MSECV_0_X - MSECV_X) / MSECV_0_X * 100
self.MSECV_total_dict_X[ind] = MSECV_X
self.XcumValExplVarList.append(perc)
else:
self.XcumValExplVarArr = np.average(self.cumValExplVarXarr_indVar, axis=1)
self.XcumValExplVarList = list(self.XcumValExplVarArr)
# Construct list with total validated explained variance in X in
# each component
self.XvalExplVarList = []
for ind, item in enumerate(self.XcumValExplVarList):
if ind == len(self.XcumValExplVarList)-1: break
explVarComp = self.XcumValExplVarList[ind+1] - \
self.XcumValExplVarList[ind]
self.XvalExplVarList.append(explVarComp)
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# Compute total RMSECV and store values in a dictionary and list.
self.RMSECV_total_dict_X = {}
self.RMSECV_total_list_X = np.sqrt(self.MSECV_total_list_X)
for ind, RMSECV_X in enumerate(self.RMSECV_total_list_X):
self.RMSECV_total_dict_X[ind] = RMSECV_X
# -----------------------------------------------------------------
def modelSettings(self):
"""
Returns a dictionary holding the settings under which NIPALS PCA was
run. Dictionary key represents order of PC.
"""
# Collect settings under which PCA was run.
self.settings = {}
self.settings['numPC'] = self.numPC
self.settings['Xstand'] = self.Xstand
self.settings['arrX'] = self.arrX_input
self.settings['analysed arrX'] = self.arrX
return self.settings
def X_means(self):
"""
Returns the score matrix T. First column holds scores for PC1,
second column holds scores for PC2, etc.
"""
return self.Xmeans.reshape(1,-1)
def X_scores(self):
"""
Returns the score matrix T. First column holds scores for PC1,
second column holds scores for PC2, etc.
"""
return self.arrT
def X_loadings(self):
"""
Returns the loading matrix P. First column holds loadings for PC1,
second column holds scores for PC2, etc.
"""
return self.arrP
def X_corrLoadings(self):
"""
Returns correlation loadings. First column holds correlation loadings
for PC1, second column holds scores for PC2, etc.
"""
# Creates empty matrix for correlation loadings
arr_corrLoadings = np.zeros((np.shape(self.arrT)[1], \
np.shape(self.arrP)[0]), float)
# Compute correlation loadings:
# For each PC in score matrix
for PC in range(np.shape(self.arrT)[1]):
PCscores = self.arrT[:, PC]
# For each variable/attribute in original matrix (not meancentered)
for var in range(np.shape(self.arrX)[1]):
origVar = self.arrX[:, var]
corrs = np.corrcoef(PCscores, origVar)
arr_corrLoadings[PC, var] = corrs[0,1]
self.arr_corrLoadings = np.transpose(arr_corrLoadings)
return self.arr_corrLoadings
def X_residuals(self):
"""
Returns a dictionary holding the residual matrices E after each
computed PC. Dictionary key represents order of PC.
"""
return self.X_residualsDict
def X_calExplVar(self):
"""
Returns a list holding the calibrated explained variance for
each PC.
"""
return self.XcalExplVarList
def X_cumCalExplVar_indVar(self):
"""
Returns an array holding the cumulative calibrated explained variance
for each variable in X after each PC.
"""
return self.cumCalExplVarXarr_indVar
def X_cumCalExplVar(self):
"""
Returns a list holding the cumulative calibrated explained variance for
each PC. Dictionary key represents order of PC.
"""
return self.XcumCalExplVarList
def X_predCal(self):
"""
Returns a dictionary holding the predicted matrices Xhat from
calibration after each computed PC. Dictionary key represents order
of PC.
"""
return self.calXpredDict
def X_PRESSE_indVar(self):
"""
Returns array holding PRESSE for each individual variable acquired
through calibration after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.PRESSEarr_indVar_X
def X_PRESSE(self):
"""
Returns an array holding PRESS across all variables in X acquired
through calibration after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.PRESSE_total_list_X
def X_MSEE_indVar(self):
"""
Returns an arrary holding MSE from calibration for each variable in X.
First row is MSE for zero components, second row for component 1, etc.
"""
return self.MSEEarr_indVar_X
def X_MSEE(self):
"""
Returns an array holding MSE across all variables in X acquired through
calibration after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.MSEE_total_list_X
def X_RMSEE_indVar(self):
"""
Returns an arrary holding RMSE from calibration for each variable in X.
First row is MSE for zero components, second row for component 1, etc.
"""
return self.RMSEEarr_indVar_X
def X_RMSEE(self):
"""
Returns an array holding RMSE across all variables in X acquired through
calibration after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.RMSEE_total_list_X
def X_valExplVar(self):
"""
Returns list holding calibrated explained variance for each PC in X.
"""
return self.XvalExplVarList
def X_cumValExplVar_indVar(self):
"""
Returns array holding cumulative validated explained variance in X for
each variable. Rows represent variables in X. Rows represent number of
components.
"""
return self.cumValExplVarXarr_indVar
def X_cumValExplVar(self):
"""
Returns list holding cumulative calibrated explained variance in X.
"""
return self.XcumValExplVarList
def X_predVal(self):
"""
Returns dictionary holding arrays of predicted Xhat after each component
from validation. Dictionary key represents order of PC.
"""
return self.valXpredDict
def X_PRESSCV_indVar(self):
"""
Returns array holding PRESS for each individual variable in X acquired
through cross validation after each computed PC. First row is PRESS for
zero components, second row component 1, third row for component 2, etc.
"""
return self.PRESSCVarr_indVar_X
def X_PRESSCV(self):
"""
Returns an array holding PRESS across all variables in X acquired
through cross validation after each computed PC. First row is PRESS for
zero components, second row component 1, third row for component 2, etc.
"""
return self.PRESSCV_total_list_X
def X_MSECV_indVar(self):
"""
Returns an arrary holding MSE from cross validation for each variable
in X. First row is MSE for zero components, second row for component 1,
etc.
"""
return self.MSECVarr_indVar_X
def X_MSECV(self):
"""
Returns an array holding MSE across all variables in X acquired through
cross validation after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.MSECV_total_list_X
def X_RMSECV_indVar(self):
"""
Returns an arrary holding RMSE from cross validation for each variable
in X. First row is MSE for zero components, second row for component 1,
etc.
"""
return self.RMSECVarr_indVar_X
def X_RMSECV(self):
"""
Returns an array holding RMSE across all variables in X acquired through
cross validation after each computed PC. First row is PRESS for zero
components, second row component 1, third row for component 2, etc.
"""
return self.RMSECV_total_list_X
def cvTrainAndTestData(self):
"""
Returns a list consisting of dictionaries holding training and test sets.
"""
return self.cvTrainAndTestDataList
def corrLoadingsEllipses(self):
"""
Returns the ellipses that represent 50% and 100% expl. variance in
correlation loadings plot.
"""
# Create range for ellipses
t = np.arange(0.0, 2*np.pi, 0.01)
# Compuing the outer circle (100 % expl. variance)
xcords100perc = np.cos(t)
ycords100perc = np.sin(t)
# Computing inner circle
xcords50perc = 0.707 * np.cos(t)
ycords50perc = 0.707 * np.sin(t)
# Collect ellipse coordinates in dictionary
ellipses = {}
ellipses['x50perc'] = xcords50perc
ellipses['y50perc'] = ycords50perc
ellipses['x100perc'] = xcords100perc
ellipses['y100perc'] = ycords100perc
return ellipses
def plots(model, pc=[1,2], plots=[1,2,3,4], objNames=[], varNames=[]):
"""
This functions generates plots that visualise the most important results
from PCA
"""
# Generate names/numbers for objects if no objects are given
if bool(objNames) == False:
numObj, numVar = np.shape(model.modelSettings()['arrX'])
for num in range(1, numObj+1):
label = 'Obj {0}'.format(num)
objNames.append(label)
# Generate names/numbers for variables if no objects are given
if bool(varNames) == False:
numObj, numVar = np.shape(model.modelSettings()['arrX'])
for num in range(1, numVar+1):
label = 'Var {0}'.format(num)
varNames.append(label)
# Generate a list with names of PC's used for PCA
obj, numPC = np.shape(model.X_scores())
pcNames = []
for num in range(numPC+1):
label = 'PC{0}'.format(num)
pcNames.append(label)
# Generate plot as requested by user
for item in plots:
print(item)
# SCORES PLOT
if item == 1:
# Access PCA scores and explained variances from model
Xscores = model.X_scores()
XexplVar = model.X_calExplVar()
# Initiate plot
fig = plt.figure()
ax = fig.add_subplot(111)
# Find maximum and minimum scores along along PC's selected
# by the user
xMax = max(Xscores[:,pc[0]-1])
xMin = min(Xscores[:,pc[0]-1])
yMax = max(Xscores[:,pc[1]-1])
yMin = min(Xscores[:,pc[1]-1])
# Compute sufficient distance of label text from scatter point
xSpace = (xMax / 100) * 5
ySpace = (yMax / 100) * 4
# Set limits for dashed lines representing the axes.
# x-axis
if abs(xMax) >= abs(xMin):
extraX = xMax * .4
limX = xMax * .3
elif abs(xMax) < abs(xMin):
extraX = abs(xMin) * .4
limX = abs(xMin) * .3
# y-axis
if abs(yMax) >= abs(yMin):
extraY = yMax * .4
limY = yMax * .3
elif abs(yMax) < abs(yMin):
extraY = abs(yMin) * .4
limY = abs(yMin) * .3
# Loop through all coordinates (PC1,PC2) and names to plot scores.
for ind, name in enumerate(objNames):
ax.scatter(Xscores[ind,pc[0]-1], Xscores[ind,pc[1]-1], s=10, \
c='w', marker='o', edgecolor='grey')
ax.text(Xscores[ind,pc[0]-1] + xSpace, \
Xscores[ind,pc[1]-1] + ySpace, name, fontsize=12)
# Set limits for dashed lines representing axes
xMaxLine = xMax + extraX
xMinLine = xMin - extraX
yMaxLine = yMax + extraY
yMinLine = yMin - extraY
# Plot dashes axes lines
ax.plot([0,0], [yMaxLine,yMinLine], color='0.4', \
linestyle='dashed', linewidth=1)
ax.plot([xMinLine,xMaxLine], [0,0], color='0.4', \
linestyle='dashed', linewidth=1)
# Set limits for plot regions.
xMaxLim = xMax + limX
xMinLim = xMin - limX
yMaxLim = yMax + limY
yMinLim = yMin - limY
ax.set_xlim(xMinLim,xMaxLim)
ax.set_ylim(yMinLim,yMaxLim)
# Plot title, axis names.
ax.set_xlabel('{0} ({1}%)'.format(pcNames[pc[0]], \
str(round(XexplVar[pc[0]-1],1))))
ax.set_ylabel('{0} ({1}%)'.format(pcNames[pc[1]], \
str(round(XexplVar[pc[1]-1],1))))
ax.set_title('PCA scores plot')
plt.show()
# LOADINGS PLOT
if item == 2:
# Access PCA scores and explained variances from model
Xloadings = model.X_loadings()
XexplVar = model.X_calExplVar()
# Initiate plot
fig = plt.figure()
ax = fig.add_subplot(111)
# Find maximum and minimum scores along along PC's selected
# by the user
xMax = max(Xloadings[:,pc[0]-1])
xMin = min(Xloadings[:,pc[0]-1])
yMax = max(Xloadings[:,pc[1]-1])
yMin = min(Xloadings[:,pc[1]-1])
# Compute sufficient distance of label text from scatter point
xSpace = (xMax / 100) * 5
ySpace = (yMax / 100) * 4
# Set limits for dashed lines representing the axes.
# x-axis
if abs(xMax) >= abs(xMin):
extraX = xMax * .4
limX = xMax * .3
elif abs(xMax) < abs(xMin):
extraX = abs(xMin) * .4
limX = abs(xMin) * .3
# y-axis
if abs(yMax) >= abs(yMin):
extraY = yMax * .4
limY = yMax * .3
elif abs(yMax) < abs(yMin):
extraY = abs(yMin) * .4
limY = abs(yMin) * .3
# Loop through all coordinates (PC1,PC2) and names to plot scores.
for ind, name in enumerate(varNames):
ax.scatter(Xloadings[ind,pc[0]-1], Xloadings[ind,pc[1]-1], \
s=10, c='w', marker='o', edgecolor='grey')
ax.text(Xloadings[ind,pc[0]-1] + xSpace, \
Xloadings[ind,pc[1]-1] + ySpace, name, fontsize=12)
# Set limits for dashed lines representing axes
xMaxLine = xMax + extraX
xMinLine = xMin - extraX
yMaxLine = yMax + extraY
yMinLine = yMin - extraY
# Plot dashes axes lines
ax.plot([0,0], [yMaxLine,yMinLine], color='0.4', \
linestyle='dashed', linewidth=1)
ax.plot([xMinLine,xMaxLine], [0,0], color='0.4', \
linestyle='dashed', linewidth=1)
# Set limits for plot regions.
xMaxLim = xMax + limX
xMinLim = xMin - limX
yMaxLim = yMax + limY
yMinLim = yMin - limY
ax.set_xlim(xMinLim,xMaxLim)
ax.set_ylim(yMinLim,yMaxLim)
# Plot title, axis names.
ax.set_xlabel('{0} ({1}%)'.format(pcNames[pc[0]], \
str(round(XexplVar[pc[0]-1],1))))
ax.set_ylabel('{0} ({1}%)'.format(pcNames[pc[1]], \
str(round(XexplVar[pc[1]-1],1))))
ax.set_title('PCA loadings plot')
plt.show()
# CORRELATION LOADINGS PLOT
if item == 3:
# Access PCA scores and explained variances from model
XcorrLoadings = model.X_corrLoadings()
XexplVar = model.X_calExplVar()
# Compute coordinates for circles in correlation loadings plot
t = np.arange(0.0, 2*np.pi, 0.01)
# Coordinates for outer circle
xcords = np.cos(t)
ycords = np.sin(t)
# Coordinates for inner circle
xcords50percent = 0.707 * np.cos(t)
ycords50percent = 0.707 * np.sin(t)
# Initiate plot
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(xcords, ycords, 'b-')
ax.plot(xcords50percent, ycords50percent, 'b-')
#ax.scatter(pc1_CL, pc2_CL, s=10, c='r', marker='o', edgecolor='grey')
# Loop through all coordinates (PC1,PC2) and names to plot scores.
for ind, name in enumerate(varNames):
ax.scatter(XcorrLoadings[ind,pc[0]-1], \
XcorrLoadings[ind,pc[1]-1], \
s=10, c='w', marker='o', edgecolor='grey')
ax.text(XcorrLoadings[ind,pc[0]-1] + xSpace, \
XcorrLoadings[ind,pc[1]-1] + ySpace, name, fontsize=12)
# Plot lines through origo.
left = -1.3; right = 1.3; top = 1.3; bottom = -1.3
ax.plot([0,0], [top,bottom], color='0.4', linestyle='dashed', \
linewidth=1)
ax.plot([left,right], [0,0], color='0.4', linestyle='dashed', \
linewidth=1)
# Plot title, axis names.
ax.set_xlabel('{0} ({1}%)'.format(pcNames[pc[0]], \
str(round(XexplVar[pc[0]-1],1))))
ax.set_ylabel('{0} ({1}%)'.format(pcNames[pc[1]], \
str(round(XexplVar[pc[1]-1],1))))
ax.set_title('PCA correlation loadings plot')
ax.set_xlim(-1.4,1.4)
ax.set_ylim(-1.1,1.1)
plt.show()
# Explained variances plot
if item == 4:
# Access PCA scores and explained variances from model
cal = model.X_cumCalExplVar()
val = model.X_cumValExplVar()
# Plot explained variances
fig = plt.figure()
ax = fig.add_subplot(111)
left = -0.2; right = len(pcNames) - 0.5; top = 105; bottom = -5
xPos = range(len(pcNames))
ax.plot(xPos, cal, color='0.4', linestyle='solid', linewidth=1, \
label='calibrated explained variance')
ax.plot(xPos, val, color='0.4', linestyle='dashed', linewidth=1, \
label='validated explained variance')
ax.set_xticks(xPos)
ax.set_xticklabels((pcNames), rotation=0, ha='center')
ax.set_ylabel('Explained variance')
plt.legend(loc='lower right', shadow=True, labelspacing=.1)
ltext = plt.gca().get_legend().get_texts()
plt.setp(ltext[0], fontsize = 10, color = 'k')
ax.set_xlim(left,right)
ax.set_ylim(bottom,top)
plt.show()
| [
"numpy.sum",
"cross_val.LeaveOneOut",
"statTools.centre",
"cross_val.LeaveOneLabelOut",
"numpy.shape",
"matplotlib.pyplot.figure",
"numpy.sin",
"numpy.arange",
"numpy.linalg.norm",
"matplotlib.pyplot.gca",
"cross_val.split",
"numpy.std",
"numpy.transpose",
"matplotlib.pyplot.setp",
"nump... | [((6270, 6298), 'numpy.hstack', 'np.hstack', (['self.X_scoresList'], {}), '(self.X_scoresList)\n', (6279, 6298), True, 'import numpy as np\n'), ((6319, 6349), 'numpy.hstack', 'np.hstack', (['self.X_loadingsList'], {}), '(self.X_loadingsList)\n', (6328, 6349), True, 'import numpy as np\n'), ((8673, 8703), 'numpy.sqrt', 'np.sqrt', (['self.MSEEarr_indVar_X'], {}), '(self.MSEEarr_indVar_X)\n', (8680, 8703), True, 'import numpy as np\n'), ((10431, 10470), 'numpy.sum', 'np.sum', (['self.PRESSEarr_indVar_X'], {'axis': '(1)'}), '(self.PRESSEarr_indVar_X, axis=1)\n', (10437, 10470), True, 'import numpy as np\n'), ((12631, 12662), 'numpy.sqrt', 'np.sqrt', (['self.MSEE_total_list_X'], {}), '(self.MSEE_total_list_X)\n', (12638, 12662), True, 'import numpy as np\n'), ((28159, 28189), 'numpy.transpose', 'np.transpose', (['arr_corrLoadings'], {}), '(arr_corrLoadings)\n', (28171, 28189), True, 'import numpy as np\n'), ((34453, 34484), 'numpy.arange', 'np.arange', (['(0.0)', '(2 * np.pi)', '(0.01)'], {}), '(0.0, 2 * np.pi, 0.01)\n', (34462, 34484), True, 'import numpy as np\n'), ((34575, 34584), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (34581, 34584), True, 'import numpy as np\n'), ((34609, 34618), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (34615, 34618), True, 'import numpy as np\n'), ((2941, 2976), 'numpy.average', 'np.average', (['self.arrX_input'], {'axis': '(0)'}), '(self.arrX_input, axis=0)\n', (2951, 2976), True, 'import numpy as np\n'), ((3013, 3052), 'numpy.std', 'np.std', (['self.arrX_input'], {'axis': '(0)', 'ddof': '(1)'}), '(self.arrX_input, axis=0, ddof=1)\n', (3019, 3052), True, 'import numpy as np\n'), ((3161, 3196), 'numpy.average', 'np.average', (['self.arrX_input'], {'axis': '(0)'}), '(self.arrX_input, axis=0)\n', (3171, 3196), True, 'import numpy as np\n'), ((9231, 9262), 'numpy.shape', 'np.shape', (['self.MSEEarr_indVar_X'], {}), '(self.MSEEarr_indVar_X)\n', (9239, 9262), True, 'import numpy as np\n'), ((10982, 11019), 'numpy.sum', 'np.sum', (['self.MSEEarr_indVar_X'], {'axis': '(1)'}), '(self.MSEEarr_indVar_X, axis=1)\n', (10988, 11019), True, 'import numpy as np\n'), ((11537, 11586), 'numpy.average', 'np.average', (['self.cumCalExplVarXarr_indVar'], {'axis': '(1)'}), '(self.cumCalExplVarXarr_indVar, axis=1)\n', (11547, 11586), True, 'import numpy as np\n'), ((20885, 20924), 'numpy.var', 'np.var', (['self.arrX_input'], {'axis': '(0)', 'ddof': '(1)'}), '(self.arrX_input, axis=0, ddof=1)\n', (20891, 20924), True, 'import numpy as np\n'), ((21945, 21976), 'numpy.sqrt', 'np.sqrt', (['self.MSECVarr_indVar_X'], {}), '(self.MSECVarr_indVar_X)\n', (21952, 21976), True, 'import numpy as np\n'), ((23765, 23805), 'numpy.sum', 'np.sum', (['self.PRESSCVarr_indVar_X'], {'axis': '(1)'}), '(self.PRESSCVarr_indVar_X, axis=1)\n', (23771, 23805), True, 'import numpy as np\n'), ((25881, 25913), 'numpy.sqrt', 'np.sqrt', (['self.MSECV_total_list_X'], {}), '(self.MSECV_total_list_X)\n', (25888, 25913), True, 'import numpy as np\n'), ((34692, 34701), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (34698, 34701), True, 'import numpy as np\n'), ((34733, 34742), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (34739, 34742), True, 'import numpy as np\n'), ((36418, 36430), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (36428, 36430), True, 'import matplotlib.pyplot as plt\n'), ((39252, 39262), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (39260, 39262), True, 'import matplotlib.pyplot as plt\n'), ((39553, 39565), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (39563, 39565), True, 'import matplotlib.pyplot as plt\n'), ((42393, 42403), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (42401, 42403), True, 'import matplotlib.pyplot as plt\n'), ((42760, 42791), 'numpy.arange', 'np.arange', (['(0.0)', '(2 * np.pi)', '(0.01)'], {}), '(0.0, 2 * np.pi, 0.01)\n', (42769, 42791), True, 'import numpy as np\n'), ((42867, 42876), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (42873, 42876), True, 'import numpy as np\n'), ((42898, 42907), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (42904, 42907), True, 'import numpy as np\n'), ((43119, 43131), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (43129, 43131), True, 'import matplotlib.pyplot as plt\n'), ((44647, 44657), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (44655, 44657), True, 'import matplotlib.pyplot as plt\n'), ((44979, 44991), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (44989, 44991), True, 'import matplotlib.pyplot as plt\n'), ((45622, 45682), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""', 'shadow': '(True)', 'labelspacing': '(0.1)'}), "(loc='lower right', shadow=True, labelspacing=0.1)\n", (45632, 45682), True, 'import matplotlib.pyplot as plt\n'), ((45749, 45791), 'matplotlib.pyplot.setp', 'plt.setp', (['ltext[0]'], {'fontsize': '(10)', 'color': '"""k"""'}), "(ltext[0], fontsize=10, color='k')\n", (45757, 45791), True, 'import matplotlib.pyplot as plt\n'), ((45906, 45916), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (45914, 45916), True, 'import matplotlib.pyplot as plt\n'), ((2009, 2023), 'numpy.shape', 'np.shape', (['arrX'], {}), '(arrX)\n', (2017, 2023), True, 'import numpy as np\n'), ((2066, 2080), 'numpy.shape', 'np.shape', (['arrX'], {}), '(arrX)\n', (2074, 2080), True, 'import numpy as np\n'), ((4915, 4929), 'numpy.linalg.norm', 'npla.norm', (['num'], {}), '(num)\n', (4924, 4929), True, 'import numpy.linalg as npla\n'), ((5003, 5019), 'numpy.dot', 'np.dot', (['X_new', 'p'], {}), '(X_new, p)\n', (5009, 5019), True, 'import numpy as np\n'), ((5688, 5703), 'numpy.transpose', 'np.transpose', (['p'], {}), '(p)\n', (5700, 5703), True, 'import numpy as np\n'), ((7189, 7212), 'numpy.transpose', 'np.transpose', (['part_arrP'], {}), '(part_arrP)\n', (7201, 7212), True, 'import numpy as np\n'), ((7874, 7900), 'statTools.centre', 'st.centre', (['self.arrX_input'], {}), '(self.arrX_input)\n', (7883, 7900), True, 'import statTools as st\n'), ((8168, 8194), 'statTools.centre', 'st.centre', (['self.arrX_input'], {}), '(self.arrX_input)\n', (8177, 8194), True, 'import statTools as st\n'), ((8197, 8212), 'statTools.centre', 'st.centre', (['Xhat'], {}), '(Xhat)\n', (8206, 8212), True, 'import statTools as st\n'), ((8250, 8266), 'numpy.square', 'np.square', (['diffX'], {}), '(diffX)\n', (8259, 8266), True, 'import numpy as np\n'), ((8611, 8636), 'numpy.shape', 'np.shape', (['self.arrX_input'], {}), '(self.arrX_input)\n', (8619, 8636), True, 'import numpy as np\n'), ((9725, 9758), 'numpy.shape', 'np.shape', (['self.PRESSEarr_indVar_X'], {}), '(self.PRESSEarr_indVar_X)\n', (9733, 9758), True, 'import numpy as np\n'), ((11040, 11065), 'numpy.shape', 'np.shape', (['self.arrX_input'], {}), '(self.arrX_input)\n', (11048, 11065), True, 'import numpy as np\n'), ((13191, 13210), 'numpy.shape', 'np.shape', (['self.arrX'], {}), '(self.arrX)\n', (13199, 13210), True, 'import numpy as np\n'), ((13321, 13343), 'cross_val.LeaveOneOut', 'cv.LeaveOneOut', (['numObj'], {}), '(numObj)\n', (13335, 13343), True, 'import cross_val as cv\n'), ((14889, 14939), 'cross_val.split', 'cv.split', (['train_index', 'test_index', 'self.arrX_input'], {}), '(train_index, test_index, self.arrX_input)\n', (14897, 14939), True, 'import cross_val as cv\n'), ((18148, 18169), 'numpy.hstack', 'np.hstack', (['scoresList'], {}), '(scoresList)\n', (18157, 18169), True, 'import numpy as np\n'), ((18193, 18216), 'numpy.hstack', 'np.hstack', (['loadingsList'], {}), '(loadingsList)\n', (18202, 18216), True, 'import numpy as np\n'), ((18447, 18472), 'numpy.dot', 'np.dot', (['X_test_proc', 'valP'], {}), '(X_test_proc, valP)\n', (18453, 18472), True, 'import numpy as np\n'), ((19666, 19699), 'numpy.vstack', 'np.vstack', (['self.valXpredDict[ind]'], {}), '(self.valXpredDict[ind])\n', (19675, 19699), True, 'import numpy as np\n'), ((20006, 20022), 'numpy.vstack', 'np.vstack', (['preds'], {}), '(preds)\n', (20015, 20022), True, 'import numpy as np\n'), ((22530, 22562), 'numpy.shape', 'np.shape', (['self.MSECVarr_indVar_X'], {}), '(self.MSECVarr_indVar_X)\n', (22538, 22562), True, 'import numpy as np\n'), ((24356, 24394), 'numpy.sum', 'np.sum', (['self.MSECVarr_indVar_X'], {'axis': '(1)'}), '(self.MSECVarr_indVar_X, axis=1)\n', (24362, 24394), True, 'import numpy as np\n'), ((24954, 25003), 'numpy.average', 'np.average', (['self.cumValExplVarXarr_indVar'], {'axis': '(1)'}), '(self.cumValExplVarXarr_indVar, axis=1)\n', (24964, 25003), True, 'import numpy as np\n'), ((27752, 27771), 'numpy.shape', 'np.shape', (['self.arrT'], {}), '(self.arrT)\n', (27760, 27771), True, 'import numpy as np\n'), ((28032, 28062), 'numpy.corrcoef', 'np.corrcoef', (['PCscores', 'origVar'], {}), '(PCscores, origVar)\n', (28043, 28062), True, 'import numpy as np\n'), ((43002, 43011), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (43008, 43011), True, 'import numpy as np\n'), ((43050, 43059), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (43056, 43059), True, 'import numpy as np\n'), ((4867, 4886), 'numpy.transpose', 'np.transpose', (['X_new'], {}), '(X_new)\n', (4879, 4886), True, 'import numpy as np\n'), ((5131, 5146), 'numpy.square', 'np.square', (['diff'], {}), '(diff)\n', (5140, 5146), True, 'import numpy as np\n'), ((13440, 13476), 'cross_val.LeavePOut', 'cv.LeavePOut', (['numObj', 'self.cvType[1]'], {}), '(numObj, self.cvType[1])\n', (13452, 13476), True, 'import cross_val as cv\n'), ((18496, 18511), 'numpy.shape', 'np.shape', (['projT'], {}), '(projT)\n', (18504, 18511), True, 'import numpy as np\n'), ((21037, 21054), 'numpy.shape', 'np.shape', (['X_train'], {}), '(X_train)\n', (21045, 21054), True, 'import numpy as np\n'), ((21396, 21422), 'statTools.centre', 'st.centre', (['self.arrX_input'], {}), '(self.arrX_input)\n', (21405, 21422), True, 'import statTools as st\n'), ((21425, 21440), 'statTools.centre', 'st.centre', (['Xhat'], {}), '(Xhat)\n', (21434, 21440), True, 'import statTools as st\n'), ((21483, 21499), 'numpy.square', 'np.square', (['diffX'], {}), '(diffX)\n', (21492, 21499), True, 'import numpy as np\n'), ((21878, 21903), 'numpy.shape', 'np.shape', (['self.arrX_input'], {}), '(self.arrX_input)\n', (21886, 21903), True, 'import numpy as np\n'), ((23083, 23117), 'numpy.shape', 'np.shape', (['self.PRESSCVarr_indVar_X'], {}), '(self.PRESSCVarr_indVar_X)\n', (23091, 23117), True, 'import numpy as np\n'), ((24419, 24444), 'numpy.shape', 'np.shape', (['self.arrX_input'], {}), '(self.arrX_input)\n', (24427, 24444), True, 'import numpy as np\n'), ((27571, 27590), 'numpy.shape', 'np.shape', (['self.arrT'], {}), '(self.arrT)\n', (27579, 27590), True, 'import numpy as np\n'), ((27609, 27628), 'numpy.shape', 'np.shape', (['self.arrP'], {}), '(self.arrP)\n', (27617, 27628), True, 'import numpy as np\n'), ((27939, 27958), 'numpy.shape', 'np.shape', (['self.arrX'], {}), '(self.arrX)\n', (27947, 27958), True, 'import numpy as np\n'), ((13575, 13610), 'cross_val.LeaveOneLabelOut', 'cv.LeaveOneLabelOut', (['self.cvType[1]'], {}), '(self.cvType[1])\n', (13594, 13610), True, 'import cross_val as cv\n'), ((17068, 17082), 'numpy.linalg.norm', 'npla.norm', (['num'], {}), '(num)\n', (17077, 17082), True, 'import numpy.linalg as npla\n'), ((17180, 17196), 'numpy.dot', 'np.dot', (['X_new', 'p'], {}), '(X_new, p)\n', (17186, 17196), True, 'import numpy as np\n'), ((17979, 17994), 'numpy.transpose', 'np.transpose', (['p'], {}), '(p)\n', (17991, 17994), True, 'import numpy as np\n'), ((18878, 18901), 'numpy.transpose', 'np.transpose', (['part_valP'], {}), '(part_valP)\n', (18890, 18901), True, 'import numpy as np\n'), ((15397, 15424), 'numpy.average', 'np.average', (['X_train'], {'axis': '(0)'}), '(X_train, axis=0)\n', (15407, 15424), True, 'import numpy as np\n'), ((15473, 15504), 'numpy.std', 'np.std', (['X_train'], {'axis': '(0)', 'ddof': '(1)'}), '(X_train, axis=0, ddof=1)\n', (15479, 15504), True, 'import numpy as np\n'), ((15838, 15865), 'numpy.average', 'np.average', (['X_train'], {'axis': '(0)'}), '(X_train, axis=0)\n', (15848, 15865), True, 'import numpy as np\n'), ((17012, 17031), 'numpy.transpose', 'np.transpose', (['X_new'], {}), '(X_new)\n', (17024, 17031), True, 'import numpy as np\n'), ((17340, 17355), 'numpy.square', 'np.square', (['diff'], {}), '(diff)\n', (17349, 17355), True, 'import numpy as np\n'), ((20981, 21006), 'numpy.shape', 'np.shape', (['self.arrX_input'], {}), '(self.arrX_input)\n', (20989, 21006), True, 'import numpy as np\n'), ((45702, 45711), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (45709, 45711), True, 'import matplotlib.pyplot as plt\n')] |
""" test_distance.py
Tests the isi- and spike-distance computation
Copyright 2014, <NAME> <<EMAIL>>
Distributed under the BSD License
"""
from __future__ import print_function
import numpy as np
from copy import copy
from numpy.testing import assert_equal, assert_almost_equal, \
assert_array_almost_equal
import pyspike as spk
from pyspike import SpikeTrain
import os
TEST_PATH = os.path.dirname(os.path.realpath(__file__))
def test_isi():
# generate two spike trains:
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
# pen&paper calculation of the isi distance
expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
expected_isi = [0.1/0.3, 0.1/0.3, 0.05/0.2, 0.05/0.2, 0.15/0.35,
0.25/0.35, 0.05/0.35, 0.2/0.3, 0.25/0.3, 0.25/0.3]
expected_times = np.array(expected_times)
expected_isi = np.array(expected_isi)
expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) *
expected_isi)/(expected_times[-1]-expected_times[0])
f = spk.isi_profile(t1, t2)
# print("ISI: ", f.y)
print("ISI value:", expected_isi_val)
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y, expected_isi, decimal=15)
assert_equal(f.avrg(), expected_isi_val)
assert_equal(spk.isi_distance(t1, t2), expected_isi_val)
# check with some equal spike times
t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0])
t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0])
expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
expected_isi = [0.1/0.3, 0.1/0.3, 0.1/0.3, 0.1/0.2, 0.1/0.2, 0.0/0.5]
expected_times = np.array(expected_times)
expected_isi = np.array(expected_isi)
expected_isi_val = sum((expected_times[1:] - expected_times[:-1]) *
expected_isi)/(expected_times[-1]-expected_times[0])
f = spk.isi_profile(t1, t2)
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y, expected_isi, decimal=15)
assert_equal(f.avrg(), expected_isi_val)
assert_equal(spk.isi_distance(t1, t2), expected_isi_val)
def test_spike():
# generate two spike trains:
t1 = SpikeTrain([0.0, 2.0, 5.0, 8.0], 10.0)
t2 = SpikeTrain([0.0, 1.0, 5.0, 9.0], 10.0)
expected_times = np.array([0.0, 1.0, 2.0, 5.0, 8.0, 9.0, 10.0])
f = spk.spike_profile(t1, t2)
assert_equal(f.x, expected_times)
# from SPIKY:
y_all = np.array([0.000000000000000000, 0.555555555555555580,
0.222222222222222210, 0.305555555555555580,
0.255102040816326536, 0.000000000000000000,
0.000000000000000000, 0.255102040816326536,
0.255102040816326536, 0.285714285714285698,
0.285714285714285698, 0.285714285714285698])
#assert_array_almost_equal(f.y1, y_all[::2])
assert_array_almost_equal(f.y2, y_all[1::2])
assert_almost_equal(f.avrg(), 0.186309523809523814, decimal=15)
assert_equal(spk.spike_distance(t1, t2), f.avrg())
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
# pen&paper calculation of the spike distance
expected_times = [0.0, 0.2, 0.3, 0.4, 0.45, 0.6, 0.7, 0.8, 0.9, 0.95, 1.0]
s1 = np.array([0.1, 0.1, (0.1*0.1+0.05*0.1)/0.2, 0.05, (0.05*0.15 * 2)/0.2,
0.15, 0.1, (0.1*0.1+0.1*0.2)/0.3, (0.1*0.2+0.1*0.1)/0.3,
(0.1*0.05+0.1*0.25)/0.3, 0.1])
s2 = np.array([0.1, (0.1*0.2+0.1*0.1)/0.3, 0.1, (0.1*0.05 * 2)/.15, 0.05,
(0.05*0.2+0.1*0.15)/0.35, (0.05*0.1+0.1*0.25)/0.35,
0.1, 0.1, 0.05, 0.05])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3])
isi2 = np.array([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05])
expected_y1 = (s1[:-1]*isi2+s2[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1[1:]*isi2+s2[1:]*isi1) / (0.5*(isi1+isi2)**2)
expected_times = np.array(expected_times)
expected_y1 = np.array(expected_y1)
expected_y2 = np.array(expected_y2)
expected_spike_val = sum((expected_times[1:] - expected_times[:-1]) *
(expected_y1+expected_y2)/2)
expected_spike_val /= (expected_times[-1]-expected_times[0])
print("SPIKE value:", expected_spike_val)
f = spk.spike_profile(t1, t2)
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=15)
assert_array_almost_equal(f.y2, expected_y2, decimal=15)
assert_almost_equal(f.avrg(), expected_spike_val, decimal=15)
assert_almost_equal(spk.spike_distance(t1, t2), expected_spike_val,
decimal=15)
# check with some equal spike times
t1 = SpikeTrain([0.2, 0.4, 0.6], [0.0, 1.0])
t2 = SpikeTrain([0.1, 0.4, 0.5, 0.6], [0.0, 1.0])
expected_times = [0.0, 0.1, 0.2, 0.4, 0.5, 0.6, 1.0]
# due to the edge correction in the beginning, s1 and s2 are different
# for left and right values
s1_r = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
s1_l = np.array([0.1, (0.1*0.1+0.1*0.1)/0.2, 0.1, 0.0, 0.0, 0.0, 0.0])
# s2_r = np.array([0.1*0.1/0.3, 0.1*0.3/0.3, 0.1*0.2/0.3,
# 0.0, 0.1, 0.0, 0.0])
# s2_l = np.array([0.1*0.1/0.3, 0.1*0.1/0.3, 0.1*0.2/0.3, 0.0,
# 0.1, 0.0, 0.0])
# eero's edge correction:
s2_r = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3,
0.0, 0.1, 0.0, 0.0])
s2_l = np.array([0.1, 0.1*0.3/0.3, 0.1*0.2/0.3, 0.0,
0.1, 0.0, 0.0])
isi1 = np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4])
isi2 = np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.4])
expected_y1 = (s1_r[:-1]*isi2+s2_r[:-1]*isi1) / (0.5*(isi1+isi2)**2)
expected_y2 = (s1_l[1:]*isi2+s2_l[1:]*isi1) / (0.5*(isi1+isi2)**2)
expected_times = np.array(expected_times)
expected_y1 = np.array(expected_y1)
expected_y2 = np.array(expected_y2)
expected_spike_val = sum((expected_times[1:] - expected_times[:-1]) *
(expected_y1+expected_y2)/2)
expected_spike_val /= (expected_times[-1]-expected_times[0])
f = spk.spike_profile(t1, t2)
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=14)
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
assert_almost_equal(f.avrg(), expected_spike_val, decimal=16)
assert_almost_equal(spk.spike_distance(t1, t2), expected_spike_val,
decimal=16)
def test_spike_sync():
spikes1 = SpikeTrain([1.0, 2.0, 3.0], 4.0)
spikes2 = SpikeTrain([2.1], 4.0)
expected_x = np.array([0.0, 1.0, 2.0, 2.1, 3.0, 4.0])
expected_y = np.array([0.0, 0.0, 1.0, 1.0, 0.0, 0.0])
f = spk.spike_sync_profile(spikes1, spikes2)
assert_array_almost_equal(f.x, expected_x, decimal=16)
assert_array_almost_equal(f.y, expected_y, decimal=16)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
# test with some small max_tau, spike_sync should be 0
assert_almost_equal(spk.spike_sync(spikes1, spikes2, max_tau=0.05),
0.0, decimal=16)
spikes2 = SpikeTrain([3.1], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
spikes2 = SpikeTrain([1.1], 4.0)
expected_x = np.array([0.0, 1.0, 1.1, 2.0, 3.0, 4.0])
expected_y = np.array([1.0, 1.0, 1.0, 0.0, 0.0, 0.0])
f = spk.spike_sync_profile(spikes1, spikes2)
assert_array_almost_equal(f.x, expected_x, decimal=16)
assert_array_almost_equal(f.y, expected_y, decimal=16)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
spikes2 = SpikeTrain([0.9], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
spikes2 = SpikeTrain([3.0], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
spikes2 = SpikeTrain([1.0], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=16)
spikes2 = SpikeTrain([1.5, 3.0], 4.0)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.4, decimal=16)
spikes1 = SpikeTrain([1.0, 2.0, 4.0], 4.0)
spikes2 = SpikeTrain([3.8], 4.0)
spikes3 = SpikeTrain([3.9, ], 4.0)
expected_x = np.array([0.0, 1.0, 2.0, 3.8, 4.0, 4.0])
expected_y = np.array([0.0, 0.0, 0.0, 1.0, 1.0, 1.0])
f = spk.spike_sync_profile(spikes1, spikes2)
assert_array_almost_equal(f.x, expected_x, decimal=16)
assert_array_almost_equal(f.y, expected_y, decimal=16)
f2 = spk.spike_sync_profile(spikes2, spikes3)
i1 = f.integral()
i2 = f2.integral()
f.add(f2)
i12 = f.integral()
assert_equal(i1[0]+i2[0], i12[0])
assert_equal(i1[1]+i2[1], i12[1])
def check_multi_profile(profile_func, profile_func_multi, dist_func_multi):
# generate spike trains:
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
t3 = SpikeTrain([0.2, 0.4, 0.6], 1.0)
t4 = SpikeTrain([0.1, 0.4, 0.5, 0.6], 1.0)
spike_trains = [t1, t2, t3, t4]
f12 = profile_func(t1, t2)
f13 = profile_func(t1, t3)
f14 = profile_func(t1, t4)
f23 = profile_func(t2, t3)
f24 = profile_func(t2, t4)
f34 = profile_func(t3, t4)
f_multi = profile_func_multi(spike_trains, [0, 1])
assert f_multi.almost_equal(f12, decimal=14)
d = dist_func_multi(spike_trains, [0, 1])
assert_equal(f_multi.avrg(), d)
f_multi1 = profile_func_multi(spike_trains, [1, 2, 3])
f_multi2 = profile_func_multi(spike_trains[1:])
assert f_multi1.almost_equal(f_multi2, decimal=14)
d = dist_func_multi(spike_trains, [1, 2, 3])
assert_almost_equal(f_multi1.avrg(), d, decimal=14)
f = copy(f12)
f.add(f13)
f.add(f23)
f.mul_scalar(1.0/3)
f_multi = profile_func_multi(spike_trains, [0, 1, 2])
assert f_multi.almost_equal(f, decimal=14)
d = dist_func_multi(spike_trains, [0, 1, 2])
assert_almost_equal(f_multi.avrg(), d, decimal=14)
f.mul_scalar(3) # revert above normalization
f.add(f14)
f.add(f24)
f.add(f34)
f.mul_scalar(1.0/6)
f_multi = profile_func_multi(spike_trains)
assert f_multi.almost_equal(f, decimal=14)
def test_multi_isi():
check_multi_profile(spk.isi_profile, spk.isi_profile_multi,
spk.isi_distance_multi)
def test_multi_spike():
check_multi_profile(spk.spike_profile, spk.spike_profile_multi,
spk.spike_distance_multi)
def test_multi_spike_sync():
# some basic multivariate check
spikes1 = SpikeTrain([100, 300, 400, 405, 410, 500, 700, 800,
805, 810, 815, 900], 1000)
spikes2 = SpikeTrain([100, 200, 205, 210, 295, 350, 400, 510,
600, 605, 700, 910], 1000)
spikes3 = SpikeTrain([100, 180, 198, 295, 412, 420, 510, 640,
695, 795, 820, 920], 1000)
assert_almost_equal(spk.spike_sync(spikes1, spikes2),
0.5, decimal=15)
assert_almost_equal(spk.spike_sync(spikes1, spikes3),
0.5, decimal=15)
assert_almost_equal(spk.spike_sync(spikes2, spikes3),
0.5, decimal=15)
f = spk.spike_sync_profile_multi([spikes1, spikes2, spikes3])
# hands on definition of the average multivariate spike synchronization
# expected = (f1.integral() + f2.integral() + f3.integral()) / \
# (np.sum(f1.mp[1:-1])+np.sum(f2.mp[1:-1])+np.sum(f3.mp[1:-1]))
expected = 0.5
assert_almost_equal(f.avrg(), expected, decimal=15)
assert_almost_equal(spk.spike_sync_multi([spikes1, spikes2, spikes3]),
expected, decimal=15)
# multivariate regression test
spike_trains = spk.load_spike_trains_from_txt(
os.path.join(TEST_PATH, "SPIKE_Sync_Test.txt"), edges=[0, 4000])
# extract all spike times
spike_times = np.array([])
for st in spike_trains:
spike_times = np.append(spike_times, st.spikes)
spike_times = np.unique(np.sort(spike_times))
f = spk.spike_sync_profile_multi(spike_trains)
assert_equal(spike_times, f.x[1:-1])
assert_equal(len(f.x), len(f.y))
assert_equal(np.sum(f.y[1:-1]), 39932)
assert_equal(np.sum(f.mp[1:-1]), 85554)
# example with 2 empty spike trains
sts = []
sts.append(SpikeTrain([1, 9], [0, 10]))
sts.append(SpikeTrain([1, 3], [0, 10]))
sts.append(SpikeTrain([], [0, 10]))
sts.append(SpikeTrain([], [0, 10]))
assert_almost_equal(spk.spike_sync_multi(sts), 1.0/6.0, decimal=15)
assert_almost_equal(spk.spike_sync_profile_multi(sts).avrg(), 1.0/6.0,
decimal=15)
def check_dist_matrix(dist_func, dist_matrix_func):
# generate spike trains:
t1 = SpikeTrain([0.2, 0.4, 0.6, 0.7], 1.0)
t2 = SpikeTrain([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)
t3 = SpikeTrain([0.2, 0.4, 0.6], 1.0)
t4 = SpikeTrain([0.1, 0.4, 0.5, 0.6], 1.0)
spike_trains = [t1, t2, t3, t4]
f12 = dist_func(t1, t2)
f13 = dist_func(t1, t3)
f14 = dist_func(t1, t4)
f23 = dist_func(t2, t3)
f24 = dist_func(t2, t4)
f34 = dist_func(t3, t4)
f_matrix = dist_matrix_func(spike_trains)
# check zero diagonal
for i in range(4):
assert_equal(0.0, f_matrix[i, i])
for i in range(4):
for j in range(i+1, 4):
assert_equal(f_matrix[i, j], f_matrix[j, i])
assert_equal(f12, f_matrix[1, 0])
assert_equal(f13, f_matrix[2, 0])
assert_equal(f14, f_matrix[3, 0])
assert_equal(f23, f_matrix[2, 1])
assert_equal(f24, f_matrix[3, 1])
assert_equal(f34, f_matrix[3, 2])
def test_isi_matrix():
check_dist_matrix(spk.isi_distance, spk.isi_distance_matrix)
def test_spike_matrix():
check_dist_matrix(spk.spike_distance, spk.spike_distance_matrix)
def test_spike_sync_matrix():
check_dist_matrix(spk.spike_sync, spk.spike_sync_matrix)
def test_regression_spiky():
# standard example
st1 = SpikeTrain(np.arange(100, 1201, 100), 1300)
st2 = SpikeTrain(np.arange(100, 1201, 110), 1300)
isi_dist = spk.isi_distance(st1, st2)
assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15)
isi_profile = spk.isi_profile(st1, st2)
assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y))
spike_dist = spk.spike_distance(st1, st2)
assert_equal(spike_dist, 0.211058782487353908)
spike_sync = spk.spike_sync(st1, st2)
assert_equal(spike_sync, 8.6956521739130432e-01)
# multivariate check
spike_trains = spk.load_spike_trains_from_txt(
os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0))
isi_dist = spk.isi_distance_multi(spike_trains)
# get the full precision from SPIKY
assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15)
spike_profile = spk.spike_profile_multi(spike_trains)
assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252)
spike_dist = spk.spike_distance_multi(spike_trains)
# get the full precision from SPIKY
assert_almost_equal(spike_dist, 0.25188056475463755, decimal=15)
spike_sync = spk.spike_sync_multi(spike_trains)
# get the full precision from SPIKY
assert_equal(spike_sync, 0.7183531505298066)
# Eero's edge correction example
st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0)
st2 = SpikeTrain([3.5, 4.5, 5.5], 6.0)
f = spk.spike_profile(st1, st2)
expected_times = np.array([0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0])
y_all = np.array([0.271604938271605, 0.271604938271605, 0.271604938271605,
0.617283950617284, 0.617283950617284, 0.444444444444444,
0.285714285714286, 0.285714285714286, 0.444444444444444,
0.617283950617284, 0.617283950617284, 0.271604938271605,
0.271604938271605, 0.271604938271605])
expected_y1 = y_all[::2]
expected_y2 = y_all[1::2]
assert_equal(f.x, expected_times)
assert_array_almost_equal(f.y1, expected_y1, decimal=14)
assert_array_almost_equal(f.y2, expected_y2, decimal=14)
def test_multi_variate_subsets():
spike_trains = spk.load_spike_trains_from_txt(
os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0))
sub_set = [1, 3, 5, 7]
spike_trains_sub_set = [spike_trains[i] for i in sub_set]
v1 = spk.isi_distance_multi(spike_trains_sub_set)
v2 = spk.isi_distance_multi(spike_trains, sub_set)
assert_equal(v1, v2)
v1 = spk.spike_distance_multi(spike_trains_sub_set)
v2 = spk.spike_distance_multi(spike_trains, sub_set)
assert_equal(v1, v2)
v1 = spk.spike_sync_multi(spike_trains_sub_set)
v2 = spk.spike_sync_multi(spike_trains, sub_set)
assert_equal(v1, v2)
if __name__ == "__main__":
test_isi()
test_spike()
test_spike_sync()
test_multi_isi()
test_multi_spike()
test_multi_spike_sync()
test_isi_matrix()
test_spike_matrix()
test_spike_sync_matrix()
test_regression_spiky()
test_multi_variate_subsets()
| [
"numpy.sum",
"pyspike.spike_sync_multi",
"pyspike.spike_sync",
"numpy.arange",
"pyspike.spike_distance_multi",
"numpy.testing.assert_array_almost_equal",
"os.path.join",
"pyspike.isi_profile",
"pyspike.spike_distance",
"numpy.testing.assert_almost_equal",
"numpy.append",
"numpy.testing.assert_... | [((408, 434), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (424, 434), False, 'import os\n'), ((496, 533), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6, 0.7]', '(1.0)'], {}), '([0.2, 0.4, 0.6, 0.7], 1.0)\n', (506, 533), False, 'from pyspike import SpikeTrain\n'), ((543, 587), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.3, 0.45, 0.8, 0.9, 0.95]', '(1.0)'], {}), '([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)\n', (553, 587), False, 'from pyspike import SpikeTrain\n'), ((877, 901), 'numpy.array', 'np.array', (['expected_times'], {}), '(expected_times)\n', (885, 901), True, 'import numpy as np\n'), ((921, 943), 'numpy.array', 'np.array', (['expected_isi'], {}), '(expected_isi)\n', (929, 943), True, 'import numpy as np\n'), ((1106, 1129), 'pyspike.isi_profile', 'spk.isi_profile', (['t1', 't2'], {}), '(t1, t2)\n', (1121, 1129), True, 'import pyspike as spk\n'), ((1204, 1237), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (1216, 1237), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((1242, 1298), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y', 'expected_isi'], {'decimal': '(15)'}), '(f.y, expected_isi, decimal=15)\n', (1267, 1298), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((1455, 1494), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6]', '[0.0, 1.0]'], {}), '([0.2, 0.4, 0.6], [0.0, 1.0])\n', (1465, 1494), False, 'from pyspike import SpikeTrain\n'), ((1504, 1548), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.1, 0.4, 0.5, 0.6]', '[0.0, 1.0]'], {}), '([0.1, 0.4, 0.5, 0.6], [0.0, 1.0])\n', (1514, 1548), False, 'from pyspike import SpikeTrain\n'), ((1702, 1726), 'numpy.array', 'np.array', (['expected_times'], {}), '(expected_times)\n', (1710, 1726), True, 'import numpy as np\n'), ((1746, 1768), 'numpy.array', 'np.array', (['expected_isi'], {}), '(expected_isi)\n', (1754, 1768), True, 'import numpy as np\n'), ((1931, 1954), 'pyspike.isi_profile', 'spk.isi_profile', (['t1', 't2'], {}), '(t1, t2)\n', (1946, 1954), True, 'import pyspike as spk\n'), ((1960, 1993), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (1972, 1993), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((1998, 2054), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y', 'expected_isi'], {'decimal': '(15)'}), '(f.y, expected_isi, decimal=15)\n', (2023, 2054), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((2223, 2261), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.0, 2.0, 5.0, 8.0]', '(10.0)'], {}), '([0.0, 2.0, 5.0, 8.0], 10.0)\n', (2233, 2261), False, 'from pyspike import SpikeTrain\n'), ((2271, 2309), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.0, 1.0, 5.0, 9.0]', '(10.0)'], {}), '([0.0, 1.0, 5.0, 9.0], 10.0)\n', (2281, 2309), False, 'from pyspike import SpikeTrain\n'), ((2332, 2378), 'numpy.array', 'np.array', (['[0.0, 1.0, 2.0, 5.0, 8.0, 9.0, 10.0]'], {}), '([0.0, 1.0, 2.0, 5.0, 8.0, 9.0, 10.0])\n', (2340, 2378), True, 'import numpy as np\n'), ((2388, 2413), 'pyspike.spike_profile', 'spk.spike_profile', (['t1', 't2'], {}), '(t1, t2)\n', (2405, 2413), True, 'import pyspike as spk\n'), ((2419, 2452), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (2431, 2452), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((2484, 2701), 'numpy.array', 'np.array', (['[0.0, 0.5555555555555556, 0.2222222222222222, 0.3055555555555556, \n 0.25510204081632654, 0.0, 0.0, 0.25510204081632654, 0.25510204081632654,\n 0.2857142857142857, 0.2857142857142857, 0.2857142857142857]'], {}), '([0.0, 0.5555555555555556, 0.2222222222222222, 0.3055555555555556, \n 0.25510204081632654, 0.0, 0.0, 0.25510204081632654, 0.25510204081632654,\n 0.2857142857142857, 0.2857142857142857, 0.2857142857142857])\n', (2492, 2701), True, 'import numpy as np\n'), ((2923, 2967), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y2', 'y_all[1::2]'], {}), '(f.y2, y_all[1::2])\n', (2948, 2967), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((3102, 3139), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6, 0.7]', '(1.0)'], {}), '([0.2, 0.4, 0.6, 0.7], 1.0)\n', (3112, 3139), False, 'from pyspike import SpikeTrain\n'), ((3149, 3193), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.3, 0.45, 0.8, 0.9, 0.95]', '(1.0)'], {}), '([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)\n', (3159, 3193), False, 'from pyspike import SpikeTrain\n'), ((3333, 3533), 'numpy.array', 'np.array', (['[0.1, 0.1, (0.1 * 0.1 + 0.05 * 0.1) / 0.2, 0.05, 0.05 * 0.15 * 2 / 0.2, \n 0.15, 0.1, (0.1 * 0.1 + 0.1 * 0.2) / 0.3, (0.1 * 0.2 + 0.1 * 0.1) / 0.3,\n (0.1 * 0.05 + 0.1 * 0.25) / 0.3, 0.1]'], {}), '([0.1, 0.1, (0.1 * 0.1 + 0.05 * 0.1) / 0.2, 0.05, 0.05 * 0.15 * 2 /\n 0.2, 0.15, 0.1, (0.1 * 0.1 + 0.1 * 0.2) / 0.3, (0.1 * 0.2 + 0.1 * 0.1) /\n 0.3, (0.1 * 0.05 + 0.1 * 0.25) / 0.3, 0.1])\n', (3341, 3533), True, 'import numpy as np\n'), ((3539, 3719), 'numpy.array', 'np.array', (['[0.1, (0.1 * 0.2 + 0.1 * 0.1) / 0.3, 0.1, 0.1 * 0.05 * 2 / 0.15, 0.05, (\n 0.05 * 0.2 + 0.1 * 0.15) / 0.35, (0.05 * 0.1 + 0.1 * 0.25) / 0.35, 0.1,\n 0.1, 0.05, 0.05]'], {}), '([0.1, (0.1 * 0.2 + 0.1 * 0.1) / 0.3, 0.1, 0.1 * 0.05 * 2 / 0.15, \n 0.05, (0.05 * 0.2 + 0.1 * 0.15) / 0.35, (0.05 * 0.1 + 0.1 * 0.25) / \n 0.35, 0.1, 0.1, 0.05, 0.05])\n', (3547, 3719), True, 'import numpy as np\n'), ((3732, 3792), 'numpy.array', 'np.array', (['[0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3]'], {}), '([0.2, 0.2, 0.2, 0.2, 0.2, 0.1, 0.3, 0.3, 0.3, 0.3])\n', (3740, 3792), True, 'import numpy as np\n'), ((3804, 3871), 'numpy.array', 'np.array', (['[0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05]'], {}), '([0.3, 0.3, 0.15, 0.15, 0.35, 0.35, 0.35, 0.1, 0.05, 0.05])\n', (3812, 3871), True, 'import numpy as np\n'), ((4030, 4054), 'numpy.array', 'np.array', (['expected_times'], {}), '(expected_times)\n', (4038, 4054), True, 'import numpy as np\n'), ((4073, 4094), 'numpy.array', 'np.array', (['expected_y1'], {}), '(expected_y1)\n', (4081, 4094), True, 'import numpy as np\n'), ((4113, 4134), 'numpy.array', 'np.array', (['expected_y2'], {}), '(expected_y2)\n', (4121, 4134), True, 'import numpy as np\n'), ((4388, 4413), 'pyspike.spike_profile', 'spk.spike_profile', (['t1', 't2'], {}), '(t1, t2)\n', (4405, 4413), True, 'import pyspike as spk\n'), ((4419, 4452), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (4431, 4452), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((4457, 4513), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y1', 'expected_y1'], {'decimal': '(15)'}), '(f.y1, expected_y1, decimal=15)\n', (4482, 4513), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((4518, 4574), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y2', 'expected_y2'], {'decimal': '(15)'}), '(f.y2, expected_y2, decimal=15)\n', (4543, 4574), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((4799, 4838), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6]', '[0.0, 1.0]'], {}), '([0.2, 0.4, 0.6], [0.0, 1.0])\n', (4809, 4838), False, 'from pyspike import SpikeTrain\n'), ((4848, 4892), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.1, 0.4, 0.5, 0.6]', '[0.0, 1.0]'], {}), '([0.1, 0.4, 0.5, 0.6], [0.0, 1.0])\n', (4858, 4892), False, 'from pyspike import SpikeTrain\n'), ((5069, 5140), 'numpy.array', 'np.array', (['[0.1, (0.1 * 0.1 + 0.1 * 0.1) / 0.2, 0.1, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.1, (0.1 * 0.1 + 0.1 * 0.1) / 0.2, 0.1, 0.0, 0.0, 0.0, 0.0])\n', (5077, 5140), True, 'import numpy as np\n'), ((5144, 5215), 'numpy.array', 'np.array', (['[0.1, (0.1 * 0.1 + 0.1 * 0.1) / 0.2, 0.1, 0.0, 0.0, 0.0, 0.0]'], {}), '([0.1, (0.1 * 0.1 + 0.1 * 0.1) / 0.2, 0.1, 0.0, 0.0, 0.0, 0.0])\n', (5152, 5215), True, 'import numpy as np\n'), ((5461, 5530), 'numpy.array', 'np.array', (['[0.1, 0.1 * 0.3 / 0.3, 0.1 * 0.2 / 0.3, 0.0, 0.1, 0.0, 0.0]'], {}), '([0.1, 0.1 * 0.3 / 0.3, 0.1 * 0.2 / 0.3, 0.0, 0.1, 0.0, 0.0])\n', (5469, 5530), True, 'import numpy as np\n'), ((5555, 5624), 'numpy.array', 'np.array', (['[0.1, 0.1 * 0.3 / 0.3, 0.1 * 0.2 / 0.3, 0.0, 0.1, 0.0, 0.0]'], {}), '([0.1, 0.1 * 0.3 / 0.3, 0.1 * 0.2 / 0.3, 0.0, 0.1, 0.0, 0.0])\n', (5563, 5624), True, 'import numpy as np\n'), ((5649, 5689), 'numpy.array', 'np.array', (['[0.2, 0.2, 0.2, 0.2, 0.2, 0.4]'], {}), '([0.2, 0.2, 0.2, 0.2, 0.2, 0.4])\n', (5657, 5689), True, 'import numpy as np\n'), ((5701, 5741), 'numpy.array', 'np.array', (['[0.3, 0.3, 0.3, 0.1, 0.1, 0.4]'], {}), '([0.3, 0.3, 0.3, 0.1, 0.1, 0.4])\n', (5709, 5741), True, 'import numpy as np\n'), ((5908, 5932), 'numpy.array', 'np.array', (['expected_times'], {}), '(expected_times)\n', (5916, 5932), True, 'import numpy as np\n'), ((5951, 5972), 'numpy.array', 'np.array', (['expected_y1'], {}), '(expected_y1)\n', (5959, 5972), True, 'import numpy as np\n'), ((5991, 6012), 'numpy.array', 'np.array', (['expected_y2'], {}), '(expected_y2)\n', (5999, 6012), True, 'import numpy as np\n'), ((6219, 6244), 'pyspike.spike_profile', 'spk.spike_profile', (['t1', 't2'], {}), '(t1, t2)\n', (6236, 6244), True, 'import pyspike as spk\n'), ((6250, 6283), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (6262, 6283), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((6288, 6344), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y1', 'expected_y1'], {'decimal': '(14)'}), '(f.y1, expected_y1, decimal=14)\n', (6313, 6344), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((6349, 6405), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y2', 'expected_y2'], {'decimal': '(14)'}), '(f.y2, expected_y2, decimal=14)\n', (6374, 6405), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((6619, 6651), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1.0, 2.0, 3.0]', '(4.0)'], {}), '([1.0, 2.0, 3.0], 4.0)\n', (6629, 6651), False, 'from pyspike import SpikeTrain\n'), ((6666, 6688), 'pyspike.SpikeTrain', 'SpikeTrain', (['[2.1]', '(4.0)'], {}), '([2.1], 4.0)\n', (6676, 6688), False, 'from pyspike import SpikeTrain\n'), ((6707, 6747), 'numpy.array', 'np.array', (['[0.0, 1.0, 2.0, 2.1, 3.0, 4.0]'], {}), '([0.0, 1.0, 2.0, 2.1, 3.0, 4.0])\n', (6715, 6747), True, 'import numpy as np\n'), ((6765, 6805), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0, 1.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 1.0, 1.0, 0.0, 0.0])\n', (6773, 6805), True, 'import numpy as np\n'), ((6815, 6855), 'pyspike.spike_sync_profile', 'spk.spike_sync_profile', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (6837, 6855), True, 'import pyspike as spk\n'), ((6861, 6915), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.x', 'expected_x'], {'decimal': '(16)'}), '(f.x, expected_x, decimal=16)\n', (6886, 6915), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((6920, 6974), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y', 'expected_y'], {'decimal': '(16)'}), '(f.y, expected_y, decimal=16)\n', (6945, 6974), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((7263, 7285), 'pyspike.SpikeTrain', 'SpikeTrain', (['[3.1]', '(4.0)'], {}), '([3.1], 4.0)\n', (7273, 7285), False, 'from pyspike import SpikeTrain\n'), ((7400, 7422), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1.1]', '(4.0)'], {}), '([1.1], 4.0)\n', (7410, 7422), False, 'from pyspike import SpikeTrain\n'), ((7441, 7481), 'numpy.array', 'np.array', (['[0.0, 1.0, 1.1, 2.0, 3.0, 4.0]'], {}), '([0.0, 1.0, 1.1, 2.0, 3.0, 4.0])\n', (7449, 7481), True, 'import numpy as np\n'), ((7499, 7539), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0, 0.0, 0.0, 0.0]'], {}), '([1.0, 1.0, 1.0, 0.0, 0.0, 0.0])\n', (7507, 7539), True, 'import numpy as np\n'), ((7549, 7589), 'pyspike.spike_sync_profile', 'spk.spike_sync_profile', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (7571, 7589), True, 'import pyspike as spk\n'), ((7595, 7649), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.x', 'expected_x'], {'decimal': '(16)'}), '(f.x, expected_x, decimal=16)\n', (7620, 7649), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((7654, 7708), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y', 'expected_y'], {'decimal': '(16)'}), '(f.y, expected_y, decimal=16)\n', (7679, 7708), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((7824, 7846), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.9]', '(4.0)'], {}), '([0.9], 4.0)\n', (7834, 7846), False, 'from pyspike import SpikeTrain\n'), ((7961, 7983), 'pyspike.SpikeTrain', 'SpikeTrain', (['[3.0]', '(4.0)'], {}), '([3.0], 4.0)\n', (7971, 7983), False, 'from pyspike import SpikeTrain\n'), ((8098, 8120), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1.0]', '(4.0)'], {}), '([1.0], 4.0)\n', (8108, 8120), False, 'from pyspike import SpikeTrain\n'), ((8235, 8262), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1.5, 3.0]', '(4.0)'], {}), '([1.5, 3.0], 4.0)\n', (8245, 8262), False, 'from pyspike import SpikeTrain\n'), ((8377, 8409), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1.0, 2.0, 4.0]', '(4.0)'], {}), '([1.0, 2.0, 4.0], 4.0)\n', (8387, 8409), False, 'from pyspike import SpikeTrain\n'), ((8424, 8446), 'pyspike.SpikeTrain', 'SpikeTrain', (['[3.8]', '(4.0)'], {}), '([3.8], 4.0)\n', (8434, 8446), False, 'from pyspike import SpikeTrain\n'), ((8461, 8483), 'pyspike.SpikeTrain', 'SpikeTrain', (['[3.9]', '(4.0)'], {}), '([3.9], 4.0)\n', (8471, 8483), False, 'from pyspike import SpikeTrain\n'), ((8504, 8544), 'numpy.array', 'np.array', (['[0.0, 1.0, 2.0, 3.8, 4.0, 4.0]'], {}), '([0.0, 1.0, 2.0, 3.8, 4.0, 4.0])\n', (8512, 8544), True, 'import numpy as np\n'), ((8562, 8602), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0, 1.0, 1.0, 1.0]'], {}), '([0.0, 0.0, 0.0, 1.0, 1.0, 1.0])\n', (8570, 8602), True, 'import numpy as np\n'), ((8612, 8652), 'pyspike.spike_sync_profile', 'spk.spike_sync_profile', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (8634, 8652), True, 'import pyspike as spk\n'), ((8658, 8712), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.x', 'expected_x'], {'decimal': '(16)'}), '(f.x, expected_x, decimal=16)\n', (8683, 8712), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((8717, 8771), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y', 'expected_y'], {'decimal': '(16)'}), '(f.y, expected_y, decimal=16)\n', (8742, 8771), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((8782, 8822), 'pyspike.spike_sync_profile', 'spk.spike_sync_profile', (['spikes2', 'spikes3'], {}), '(spikes2, spikes3)\n', (8804, 8822), True, 'import pyspike as spk\n'), ((8911, 8946), 'numpy.testing.assert_equal', 'assert_equal', (['(i1[0] + i2[0])', 'i12[0]'], {}), '(i1[0] + i2[0], i12[0])\n', (8923, 8946), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((8949, 8984), 'numpy.testing.assert_equal', 'assert_equal', (['(i1[1] + i2[1])', 'i12[1]'], {}), '(i1[1] + i2[1], i12[1])\n', (8961, 8984), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((9099, 9136), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6, 0.7]', '(1.0)'], {}), '([0.2, 0.4, 0.6, 0.7], 1.0)\n', (9109, 9136), False, 'from pyspike import SpikeTrain\n'), ((9146, 9190), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.3, 0.45, 0.8, 0.9, 0.95]', '(1.0)'], {}), '([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)\n', (9156, 9190), False, 'from pyspike import SpikeTrain\n'), ((9200, 9232), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6]', '(1.0)'], {}), '([0.2, 0.4, 0.6], 1.0)\n', (9210, 9232), False, 'from pyspike import SpikeTrain\n'), ((9242, 9279), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.1, 0.4, 0.5, 0.6]', '(1.0)'], {}), '([0.1, 0.4, 0.5, 0.6], 1.0)\n', (9252, 9279), False, 'from pyspike import SpikeTrain\n'), ((9971, 9980), 'copy.copy', 'copy', (['f12'], {}), '(f12)\n', (9975, 9980), False, 'from copy import copy\n'), ((10819, 10897), 'pyspike.SpikeTrain', 'SpikeTrain', (['[100, 300, 400, 405, 410, 500, 700, 800, 805, 810, 815, 900]', '(1000)'], {}), '([100, 300, 400, 405, 410, 500, 700, 800, 805, 810, 815, 900], 1000)\n', (10829, 10897), False, 'from pyspike import SpikeTrain\n'), ((10938, 11016), 'pyspike.SpikeTrain', 'SpikeTrain', (['[100, 200, 205, 210, 295, 350, 400, 510, 600, 605, 700, 910]', '(1000)'], {}), '([100, 200, 205, 210, 295, 350, 400, 510, 600, 605, 700, 910], 1000)\n', (10948, 11016), False, 'from pyspike import SpikeTrain\n'), ((11057, 11135), 'pyspike.SpikeTrain', 'SpikeTrain', (['[100, 180, 198, 295, 412, 420, 510, 640, 695, 795, 820, 920]', '(1000)'], {}), '([100, 180, 198, 295, 412, 420, 510, 640, 695, 795, 820, 920], 1000)\n', (11067, 11135), False, 'from pyspike import SpikeTrain\n'), ((11468, 11525), 'pyspike.spike_sync_profile_multi', 'spk.spike_sync_profile_multi', (['[spikes1, spikes2, spikes3]'], {}), '([spikes1, spikes2, spikes3])\n', (11496, 11525), True, 'import pyspike as spk\n'), ((12154, 12166), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (12162, 12166), True, 'import numpy as np\n'), ((12310, 12352), 'pyspike.spike_sync_profile_multi', 'spk.spike_sync_profile_multi', (['spike_trains'], {}), '(spike_trains)\n', (12338, 12352), True, 'import pyspike as spk\n'), ((12358, 12394), 'numpy.testing.assert_equal', 'assert_equal', (['spike_times', 'f.x[1:-1]'], {}), '(spike_times, f.x[1:-1])\n', (12370, 12394), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13018, 13055), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6, 0.7]', '(1.0)'], {}), '([0.2, 0.4, 0.6, 0.7], 1.0)\n', (13028, 13055), False, 'from pyspike import SpikeTrain\n'), ((13065, 13109), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.3, 0.45, 0.8, 0.9, 0.95]', '(1.0)'], {}), '([0.3, 0.45, 0.8, 0.9, 0.95], 1.0)\n', (13075, 13109), False, 'from pyspike import SpikeTrain\n'), ((13119, 13151), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.2, 0.4, 0.6]', '(1.0)'], {}), '([0.2, 0.4, 0.6], 1.0)\n', (13129, 13151), False, 'from pyspike import SpikeTrain\n'), ((13161, 13198), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.1, 0.4, 0.5, 0.6]', '(1.0)'], {}), '([0.1, 0.4, 0.5, 0.6], 1.0)\n', (13171, 13198), False, 'from pyspike import SpikeTrain\n'), ((13658, 13691), 'numpy.testing.assert_equal', 'assert_equal', (['f12', 'f_matrix[1, 0]'], {}), '(f12, f_matrix[1, 0])\n', (13670, 13691), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13696, 13729), 'numpy.testing.assert_equal', 'assert_equal', (['f13', 'f_matrix[2, 0]'], {}), '(f13, f_matrix[2, 0])\n', (13708, 13729), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13734, 13767), 'numpy.testing.assert_equal', 'assert_equal', (['f14', 'f_matrix[3, 0]'], {}), '(f14, f_matrix[3, 0])\n', (13746, 13767), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13772, 13805), 'numpy.testing.assert_equal', 'assert_equal', (['f23', 'f_matrix[2, 1]'], {}), '(f23, f_matrix[2, 1])\n', (13784, 13805), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13810, 13843), 'numpy.testing.assert_equal', 'assert_equal', (['f24', 'f_matrix[3, 1]'], {}), '(f24, f_matrix[3, 1])\n', (13822, 13843), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((13848, 13881), 'numpy.testing.assert_equal', 'assert_equal', (['f34', 'f_matrix[3, 2]'], {}), '(f34, f_matrix[3, 2])\n', (13860, 13881), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14339, 14365), 'pyspike.isi_distance', 'spk.isi_distance', (['st1', 'st2'], {}), '(st1, st2)\n', (14355, 14365), True, 'import pyspike as spk\n'), ((14370, 14432), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['isi_dist', '(0.09090909090909094)'], {'decimal': '(15)'}), '(isi_dist, 0.09090909090909094, decimal=15)\n', (14389, 14432), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14454, 14479), 'pyspike.isi_profile', 'spk.isi_profile', (['st1', 'st2'], {}), '(st1, st2)\n', (14469, 14479), True, 'import pyspike as spk\n'), ((14569, 14597), 'pyspike.spike_distance', 'spk.spike_distance', (['st1', 'st2'], {}), '(st1, st2)\n', (14587, 14597), True, 'import pyspike as spk\n'), ((14602, 14646), 'numpy.testing.assert_equal', 'assert_equal', (['spike_dist', '(0.2110587824873539)'], {}), '(spike_dist, 0.2110587824873539)\n', (14614, 14646), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14667, 14691), 'pyspike.spike_sync', 'spk.spike_sync', (['st1', 'st2'], {}), '(st1, st2)\n', (14681, 14691), True, 'import pyspike as spk\n'), ((14696, 14740), 'numpy.testing.assert_equal', 'assert_equal', (['spike_sync', '(0.8695652173913043)'], {}), '(spike_sync, 0.8695652173913043)\n', (14708, 14740), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14910, 14946), 'pyspike.isi_distance_multi', 'spk.isi_distance_multi', (['spike_trains'], {}), '(spike_trains)\n', (14932, 14946), True, 'import pyspike as spk\n'), ((14991, 15052), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['isi_dist', '(0.1705181681699913)'], {'decimal': '(15)'}), '(isi_dist, 0.1705181681699913, decimal=15)\n', (15010, 15052), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((15078, 15115), 'pyspike.spike_profile_multi', 'spk.spike_profile_multi', (['spike_trains'], {}), '(spike_trains)\n', (15101, 15115), True, 'import pyspike as spk\n'), ((15202, 15240), 'pyspike.spike_distance_multi', 'spk.spike_distance_multi', (['spike_trains'], {}), '(spike_trains)\n', (15226, 15240), True, 'import pyspike as spk\n'), ((15285, 15349), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['spike_dist', '(0.25188056475463755)'], {'decimal': '(15)'}), '(spike_dist, 0.25188056475463755, decimal=15)\n', (15304, 15349), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((15368, 15402), 'pyspike.spike_sync_multi', 'spk.spike_sync_multi', (['spike_trains'], {}), '(spike_trains)\n', (15388, 15402), True, 'import pyspike as spk\n'), ((15447, 15491), 'numpy.testing.assert_equal', 'assert_equal', (['spike_sync', '(0.7183531505298066)'], {}), '(spike_sync, 0.7183531505298066)\n', (15459, 15491), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((15540, 15572), 'pyspike.SpikeTrain', 'SpikeTrain', (['[0.5, 1.5, 2.5]', '(6.0)'], {}), '([0.5, 1.5, 2.5], 6.0)\n', (15550, 15572), False, 'from pyspike import SpikeTrain\n'), ((15583, 15615), 'pyspike.SpikeTrain', 'SpikeTrain', (['[3.5, 4.5, 5.5]', '(6.0)'], {}), '([3.5, 4.5, 5.5], 6.0)\n', (15593, 15615), False, 'from pyspike import SpikeTrain\n'), ((15625, 15652), 'pyspike.spike_profile', 'spk.spike_profile', (['st1', 'st2'], {}), '(st1, st2)\n', (15642, 15652), True, 'import pyspike as spk\n'), ((15675, 15725), 'numpy.array', 'np.array', (['[0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0]'], {}), '([0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0])\n', (15683, 15725), True, 'import numpy as np\n'), ((15738, 16034), 'numpy.array', 'np.array', (['[0.271604938271605, 0.271604938271605, 0.271604938271605, 0.617283950617284,\n 0.617283950617284, 0.444444444444444, 0.285714285714286, \n 0.285714285714286, 0.444444444444444, 0.617283950617284, \n 0.617283950617284, 0.271604938271605, 0.271604938271605, 0.271604938271605]'], {}), '([0.271604938271605, 0.271604938271605, 0.271604938271605, \n 0.617283950617284, 0.617283950617284, 0.444444444444444, \n 0.285714285714286, 0.285714285714286, 0.444444444444444, \n 0.617283950617284, 0.617283950617284, 0.271604938271605, \n 0.271604938271605, 0.271604938271605])\n', (15746, 16034), True, 'import numpy as np\n'), ((16167, 16200), 'numpy.testing.assert_equal', 'assert_equal', (['f.x', 'expected_times'], {}), '(f.x, expected_times)\n', (16179, 16200), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((16205, 16261), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y1', 'expected_y1'], {'decimal': '(14)'}), '(f.y1, expected_y1, decimal=14)\n', (16230, 16261), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((16266, 16322), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f.y2', 'expected_y2'], {'decimal': '(14)'}), '(f.y2, expected_y2, decimal=14)\n', (16291, 16322), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((16581, 16625), 'pyspike.isi_distance_multi', 'spk.isi_distance_multi', (['spike_trains_sub_set'], {}), '(spike_trains_sub_set)\n', (16603, 16625), True, 'import pyspike as spk\n'), ((16635, 16680), 'pyspike.isi_distance_multi', 'spk.isi_distance_multi', (['spike_trains', 'sub_set'], {}), '(spike_trains, sub_set)\n', (16657, 16680), True, 'import pyspike as spk\n'), ((16685, 16705), 'numpy.testing.assert_equal', 'assert_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (16697, 16705), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((16716, 16762), 'pyspike.spike_distance_multi', 'spk.spike_distance_multi', (['spike_trains_sub_set'], {}), '(spike_trains_sub_set)\n', (16740, 16762), True, 'import pyspike as spk\n'), ((16772, 16819), 'pyspike.spike_distance_multi', 'spk.spike_distance_multi', (['spike_trains', 'sub_set'], {}), '(spike_trains, sub_set)\n', (16796, 16819), True, 'import pyspike as spk\n'), ((16824, 16844), 'numpy.testing.assert_equal', 'assert_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (16836, 16844), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((16855, 16897), 'pyspike.spike_sync_multi', 'spk.spike_sync_multi', (['spike_trains_sub_set'], {}), '(spike_trains_sub_set)\n', (16875, 16897), True, 'import pyspike as spk\n'), ((16907, 16950), 'pyspike.spike_sync_multi', 'spk.spike_sync_multi', (['spike_trains', 'sub_set'], {}), '(spike_trains, sub_set)\n', (16927, 16950), True, 'import pyspike as spk\n'), ((16955, 16975), 'numpy.testing.assert_equal', 'assert_equal', (['v1', 'v2'], {}), '(v1, v2)\n', (16967, 16975), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((1361, 1385), 'pyspike.isi_distance', 'spk.isi_distance', (['t1', 't2'], {}), '(t1, t2)\n', (1377, 1385), True, 'import pyspike as spk\n'), ((2117, 2141), 'pyspike.isi_distance', 'spk.isi_distance', (['t1', 't2'], {}), '(t1, t2)\n', (2133, 2141), True, 'import pyspike as spk\n'), ((3054, 3080), 'pyspike.spike_distance', 'spk.spike_distance', (['t1', 't2'], {}), '(t1, t2)\n', (3072, 3080), True, 'import pyspike as spk\n'), ((4665, 4691), 'pyspike.spike_distance', 'spk.spike_distance', (['t1', 't2'], {}), '(t1, t2)\n', (4683, 4691), True, 'import pyspike as spk\n'), ((6496, 6522), 'pyspike.spike_distance', 'spk.spike_distance', (['t1', 't2'], {}), '(t1, t2)\n', (6514, 6522), True, 'import pyspike as spk\n'), ((7000, 7032), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (7014, 7032), True, 'import pyspike as spk\n'), ((7159, 7205), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {'max_tau': '(0.05)'}), '(spikes1, spikes2, max_tau=0.05)\n', (7173, 7205), True, 'import pyspike as spk\n'), ((7310, 7342), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (7324, 7342), True, 'import pyspike as spk\n'), ((7734, 7766), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (7748, 7766), True, 'import pyspike as spk\n'), ((7871, 7903), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (7885, 7903), True, 'import pyspike as spk\n'), ((8008, 8040), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (8022, 8040), True, 'import pyspike as spk\n'), ((8145, 8177), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (8159, 8177), True, 'import pyspike as spk\n'), ((8287, 8319), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (8301, 8319), True, 'import pyspike as spk\n'), ((11186, 11218), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes2'], {}), '(spikes1, spikes2)\n', (11200, 11218), True, 'import pyspike as spk\n'), ((11285, 11317), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes1', 'spikes3'], {}), '(spikes1, spikes3)\n', (11299, 11317), True, 'import pyspike as spk\n'), ((11384, 11416), 'pyspike.spike_sync', 'spk.spike_sync', (['spikes2', 'spikes3'], {}), '(spikes2, spikes3)\n', (11398, 11416), True, 'import pyspike as spk\n'), ((11849, 11898), 'pyspike.spike_sync_multi', 'spk.spike_sync_multi', (['[spikes1, spikes2, spikes3]'], {}), '([spikes1, spikes2, spikes3])\n', (11869, 11898), True, 'import pyspike as spk\n'), ((12041, 12087), 'os.path.join', 'os.path.join', (['TEST_PATH', '"""SPIKE_Sync_Test.txt"""'], {}), "(TEST_PATH, 'SPIKE_Sync_Test.txt')\n", (12053, 12087), False, 'import os\n'), ((12217, 12250), 'numpy.append', 'np.append', (['spike_times', 'st.spikes'], {}), '(spike_times, st.spikes)\n', (12226, 12250), True, 'import numpy as np\n'), ((12279, 12299), 'numpy.sort', 'np.sort', (['spike_times'], {}), '(spike_times)\n', (12286, 12299), True, 'import numpy as np\n'), ((12450, 12467), 'numpy.sum', 'np.sum', (['f.y[1:-1]'], {}), '(f.y[1:-1])\n', (12456, 12467), True, 'import numpy as np\n'), ((12493, 12511), 'numpy.sum', 'np.sum', (['f.mp[1:-1]'], {}), '(f.mp[1:-1])\n', (12499, 12511), True, 'import numpy as np\n'), ((12589, 12616), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1, 9]', '[0, 10]'], {}), '([1, 9], [0, 10])\n', (12599, 12616), False, 'from pyspike import SpikeTrain\n'), ((12633, 12660), 'pyspike.SpikeTrain', 'SpikeTrain', (['[1, 3]', '[0, 10]'], {}), '([1, 3], [0, 10])\n', (12643, 12660), False, 'from pyspike import SpikeTrain\n'), ((12677, 12700), 'pyspike.SpikeTrain', 'SpikeTrain', (['[]', '[0, 10]'], {}), '([], [0, 10])\n', (12687, 12700), False, 'from pyspike import SpikeTrain\n'), ((12717, 12740), 'pyspike.SpikeTrain', 'SpikeTrain', (['[]', '[0, 10]'], {}), '([], [0, 10])\n', (12727, 12740), False, 'from pyspike import SpikeTrain\n'), ((12767, 12792), 'pyspike.spike_sync_multi', 'spk.spike_sync_multi', (['sts'], {}), '(sts)\n', (12787, 12792), True, 'import pyspike as spk\n'), ((13508, 13541), 'numpy.testing.assert_equal', 'assert_equal', (['(0.0)', 'f_matrix[i, i]'], {}), '(0.0, f_matrix[i, i])\n', (13520, 13541), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14236, 14261), 'numpy.arange', 'np.arange', (['(100)', '(1201)', '(100)'], {}), '(100, 1201, 100)\n', (14245, 14261), True, 'import numpy as np\n'), ((14290, 14315), 'numpy.arange', 'np.arange', (['(100)', '(1201)', '(110)'], {}), '(100, 1201, 110)\n', (14299, 14315), True, 'import numpy as np\n'), ((14831, 14878), 'os.path.join', 'os.path.join', (['TEST_PATH', '"""PySpike_testdata.txt"""'], {}), "(TEST_PATH, 'PySpike_testdata.txt')\n", (14843, 14878), False, 'import os\n'), ((16418, 16465), 'os.path.join', 'os.path.join', (['TEST_PATH', '"""PySpike_testdata.txt"""'], {}), "(TEST_PATH, 'PySpike_testdata.txt')\n", (16430, 16465), False, 'import os\n'), ((13609, 13653), 'numpy.testing.assert_equal', 'assert_equal', (['f_matrix[i, j]', 'f_matrix[j, i]'], {}), '(f_matrix[i, j], f_matrix[j, i])\n', (13621, 13653), False, 'from numpy.testing import assert_equal, assert_almost_equal, assert_array_almost_equal\n'), ((14522, 14549), 'numpy.ones_like', 'np.ones_like', (['isi_profile.y'], {}), '(isi_profile.y)\n', (14534, 14549), True, 'import numpy as np\n'), ((12839, 12872), 'pyspike.spike_sync_profile_multi', 'spk.spike_sync_profile_multi', (['sts'], {}), '(sts)\n', (12867, 12872), True, 'import pyspike as spk\n')] |
import os
import numpy as np
import sqlite3
from lsst.sims.catUtils.dust import EBVbase
'''
This is a companion script to trim_sn_summary.py. The output of
trim_sn_summary.py is this input to complete_sn_summary.
complete_sn_summary must run in a DC2-era lsst_sims environment. It will
- Add new integer id column (keep original id)
- Add Rv, Av columns
- Add columns for max observed delta flux
'''
_INIT_COLUMNS = [('id_string', 'TEXT'), ('host_galaxy', 'BIGINT'),
('ra', 'DOUBLE'), ('dec', 'DOUBLE'), ('redshift', 'DOUBLE'),
('c', 'DOUBLE'), ('mB', 'DOUBLE'), ('t0', 'DOUBLE'),
('x0', 'DOUBLE'), ('x1', 'DOUBLE')]
_ADD_COLUMNS = [('id', 'BIGINT'), ('av', 'FLOAT'), ('rv', 'FLOAT'),
('max_flux_u', 'FLOAT'),('max_flux_g', 'FLOAT'),
('max_flux_r', 'FLOAT'),('max_flux_i', 'FLOAT'),
('max_flux_z', 'FLOAT'),('max_flux_y', 'FLOAT')]
_INITIAL_TABLE = 'initial_summary'
_SN_DIR = os.path.join(os.getenv('SCRATCH'), 'desc/truth/sn')
_IN_FILE = os.path.join(_SN_DIR, 'initial_table.db')
_IN_TABLE = 'initial_summary'
_OUT_TABLE = 'truth_sn_summary'
_OUT_FILE = os.path.join(_SN_DIR, _OUT_TABLE + '.db')
_VAR_FILE = os.path.join(_SN_DIR, 'sum_variable-31mar.db')
_VAR_TABLE = 'sn_variability_truth'
_MAX_STAR_ID = 41021613038
_SN_OBJ_TYPE = 22
class SnSummaryWriter:
'''
This class finishes the work of creating the table
truth_sn_summary. It will
* Adds columns for max flux per band
* Adds Rv, Av
* Add new integer id
'''
ebv_model = EBVbase()
def __init__(self, out_file=_OUT_FILE, in_file=_IN_FILE,
in_table=_IN_TABLE, var_file=_VAR_FILE):
self._out_file = out_file
self._out_table = _OUT_TABLE
self._in_file = in_file
self._in_table = in_table
self._var_file = var_file
@staticmethod
def _connect_read(path):
'''
Not obvious how to connect read-only to SQLite db. Package it up here
'''
conn = sqlite3.connect(f'file:{path}?mode=ro', uri=True)
return conn
@staticmethod
def get_MW_AvRv(ra, dec, Rv=3.1):
'''
Copied from
https://github.com/LSSTDESC/sims_TruthCatalog/blob/master/python/desc/sims_truthcatalog/synthetic_photometry.py#L133
'''
#eq_coord = np.array([[np.radians(ra)], [np.radians(dec)]])
eq_coord = np.array([np.radians(ra), np.radians(dec)])
ebv = SnSummaryWriter.ebv_model.calculateEbv(equatorialCoordinates=eq_coord,
interp=True)
Av = Rv*ebv
return Av, Rv
@staticmethod
def make_int_id(host):
'''
Parameters
----------
host int id of host galaxy
When host is a real galaxy, new id will be
host * 1024 + (object-type-id), which is probably 22
Otherwise assign int id to be host_id + CONSTANT
where CONSTANT is large enough that all int ids are larger
than MAX_STAR_ID. Least host id is 0.
'''
OFFSET = _MAX_STAR_ID + 1
if host < 100000:
new_id = host + OFFSET
else:
new_id = host * 1024 + _SN_OBJ_TYPE
return new_id
_MAX_FLUX_QUERY = '''select bandpass, max(delta_flux)
from sn_variability_truth where id=? group by bandpass'''
_INSERT = 'insert into ' + _OUT_TABLE + ''' VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
@staticmethod
def get_max_fluxes(conn, id):
'''
Give connection to variability file and id, find max flux for
each band. Return a tuple of values in the usual order
'''
out_dict = {}
cur = conn.cursor()
cur.execute(SnSummaryWriter._MAX_FLUX_QUERY, (id,))
for row in cur:
out_dict[row[0]] = row[1]
return (out_dict.get('u'), out_dict.get('g'), out_dict.get('r'),
out_dict.get('i'), out_dict.get('z'), out_dict.get('y'))
@staticmethod
def assemble_create_table(table_name, columns):
'''
Return string which will create table with supplied names
and column specifications (a tuple (col_name, col_type) )
'''
stmt = 'CREATE TABLE ' + table_name + '('
col_specs = [f'{c[0]} {c[1]}' for c in columns]
stmt += ','.join(col_specs) + ')'
return stmt
def _do_chunk(self, in_cur):
'''
Fetch the next set of rows, calculate additional columns
and write to output.
Returns
-------
False if there might be more data; otherwise (all done) True
'''
rows = in_cur.fetchmany()
if len(rows) == 0:
return True
id_list, host, ra, dec, c5, c6, c7, c8, c9, c10 = zip(*rows)
Av, rv = self.get_MW_AvRv(ra, dec)
Rv = np.full((len(Av),), rv)
id_int = [self.make_int_id(h) for h in host]
max_deltas = [self.get_max_fluxes(self._conn_var, id_str) for id_str in id_list]
u, g, r, i, z, y = zip(*max_deltas)
to_write = list(zip(id_list, host, ra, dec, c5, c6, c7, c8, c9, c10,
id_int, Av, Rv, u, g, r, i, z, y))
self._conn_out.cursor().executemany(self._INSERT, to_write)
self._conn_out.commit()
return False
def complete(self, chunksize=20000, max_chunk=None):
self._conn_in = self._connect_read(self._in_file)
self._conn_var = self._connect_read(self._var_file)
self._conn_out = sqlite3.connect(self._out_file)
out_columns = _INIT_COLUMNS + _ADD_COLUMNS
create_query = self.assemble_create_table(_OUT_TABLE, out_columns)
self._conn_out.cursor().execute(create_query)
self._in_names = [e[0] for e in _INIT_COLUMNS]
rd_query = 'select ' + ','.join(self._in_names) + ' from ' + self._in_table
in_cur = self._conn_in.cursor()
in_cur.arraysize = chunksize
in_cur.execute(rd_query)
done = False
i_chunk = 0
while not done:
done = self._do_chunk(in_cur)
if done:
print("all done")
else:
print('completed chunk ', i_chunk)
i_chunk += 1
if max_chunk:
if i_chunk >= max_chunk:
break
self._conn_in.close()
self._conn_out.close()
self._conn_var.close()
if __name__ == '__main__':
out_file = os.path.join(_SN_DIR, 'truth_sn_summary.db')
writer = SnSummaryWriter(out_file=out_file)
# A call suitable for testing
#writer.complete(chunksize=10, max_chunk=3)
writer.complete()
| [
"numpy.radians",
"sqlite3.connect",
"lsst.sims.catUtils.dust.EBVbase",
"os.path.join",
"os.getenv"
] | [((1061, 1102), 'os.path.join', 'os.path.join', (['_SN_DIR', '"""initial_table.db"""'], {}), "(_SN_DIR, 'initial_table.db')\n", (1073, 1102), False, 'import os\n'), ((1177, 1218), 'os.path.join', 'os.path.join', (['_SN_DIR', "(_OUT_TABLE + '.db')"], {}), "(_SN_DIR, _OUT_TABLE + '.db')\n", (1189, 1218), False, 'import os\n'), ((1231, 1277), 'os.path.join', 'os.path.join', (['_SN_DIR', '"""sum_variable-31mar.db"""'], {}), "(_SN_DIR, 'sum_variable-31mar.db')\n", (1243, 1277), False, 'import os\n'), ((1011, 1031), 'os.getenv', 'os.getenv', (['"""SCRATCH"""'], {}), "('SCRATCH')\n", (1020, 1031), False, 'import os\n'), ((1597, 1606), 'lsst.sims.catUtils.dust.EBVbase', 'EBVbase', ([], {}), '()\n', (1604, 1606), False, 'from lsst.sims.catUtils.dust import EBVbase\n'), ((6570, 6614), 'os.path.join', 'os.path.join', (['_SN_DIR', '"""truth_sn_summary.db"""'], {}), "(_SN_DIR, 'truth_sn_summary.db')\n", (6582, 6614), False, 'import os\n'), ((2063, 2112), 'sqlite3.connect', 'sqlite3.connect', (['f"""file:{path}?mode=ro"""'], {'uri': '(True)'}), "(f'file:{path}?mode=ro', uri=True)\n", (2078, 2112), False, 'import sqlite3\n'), ((5617, 5648), 'sqlite3.connect', 'sqlite3.connect', (['self._out_file'], {}), '(self._out_file)\n', (5632, 5648), False, 'import sqlite3\n'), ((2456, 2470), 'numpy.radians', 'np.radians', (['ra'], {}), '(ra)\n', (2466, 2470), True, 'import numpy as np\n'), ((2472, 2487), 'numpy.radians', 'np.radians', (['dec'], {}), '(dec)\n', (2482, 2487), True, 'import numpy as np\n')] |
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
eval model
"""
import os
from datetime import datetime
import pickle
import numpy as np
from scipy.spatial.distance import cosine
from sklearn.metrics.pairwise import cosine_similarity
from mindspore import Tensor
from mindspore import context, load_checkpoint, load_param_into_net
from src.ecapa_tdnn import ECAPA_TDNN
from src.reader import DatasetGenerator
from src.metrics import get_EER_from_scores
from src.model_utils.config import config as hparams
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
excluded_set = {2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2312, 2313, 2314, 2315,
2316, 2317, 2318, 2319, 2320, 2321, 2322, 2323, 2324, 2325, 2326, 2327, 2328, 2329,
2330, 2331, 2332, 2333, 2334, 2335, 2336, 2337, 2338, 2339, 2340, 2341, 2342, 2343,
2344, 2345, 2346, 2347, 2348, 2349, 2350, 2351, 2352, 2353, 2354, 2355, 2356, 2357,
2358, 2359, 2360, 2361, 2362, 2363, 2364, 2365, 2366, 2367, 2368, 2369, 2370, 2371,
2372, 2373, 2374, 2375, 2376, 2377, 2378, 2379, 2380, 2381, 2382, 2383, 2384, 2385,
2386, 2387, 2970, 2971, 2972, 2973, 2974, 2975, 2976, 2977, 2978, 2979, 2980, 2981,
2982, 2983, 2984, 2985, 2986, 2987, 2988, 2989, 2990, 2991, 2992, 2993, 2994, 2995,
2996, 2997, 2998, 2999, 3000, 3001, 3002, 3003, 3004, 3005, 3006, 3007, 3008, 3009,
3010, 3011, 3012, 3013, 3014, 3015, 3016, 3017, 3018, 3019, 3020, 3021, 3022, 3023,
3024, 3025, 3026, 3027, 3028, 3029, 3030, 3031, 3032, 3033, 3034, 3035, 3036, 3037,
4442, 4443, 4444, 4445, 4446, 4447, 4448, 4449, 4450, 4451, 4452, 4453, 4454, 4455,
4456, 4457, 4458, 4459, 4460, 4461, 4462, 4463, 4464, 4465, 4466, 4467, 4468, 4469,
4470, 4471, 4472, 4473, 4474, 4475, 4476, 4477, 4478, 4479, 4480, 4481, 4482, 4483,
4484, 4485, 4486, 4487, 4488, 4489, 4490, 4491, 4492, 4639, 4640, 4641, 4642, 4643}
def evaluate(spk2emb, utt2emb, trials):
# Evaluate EER given utterance to embedding mapping and trials file
scores, labels = [], []
with open(trials, "r") as f:
for trial in f:
trial = trial.strip()
label, spk, test = trial.split(" ")
spk = spk[:-4]
if label == '1':
labels.append(1)
else:
labels.append(0)
enroll_emb = spk2emb[spk]
test_emb = utt2emb[test[:-4]]
scores.append(1 - cosine(enroll_emb, test_emb))
return get_EER_from_scores(scores, labels)[0]
def evaluate2(spk2emb, utt2emb, norm_dict, params, trials):
# Evaluate EER given utterance to embedding mapping and trials file
train_cohort = None
if norm_dict is not None:
train_cohort = norm_dict
print("train_cohort shape:", train_cohort.shape)
positive_scores = []
negative_scores = []
with open(trials, "r") as f:
lines = f.readlines()
print_dur = 100
for idx_c, trial in enumerate(lines):
if idx_c % print_dur == 0:
print(f'{datetime.now()}, processing {idx_c}/{len(lines)}')
trial = trial.strip()
label, spk_utt, test_utt = trial.split(" ")
spk_utt = spk_utt[:-4]
test_utt = test_utt[:-4]
enrol = (spk2emb[spk_utt])
test = (utt2emb[test_utt])
if train_cohort is not None:
score_e_c = cosine_similarity(
enrol.reshape(1, -1), train_cohort)
score_e_c = np.squeeze(score_e_c)
if hasattr(params, 'cohort_size'):
score_e_c = np.partition(
score_e_c, kth=-params.cohort_size
)[-params.cohort_size:]
mean_e_c = np.mean(score_e_c)
std_e_c = np.std(score_e_c)
# Getting norm stats for test impostors
score_t_c = cosine_similarity(
test.reshape(1, -1), train_cohort)
score_t_c = np.squeeze(score_t_c)
if hasattr(params, 'cohort_size'):
score_t_c = np.partition(
score_t_c, kth=-params.cohort_size
)[-params.cohort_size:]
mean_t_c = np.mean(score_t_c)
std_t_c = np.std(score_t_c)
# Compute the score for the given sentence
score = cosine_similarity(enrol.reshape(
1, -1), test.reshape(1, -1)).item()
# Perform score normalization
if hasattr(params, 'score_norm'):
if params.score_norm == "z-norm":
score = (score - mean_e_c) / std_e_c
elif params.score_norm == "t-norm":
score = (score - mean_t_c) / std_t_c
elif params.score_norm == "s-norm":
score_e = (score - mean_e_c) / std_e_c
score_t = (score - mean_t_c) / std_t_c
score = 0.5 * (score_e + score_t)
if label == '1':
positive_scores.append(score)
else:
negative_scores.append(score)
return positive_scores, negative_scores
def EER(pos_arr, neg_arr):
thresholds = np.sort(np.concatenate((pos_arr, neg_arr)))
thresholds = np.unique(thresholds)
interm_thresholds = (thresholds[0:-1] + thresholds[1:]) / 2
thresholds = np.sort(np.concatenate((thresholds, interm_thresholds)))
pos_scores = np.repeat(np.expand_dims(pos_arr, 0), len(thresholds), axis=0)
pos_scores_threshold = np.transpose(pos_scores) <= thresholds
FRR = (pos_scores_threshold.sum(0)) / pos_scores.shape[1]
del pos_scores
del pos_scores_threshold
neg_scores = np.repeat(np.expand_dims(neg_arr, 0), len(thresholds), axis=0)
neg_scores_threshold = np.transpose(neg_scores) > thresholds
FAR = (neg_scores_threshold.sum(0)) / neg_scores.shape[1]
del neg_scores
del neg_scores_threshold
# Finding the threshold for EER
min_index = np.argmin(np.absolute(FAR - FRR))
# It is possible that eer != fpr != fnr. We return (FAR + FRR) / 2 as EER.
equal_error_rate = (FAR[min_index] + FRR[min_index]) / 2
return equal_error_rate
def emb_mean(g_mean, increment, emb_dict):
emb_dict_mean = dict()
for utt in emb_dict:
if increment == 0:
g_mean = emb_dict[utt]
else:
weight = 1 / (increment + 1)
g_mean = (
1 - weight
) * g_mean + weight * emb_dict[utt]
emb_dict_mean[utt] = emb_dict[utt] - g_mean
increment += 1
if increment % 3000 == 0:
print('processing ', increment)
return emb_dict_mean, g_mean, increment
def compute_embeddings(embedder, dataloader, startidx=0, dur=50000, exc_set=None):
# Compute embeddings for utterances from dataloader
embedder.set_train(False)
utt2emb = dict()
print("Compute embeddings, num to process:", len(dataloader))
for index in range(startidx, startidx + dur):
if index >= len(dataloader):
print("exceed data size")
return utt2emb
batchdata = dataloader[index][0]
if hparams.cut_wav:
batchdata = batchdata[:, :301, :]
if exc_set is not None and index in exc_set:
continue
if index % 1000 == 0:
print(f"{datetime.now()}, iter-{index}")
wavs = Tensor(batchdata)
embs = embedder(wavs)
utt2emb[dataloader[index][1]] = embs.asnumpy()
return utt2emb
if __name__ == "__main__":
context.set_context(device_id=hparams.device_id)
in_channels = hparams.in_channels
channels = hparams.channels
emb_size = hparams.emb_size
model = ECAPA_TDNN(in_channels, channels=(channels, channels, channels, channels, channels * 3),
lin_neurons=emb_size)
eval_data_path = hparams.eval_data_path
dataset_enroll = DatasetGenerator(eval_data_path, False)
steps_per_epoch_enroll = len(dataset_enroll)
print("size of enroll, test:", steps_per_epoch_enroll)
model_path = os.path.join(hparams.model_path)
print(model_path)
param_dict = load_checkpoint(model_path)
# load parameter to the network
load_param_into_net(model, param_dict)
veri_file_path = hparams.veri_file_path
if not os.path.exists(os.path.join(hparams.npy_file_path)):
os.makedirs(hparams.npy_file_path, exist_ok=False)
fpath = os.path.join(hparams.npy_file_path,
f"enroll_dict_bleeched.npy")
if os.path.isfile(fpath):
print(f'find cache file:{fpath}, continue')
enroll_dict = pickle.load(open(fpath, "rb"))
else:
enroll_dict = compute_embeddings(
model, dataset_enroll, dur=len(dataset_enroll), exc_set=excluded_set)
pickle.dump(enroll_dict, open(fpath, "wb"))
eer = evaluate(enroll_dict, enroll_dict, veri_file_path)
print("eer baseline:", eer)
print("Sub mean...")
glob_mean = Tensor([0])
cnt = 0
enroll_dict_mean, glob_mean, cnt = emb_mean(
glob_mean, cnt, enroll_dict)
enroll_dict_mean, glob_mean, cnt = emb_mean(
glob_mean, cnt, enroll_dict)
enroll_dict_mean, glob_mean, cnt = emb_mean(
glob_mean, cnt, enroll_dict)
eer = evaluate(enroll_dict_mean, enroll_dict_mean, veri_file_path)
print("eer with sub mean:", eer)
if hasattr(hparams, 'score_norm') and hparams.cut_wav is not True:
train_norm_path = hparams.train_norm_path
dataset_train = DatasetGenerator(train_norm_path, False)
steps_per_epoch_train = len(dataset_train)
print("steps_per_epoch_train:", steps_per_epoch_train)
start_idx = 0
for start in range(start_idx, len(dataset_train), 50000):
end = start + 50000
if end > len(dataset_train):
end = len(dataset_train)
print("start end:", start, end)
fpath = os.path.join(hparams.npy_file_path,
f"train_dict_{start}_{end}.npy")
if os.path.isfile(fpath):
print(f'find cache file:{fpath}, continue')
continue
train_dict = compute_embeddings(
model, dataset_train, startidx=start, dur=50000)
pickle.dump(train_dict, open(fpath, "wb"))
dict_lst = []
for idx in range(0, 5):
dict_lst.append(pickle.load(open(os.path.join(
hparams.npy_file_path, f"train_dict_{idx*50000}_{(idx+1)*50000}.npy"), "rb")))
dict_lst.append(pickle.load(open(os.path.join(
hparams.npy_file_path, f"train_dict_250000_{len(dataset_train)}.npy"), "rb")))
train_dict = dict()
for dicti in dict_lst:
train_dict.update(dicti)
print('norm data len:', len(train_dict))
train_dict_mean, glob_mean, cnt = emb_mean(
glob_mean, cnt, train_dict)
items = list(train_dict_mean.values())
train_arr = np.asarray(items)
pos_score, neg_score = evaluate2(
enroll_dict_mean, enroll_dict_mean, train_arr, hparams, veri_file_path)
eer = EER(np.array(pos_score), np.array(neg_score))
print("EER with norm:", eer)
| [
"src.reader.DatasetGenerator",
"numpy.absolute",
"mindspore.load_param_into_net",
"mindspore.Tensor",
"os.path.isfile",
"numpy.mean",
"os.path.join",
"numpy.unique",
"mindspore.context.set_context",
"numpy.std",
"numpy.transpose",
"datetime.datetime.now",
"numpy.partition",
"mindspore.load... | [((1129, 1197), 'mindspore.context.set_context', 'context.set_context', ([], {'mode': 'context.GRAPH_MODE', 'device_target': '"""Ascend"""'}), "(mode=context.GRAPH_MODE, device_target='Ascend')\n", (1148, 1197), False, 'from mindspore import context, load_checkpoint, load_param_into_net\n'), ((6087, 6108), 'numpy.unique', 'np.unique', (['thresholds'], {}), '(thresholds)\n', (6096, 6108), True, 'import numpy as np\n'), ((8377, 8425), 'mindspore.context.set_context', 'context.set_context', ([], {'device_id': 'hparams.device_id'}), '(device_id=hparams.device_id)\n', (8396, 8425), False, 'from mindspore import context, load_checkpoint, load_param_into_net\n'), ((8540, 8655), 'src.ecapa_tdnn.ECAPA_TDNN', 'ECAPA_TDNN', (['in_channels'], {'channels': '(channels, channels, channels, channels, channels * 3)', 'lin_neurons': 'emb_size'}), '(in_channels, channels=(channels, channels, channels, channels, \n channels * 3), lin_neurons=emb_size)\n', (8550, 8655), False, 'from src.ecapa_tdnn import ECAPA_TDNN\n'), ((8740, 8779), 'src.reader.DatasetGenerator', 'DatasetGenerator', (['eval_data_path', '(False)'], {}), '(eval_data_path, False)\n', (8756, 8779), False, 'from src.reader import DatasetGenerator\n'), ((8905, 8937), 'os.path.join', 'os.path.join', (['hparams.model_path'], {}), '(hparams.model_path)\n', (8917, 8937), False, 'import os\n'), ((8977, 9004), 'mindspore.load_checkpoint', 'load_checkpoint', (['model_path'], {}), '(model_path)\n', (8992, 9004), False, 'from mindspore import context, load_checkpoint, load_param_into_net\n'), ((9045, 9083), 'mindspore.load_param_into_net', 'load_param_into_net', (['model', 'param_dict'], {}), '(model, param_dict)\n', (9064, 9083), False, 'from mindspore import context, load_checkpoint, load_param_into_net\n'), ((9263, 9327), 'os.path.join', 'os.path.join', (['hparams.npy_file_path', 'f"""enroll_dict_bleeched.npy"""'], {}), "(hparams.npy_file_path, f'enroll_dict_bleeched.npy')\n", (9275, 9327), False, 'import os\n'), ((9360, 9381), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (9374, 9381), False, 'import os\n'), ((9809, 9820), 'mindspore.Tensor', 'Tensor', (['[0]'], {}), '([0])\n', (9815, 9820), False, 'from mindspore import Tensor\n'), ((3272, 3307), 'src.metrics.get_EER_from_scores', 'get_EER_from_scores', (['scores', 'labels'], {}), '(scores, labels)\n', (3291, 3307), False, 'from src.metrics import get_EER_from_scores\n'), ((6034, 6068), 'numpy.concatenate', 'np.concatenate', (['(pos_arr, neg_arr)'], {}), '((pos_arr, neg_arr))\n', (6048, 6068), True, 'import numpy as np\n'), ((6199, 6246), 'numpy.concatenate', 'np.concatenate', (['(thresholds, interm_thresholds)'], {}), '((thresholds, interm_thresholds))\n', (6213, 6246), True, 'import numpy as np\n'), ((6275, 6301), 'numpy.expand_dims', 'np.expand_dims', (['pos_arr', '(0)'], {}), '(pos_arr, 0)\n', (6289, 6301), True, 'import numpy as np\n'), ((6355, 6379), 'numpy.transpose', 'np.transpose', (['pos_scores'], {}), '(pos_scores)\n', (6367, 6379), True, 'import numpy as np\n'), ((6532, 6558), 'numpy.expand_dims', 'np.expand_dims', (['neg_arr', '(0)'], {}), '(neg_arr, 0)\n', (6546, 6558), True, 'import numpy as np\n'), ((6612, 6636), 'numpy.transpose', 'np.transpose', (['neg_scores'], {}), '(neg_scores)\n', (6624, 6636), True, 'import numpy as np\n'), ((6822, 6844), 'numpy.absolute', 'np.absolute', (['(FAR - FRR)'], {}), '(FAR - FRR)\n', (6833, 6844), True, 'import numpy as np\n'), ((8222, 8239), 'mindspore.Tensor', 'Tensor', (['batchdata'], {}), '(batchdata)\n', (8228, 8239), False, 'from mindspore import Tensor\n'), ((9200, 9250), 'os.makedirs', 'os.makedirs', (['hparams.npy_file_path'], {'exist_ok': '(False)'}), '(hparams.npy_file_path, exist_ok=False)\n', (9211, 9250), False, 'import os\n'), ((10345, 10385), 'src.reader.DatasetGenerator', 'DatasetGenerator', (['train_norm_path', '(False)'], {}), '(train_norm_path, False)\n', (10361, 10385), False, 'from src.reader import DatasetGenerator\n'), ((11815, 11832), 'numpy.asarray', 'np.asarray', (['items'], {}), '(items)\n', (11825, 11832), True, 'import numpy as np\n'), ((9154, 9189), 'os.path.join', 'os.path.join', (['hparams.npy_file_path'], {}), '(hparams.npy_file_path)\n', (9166, 9189), False, 'import os\n'), ((10766, 10834), 'os.path.join', 'os.path.join', (['hparams.npy_file_path', 'f"""train_dict_{start}_{end}.npy"""'], {}), "(hparams.npy_file_path, f'train_dict_{start}_{end}.npy')\n", (10778, 10834), False, 'import os\n'), ((10883, 10904), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (10897, 10904), False, 'import os\n'), ((11978, 11997), 'numpy.array', 'np.array', (['pos_score'], {}), '(pos_score)\n', (11986, 11997), True, 'import numpy as np\n'), ((11999, 12018), 'numpy.array', 'np.array', (['neg_score'], {}), '(neg_score)\n', (12007, 12018), True, 'import numpy as np\n'), ((4299, 4320), 'numpy.squeeze', 'np.squeeze', (['score_e_c'], {}), '(score_e_c)\n', (4309, 4320), True, 'import numpy as np\n'), ((4548, 4566), 'numpy.mean', 'np.mean', (['score_e_c'], {}), '(score_e_c)\n', (4555, 4566), True, 'import numpy as np\n'), ((4593, 4610), 'numpy.std', 'np.std', (['score_e_c'], {}), '(score_e_c)\n', (4599, 4610), True, 'import numpy as np\n'), ((4797, 4818), 'numpy.squeeze', 'np.squeeze', (['score_t_c'], {}), '(score_t_c)\n', (4807, 4818), True, 'import numpy as np\n'), ((5046, 5064), 'numpy.mean', 'np.mean', (['score_t_c'], {}), '(score_t_c)\n', (5053, 5064), True, 'import numpy as np\n'), ((5091, 5108), 'numpy.std', 'np.std', (['score_t_c'], {}), '(score_t_c)\n', (5097, 5108), True, 'import numpy as np\n'), ((3230, 3258), 'scipy.spatial.distance.cosine', 'cosine', (['enroll_emb', 'test_emb'], {}), '(enroll_emb, test_emb)\n', (3236, 3258), False, 'from scipy.spatial.distance import cosine\n'), ((4404, 4452), 'numpy.partition', 'np.partition', (['score_e_c'], {'kth': '(-params.cohort_size)'}), '(score_e_c, kth=-params.cohort_size)\n', (4416, 4452), True, 'import numpy as np\n'), ((4902, 4950), 'numpy.partition', 'np.partition', (['score_t_c'], {'kth': '(-params.cohort_size)'}), '(score_t_c, kth=-params.cohort_size)\n', (4914, 4950), True, 'import numpy as np\n'), ((8175, 8189), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8187, 8189), False, 'from datetime import datetime\n'), ((11256, 11348), 'os.path.join', 'os.path.join', (['hparams.npy_file_path', 'f"""train_dict_{idx * 50000}_{(idx + 1) * 50000}.npy"""'], {}), "(hparams.npy_file_path,\n f'train_dict_{idx * 50000}_{(idx + 1) * 50000}.npy')\n", (11268, 11348), False, 'import os\n'), ((3836, 3850), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3848, 3850), False, 'from datetime import datetime\n')] |
"""Build combined NIST table from txt files included in package
"""
import glob
import os
import numpy as np
import re
from astropy.table import Column, Table, vstack
def build_table(line_lists=None):
"""Build master table from NIST txt files
Parameters
----------
line_lists: list or None
A list of line table to read in. If set to 'None',
it will read in tables in the NIST data directory
Returns
-------
master_table: astropy table
Table with all of the NIST line from the txt files
"""
names = ['Intensity', 'Wavelength', 'Element', 'Reference']
# Use packaging directory instead of relative path in the future.
if line_lists is None:
code_dir = os.path.dirname(os.path.realpath(__file__))
line_lists = glob.glob(code_dir + '/datasets/line_lists/NIST/*.txt')
tabs_to_stack = []
for line_list in line_lists:
try:
t = Table.read(line_list, format='ascii', names=names)
tabs_to_stack.append(t)
except:
# Use numpy to parse table that arent comma delimited.
data = np.genfromtxt(line_list,
delimiter=(13, 14, 13, 16),
dtype=str)
t = Table(data, names=names,
dtype=('S10', 'f8', 'S15' , 'S15'))
tabs_to_stack.append(t)
# Stack all of the tables.
master_table = vstack(tabs_to_stack)
# Add on switch for users. Use line if True, don't if False
# Set to True by default.
on_off_column = Column([True] * len(master_table))
master_table.add_column(on_off_column, name='On')
# Strip the numeric characters off of the intensities and add the letters
# that denote intensities to their own column
intensity = master_table['Intensity']
strength = [re.sub('[0-9]+', '', value).strip() for value in intensity]
master_table.add_column(Column(strength), name='Strength')
# Find and strip all alphabetic + special characters
intensity_wo_strength = [re.sub('[a-zA-Z!@#$%^&*]', '', value).strip() \
for value in intensity]
# Delete old column
master_table.remove_column('Intensity')
# Add new Intensity column that only has intensity as an integer.
master_table.add_column(Column(intensity_wo_strength,
dtype=int,
name='Intensity'))
# Reorder table columns
neworder = ('Element','Wavelength','Intensity', 'Strength', 'On', 'Reference')
master_table = master_table[neworder]
return master_table
| [
"astropy.table.Table",
"os.path.realpath",
"numpy.genfromtxt",
"astropy.table.vstack",
"glob.glob",
"astropy.table.Column",
"re.sub",
"astropy.table.Table.read"
] | [((1438, 1459), 'astropy.table.vstack', 'vstack', (['tabs_to_stack'], {}), '(tabs_to_stack)\n', (1444, 1459), False, 'from astropy.table import Column, Table, vstack\n'), ((795, 850), 'glob.glob', 'glob.glob', (["(code_dir + '/datasets/line_lists/NIST/*.txt')"], {}), "(code_dir + '/datasets/line_lists/NIST/*.txt')\n", (804, 850), False, 'import glob\n'), ((1939, 1955), 'astropy.table.Column', 'Column', (['strength'], {}), '(strength)\n', (1945, 1955), False, 'from astropy.table import Column, Table, vstack\n'), ((2330, 2388), 'astropy.table.Column', 'Column', (['intensity_wo_strength'], {'dtype': 'int', 'name': '"""Intensity"""'}), "(intensity_wo_strength, dtype=int, name='Intensity')\n", (2336, 2388), False, 'from astropy.table import Column, Table, vstack\n'), ((747, 773), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (763, 773), False, 'import os\n'), ((937, 987), 'astropy.table.Table.read', 'Table.read', (['line_list'], {'format': '"""ascii"""', 'names': 'names'}), "(line_list, format='ascii', names=names)\n", (947, 987), False, 'from astropy.table import Column, Table, vstack\n'), ((1126, 1189), 'numpy.genfromtxt', 'np.genfromtxt', (['line_list'], {'delimiter': '(13, 14, 13, 16)', 'dtype': 'str'}), '(line_list, delimiter=(13, 14, 13, 16), dtype=str)\n', (1139, 1189), True, 'import numpy as np\n'), ((1270, 1329), 'astropy.table.Table', 'Table', (['data'], {'names': 'names', 'dtype': "('S10', 'f8', 'S15', 'S15')"}), "(data, names=names, dtype=('S10', 'f8', 'S15', 'S15'))\n", (1275, 1329), False, 'from astropy.table import Column, Table, vstack\n'), ((1851, 1878), 're.sub', 're.sub', (['"""[0-9]+"""', '""""""', 'value'], {}), "('[0-9]+', '', value)\n", (1857, 1878), False, 'import re\n'), ((2061, 2098), 're.sub', 're.sub', (['"""[a-zA-Z!@#$%^&*]"""', '""""""', 'value'], {}), "('[a-zA-Z!@#$%^&*]', '', value)\n", (2067, 2098), False, 'import re\n')] |
#!/usr/bin/env python
"""@package docstring
File: analyzer.py
Author: <NAME>
Email: <EMAIL>
Description: File containing classes to analyze data, make movies,
and create graphs from foxlink runs
"""
from pathlib import Path
import numpy as np
# from matplotlib.lines import Line2D
import h5py
import yaml
import pprint
def normalize(vec):
"""!TODO: Docstring for normalize.
@param vec: TODO
@return: TODO
"""
norm = np.linalg.norm(vec, axis=-1)
return np.divide(vec, norm[:, None],
out=np.zeros_like(vec), where=norm[:, None] != 0)
def touch_group(parent, grp_name):
"""!See if a data set is there and if it is return it.
Otherwise, generate it.
@param parent: Parent group of group to be checked and/or created
@param grp_name: Name of group to be checked and/or createad
@return: The group reference
"""
return parent[grp_name] if grp_name in parent else parent.create_group(
grp_name)
class Analyzer():
"""!Analyze Fokker-Planck equation code"""
def __init__(self, filename="Solver.h5", analysis_type='load'):
"""! Initialize analysis code by loading in hdf5 file and setting up
params.
@param filename: Name of file to be analyzed
@param analysis_type: What kind of analysis ot run on data file
"""
self.mu00 = [] # Array of motor number vs time
self.mu10 = [] # Array of motor polarity vs time
self.mu01 = [] # Array of motor polarity vs time
self.mu11 = [] # Array of motor asymmetry vs time
self.mu20 = [] # Array of motor variance vs time
self.mu02 = [] # Array of motor variance vs time
self.B0_j = []
self.B0_i = []
self.B1_j = []
self.B1_i = []
self.B2_j = []
self.B2_i = []
self.B3_j = []
self.B3_i = []
self.dBds0_j = []
self.dBds0_i = []
self.dBds1_j = []
self.dBds1_i = []
self.dBds2_j = []
self.dBds2_i = []
self.dBds3_j = []
self.dBds3_i = []
self.d2Bds0_j = []
self.d2Bds0_i = []
self.d2Bds1_j = []
self.d2Bds1_i = []
self.d2Bds2_j = []
self.d2Bds2_i = []
self.d2Bds3_j = []
self.d2Bds3_i = []
self._filename = filename
self._h5_data, self._params = self.load()
self.s_type = self._params['solver_type']
self.collect_data_arrays()
self.init_flag = True
self.analyzsis_grp = self.analyze(analysis_type)
def collect_data_arrays(self):
"""!Store data arrays in member variables
@return: void, modifies member variables
"""
self.time = np.asarray(self._h5_data["time"])
# What kind of motion of microtubules
if 'phio' in self._params: # Ang motion
self.phi_arr = self._h5_data['rod_data/phi']
elif 'ro' in self._params: # Para motion
self.R_arr = np.asarray(self._h5_data['rod_data/R_pos'])
else: # General motion
self.R1_pos = np.asarray(self._h5_data['/rod_data/R1_pos'])
self.R2_pos = np.asarray(self._h5_data['/rod_data/R2_pos'])
self.R1_vec = np.asarray(self._h5_data['/rod_data/R1_vec'])
self.R2_vec = np.asarray(self._h5_data['/rod_data/R2_vec'])
def load(self):
"""!Load in data from hdf5 file and grab analysis files if they exist.
@param analysis_type: load, analyze, overwrite. The extent of the
analysis that should be carried out.
@return: void, stores hdf5 file, parameters, and data arrays to self.
"""
h5_data = h5py.File(self._filename, 'r+')
if 'params' in h5_data.attrs:
params = yaml.safe_load(h5_data.attrs['params'])
else:
params = h5_data.attrs
pprint.pprint(params)
return h5_data, params
def save(self):
"""!Create a pickle file of solution
@return: void
"""
self._h5_data.flush()
self._h5_data.close()
def get_name(self):
""" Get name of simulation """
return self._params['name'] if 'name' in self._params else Path.cwd(
).name
########################
# analysis functions #
########################
def analyze(self, analysis_type='analyze'):
"""!Read in analysis or analyze data according to type of solver hdf5
file came from and what analysis_type was specified.
@param analysis_type: load, analyze, overwrite. The extent of the
analysis that should be carried out.
@return: void
"""
if 'analysis' not in self._h5_data:
if analysis_type == 'load':
print('-- {} has not been analyzed. --'.format(self._filename))
return
analysis_grp = self._h5_data.create_group('analysis')
elif analysis_type == 'overwrite': # Delete old analysis and try again
del self._h5_data['analysis']
analysis_grp = self._h5_data.create_group('analysis')
else:
analysis_grp = self._h5_data['analysis']
return analysis_grp
def rod_geometry_analysis(self, rod_analysis_grp, analysis_type='analyze'):
"""!Analyze and store data relating to the configuration of the rods
@param rod_analysis_grp: TODO
@return: TODO
"""
# Analyze distance between rod center at each time step
if 'center_separation' not in rod_analysis_grp:
if analysis_type != 'load':
self.dR_arr = np.linalg.norm(
np.subtract(self.R2_pos, self.R1_pos), axis=1)
self.rod_sep_dset = rod_analysis_grp.create_dataset(
'center_separation', data=self.dR_arr, dtype=np.float32)
else:
print('--- The rod center separation not analyzed or stored. ---')
else:
self.rod_sep_dset = rod_analysis_grp['center_separation']
self.dR_arr = self.rod_sep_dset[...]
# Analyze angle between rods at teach time step
if 'angle_between' not in rod_analysis_grp:
if analysis_type != 'load':
self.phi_arr = np.arccos(
np.einsum('ij,ij->i', self.R1_vec, self.R2_vec))
self.rod_phi_dset = rod_analysis_grp.create_dataset(
'angle_between', data=self.phi_arr, dtype=np.float32)
else:
print('--- The angle between rods not analyzed or stored. ---')
else:
self.rod_phi_dset = rod_analysis_grp['angle_between']
self.phi_arr = np.asarray(self.rod_phi_dset)
# Minus-end(bead) separations
if 'overlap' not in rod_analysis_grp:
if analysis_type != 'load':
self.overlap_arr = Analyzer.calc_overlap(self.R1_pos, self.R2_pos,
self.R1_vec, self.R2_vec,
self._params['L1'],
self._params['L2'])
self.rod_overlap_dset = rod_analysis_grp.create_dataset(
'overlap', data=self.overlap_arr, dtype=np.float32)
else:
print('--- The rod overlap not analyzed or stored. ---')
else:
self.rod_overlap_dset = rod_analysis_grp['overlap']
self.overlap_arr = np.asarray(self.rod_phi_dset)
###########################
# Calculation functions #
###########################
@staticmethod
def calc_overlap(R1_pos, R2_pos, R1_vec, R2_vec, L1, L2):
"""!Calculate the overlap of two antiparallel rods based on the location
of their minus ends. You can also negate the vector of one of the rods
if they are parallel instead of antiparallel.
@param R1_pos: TODO
@param R2_pos: TODO
@param R1_vec: TODO
@param R2_vec: TODO
@param L1: TODO
@param L2: TODO
@return: Overlap of two rods as a function of time
"""
minus1_pos = R1_pos - .5 * L1 * R1_vec
minus2_pos = R2_pos - .5 * L2 * R2_vec
# Distance between beads
d = np.subtract(minus1_pos, minus2_pos)
dmag = np.linalg.norm(d, axis=1)
# Projection of one rod onto another
proj = abs(np.einsum('ij,ij->i', R1_vec, R2_vec))
return proj * (L1 + L2) - dmag
@staticmethod
def find_start_time(arr, reps=1):
"""! A function to find when simulations reaches a steady state with
respect to array, arr.
@param arr: Array to find steady state in
@param reps: repetitions of recursion
@return: st Start time, the index of time array when the simulation
first reaches a the steady state average
"""
# Test to make sure correct parameters types were given to function
if not isinstance(arr, np.ndarray):
raise TypeError(" Array arr must be numpy.ndarray type ")
if reps > 0:
start_time = Analyzer.find_start_time(arr - arr.mean(), reps - 1)
else:
# Get array of sign values, ie. sign with respect to mean
sign_arr = np.sign(arr)
# Create array of differences from one index to the next
diff_arr = np.diff(sign_arr)
# Find the non-zero differences and record the indices
index_arr = np.where(diff_arr)[0] # always produces a tuple
if index_arr.size == 0: # System was in steady state all along
start_time = 0
else:
start_time = index_arr[0]
return start_time
def create_distr_approx_func(self):
"""!Create a function that will approximate the motor distribution
@return: Bivariate gaussian distribution approximation
"""
A = self.mu00
sig_i = np.nan_to_num(
np.sqrt((self.mu20 / A) - (self.mu10**2) / (A * A)))
print("sig_i")
print(sig_i)
sig_j = np.nan_to_num(
np.sqrt((self.mu02 / A) - (self.mu01**2) / (A * A)))
print("sig_j")
print(sig_j)
nu = np.nan_to_num(
(self.mu11 / A - (self.mu10 * self.mu01) / (A * A)) / (sig_i * sig_j))
nu = np.clip(nu, -.9999999999999999, .999999999999999999)
print("nu")
print(nu)
pre_fact = np.nan_to_num(
A / (2. * np.pi * sig_i * sig_j * np.sqrt(1. - (nu * nu))), nan=0, posinf=0)
print("pre_fact")
print(pre_fact)
denom = np.nan_to_num(.5 / ((nu * nu) - 1.), nan=0, posinf=0)
def gauss_distr_approx_func(s_i, s_j, n=-1):
x = np.nan_to_num((s_i - (self.mu10[n] / A[n])) / sig_i[n])
y = np.nan_to_num((s_j - (self.mu01[n] / A[n])) / sig_j[n])
return np.nan_to_num(pre_fact[n] * np.exp(
(x * x + y * y - 2. * nu[n] * x * y) * denom[n]), posinf=0, nan=0)
return gauss_distr_approx_func
| [
"h5py.File",
"numpy.zeros_like",
"numpy.subtract",
"numpy.nan_to_num",
"numpy.asarray",
"numpy.einsum",
"numpy.clip",
"numpy.diff",
"numpy.linalg.norm",
"pprint.pprint",
"yaml.safe_load",
"numpy.sign",
"numpy.where",
"numpy.exp",
"pathlib.Path.cwd",
"numpy.sqrt"
] | [((441, 469), 'numpy.linalg.norm', 'np.linalg.norm', (['vec'], {'axis': '(-1)'}), '(vec, axis=-1)\n', (455, 469), True, 'import numpy as np\n'), ((2737, 2770), 'numpy.asarray', 'np.asarray', (["self._h5_data['time']"], {}), "(self._h5_data['time'])\n", (2747, 2770), True, 'import numpy as np\n'), ((3713, 3744), 'h5py.File', 'h5py.File', (['self._filename', '"""r+"""'], {}), "(self._filename, 'r+')\n", (3722, 3744), False, 'import h5py\n'), ((3901, 3922), 'pprint.pprint', 'pprint.pprint', (['params'], {}), '(params)\n', (3914, 3922), False, 'import pprint\n'), ((8366, 8401), 'numpy.subtract', 'np.subtract', (['minus1_pos', 'minus2_pos'], {}), '(minus1_pos, minus2_pos)\n', (8377, 8401), True, 'import numpy as np\n'), ((8417, 8442), 'numpy.linalg.norm', 'np.linalg.norm', (['d'], {'axis': '(1)'}), '(d, axis=1)\n', (8431, 8442), True, 'import numpy as np\n'), ((10345, 10431), 'numpy.nan_to_num', 'np.nan_to_num', (['((self.mu11 / A - self.mu10 * self.mu01 / (A * A)) / (sig_i * sig_j))'], {}), '((self.mu11 / A - self.mu10 * self.mu01 / (A * A)) / (sig_i *\n sig_j))\n', (10358, 10431), True, 'import numpy as np\n'), ((10456, 10493), 'numpy.clip', 'np.clip', (['nu', '(-0.9999999999999999)', '(1.0)'], {}), '(nu, -0.9999999999999999, 1.0)\n', (10463, 10493), True, 'import numpy as np\n'), ((10736, 10789), 'numpy.nan_to_num', 'np.nan_to_num', (['(0.5 / (nu * nu - 1.0))'], {'nan': '(0)', 'posinf': '(0)'}), '(0.5 / (nu * nu - 1.0), nan=0, posinf=0)\n', (10749, 10789), True, 'import numpy as np\n'), ((536, 554), 'numpy.zeros_like', 'np.zeros_like', (['vec'], {}), '(vec)\n', (549, 554), True, 'import numpy as np\n'), ((3804, 3843), 'yaml.safe_load', 'yaml.safe_load', (["h5_data.attrs['params']"], {}), "(h5_data.attrs['params'])\n", (3818, 3843), False, 'import yaml\n'), ((6753, 6782), 'numpy.asarray', 'np.asarray', (['self.rod_phi_dset'], {}), '(self.rod_phi_dset)\n', (6763, 6782), True, 'import numpy as np\n'), ((7573, 7602), 'numpy.asarray', 'np.asarray', (['self.rod_phi_dset'], {}), '(self.rod_phi_dset)\n', (7583, 7602), True, 'import numpy as np\n'), ((8507, 8544), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'R1_vec', 'R2_vec'], {}), "('ij,ij->i', R1_vec, R2_vec)\n", (8516, 8544), True, 'import numpy as np\n'), ((9381, 9393), 'numpy.sign', 'np.sign', (['arr'], {}), '(arr)\n', (9388, 9393), True, 'import numpy as np\n'), ((9486, 9503), 'numpy.diff', 'np.diff', (['sign_arr'], {}), '(sign_arr)\n', (9493, 9503), True, 'import numpy as np\n'), ((10095, 10144), 'numpy.sqrt', 'np.sqrt', (['(self.mu20 / A - self.mu10 ** 2 / (A * A))'], {}), '(self.mu20 / A - self.mu10 ** 2 / (A * A))\n', (10102, 10144), True, 'import numpy as np\n'), ((10235, 10284), 'numpy.sqrt', 'np.sqrt', (['(self.mu02 / A - self.mu01 ** 2 / (A * A))'], {}), '(self.mu02 / A - self.mu01 ** 2 / (A * A))\n', (10242, 10284), True, 'import numpy as np\n'), ((10860, 10913), 'numpy.nan_to_num', 'np.nan_to_num', (['((s_i - self.mu10[n] / A[n]) / sig_i[n])'], {}), '((s_i - self.mu10[n] / A[n]) / sig_i[n])\n', (10873, 10913), True, 'import numpy as np\n'), ((10932, 10985), 'numpy.nan_to_num', 'np.nan_to_num', (['((s_j - self.mu01[n] / A[n]) / sig_j[n])'], {}), '((s_j - self.mu01[n] / A[n]) / sig_j[n])\n', (10945, 10985), True, 'import numpy as np\n'), ((2999, 3042), 'numpy.asarray', 'np.asarray', (["self._h5_data['rod_data/R_pos']"], {}), "(self._h5_data['rod_data/R_pos'])\n", (3009, 3042), True, 'import numpy as np\n'), ((3101, 3146), 'numpy.asarray', 'np.asarray', (["self._h5_data['/rod_data/R1_pos']"], {}), "(self._h5_data['/rod_data/R1_pos'])\n", (3111, 3146), True, 'import numpy as np\n'), ((3173, 3218), 'numpy.asarray', 'np.asarray', (["self._h5_data['/rod_data/R2_pos']"], {}), "(self._h5_data['/rod_data/R2_pos'])\n", (3183, 3218), True, 'import numpy as np\n'), ((3245, 3290), 'numpy.asarray', 'np.asarray', (["self._h5_data['/rod_data/R1_vec']"], {}), "(self._h5_data['/rod_data/R1_vec'])\n", (3255, 3290), True, 'import numpy as np\n'), ((3317, 3362), 'numpy.asarray', 'np.asarray', (["self._h5_data['/rod_data/R2_vec']"], {}), "(self._h5_data['/rod_data/R2_vec'])\n", (3327, 3362), True, 'import numpy as np\n'), ((4246, 4256), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (4254, 4256), False, 'from pathlib import Path\n'), ((9595, 9613), 'numpy.where', 'np.where', (['diff_arr'], {}), '(diff_arr)\n', (9603, 9613), True, 'import numpy as np\n'), ((5719, 5756), 'numpy.subtract', 'np.subtract', (['self.R2_pos', 'self.R1_pos'], {}), '(self.R2_pos, self.R1_pos)\n', (5730, 5756), True, 'import numpy as np\n'), ((6356, 6403), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'self.R1_vec', 'self.R2_vec'], {}), "('ij,ij->i', self.R1_vec, self.R2_vec)\n", (6365, 6403), True, 'import numpy as np\n'), ((10627, 10649), 'numpy.sqrt', 'np.sqrt', (['(1.0 - nu * nu)'], {}), '(1.0 - nu * nu)\n', (10634, 10649), True, 'import numpy as np\n'), ((11035, 11091), 'numpy.exp', 'np.exp', (['((x * x + y * y - 2.0 * nu[n] * x * y) * denom[n])'], {}), '((x * x + y * y - 2.0 * nu[n] * x * y) * denom[n])\n', (11041, 11091), True, 'import numpy as np\n')] |
from scipy.spatial.distance import cdist
from malaya_speech.model.clustering import ClusteringAP
from malaya_speech.utils.dist import l2_normalize, compute_log_dist_matrix
import numpy as np
from herpetologist import check_type
from typing import Callable
@check_type
def speaker_similarity(
vad_results,
speaker_vector,
similarity_threshold: float = 0.8,
norm_function: Callable = None,
return_embedding: bool = False,
):
"""
Speaker diarization using L2-Norm similarity.
Parameters
----------
vad_results: List[Tuple[Frame, label]]
results from VAD.
speaker_vector: callable
speaker vector object.
similarity_threshold: float, optional (default=0.8)
if current voice activity sample similar at least 80%, we assumed it is from the same speaker.
norm_function: Callable, optional(default=None)
normalize function for speaker vectors.
speaker_change_threshold: float, optional (default=0.5)
in one voice activity sample can be more than one speaker, split it using this threshold.
Returns
-------
result : List[Tuple[Frame, label]]
"""
if not 0 < similarity_threshold <= 1.0:
raise ValueError(
'similarity_threshold must, 0 < similarity_threshold <= 1.0'
)
speakers, embedding = [], []
for result in vad_results:
if result[1]:
vector = speaker_vector([result[0]])[0]
if len(embedding):
a = np.array(embedding)
if norm_function:
a = norm_function(a)
s = 1 - cdist([vector], a, metric='cosine')[0]
where = np.where(s >= similarity_threshold)[0]
if len(where):
argsort = (np.argsort(s)[::-1]).tolist()
argsort = [a for a in argsort if a in where]
speakers.append(f'speaker {argsort[0]}')
else:
speakers.append(f'speaker {len(embedding)}')
embedding.append(vector)
else:
speakers.append(f'speaker {len(embedding)}')
embedding.append(vector)
else:
speakers.append('not a speaker')
results = []
for no, result in enumerate(vad_results):
results.append((result[0], speakers[no]))
if return_embedding:
return results, embedding
else:
return results
@check_type
def n_clustering(
vad_results,
speaker_vector,
model,
norm_function: Callable = l2_normalize,
return_embedding=False,
):
"""
Speaker diarization using any clustering model.
Parameters
----------
vad_results: List[Tuple[Frame, label]]
results from VAD.
speaker_vector: callable
speaker vector object.
model: callable
Prefer any sklearn unsupervised clustering model.
Required `fit_predict` or `apply` method.
norm_function: Callable, optional(default=malaya_speech.utils.dist.l2_normalize)
normalize function for speaker vectors.
log_distance_metric: str, optional (default='cosine')
post distance norm in log scale metrics.
Returns
-------
result : List[Tuple[Frame, label]]
"""
if not hasattr(model, 'fit_predict') and not hasattr(model, 'apply'):
raise ValueError('model must have `fit_predict` or `apply` method.')
speakers, activities, mapping = [], [], {}
for no, result in enumerate(vad_results):
if result[1]:
speakers.append('got')
mapping[len(activities)] = no
vector = speaker_vector([result[0]])[0]
activities.append(vector)
else:
speakers.append('not a speaker')
activities = np.array(activities)
if norm_function:
activities = norm_function(activities)
if hasattr(model, 'fit_predict'):
cluster_labels = model.fit_predict(activities)
if hasattr(model, 'apply'):
cluster_labels = model.apply(activities)
for k, v in mapping.items():
speakers[v] = f'speaker {cluster_labels[k]}'
results = []
for no, result in enumerate(vad_results):
results.append((result[0], speakers[no]))
if return_embedding:
return results, activities
else:
return results
@check_type
def affinity_propagation(
vad_results,
speaker_vector,
norm_function: Callable = l2_normalize,
log_distance_metric: str = 'cosine',
damping: float = 0.8,
preference: float = None,
return_embedding=False,
):
"""
Speaker diarization using sklearn Affinity Propagation.
Parameters
----------
vad_results: List[Tuple[Frame, label]]
results from VAD.
speaker_vector: callable
speaker vector object.
norm_function: Callable, optional(default=malaya_speech.utils.dist.l2_normalize)
normalize function for speaker vectors.
log_distance_metric: str, optional (default='cosine')
post distance norm in log scale metrics.
Returns
-------
result : List[Tuple[Frame, label]]
"""
affinity = ClusteringAP(
metric=log_distance_metric, damping=damping, preference=preference
)
return n_clustering(
vad_results=vad_results,
speaker_vector=speaker_vector,
model=affinity,
norm_function=norm_function,
return_embedding=return_embedding,
)
@check_type
def spectral_cluster(
vad_results,
speaker_vector,
min_clusters: int = None,
max_clusters: int = None,
norm_function: Callable = l2_normalize,
log_distance_metric: str = None,
return_embedding=False,
**kwargs,
):
"""
Speaker diarization using SpectralCluster, https://github.com/wq2012/SpectralCluster
Parameters
----------
vad_results: List[Tuple[Frame, label]]
results from VAD.
speaker_vector: callable
speaker vector object.
min_clusters: int, optional (default=None)
minimal number of clusters allowed (only effective if not None).
max_clusters: int, optional (default=None)
maximal number of clusters allowed (only effective if not None).
can be used together with min_clusters to fix the number of clusters.
norm_function: Callable, optional(default=malaya_speech.utils.dist.l2_normalize)
normalize function for speaker vectors.
log_distance_metric: str, optional (default=None)
post distance norm in log scale metrics.
Returns
-------
result : List[Tuple[Frame, label]]
"""
try:
from spectralcluster import SpectralClusterer
except BaseException:
raise ModuleNotFoundError(
'spectralcluster not installed. Please install it by `pip install spectralcluster` and try again.'
)
clusterer = SpectralClusterer(
min_clusters=min_clusters,
max_clusters=max_clusters,
**kwargs,
)
speakers, activities, mapping = [], [], {}
for no, result in enumerate(vad_results):
if result[1]:
speakers.append('got')
mapping[len(activities)] = no
vector = speaker_vector([result[0]])[0]
activities.append(vector)
else:
speakers.append('not a speaker')
activities = np.array(activities)
if norm_function:
activities = norm_function(activities)
if log_distance_metric:
activities = compute_log_dist_matrix(activities, log_distance_metric)
cluster_labels = clusterer.predict(activities)
for k, v in mapping.items():
speakers[v] = f'speaker {cluster_labels[k]}'
results = []
for no, result in enumerate(vad_results):
results.append((result[0], speakers[no]))
if return_embedding:
return results, activities
else:
return results
| [
"scipy.spatial.distance.cdist",
"malaya_speech.utils.dist.compute_log_dist_matrix",
"numpy.argsort",
"numpy.where",
"numpy.array",
"spectralcluster.SpectralClusterer",
"malaya_speech.model.clustering.ClusteringAP"
] | [((3780, 3800), 'numpy.array', 'np.array', (['activities'], {}), '(activities)\n', (3788, 3800), True, 'import numpy as np\n'), ((5144, 5229), 'malaya_speech.model.clustering.ClusteringAP', 'ClusteringAP', ([], {'metric': 'log_distance_metric', 'damping': 'damping', 'preference': 'preference'}), '(metric=log_distance_metric, damping=damping, preference=preference\n )\n', (5156, 5229), False, 'from malaya_speech.model.clustering import ClusteringAP\n'), ((6851, 6937), 'spectralcluster.SpectralClusterer', 'SpectralClusterer', ([], {'min_clusters': 'min_clusters', 'max_clusters': 'max_clusters'}), '(min_clusters=min_clusters, max_clusters=max_clusters, **\n kwargs)\n', (6868, 6937), False, 'from spectralcluster import SpectralClusterer\n'), ((7324, 7344), 'numpy.array', 'np.array', (['activities'], {}), '(activities)\n', (7332, 7344), True, 'import numpy as np\n'), ((7464, 7520), 'malaya_speech.utils.dist.compute_log_dist_matrix', 'compute_log_dist_matrix', (['activities', 'log_distance_metric'], {}), '(activities, log_distance_metric)\n', (7487, 7520), False, 'from malaya_speech.utils.dist import l2_normalize, compute_log_dist_matrix\n'), ((1495, 1514), 'numpy.array', 'np.array', (['embedding'], {}), '(embedding)\n', (1503, 1514), True, 'import numpy as np\n'), ((1677, 1712), 'numpy.where', 'np.where', (['(s >= similarity_threshold)'], {}), '(s >= similarity_threshold)\n', (1685, 1712), True, 'import numpy as np\n'), ((1614, 1649), 'scipy.spatial.distance.cdist', 'cdist', (['[vector]', 'a'], {'metric': '"""cosine"""'}), "([vector], a, metric='cosine')\n", (1619, 1649), False, 'from scipy.spatial.distance import cdist\n'), ((1778, 1791), 'numpy.argsort', 'np.argsort', (['s'], {}), '(s)\n', (1788, 1791), True, 'import numpy as np\n')] |
import numpy as np
def dfs(cb, dep):
if not np.any(cb == 0):
print('Solved at %d-th depth' % dep)
print(cb)
return
pos = np.argwhere(cb == 0)[0]
for val in range(1, 10):
if check(cb, pos, val):
cb[pos[0], pos[1]] = val
dfs(cb, dep+1)
cb[pos[0], pos[1]] = 0
# raise ValueError('\n'+str(cb)+ '\n'+ str(pos))
def check(cb, pos, val):
if val in cb[pos[0], :] or val in cb[:, pos[1]]:
return False
start_row, start_col = int(pos[0] / 3) * 3, int(pos[1] / 3) * 3
if val in cb[start_row:start_row+3, start_col:start_col+3]:
return False
return True
def main():
chessboard = np.array([
[2, 0, 3, 4, 0, 0, 0, 9, 0],
[0, 0, 0, 1, 0, 2, 0, 0, 5],
[5, 0, 6, 0, 0, 0, 1, 0, 0],
[0, 2, 0, 5, 0, 0, 8, 1, 0],
[9, 8, 1, 0, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 1, 9, 2, 0, 0],
[4, 3, 0, 0, 8, 0, 0, 0, 1],
[0, 9, 0, 0, 5, 0, 6, 0, 0],
[0, 0, 0, 0, 2, 1, 0, 5, 4]
])
visit = chessboard.copy()
visit[visit != 0] = 1
pos = np.argwhere(chessboard == 0)
dfs(chessboard, 0)
if __name__ == '__main__':
main()
| [
"numpy.argwhere",
"numpy.any",
"numpy.array"
] | [((728, 1012), 'numpy.array', 'np.array', (['[[2, 0, 3, 4, 0, 0, 0, 9, 0], [0, 0, 0, 1, 0, 2, 0, 0, 5], [5, 0, 6, 0, 0, \n 0, 1, 0, 0], [0, 2, 0, 5, 0, 0, 8, 1, 0], [9, 8, 1, 0, 0, 0, 0, 0, 6],\n [0, 0, 0, 0, 1, 9, 2, 0, 0], [4, 3, 0, 0, 8, 0, 0, 0, 1], [0, 9, 0, 0, \n 5, 0, 6, 0, 0], [0, 0, 0, 0, 2, 1, 0, 5, 4]]'], {}), '([[2, 0, 3, 4, 0, 0, 0, 9, 0], [0, 0, 0, 1, 0, 2, 0, 0, 5], [5, 0, \n 6, 0, 0, 0, 1, 0, 0], [0, 2, 0, 5, 0, 0, 8, 1, 0], [9, 8, 1, 0, 0, 0, 0,\n 0, 6], [0, 0, 0, 0, 1, 9, 2, 0, 0], [4, 3, 0, 0, 8, 0, 0, 0, 1], [0, 9,\n 0, 0, 5, 0, 6, 0, 0], [0, 0, 0, 0, 2, 1, 0, 5, 4]])\n', (736, 1012), True, 'import numpy as np\n'), ((1157, 1185), 'numpy.argwhere', 'np.argwhere', (['(chessboard == 0)'], {}), '(chessboard == 0)\n', (1168, 1185), True, 'import numpy as np\n'), ((54, 69), 'numpy.any', 'np.any', (['(cb == 0)'], {}), '(cb == 0)\n', (60, 69), True, 'import numpy as np\n'), ((165, 185), 'numpy.argwhere', 'np.argwhere', (['(cb == 0)'], {}), '(cb == 0)\n', (176, 185), True, 'import numpy as np\n')] |
"""
Demo script to showcase the functionality of the multi-task Bayesian neural network
implementation.
"""
import os
from sys import int_info
import warnings
import numpy as np
import pyro
import wandb
from matplotlib import pyplot as plt
from wandb.sdk.wandb_init import init
from mtbnn.mtbnn import MultiTaskBayesianNeuralNetwork
from metalearning_benchmarks.util import normalize_benchmark
from mtbnn.plotting import plot_distributions, plot_metrics, plot_predictions
from mtutils.mtutils import BM_DICT, collate_data, norm_area_under_curve
from mtutils.mtutils import print_headline_string as prinths
from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples
def run_experiment(
config,
wandb_run,
):
## define metrics for wandb logging
wandb_run.define_metric(name="meta_train/epoch")
wandb_run.define_metric(name="meta_train/*", step_metric="meta_train/epoch")
wandb_run.define_metric(name="adapt/epoch")
wandb_run.define_metric(name="adapt/*", step_metric="adapt/epoch")
wandb_run.define_metric(name="eval/n_context")
wandb_run.define_metric(name="eval/*", step_metric="eval/n_context")
## seeding
pyro.set_rng_seed(config["seed_pyro"])
## create benchmarks
# meta benchmark
bm_meta = BM_DICT[config["bm"]](
n_task=config["n_tasks_meta"],
n_datapoints_per_task=config["n_points_per_task_meta"],
output_noise=config["noise_stddev"],
seed_task=config["seed_offset_train"],
seed_x=config["seed_offset_train"] + 1,
seed_noise=config["seed_offset_train"] + 2,
)
if config["normalize_bm"]:
bm_meta = normalize_benchmark(benchmark=bm_meta)
x_meta, y_meta = collate_data(bm=bm_meta)
x_pred_meta = np.linspace(
bm_meta.x_bounds[0, 0]
- 0.1 * (bm_meta.x_bounds[0, 1] - bm_meta.x_bounds[0, 0]),
bm_meta.x_bounds[0, 1]
+ 0.1 * (bm_meta.x_bounds[0, 1] - bm_meta.x_bounds[0, 0]),
config["n_points_pred"],
)[None, :, None].repeat(config["n_tasks_meta"], axis=0)
# test benchmark
bm_test = BM_DICT[config["bm"]](
n_task=config["n_tasks_test"],
n_datapoints_per_task=config["n_points_per_task_test"],
output_noise=config["noise_stddev"],
seed_task=config["seed_offset_test"],
seed_x=config["seed_offset_test"] + 1,
seed_noise=config["seed_offset_test"] + 2,
)
if config["normalize_bm"]:
bm_test = normalize_benchmark(benchmark=bm_test)
x_test, y_test = collate_data(bm=bm_test)
x_pred_test = np.linspace(
bm_test.x_bounds[0, 0]
- 0.1 * (bm_test.x_bounds[0, 1] - bm_test.x_bounds[0, 0]),
bm_test.x_bounds[0, 1]
+ 0.1 * (bm_test.x_bounds[0, 1] - bm_test.x_bounds[0, 0]),
config["n_points_pred"],
)[None, :, None].repeat(config["n_tasks_test"], axis=0)
## create model
if config["prior_type"] == "fixed":
do_meta_training = False
prior_type = "factorized_normal"
else:
do_meta_training = True
prior_type = config["prior_type"]
mtbnn = MultiTaskBayesianNeuralNetwork(
d_x=bm_meta.d_x,
d_y=bm_meta.d_y,
n_hidden=config["n_hidden"],
d_hidden=config["d_hidden"],
noise_stddev=None if config["infer_noise_stddev"] else config["noise_stddev"],
prior_type=prior_type,
prior_init=config["prior_init"],
posterior_init=config["posterior_init"],
)
## obtain predictions on meta data before meta training
samples_prior_meta_untrained = mtbnn.predict(
x=x_pred_meta, n_samples=config["n_samples_pred"], guide=None
)
pred_summary_prior_meta_untrained = summarize_samples(
samples=samples_prior_meta_untrained
)
## print prior parameters
prinths("Pyro Parameters (before meta training)")
print_pyro_parameters()
## meta training
prinths("Performing Meta Training...")
if do_meta_training:
learning_curve_meta, guide_meta = mtbnn.meta_train(
x=x_meta,
y=y_meta,
n_epochs=config["n_epochs"],
initial_lr=config["initial_lr"],
final_lr=config["final_lr"],
alpha_reg=config["alpha_reg"],
wandb_run=wandb_run,
)
else:
print("No meta training performed!")
learning_curve_meta, guide_meta = None, None
## save model
# with open("model.onnx", "wb") as f:
# mtbnn.export_onnx(f=f)
# wandb_run.save("model.onnx")
## print learned parameters
prinths("Pyro Parameters (after meta training)")
print_pyro_parameters()
## obtain predictions on meta data after training
# obtain prior predictions
samples_prior_meta_trained = mtbnn.predict(
x=x_pred_meta, n_samples=config["n_samples_pred"], guide=None
)
pred_summary_prior_meta_trained = summarize_samples(
samples=samples_prior_meta_trained
)
# obtain posterior predictions
samples_posterior_meta = mtbnn.predict(
x=x_pred_meta, n_samples=config["n_samples_pred"], guide=guide_meta
)
pred_summary_posterior_meta = summarize_samples(samples=samples_posterior_meta)
# print freezed parameters
prinths("Freezed Pyro Parameters (before adaptation)")
print_pyro_parameters()
## do inference on test task
lls = np.zeros(len(config["n_contexts_pred"]))
lls_context = np.zeros(len(config["n_contexts_pred"]))
pred_summaries_posteriors_test, samples_posteriors_test = [], []
learning_curves_test = []
for i, n_context in enumerate(config["n_contexts_pred"]):
prinths(f"Adapting to test tasks (n_context = {n_context:3d})...")
x_context, y_context, x_target, y_target = split_tasks(
x=x_test, y=y_test, n_context=n_context
)
lc, guide_test = mtbnn.adapt(
x=x_context,
y=y_context,
n_epochs=config["n_epochs"],
initial_lr=config["initial_lr"],
final_lr=config["final_lr"],
wandb_run=wandb_run,
)
learning_curves_test.append(lc)
lls[i] = mtbnn.marginal_log_likelihood(
x=x_target,
y=y_target,
n_samples=config["n_samples_pred"],
guide=guide_test,
)
lls_context[i] = mtbnn.marginal_log_likelihood(
x=x_context,
y=y_context,
n_samples=config["n_samples_pred"],
guide=guide_test,
)
cur_samples_posterior_test = mtbnn.predict(
x=x_pred_test,
n_samples=config["n_samples_pred"],
guide=guide_test,
)
cur_pred_summary_posterior_test = summarize_samples(
samples=cur_samples_posterior_test
)
pred_summaries_posteriors_test.append(cur_pred_summary_posterior_test)
samples_posteriors_test.append(cur_samples_posterior_test)
wandb_run.log(
{
"eval/n_context": n_context,
"eval/marg_ll_target": lls[i],
"eval/marg_ll_context": lls_context[i],
}
)
wandb_run.summary["eval/marg_ll_target_mean"] = norm_area_under_curve(
x=config["n_contexts_pred"], y=lls
)
wandb_run.summary["eval/marg_ll_context_mean"] = norm_area_under_curve(
x=config["n_contexts_pred"], y=lls_context
)
prinths("Freezed Pyro Parameters (after adaptation)")
print_pyro_parameters()
# plot predictions
if config["plot"]:
fig = plot_metrics(
learning_curve_meta=learning_curve_meta,
learning_curves_test=learning_curves_test,
lls=lls,
lls_context=lls_context,
n_contexts=config["n_contexts_pred"],
)
fig = plot_predictions(
x_meta=x_meta,
y_meta=y_meta,
x_pred_meta=x_pred_meta,
x_test=x_test,
y_test=y_test,
x_pred_test=x_pred_test,
n_contexts_test=config["n_contexts_pred"],
pred_summary_prior_meta_untrained=pred_summary_prior_meta_untrained,
pred_summary_prior_meta_trained=pred_summary_prior_meta_trained,
pred_summary_posterior_meta=pred_summary_posterior_meta,
pred_summaries_posterior_test=pred_summaries_posteriors_test,
max_tasks=config["max_tasks_plot"],
n_contexts_plot=config["n_contexts_plot"],
)
# wandb_run.log({"predictions_plotly": fig})
wandb_run.log({"predictions_png": wandb.Image(fig)})
if config["n_hidden"] == 0:
# plot prior and posterior distributions
with warnings.catch_warnings():
warnings.simplefilter("ignore")
if isinstance(bm_meta, BM_DICT["Affine1D"]):
bm_meta_params = np.zeros(config["n_tasks_meta"])
bm_test_params = np.zeros(config["n_tasks_test"])
for l, task in enumerate(bm_meta):
bm_meta_params[l] = task.param[0]
for l, task in enumerate(bm_test):
bm_test_params[l] = task.param[0]
else:
bm_meta_params, bm_test_params = None, None
fig = plot_distributions(
site_name="_wb",
site_idx=0,
bm_meta_params=bm_meta_params,
bm_test_params=bm_test_params,
samples_prior_meta_untrained=samples_prior_meta_untrained,
samples_prior_meta_trained=samples_prior_meta_trained,
samples_posterior_meta=samples_posterior_meta,
samples_posteriors_test=samples_posteriors_test,
n_contexts_test=config["n_contexts_pred"],
max_tasks=config["max_tasks_plot"],
n_contexts_plot=config["n_contexts_plot"],
)
# wandb_run.log({"latent_distribution_w_plotly": fig})
wandb_run.log({"latent_distribution_w_png": wandb.Image(fig)})
if isinstance(bm_meta, BM_DICT["Affine1D"]):
bm_meta_params = np.zeros(config["n_tasks_meta"])
bm_test_params = np.zeros(config["n_tasks_test"])
for l, task in enumerate(bm_meta):
bm_meta_params[l] = task.param[1]
for l, task in enumerate(bm_test):
bm_test_params[l] = task.param[1]
else:
bm_meta_params, bm_test_params = None, None
fig = plot_distributions(
site_name="_wb",
site_idx=1,
bm_meta_params=bm_meta_params,
bm_test_params=bm_test_params,
samples_prior_meta_untrained=samples_prior_meta_untrained,
samples_prior_meta_trained=samples_prior_meta_trained,
samples_posterior_meta=samples_posterior_meta,
samples_posteriors_test=samples_posteriors_test,
n_contexts_test=config["n_contexts_pred"],
max_tasks=config["max_tasks_plot"],
n_contexts_plot=config["n_contexts_plot"],
)
# wandb_run.log({"latent_distribution_b_plotly": fig})
wandb_run.log({"latent_distribution_b_png": wandb.Image(fig)})
if wandb_run.mode == "disabled":
plt.show()
def main():
## config
wandb_mode = os.getenv("WANDB_MODE", "disabled")
smoke_test = os.getenv("SMOKE_TEST", "False") == "True"
print(f"wandb_mode={wandb_mode}")
print(f"smoke_test={smoke_test}")
config = dict(
model="MTBNN",
seed_pyro=123,
# benchmarks
bm="Affine1D",
noise_stddev=0.01,
n_tasks_meta=8,
n_points_per_task_meta=16,
n_tasks_test=128,
n_points_per_task_test=128,
seed_offset_train=1234,
seed_offset_test=1235,
normalize_bm=True,
# model
n_hidden=1,
d_hidden=8,
infer_noise_stddev=True,
prior_type="fixed",
# allow pyro-standard variational posterior init during meta training
# + setting it to prior for adaptation?
prior_init="standard_normal",
posterior_init="set_to_prior",
# training
n_epochs=5000 if not smoke_test else 100,
initial_lr=0.1,
final_lr=0.00001,
alpha_reg=0.0,
n_points_pred=100,
n_samples_pred=1000 if not smoke_test else 100,
# evaluation
n_contexts_pred=(
[0, 5, 10, 50, 128]
if not smoke_test
else [0, 5, 10, 50, 128]
),
# plot
plot=True,
max_tasks_plot=4,
n_contexts_plot=[0, 5, 10, 50, 128],
)
if wandb_mode != "disabled":
wandb.login()
with wandb.init(project="mtbnn_v0", mode=wandb_mode, config=config) as wandb_run:
config = wandb_run.config
run_experiment(config=config, wandb_run=wandb_run)
if __name__ == "__main__":
main()
| [
"mtbnn.plotting.plot_predictions",
"mtutils.mtutils.split_tasks",
"metalearning_benchmarks.util.normalize_benchmark",
"warnings.simplefilter",
"mtutils.mtutils.norm_area_under_curve",
"warnings.catch_warnings",
"numpy.linspace",
"wandb.login",
"matplotlib.pyplot.show",
"os.getenv",
"mtutils.mtut... | [((1180, 1218), 'pyro.set_rng_seed', 'pyro.set_rng_seed', (["config['seed_pyro']"], {}), "(config['seed_pyro'])\n", (1197, 1218), False, 'import pyro\n'), ((1713, 1737), 'mtutils.mtutils.collate_data', 'collate_data', ([], {'bm': 'bm_meta'}), '(bm=bm_meta)\n', (1725, 1737), False, 'from mtutils.mtutils import BM_DICT, collate_data, norm_area_under_curve\n'), ((2523, 2547), 'mtutils.mtutils.collate_data', 'collate_data', ([], {'bm': 'bm_test'}), '(bm=bm_test)\n', (2535, 2547), False, 'from mtutils.mtutils import BM_DICT, collate_data, norm_area_under_curve\n'), ((3099, 3416), 'mtbnn.mtbnn.MultiTaskBayesianNeuralNetwork', 'MultiTaskBayesianNeuralNetwork', ([], {'d_x': 'bm_meta.d_x', 'd_y': 'bm_meta.d_y', 'n_hidden': "config['n_hidden']", 'd_hidden': "config['d_hidden']", 'noise_stddev': "(None if config['infer_noise_stddev'] else config['noise_stddev'])", 'prior_type': 'prior_type', 'prior_init': "config['prior_init']", 'posterior_init': "config['posterior_init']"}), "(d_x=bm_meta.d_x, d_y=bm_meta.d_y, n_hidden=\n config['n_hidden'], d_hidden=config['d_hidden'], noise_stddev=None if\n config['infer_noise_stddev'] else config['noise_stddev'], prior_type=\n prior_type, prior_init=config['prior_init'], posterior_init=config[\n 'posterior_init'])\n", (3129, 3416), False, 'from mtbnn.mtbnn import MultiTaskBayesianNeuralNetwork\n'), ((3696, 3751), 'mtutils.mtutils.summarize_samples', 'summarize_samples', ([], {'samples': 'samples_prior_meta_untrained'}), '(samples=samples_prior_meta_untrained)\n', (3713, 3751), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((3801, 3850), 'mtutils.mtutils.print_headline_string', 'prinths', (['"""Pyro Parameters (before meta training)"""'], {}), "('Pyro Parameters (before meta training)')\n", (3808, 3850), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((3855, 3878), 'mtutils.mtutils.print_pyro_parameters', 'print_pyro_parameters', ([], {}), '()\n', (3876, 3878), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((3905, 3943), 'mtutils.mtutils.print_headline_string', 'prinths', (['"""Performing Meta Training..."""'], {}), "('Performing Meta Training...')\n", (3912, 3943), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((4560, 4608), 'mtutils.mtutils.print_headline_string', 'prinths', (['"""Pyro Parameters (after meta training)"""'], {}), "('Pyro Parameters (after meta training)')\n", (4567, 4608), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((4613, 4636), 'mtutils.mtutils.print_pyro_parameters', 'print_pyro_parameters', ([], {}), '()\n', (4634, 4636), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((4885, 4938), 'mtutils.mtutils.summarize_samples', 'summarize_samples', ([], {'samples': 'samples_prior_meta_trained'}), '(samples=samples_prior_meta_trained)\n', (4902, 4938), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((5148, 5197), 'mtutils.mtutils.summarize_samples', 'summarize_samples', ([], {'samples': 'samples_posterior_meta'}), '(samples=samples_posterior_meta)\n', (5165, 5197), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((5234, 5288), 'mtutils.mtutils.print_headline_string', 'prinths', (['"""Freezed Pyro Parameters (before adaptation)"""'], {}), "('Freezed Pyro Parameters (before adaptation)')\n", (5241, 5288), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((5293, 5316), 'mtutils.mtutils.print_pyro_parameters', 'print_pyro_parameters', ([], {}), '()\n', (5314, 5316), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((7191, 7248), 'mtutils.mtutils.norm_area_under_curve', 'norm_area_under_curve', ([], {'x': "config['n_contexts_pred']", 'y': 'lls'}), "(x=config['n_contexts_pred'], y=lls)\n", (7212, 7248), False, 'from mtutils.mtutils import BM_DICT, collate_data, norm_area_under_curve\n'), ((7316, 7381), 'mtutils.mtutils.norm_area_under_curve', 'norm_area_under_curve', ([], {'x': "config['n_contexts_pred']", 'y': 'lls_context'}), "(x=config['n_contexts_pred'], y=lls_context)\n", (7337, 7381), False, 'from mtutils.mtutils import BM_DICT, collate_data, norm_area_under_curve\n'), ((7401, 7454), 'mtutils.mtutils.print_headline_string', 'prinths', (['"""Freezed Pyro Parameters (after adaptation)"""'], {}), "('Freezed Pyro Parameters (after adaptation)')\n", (7408, 7454), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((7459, 7482), 'mtutils.mtutils.print_pyro_parameters', 'print_pyro_parameters', ([], {}), '()\n', (7480, 7482), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((11606, 11641), 'os.getenv', 'os.getenv', (['"""WANDB_MODE"""', '"""disabled"""'], {}), "('WANDB_MODE', 'disabled')\n", (11615, 11641), False, 'import os\n'), ((1653, 1691), 'metalearning_benchmarks.util.normalize_benchmark', 'normalize_benchmark', ([], {'benchmark': 'bm_meta'}), '(benchmark=bm_meta)\n', (1672, 1691), False, 'from metalearning_benchmarks.util import normalize_benchmark\n'), ((2463, 2501), 'metalearning_benchmarks.util.normalize_benchmark', 'normalize_benchmark', ([], {'benchmark': 'bm_test'}), '(benchmark=bm_test)\n', (2482, 2501), False, 'from metalearning_benchmarks.util import normalize_benchmark\n'), ((5630, 5696), 'mtutils.mtutils.print_headline_string', 'prinths', (['f"""Adapting to test tasks (n_context = {n_context:3d})..."""'], {}), "(f'Adapting to test tasks (n_context = {n_context:3d})...')\n", (5637, 5696), True, 'from mtutils.mtutils import print_headline_string as prinths\n'), ((5748, 5800), 'mtutils.mtutils.split_tasks', 'split_tasks', ([], {'x': 'x_test', 'y': 'y_test', 'n_context': 'n_context'}), '(x=x_test, y=y_test, n_context=n_context)\n', (5759, 5800), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((6708, 6761), 'mtutils.mtutils.summarize_samples', 'summarize_samples', ([], {'samples': 'cur_samples_posterior_test'}), '(samples=cur_samples_posterior_test)\n', (6725, 6761), False, 'from mtutils.mtutils import print_pyro_parameters, split_tasks, summarize_samples\n'), ((7544, 7722), 'mtbnn.plotting.plot_metrics', 'plot_metrics', ([], {'learning_curve_meta': 'learning_curve_meta', 'learning_curves_test': 'learning_curves_test', 'lls': 'lls', 'lls_context': 'lls_context', 'n_contexts': "config['n_contexts_pred']"}), "(learning_curve_meta=learning_curve_meta, learning_curves_test=\n learning_curves_test, lls=lls, lls_context=lls_context, n_contexts=\n config['n_contexts_pred'])\n", (7556, 7722), False, 'from mtbnn.plotting import plot_distributions, plot_metrics, plot_predictions\n'), ((7798, 8332), 'mtbnn.plotting.plot_predictions', 'plot_predictions', ([], {'x_meta': 'x_meta', 'y_meta': 'y_meta', 'x_pred_meta': 'x_pred_meta', 'x_test': 'x_test', 'y_test': 'y_test', 'x_pred_test': 'x_pred_test', 'n_contexts_test': "config['n_contexts_pred']", 'pred_summary_prior_meta_untrained': 'pred_summary_prior_meta_untrained', 'pred_summary_prior_meta_trained': 'pred_summary_prior_meta_trained', 'pred_summary_posterior_meta': 'pred_summary_posterior_meta', 'pred_summaries_posterior_test': 'pred_summaries_posteriors_test', 'max_tasks': "config['max_tasks_plot']", 'n_contexts_plot': "config['n_contexts_plot']"}), "(x_meta=x_meta, y_meta=y_meta, x_pred_meta=x_pred_meta,\n x_test=x_test, y_test=y_test, x_pred_test=x_pred_test, n_contexts_test=\n config['n_contexts_pred'], pred_summary_prior_meta_untrained=\n pred_summary_prior_meta_untrained, pred_summary_prior_meta_trained=\n pred_summary_prior_meta_trained, pred_summary_posterior_meta=\n pred_summary_posterior_meta, pred_summaries_posterior_test=\n pred_summaries_posteriors_test, max_tasks=config['max_tasks_plot'],\n n_contexts_plot=config['n_contexts_plot'])\n", (7814, 8332), False, 'from mtbnn.plotting import plot_distributions, plot_metrics, plot_predictions\n'), ((11659, 11691), 'os.getenv', 'os.getenv', (['"""SMOKE_TEST"""', '"""False"""'], {}), "('SMOKE_TEST', 'False')\n", (11668, 11691), False, 'import os\n'), ((12981, 12994), 'wandb.login', 'wandb.login', ([], {}), '()\n', (12992, 12994), False, 'import wandb\n'), ((13004, 13066), 'wandb.init', 'wandb.init', ([], {'project': '"""mtbnn_v0"""', 'mode': 'wandb_mode', 'config': 'config'}), "(project='mtbnn_v0', mode=wandb_mode, config=config)\n", (13014, 13066), False, 'import wandb\n'), ((11550, 11560), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11558, 11560), True, 'from matplotlib import pyplot as plt\n'), ((1756, 1965), 'numpy.linspace', 'np.linspace', (['(bm_meta.x_bounds[0, 0] - 0.1 * (bm_meta.x_bounds[0, 1] - bm_meta.x_bounds[\n 0, 0]))', '(bm_meta.x_bounds[0, 1] + 0.1 * (bm_meta.x_bounds[0, 1] - bm_meta.x_bounds[\n 0, 0]))', "config['n_points_pred']"], {}), "(bm_meta.x_bounds[0, 0] - 0.1 * (bm_meta.x_bounds[0, 1] -\n bm_meta.x_bounds[0, 0]), bm_meta.x_bounds[0, 1] + 0.1 * (bm_meta.\n x_bounds[0, 1] - bm_meta.x_bounds[0, 0]), config['n_points_pred'])\n", (1767, 1965), True, 'import numpy as np\n'), ((2566, 2775), 'numpy.linspace', 'np.linspace', (['(bm_test.x_bounds[0, 0] - 0.1 * (bm_test.x_bounds[0, 1] - bm_test.x_bounds[\n 0, 0]))', '(bm_test.x_bounds[0, 1] + 0.1 * (bm_test.x_bounds[0, 1] - bm_test.x_bounds[\n 0, 0]))', "config['n_points_pred']"], {}), "(bm_test.x_bounds[0, 0] - 0.1 * (bm_test.x_bounds[0, 1] -\n bm_test.x_bounds[0, 0]), bm_test.x_bounds[0, 1] + 0.1 * (bm_test.\n x_bounds[0, 1] - bm_test.x_bounds[0, 0]), config['n_points_pred'])\n", (2577, 2775), True, 'import numpy as np\n'), ((8562, 8578), 'wandb.Image', 'wandb.Image', (['fig'], {}), '(fig)\n', (8573, 8578), False, 'import wandb\n'), ((8688, 8713), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (8711, 8713), False, 'import warnings\n'), ((8731, 8762), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (8752, 8762), False, 'import warnings\n'), ((9298, 9770), 'mtbnn.plotting.plot_distributions', 'plot_distributions', ([], {'site_name': '"""_wb"""', 'site_idx': '(0)', 'bm_meta_params': 'bm_meta_params', 'bm_test_params': 'bm_test_params', 'samples_prior_meta_untrained': 'samples_prior_meta_untrained', 'samples_prior_meta_trained': 'samples_prior_meta_trained', 'samples_posterior_meta': 'samples_posterior_meta', 'samples_posteriors_test': 'samples_posteriors_test', 'n_contexts_test': "config['n_contexts_pred']", 'max_tasks': "config['max_tasks_plot']", 'n_contexts_plot': "config['n_contexts_plot']"}), "(site_name='_wb', site_idx=0, bm_meta_params=\n bm_meta_params, bm_test_params=bm_test_params,\n samples_prior_meta_untrained=samples_prior_meta_untrained,\n samples_prior_meta_trained=samples_prior_meta_trained,\n samples_posterior_meta=samples_posterior_meta, samples_posteriors_test=\n samples_posteriors_test, n_contexts_test=config['n_contexts_pred'],\n max_tasks=config['max_tasks_plot'], n_contexts_plot=config[\n 'n_contexts_plot'])\n", (9316, 9770), False, 'from mtbnn.plotting import plot_distributions, plot_metrics, plot_predictions\n'), ((10665, 11137), 'mtbnn.plotting.plot_distributions', 'plot_distributions', ([], {'site_name': '"""_wb"""', 'site_idx': '(1)', 'bm_meta_params': 'bm_meta_params', 'bm_test_params': 'bm_test_params', 'samples_prior_meta_untrained': 'samples_prior_meta_untrained', 'samples_prior_meta_trained': 'samples_prior_meta_trained', 'samples_posterior_meta': 'samples_posterior_meta', 'samples_posteriors_test': 'samples_posteriors_test', 'n_contexts_test': "config['n_contexts_pred']", 'max_tasks': "config['max_tasks_plot']", 'n_contexts_plot': "config['n_contexts_plot']"}), "(site_name='_wb', site_idx=1, bm_meta_params=\n bm_meta_params, bm_test_params=bm_test_params,\n samples_prior_meta_untrained=samples_prior_meta_untrained,\n samples_prior_meta_trained=samples_prior_meta_trained,\n samples_posterior_meta=samples_posterior_meta, samples_posteriors_test=\n samples_posteriors_test, n_contexts_test=config['n_contexts_pred'],\n max_tasks=config['max_tasks_plot'], n_contexts_plot=config[\n 'n_contexts_plot'])\n", (10683, 11137), False, 'from mtbnn.plotting import plot_distributions, plot_metrics, plot_predictions\n'), ((8861, 8893), 'numpy.zeros', 'np.zeros', (["config['n_tasks_meta']"], {}), "(config['n_tasks_meta'])\n", (8869, 8893), True, 'import numpy as np\n'), ((8931, 8963), 'numpy.zeros', 'np.zeros', (["config['n_tasks_test']"], {}), "(config['n_tasks_test'])\n", (8939, 8963), True, 'import numpy as np\n'), ((10228, 10260), 'numpy.zeros', 'np.zeros', (["config['n_tasks_meta']"], {}), "(config['n_tasks_meta'])\n", (10236, 10260), True, 'import numpy as np\n'), ((10298, 10330), 'numpy.zeros', 'np.zeros', (["config['n_tasks_test']"], {}), "(config['n_tasks_test'])\n", (10306, 10330), True, 'import numpy as np\n'), ((10110, 10126), 'wandb.Image', 'wandb.Image', (['fig'], {}), '(fig)\n', (10121, 10126), False, 'import wandb\n'), ((11477, 11493), 'wandb.Image', 'wandb.Image', (['fig'], {}), '(fig)\n', (11488, 11493), False, 'import wandb\n')] |
"""
Let T(n) be the number of tours over a 4 × n playing board such that:
The tour starts in the top left corner.
The tour consists of moves that are up, down, left, or right one square.
The tour visits each square exactly once.
The tour ends in the bottom left corner.
The diagram shows one tour over a 4 × 10 board:
T(10) is 2329. What is T(10**12) modulo 10**8?
ans: 15836928
"""
"""
Solution Method:
1. Catalogue all possible 4x2 path "slices"
2. Encode how the left side connects to the right side in an adjacency matrix
3. Raise adj. matrix to n/2-th power
The following illustrates how a 4x2 slice is encoded as a 4x2 matrix:
1 indicates a path entering or exiting the slice at the square.
2 indicates a "loop back".
The distinction between 1s and 2s stops path "islands" and premature loops from forming.
1s and 2s on the left will connect to 1s and 2s respectively on the right, with the following exceptions.
A 1 and 2 on the left can connect to each other, the remaining 2 can connect to a 1 on the right.
1s can connect on the left if the right has only 0s.
Examples:
00 _ 01 ___ 10 ___
11 == _| |_ 12 == _| _ 10 == _ |
11 _ _ 12 _ |_ 21 _| |_
00 |_| 01 |___ 21 _____
11 and 11 are unique in that these encodings refer to multiple paths:
00 20
00 20
11 11
_____ _ _ _ _ and _ _ _____
_ |_| | | _| | ___
| | _ |_| ___| _ |
_| |_ _| |_ ___ _____ _| |_
The adjacency matrix then looks at all ways the left side (given by a column of 4 ints) can connect to the right side (again given by a column of 4 ints):
index: 0 1 2 3 4 5 6 7 8 9
col : 1 1 0 2 1 1 1 0 0 0
0 1 0 2 2 1 0 1 1 0
0 0 1 1 2 2 1 0 1 0
1 0 1 1 1 2 0 1 0 0
The 9th column is noteworthy in being all 0s. This indicates the last column of the playing board where all paths need to turn around.
"""
import numpy as np
n = 10**12
m = 10**8
adj_matrix = np.matrix([
[3,1,1,1,1,1,0,1],
[1,1,1,0,1,1,1,1],
[1,1,1,1,1,0,1,1],
[1,1,1,1,1,0,1,1],
[2,1,1,1,1,1,0,1],
[1,1,1,0,1,1,1,1],
[0,1,1,0,1,0,1,1],
[0,0,0,0,0,0,0,0]
], np.int64)
def mod_pow(base, exp, mod):
if exp <= 0:
raise Exception("exp nonpositive")
elif exp == 1:
return base
elif exp % 2 == 0:
tmp = mod_pow(base, exp/2, mod)
return np.mod(np.matmul(tmp, tmp), mod)
else:
tmp = mod_pow(base, exp - 1, mod)
return np.mod(np.matmul(tmp, base), mod)
def main(width, mod = 10000):
return mod_pow(adj_matrix, width//2, mod)[0,7]
print(f"T(4) = {main(4)}")
assert main(4) == 8
print(f"T(10) = {main(10)}")
assert main(10) == 2329
print(f"T({n}) = {main(n, m)}") | [
"numpy.matrix",
"numpy.matmul"
] | [((2079, 2320), 'numpy.matrix', 'np.matrix', (['[[3, 1, 1, 1, 1, 1, 0, 1], [1, 1, 1, 0, 1, 1, 1, 1], [1, 1, 1, 1, 1, 0, 1, \n 1], [1, 1, 1, 1, 1, 0, 1, 1], [2, 1, 1, 1, 1, 1, 0, 1], [1, 1, 1, 0, 1,\n 1, 1, 1], [0, 1, 1, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0]]', 'np.int64'], {}), '([[3, 1, 1, 1, 1, 1, 0, 1], [1, 1, 1, 0, 1, 1, 1, 1], [1, 1, 1, 1,\n 1, 0, 1, 1], [1, 1, 1, 1, 1, 0, 1, 1], [2, 1, 1, 1, 1, 1, 0, 1], [1, 1,\n 1, 0, 1, 1, 1, 1], [0, 1, 1, 0, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0, 0, 0]],\n np.int64)\n', (2088, 2320), True, 'import numpy as np\n'), ((2436, 2455), 'numpy.matmul', 'np.matmul', (['tmp', 'tmp'], {}), '(tmp, tmp)\n', (2445, 2455), True, 'import numpy as np\n'), ((2521, 2541), 'numpy.matmul', 'np.matmul', (['tmp', 'base'], {}), '(tmp, base)\n', (2530, 2541), True, 'import numpy as np\n')] |
import numpy as np
from sklearn.decomposition import PCA
def DAPCA(Xs, Xt, n_components=2):
return PCA(n_components=n_components).fit(np.concatenate([Xs, Xt], axis=0)).components_.T | [
"sklearn.decomposition.PCA",
"numpy.concatenate"
] | [((140, 172), 'numpy.concatenate', 'np.concatenate', (['[Xs, Xt]'], {'axis': '(0)'}), '([Xs, Xt], axis=0)\n', (154, 172), True, 'import numpy as np\n'), ((105, 135), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'n_components'}), '(n_components=n_components)\n', (108, 135), False, 'from sklearn.decomposition import PCA\n')] |
#!/usr/bin/env python2
from __future__ import print_function
import sys
sys.path.append('../lib')
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.path import Path
from matplotlib.patches import PathPatch
import string
import protocols
import model as m
from parameters import simvc, get_qc
from parameters import simvc_fix, simvc_fix_typical_values
from parameters import simvc_typical_values
from releakcorrect import I_releak, score_leak, protocol_leak_check
from scipy.optimize import fmin
# Set seed
np.random.seed(101)
savedir = './figs'
if not os.path.isdir(savedir):
os.makedirs(savedir)
#refcell = 'D19'
def get_fix_param(var, val):
"""
var: variable name.
val: variable value to fix.
"""
out = {}
for i, j in zip(var, val):
out[i] = j
return out
def rmsd_compute(t1, t2):
# Normalised RMSD value between trace 1 ``t1`` and trace 2 ``t2``
#
# Note, usually normalise to data, so
# - ``t2`` data (or anything as reference)
# - ``t1`` simulation (or anything for comparison)
return np.sqrt(np.mean((t1 - t2) ** 2)) / np.sqrt(np.mean(t2 ** 2))
#
# Protocol info
#
protocol_funcs = {
'staircaseramp': protocols.leak_staircase,
'pharma': protocols.pharma, # during drug application
'apab': 'protocol-apab.csv',
'apabv3': 'protocol-apabv3.csv',
'ap05hz': 'protocol-ap05hz.csv',
'ap1hz': 'protocol-ap1hz.csv',
'ap2hz': 'protocol-ap2hz.csv',
'sactiv': protocols.sactiv,
'sinactiv': protocols.sinactiv,
}
protocol_dir = '../protocol-time-series'
protocol_list = [
'staircaseramp',
# 'sactiv',
# 'sinactiv',
'pharma',
'apab',
'apabv3',
# 'ap05hz',
'ap1hz',
'ap2hz',
]
validation_idx = [
None,
# 1,
# 2,
3,
4,
5,
# 6,
7,
8,
]
# IV protocol special treatment
protocol_iv = [
'sactiv',
'sinactiv',
]
protocol_iv_times = {
'sactiv': protocols.sactiv_times,
'sinactiv': protocols.sinactiv_times,
}
protocol_iv_convert = {
'sactiv': protocols.sactiv_convert,
'sinactiv': protocols.sinactiv_convert,
}
protocol_iv_args = {
'sactiv': protocols.sactiv_iv_arg,
'sinactiv': protocols.sinactiv_iv_arg,
}
protocol_iv_v = {
'sactiv': protocols.sactiv_v,
'sinactiv': protocols.sinactiv_v,
}
data_dir_staircase = '../data'
data_dir = '../data-autoLC'
file_dir = '../../hERGRapidCharacterisation/room-temperature-only/out'
file_dir2 = './out'
file_list = [
'herg25oc1',
]
temperatures = np.array([25.0])
temperatures += 273.15 # in K
fit_seed = '542811797'
fit_seed2 = '717354021'
file_name = file_list[0]
temperature = temperatures[0]
# Load RMSD matrix
rmsd_matrix_file = '../../hERGRapidCharacterisation/room-temperature-only/figs/rmsd-hist-%s-autoLC-releak/rmsd-matrix.txt' \
% file_name
rmsd_cells_file = '../../hERGRapidCharacterisation/room-temperature-only/figs/rmsd-hist-%s-autoLC-releak/rmsd-matrix-cells.txt' \
% file_name
rmsd_matrix = np.loadtxt(rmsd_matrix_file)
with open(rmsd_matrix_file, 'r') as f:
rmsd_prt = f.readline().strip('\n').strip('#').split()
rmsd_cells = []
with open(rmsd_cells_file, 'r') as f:
for l in f:
if not l.startswith('#'):
rmsd_cells.append(l.strip('\n').split('-')[1])
rmsd_matrix_file2 = './out/rmsd-hist-%s-fixkinetics-simvclinleak-scheme3/rmsd-matrix.txt' \
% file_name
rmsd_cells_file2 = './out/rmsd-hist-%s-fixkinetics-simvclinleak-scheme3/rmsd-matrix-cells.txt' \
% file_name
rmsd_matrix2 = np.loadtxt(rmsd_matrix_file2)
with open(rmsd_matrix_file2, 'r') as f:
rmsd_prt2 = f.readline().strip('\n').strip('#').split()
rmsd_cells2 = []
with open(rmsd_cells_file2, 'r') as f:
for l in f:
if not l.startswith('#'):
rmsd_cells2.append(l.strip('\n').split('-')[1])
rankedlabels = [r'$*$',
u'\u2021',
r'#',
u'\u2666']
#
# Do a very very tailored version........ :(
#
fig = plt.figure(figsize=(16, 15))
bigxgap = 12
n_xgrid = 84
bigygap = 5
n_ygrid = 31
grid = plt.GridSpec(2 * n_ygrid + 1 * bigygap, 3 * n_xgrid + 2 * bigxgap,
hspace=0.0, wspace=0.0)
axes = np.empty([10, int(len(protocol_list) / 2)], dtype=object)
# long list here:
for i in range(int(len(protocol_list) / 2)):
i_grid = i * (n_xgrid + bigxgap)
f_grid = (i + 1) * n_xgrid + i * bigxgap
# First 'row'
axes[0, i] = fig.add_subplot(grid[0:3, i_grid:f_grid])
axes[0, i].set_xticklabels([])
axes[1, i] = fig.add_subplot(grid[3:9, i_grid:f_grid])
axes[1, i].set_xticklabels([])
axes[2, i] = fig.add_subplot(grid[9:15, i_grid:f_grid])
axes[2, i].set_xticklabels([])
axes[3, i] = fig.add_subplot(grid[15:21, i_grid:f_grid])
# Histogram
axes[4, i] = fig.add_subplot(grid[24:31, i_grid:f_grid])
# Second 'row'
n_shift = n_ygrid + bigygap
axes[5, i] = fig.add_subplot(grid[n_shift+0:n_shift+3, i_grid:f_grid])
axes[5, i].set_xticklabels([])
axes[6, i] = fig.add_subplot(grid[n_shift+3:n_shift+9, i_grid:f_grid])
axes[6, i].set_xticklabels([])
axes[7, i] = fig.add_subplot(grid[n_shift+9:n_shift+15, i_grid:f_grid])
axes[7, i].set_xticklabels([])
axes[8, i] = fig.add_subplot(grid[n_shift+15:n_shift+21, i_grid:f_grid])
# Histogram
axes[9, i] = fig.add_subplot(grid[n_shift+24:n_shift+31, i_grid:f_grid])
# Set x-labels
axes[3, i].set_xlabel('Time (s)', fontsize=14)
axes[4, i].set_xlabel('RRMSE', fontsize=14)
axes[8, i].set_xlabel('Time (s)', fontsize=14)
axes[9, i].set_xlabel('RRMSE', fontsize=14)
# Set labels
axes[0, 0].set_ylabel('Voltage\n(mV)', fontsize=14)
axes[1, 0].set_ylabel(u'Best\n(*)', fontsize=14, color='#d95f02')
axes[2, 0].set_ylabel(u'Median\n(\u2021)', fontsize=14, color='#d95f02')
axes[3, 0].set_ylabel(u'90%ile\n(#)', fontsize=14, color='#d95f02')
axes[4, 0].set_ylabel('Frequency\n(N=%s)' % len(rmsd_cells), fontsize=14)
axes[5, 0].set_ylabel('Voltage\n(mV)', fontsize=14)
axes[6, 0].set_ylabel(u'Best\n(*)', fontsize=14, color='#d95f02')
axes[7, 0].set_ylabel(u'Median\n(\u2021)', fontsize=14, color='#d95f02')
axes[8, 0].set_ylabel(u'90%ile\n(#)', fontsize=14, color='#d95f02')
axes[9, 0].set_ylabel('Frequency\n(N=%s)' % len(rmsd_cells), fontsize=14)
axes[2, 0].text(-0.3, 0.5, 'Current (pA)', rotation=90, fontsize=18,
transform=axes[2, 0].transAxes, ha='center', va='center')
axes[7, 0].text(-0.275, 0.5, 'Current (pA)', rotation=90, fontsize=18,
transform=axes[7, 0].transAxes, ha='center', va='center')
#
# Model
#
prt2model = {}
prt2fixkineticsmodel = {}
for prt in protocol_list:
protocol_def = protocol_funcs[prt]
if type(protocol_def) is str:
protocol_def = '%s/%s' % (protocol_dir, protocol_def)
prt2model[prt] = m.Model('../mmt-model-files/ideal-ikr.mmt',
protocol_def=protocol_def,
temperature=temperature, # K
transform=None,
useFilterCap=False) # ignore capacitive spike
prt2fixkineticsmodel[prt] = m.Model(
'../mmt-model-files/simplified-voltage-clamp-ikr-linleak.mmt',
protocol_def=protocol_def,
temperature=temperature, # K
transform=None,
useFilterCap=False) # ignore capacitive spike
#
# Plot
#
mid = []
upper = []
lower = []
mid2 = []
upper2 = []
lower2 = []
for i_prt, prt in enumerate(protocol_list):
# Calculate axis index
ai, aj = 5 * int(i_prt / 3), i_prt % 3
# Title
if prt == 'staircaseramp':
axes[ai, aj].set_title('Calibration', fontsize=16)
else:
axes[ai, aj].set_title('Validation %s' % validation_idx[i_prt],
fontsize=16)
# Add label!
axes[ai, aj].text(-0.1, 1.4, string.ascii_uppercase[i_prt],
transform=axes[ai, aj].transAxes, size=20,
weight='bold')
# Time point
times = np.loadtxt('%s/%s-%s-times.csv' % (data_dir, file_name,
prt), delimiter=',', skiprows=1) * 1e3 # s -> ms
# Protocol
model = prt2model[prt]
modelfixkinetics = prt2fixkineticsmodel[prt]
# Set which parameters to be inferred
modelfixkinetics.set_parameters([
'ikr.g',
#'voltageclamp.rseries',
'voltageclamp.voffset_eff',
'voltageclamp.gLeak'])
if prt not in protocol_iv:
times_sim = np.copy(times)
voltage = model.voltage(times_sim)
else:
times_sim = protocol_iv_times[prt](times[1] - times[0])
voltage = model.voltage(times_sim)
voltage, t = protocol_iv_convert[prt](voltage, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-8)
axes[ai, aj].set_ylim((np.min(voltage) - 10, np.max(voltage) + 15))
# Plot protocol
if prt not in protocol_iv:
axes[ai, aj].plot(times * 1e-3, voltage, c='#696969')
else:
# protocol
for i in range(voltage.shape[1]):
axes[ai, aj].plot(times * 1e-3, voltage[:, i], c='#696969')
# Calculate ranking
rmsd = rmsd_matrix[:, rmsd_prt.index(prt)]
best_cell = np.argmin(rmsd)
median_cell = np.argsort(rmsd)[len(rmsd)//2]
p90_cell = np.argsort(rmsd)[int(len(rmsd)*0.9)]
rankedcells = [rmsd_cells[best_cell],
rmsd_cells[median_cell],
rmsd_cells[p90_cell]]
rankedvalues = [rmsd[best_cell],
rmsd[median_cell],
rmsd[p90_cell]]
#rmsd[rmsd_cells.index(refcell)]]
rmsd2 = rmsd_matrix2[:, rmsd_prt2.index(prt)]
#best_cell2 = np.argmin(rmsd2)
#median_cell2 = np.argsort(rmsd2)[len(rmsd2)//2]
#p90_cell2 = np.argsort(rmsd2)[int(len(rmsd2)*0.9)]
#NOTE Compare with the 'red' cells; not its own ranking!!
best_cell2 = rmsd_cells2.index(rmsd_cells[best_cell])
median_cell2 = rmsd_cells2.index(rmsd_cells[median_cell])
p90_cell2 = rmsd_cells2.index(rmsd_cells[p90_cell])
rankedcells2 = [rmsd_cells2[best_cell2],
rmsd_cells2[median_cell2],
rmsd_cells2[p90_cell2]]
rankedvalues2 = [rmsd2[best_cell2],
rmsd2[median_cell2],
rmsd2[p90_cell2]]
#rmsd2[rmsd_cells2.index(refcell)]]
# Parameters
fn = '%s/%s-scheme3-simvclinleak/%s-cells-%s.txt' % \
(file_dir2, file_name, file_name, fit_seed2)
scheme3_cell_list = []
with open(fn, 'r') as f:
for l in f:
if not l.startswith('#'):
scheme3_cell_list.append(l.split()[0])
param_file = '%s/%s-scheme3-simvclinleak/%s-solution_i-%s.txt' % \
(file_dir2, file_name, file_name, fit_seed2)
obtained_parameters_all = np.loadtxt(param_file)
ikr_param = [
'ikr.p1', 'ikr.p2', 'ikr.p3', 'ikr.p4',
'ikr.p5', 'ikr.p6', 'ikr.p7', 'ikr.p8',
]
p_ikr = np.loadtxt('%s/%s-scheme3-simvclinleak/%s-solution-%s.txt' % \
(file_dir2, file_name, file_name, fit_seed2))
for i_cell, cell in enumerate(rankedcells):
# Data
if prt == 'staircaseramp':
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir_staircase,
file_name, prt, cell), delimiter=',', skiprows=1)
elif prt not in protocol_iv:
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir, file_name,
prt, cell), delimiter=',', skiprows=1)
# Re-leak correct the leak corrected data...
g_releak = fmin(score_leak, [0.0], args=(data, voltage, times,
protocol_leak_check[prt]), disp=False)
data = I_releak(g_releak[0], data, voltage)
else:
data = np.loadtxt('%s/%s-%s-%s.csv' % (data_dir, file_name,
prt, cell), delimiter=',', skiprows=1)
# Re-leak correct the leak corrected data...
for i in range(data.shape[1]):
g_releak = fmin(score_leak, [0.0], args=(data[:, i],
voltage[:, i], times,
protocol_leak_check[prt]), disp=False)
data[:, i] = I_releak(g_releak[0], data[:, i], voltage[:, i])
assert(len(data) == len(times))
# Fitted parameters
param_file = '%s/%s/%s-staircaseramp-%s-solution-%s.txt' % \
(file_dir, file_name, file_name, cell, fit_seed)
obtained_parameters = np.loadtxt(param_file) * 1e-3 # V, s -> mV, ms
# For fix kinetics model
rseal, cm, rseries = get_qc('../qc', file_name, cell)
#print('Est. Rseal, Cm, Rseries:', rseal, cm, rseries, '(GOhm, pF, GOhm)')
alpha = 0.8 # rseries %compensation
simvc_fix_values = [cm, rseries * alpha, rseries]
extra_fix = ['voltageclamp.rseries']
updateELeakCorrection = False
if updateELeakCorrection:
leakbeforeparam = np.loadtxt('../qc/' + file_name + '-staircaseramp-leak_before.txt')
leakafterparam = np.loadtxt('../qc/' + file_name + '-staircaseramp-leak_after.txt')
cell_id_file = '../qc/%s-staircaseramp-cell_id.txt' % file_name
cell_ids = []
with open(cell_id_file, 'r') as f:
for l in f:
if not l.startswith('#'):
cell_ids.append(l.split()[0])
cell_idx = cell_ids.index(cell)
ga, Ea = leakbeforeparam[cell_idx]
gb, Eb = leakafterparam[cell_idx]
ELeakCorrection = - (ga * Ea - gb * Eb) / (gb - ga)
#print('E_Leak correction: ', ELeakCorrection, ' (mV)')
if np.abs(ELeakCorrection) > 200: print('==' * 30, ga, Ea, gb, Eb)
extra_fix += ['voltageclamp.ELeak']
simvc_fix_values += [ELeakCorrection]
fix_p = get_fix_param(ikr_param + simvc_fix + extra_fix,
np.append(p_ikr, simvc_fix_values))
modelfixkinetics.set_fix_parameters(fix_p)
scheme3_cell_idx = scheme3_cell_list.index(cell)
obtained_parameters2 = obtained_parameters_all[scheme3_cell_idx]
# Simulation
simulation = model.simulate(obtained_parameters, times_sim)
simulationfixkinetics = modelfixkinetics.simulate(obtained_parameters2, times_sim)
if prt != 'staircaseramp' and prt not in protocol_iv:
# Re-leak correct the leak corrected simulationfixkinetics... TODO?
g_releak_simulationfixkinetics = fmin(score_leak, [0.1], args=(simulationfixkinetics, voltage, times,
protocol_leak_check[prt]), disp=False)
simulationfixkinetics = I_releak(g_releak_simulationfixkinetics[0], simulationfixkinetics, voltage)
if prt in protocol_iv:
simulation, t = protocol_iv_convert[prt](simulation, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-6)
simulationfixkinetics, t = protocol_iv_convert[prt](
simulationfixkinetics, times_sim)
assert(np.mean(np.abs(t - times)) < 1e-6)
# Re-leak correct the leak corrected simulationfixkinetics... TODO?
for i in range(simulationfixkinetics.shape[1]):
g_releak_simulationfixkinetics = fmin(score_leak, [0.1], args=(simulationfixkinetics[:, i],
voltage[:, i], times,
protocol_leak_check[prt]), disp=False)
simulationfixkinetics[:, i] = I_releak(g_releak_simulationfixkinetics[0], simulationfixkinetics[:, i], voltage[:, i])
# Work out ylim
maximum = np.percentile(simulation, 100)
minimum = np.percentile(simulation, 0.0)
amplitude = maximum - minimum
if prt in ['apabv3', 'ap05hz']:
maximum += 0.6 * amplitude
minimum -= 0.6 * amplitude
elif prt in ['apab', 'ap1hz']:
maximum += 0.3 * amplitude
minimum -= 0.3 * amplitude
else:
maximum += 0.15 * amplitude
minimum -= 0.15 * amplitude
axes[ai + i_cell + 1, aj].set_ylim([minimum, maximum])
# Plot
if prt not in protocol_iv:
# recording
axes[ai + i_cell + 1, aj].plot(times * 1e-3, data, lw=1, alpha=0.5,
c='#9ecae1', label='data')
# simulation
if prt == 'staircaseramp':
axes[ai + i_cell + 1, aj].plot(times * 1e-3, simulation, lw=2,
c='#d95f02', label='model fit to data')
axes[ai + i_cell + 1, aj].plot(times * 1e-3, simulationfixkinetics, lw=2,
c='#1b9e77', label='model (fix kinetics) fit to data')
else:
axes[ai + i_cell + 1, aj].plot(times * 1e-3, simulation, lw=2,
c='#d95f02', label='model prediction')
axes[ai + i_cell + 1, aj].plot(times * 1e-3, simulationfixkinetics, lw=2,
c='#1b9e77', label='model (fix kinetics) prediction')
else:
iv_v = protocol_iv_v[prt]() # mV
# recording
iv_i = protocols.get_corrected_iv(data, times,
*protocol_iv_args[prt]())
axes[ai + i_cell + 1, aj].plot(iv_v, iv_i / np.max(iv_i), lw=2,
alpha=0.25, c='#9ecae1', label='data')
# simulation
iv_i = protocols.get_corrected_iv(simulation, times,
*protocol_iv_args[prt]())
axes[ai + i_cell + 1, aj].plot(iv_v, iv_i / np.max(iv_i), lw=2,
alpha=1, c='#d95f02', label='model prediction')
# simulationfixkinetics
iv_i_fixkinetics = protocols.get_corrected_iv(simulationfixkinetics, times,
*protocol_iv_args[prt]())
axes[ai + i_cell + 1, aj].plot(iv_v, iv_i_fixkinetics / np.max(iv_i_fixkinetics), lw=2,
alpha=1, c='#1b9e77', label='model (fix kinetics) prediction')
if prt == 'sactiv':
axes[ai + i_cell + 1, aj].set_ylim([-0.05, 1.05])
elif prt == 'sinactiv':
axes[ai + i_cell + 1, aj].set_ylim([-5, 1.05])
if False:
print(prt, i_cell, cell)
print('red', rmsd_compute(simulation, data))
print('green', rmsd_compute(simulationfixkinetics, data))
# Plot rmsd histogram
rmse_min = min(np.min(rmsd), np.min(rmsd2))
rmse_max = max(np.max(rmsd), np.max(rmsd2))
rmse_range = rmse_max - rmse_min
bins = np.linspace(rmse_min - 0.1 * rmse_range,
rmse_max + 0.1 * rmse_range, 20)
n, b, _ = axes[ai + 4, aj].hist(rmsd, bins=bins, color='#d95f02', alpha=0.25)
n2, b2, _ = axes[ai + 4, aj].hist(rmsd2, bins=bins, color='#2ca02c', alpha=0.25)
mid.append(np.percentile(rmsd, 50))
upper.append(np.percentile(rmsd, 90))
lower.append(np.percentile(rmsd, 10))
mid2.append(np.percentile(rmsd2, 50))
upper2.append(np.percentile(rmsd2, 90))
lower2.append(np.percentile(rmsd2, 10))
# Add labels
rankedidx = []
for i, v in enumerate(rankedvalues):
idx = np.where(b <= v)[0][-1]
if idx in rankedidx:
print('Ref. marker might clash with other markers...')
shift = 4
else:
shift = 0
axes[ai + 4, aj].text((b[idx] + b[idx + 1]) / 2., n[idx] + 3 + shift,
rankedlabels[i], fontsize=16, color='#d95f02',
ha='center', va='center')
if n[idx] > 0.8 * max(np.max(n), np.max(n2)):
axes[ai + 4, aj].set_ylim([0, max(np.max(n2), np.max(n)) + 8 + shift])
rankedidx.append(idx)
rankedidx2 = []
for i, v in enumerate(rankedvalues2):
idx = np.where(b2 <= v)[0][-1]
if idx in rankedidx2:
print('Ref. marker might clash with other markers...')
shift = 4
elif idx in rankedidx:
diff = np.abs(n[idx] - n2[idx])
if diff < max(np.max(n2), np.max(n)) * 0.1:
shift = max(np.max(n2), np.max(n)) * 0.2
else:
shift = 0
else:
shift = 0
axes[ai + 4, aj].text((b2[idx] + b2[idx + 1]) / 2., n2[idx] + 3 + shift,
rankedlabels[i], fontsize=16, color='#2ca02c',
ha='center', va='center')
if n2[idx] > 0.8 * max(np.max(n2), np.max(n)):
axes[ai + 4, aj].set_ylim([0, max(np.max(n2), np.max(n)) + 8 + shift])
rankedidx2.append(idx)
#
# Final adjustment and save
#
#axes[1, 0].legend()
#axes[1, 1].legend()
import matplotlib.patches as mpatches
data_patch = mpatches.Patch(color='#9ecae1', label='Data')
h1_patch = mpatches.Patch(color='#d95f02', label='Hypothesis 1: independent kinetics models')
h2_patch = mpatches.Patch(color='#1b9e77', label='Hypothesis 2: identical kinetics models')
axes[0, 0].legend(handles=[data_patch, h1_patch, h2_patch], loc='upper left', bbox_to_anchor=(-.025, 2.75), fontsize=14, ncol=3, columnspacing=5.5)
#grid.tight_layout(fig, pad=0.6, rect=(0.02, 0.0, 1, 0.99)) # not working...
#grid.update(wspace=0.2, hspace=0.0)
plt.savefig('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.pdf' % (savedir), bbox_inch='tight',
pad_inches=0, format='pdf')
plt.savefig('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.png' % (savedir), bbox_inch='tight',
pad_inches=0, dpi=300)
print('Done')
#
# Table
#
tex = ''
tex += '\\begin{tabularx}{\\textwidth}{@{}l' \
+ 'XXc' * (len(protocol_list) - 1) + 'XX@{}}\n'
tex += '\\toprule\n'
tex += ' '
for i_prt, prt in enumerate(protocol_list):
# span 2 columns
if prt == 'staircaseramp':
tex += ' & \multicolumn{2}{c}{Cal.}'
else:
tex += ' & \multicolumn{2}{c}{Val.~%s}' % validation_idx[i_prt]
if i_prt < len(protocol_list) - 1:
tex += ' & \phantom{}'
tex += ' \\\\\n'
ii = 1
for i_prt, prt in enumerate(protocol_list):
ii += 1
tex += '\\cmidrule{%s-%s}' % (ii, ii + 1)
ii += 2
tex += '\n'
tex += ' '
for i_prt, prt in enumerate(protocol_list):
tex += ' & H1 & H2'
if i_prt < len(protocol_list) - 1:
tex += ' &'
tex += ' \\\\\n'
tex += '\\midrule\n'
tex += 'Median'
for i_prt, prt in enumerate(protocol_list):
tex += ' & %.2f & %.2f' % (mid[i_prt], mid2[i_prt])
if i_prt < len(protocol_list) - 1:
tex += ' &'
tex += ' \\\\\n'
tex += '10\\textsuperscript{th} \\%ile'
for i_prt, prt in enumerate(protocol_list):
tex += ' & %.2f & %.2f' % (lower[i_prt], lower2[i_prt])
if i_prt < len(protocol_list) - 1:
tex += ' &'
tex += ' \\\\\n'
tex += '90\\textsuperscript{th} \\%ile'
for i_prt, prt in enumerate(protocol_list):
tex += ' & %.2f & %.2f' % (upper[i_prt], upper2[i_prt])
if i_prt < len(protocol_list) - 1:
tex += ' &'
tex += ' \\\\\n'
tex += '\\bottomrule\n'
tex += '\\end{tabularx}'
print(tex)
| [
"numpy.random.seed",
"numpy.abs",
"numpy.argmin",
"numpy.argsort",
"matplotlib.pyplot.figure",
"numpy.mean",
"matplotlib.patches.Patch",
"sys.path.append",
"numpy.copy",
"numpy.append",
"numpy.max",
"numpy.loadtxt",
"numpy.linspace",
"scipy.optimize.fmin",
"numpy.percentile",
"numpy.mi... | [((72, 97), 'sys.path.append', 'sys.path.append', (['"""../lib"""'], {}), "('../lib')\n", (87, 97), False, 'import sys\n'), ((145, 166), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (159, 166), False, 'import matplotlib\n'), ((575, 594), 'numpy.random.seed', 'np.random.seed', (['(101)'], {}), '(101)\n', (589, 594), True, 'import numpy as np\n'), ((2585, 2601), 'numpy.array', 'np.array', (['[25.0]'], {}), '([25.0])\n', (2593, 2601), True, 'import numpy as np\n'), ((3087, 3115), 'numpy.loadtxt', 'np.loadtxt', (['rmsd_matrix_file'], {}), '(rmsd_matrix_file)\n', (3097, 3115), True, 'import numpy as np\n'), ((3647, 3676), 'numpy.loadtxt', 'np.loadtxt', (['rmsd_matrix_file2'], {}), '(rmsd_matrix_file2)\n', (3657, 3676), True, 'import numpy as np\n'), ((4104, 4132), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 15)'}), '(figsize=(16, 15))\n', (4114, 4132), True, 'import matplotlib.pyplot as plt\n'), ((4191, 4286), 'matplotlib.pyplot.GridSpec', 'plt.GridSpec', (['(2 * n_ygrid + 1 * bigygap)', '(3 * n_xgrid + 2 * bigxgap)'], {'hspace': '(0.0)', 'wspace': '(0.0)'}), '(2 * n_ygrid + 1 * bigygap, 3 * n_xgrid + 2 * bigxgap, hspace=\n 0.0, wspace=0.0)\n', (4203, 4286), True, 'import matplotlib.pyplot as plt\n'), ((20889, 20934), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': '"""#9ecae1"""', 'label': '"""Data"""'}), "(color='#9ecae1', label='Data')\n", (20903, 20934), True, 'import matplotlib.patches as mpatches\n'), ((20946, 21033), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': '"""#d95f02"""', 'label': '"""Hypothesis 1: independent kinetics models"""'}), "(color='#d95f02', label=\n 'Hypothesis 1: independent kinetics models')\n", (20960, 21033), True, 'import matplotlib.patches as mpatches\n'), ((21040, 21125), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': '"""#1b9e77"""', 'label': '"""Hypothesis 2: identical kinetics models"""'}), "(color='#1b9e77', label='Hypothesis 2: identical kinetics models'\n )\n", (21054, 21125), True, 'import matplotlib.patches as mpatches\n'), ((21384, 21516), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.pdf' % savedir)"], {'bbox_inch': '"""tight"""', 'pad_inches': '(0)', 'format': '"""pdf"""'}), "('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.pdf' %\n savedir, bbox_inch='tight', pad_inches=0, format='pdf')\n", (21395, 21516), True, 'import matplotlib.pyplot as plt\n'), ((21523, 21650), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.png' % savedir)"], {'bbox_inch': '"""tight"""', 'pad_inches': '(0)', 'dpi': '(300)'}), "('%s/rmsd-hist-fix-kinetics-simvclinleak-scheme3-part1.png' %\n savedir, bbox_inch='tight', pad_inches=0, dpi=300)\n", (21534, 21650), True, 'import matplotlib.pyplot as plt\n'), ((622, 644), 'os.path.isdir', 'os.path.isdir', (['savedir'], {}), '(savedir)\n', (635, 644), False, 'import os\n'), ((650, 670), 'os.makedirs', 'os.makedirs', (['savedir'], {}), '(savedir)\n', (661, 670), False, 'import os\n'), ((6915, 7050), 'model.Model', 'm.Model', (['"""../mmt-model-files/ideal-ikr.mmt"""'], {'protocol_def': 'protocol_def', 'temperature': 'temperature', 'transform': 'None', 'useFilterCap': '(False)'}), "('../mmt-model-files/ideal-ikr.mmt', protocol_def=protocol_def,\n temperature=temperature, transform=None, useFilterCap=False)\n", (6922, 7050), True, 'import model as m\n'), ((7208, 7374), 'model.Model', 'm.Model', (['"""../mmt-model-files/simplified-voltage-clamp-ikr-linleak.mmt"""'], {'protocol_def': 'protocol_def', 'temperature': 'temperature', 'transform': 'None', 'useFilterCap': '(False)'}), "('../mmt-model-files/simplified-voltage-clamp-ikr-linleak.mmt',\n protocol_def=protocol_def, temperature=temperature, transform=None,\n useFilterCap=False)\n", (7215, 7374), True, 'import model as m\n'), ((9263, 9278), 'numpy.argmin', 'np.argmin', (['rmsd'], {}), '(rmsd)\n', (9272, 9278), True, 'import numpy as np\n'), ((10873, 10895), 'numpy.loadtxt', 'np.loadtxt', (['param_file'], {}), '(param_file)\n', (10883, 10895), True, 'import numpy as np\n'), ((11047, 11157), 'numpy.loadtxt', 'np.loadtxt', (["('%s/%s-scheme3-simvclinleak/%s-solution-%s.txt' % (file_dir2, file_name,\n file_name, fit_seed2))"], {}), "('%s/%s-scheme3-simvclinleak/%s-solution-%s.txt' % (file_dir2,\n file_name, file_name, fit_seed2))\n", (11057, 11157), True, 'import numpy as np\n'), ((18790, 18863), 'numpy.linspace', 'np.linspace', (['(rmse_min - 0.1 * rmse_range)', '(rmse_max + 0.1 * rmse_range)', '(20)'], {}), '(rmse_min - 0.1 * rmse_range, rmse_max + 0.1 * rmse_range, 20)\n', (18801, 18863), True, 'import numpy as np\n'), ((8101, 8193), 'numpy.loadtxt', 'np.loadtxt', (["('%s/%s-%s-times.csv' % (data_dir, file_name, prt))"], {'delimiter': '""","""', 'skiprows': '(1)'}), "('%s/%s-%s-times.csv' % (data_dir, file_name, prt), delimiter=',',\n skiprows=1)\n", (8111, 8193), True, 'import numpy as np\n'), ((8555, 8569), 'numpy.copy', 'np.copy', (['times'], {}), '(times)\n', (8562, 8569), True, 'import numpy as np\n'), ((9297, 9313), 'numpy.argsort', 'np.argsort', (['rmsd'], {}), '(rmsd)\n', (9307, 9313), True, 'import numpy as np\n'), ((9343, 9359), 'numpy.argsort', 'np.argsort', (['rmsd'], {}), '(rmsd)\n', (9353, 9359), True, 'import numpy as np\n'), ((12704, 12736), 'parameters.get_qc', 'get_qc', (['"""../qc"""', 'file_name', 'cell'], {}), "('../qc', file_name, cell)\n", (12710, 12736), False, 'from parameters import simvc, get_qc\n'), ((15768, 15798), 'numpy.percentile', 'np.percentile', (['simulation', '(100)'], {}), '(simulation, 100)\n', (15781, 15798), True, 'import numpy as np\n'), ((15817, 15847), 'numpy.percentile', 'np.percentile', (['simulation', '(0.0)'], {}), '(simulation, 0.0)\n', (15830, 15847), True, 'import numpy as np\n'), ((18665, 18677), 'numpy.min', 'np.min', (['rmsd'], {}), '(rmsd)\n', (18671, 18677), True, 'import numpy as np\n'), ((18679, 18692), 'numpy.min', 'np.min', (['rmsd2'], {}), '(rmsd2)\n', (18685, 18692), True, 'import numpy as np\n'), ((18713, 18725), 'numpy.max', 'np.max', (['rmsd'], {}), '(rmsd)\n', (18719, 18725), True, 'import numpy as np\n'), ((18727, 18740), 'numpy.max', 'np.max', (['rmsd2'], {}), '(rmsd2)\n', (18733, 18740), True, 'import numpy as np\n'), ((19059, 19082), 'numpy.percentile', 'np.percentile', (['rmsd', '(50)'], {}), '(rmsd, 50)\n', (19072, 19082), True, 'import numpy as np\n'), ((19101, 19124), 'numpy.percentile', 'np.percentile', (['rmsd', '(90)'], {}), '(rmsd, 90)\n', (19114, 19124), True, 'import numpy as np\n'), ((19143, 19166), 'numpy.percentile', 'np.percentile', (['rmsd', '(10)'], {}), '(rmsd, 10)\n', (19156, 19166), True, 'import numpy as np\n'), ((19184, 19208), 'numpy.percentile', 'np.percentile', (['rmsd2', '(50)'], {}), '(rmsd2, 50)\n', (19197, 19208), True, 'import numpy as np\n'), ((19228, 19252), 'numpy.percentile', 'np.percentile', (['rmsd2', '(90)'], {}), '(rmsd2, 90)\n', (19241, 19252), True, 'import numpy as np\n'), ((19272, 19296), 'numpy.percentile', 'np.percentile', (['rmsd2', '(10)'], {}), '(rmsd2, 10)\n', (19285, 19296), True, 'import numpy as np\n'), ((1144, 1167), 'numpy.mean', 'np.mean', (['((t1 - t2) ** 2)'], {}), '((t1 - t2) ** 2)\n', (1151, 1167), True, 'import numpy as np\n'), ((1179, 1195), 'numpy.mean', 'np.mean', (['(t2 ** 2)'], {}), '(t2 ** 2)\n', (1186, 1195), True, 'import numpy as np\n'), ((11286, 11391), 'numpy.loadtxt', 'np.loadtxt', (["('%s/%s-%s-%s.csv' % (data_dir_staircase, file_name, prt, cell))"], {'delimiter': '""","""', 'skiprows': '(1)'}), "('%s/%s-%s-%s.csv' % (data_dir_staircase, file_name, prt, cell),\n delimiter=',', skiprows=1)\n", (11296, 11391), True, 'import numpy as np\n'), ((12593, 12615), 'numpy.loadtxt', 'np.loadtxt', (['param_file'], {}), '(param_file)\n', (12603, 12615), True, 'import numpy as np\n'), ((13070, 13137), 'numpy.loadtxt', 'np.loadtxt', (["('../qc/' + file_name + '-staircaseramp-leak_before.txt')"], {}), "('../qc/' + file_name + '-staircaseramp-leak_before.txt')\n", (13080, 13137), True, 'import numpy as np\n'), ((13167, 13233), 'numpy.loadtxt', 'np.loadtxt', (["('../qc/' + file_name + '-staircaseramp-leak_after.txt')"], {}), "('../qc/' + file_name + '-staircaseramp-leak_after.txt')\n", (13177, 13233), True, 'import numpy as np\n'), ((14038, 14072), 'numpy.append', 'np.append', (['p_ikr', 'simvc_fix_values'], {}), '(p_ikr, simvc_fix_values)\n', (14047, 14072), True, 'import numpy as np\n'), ((14624, 14735), 'scipy.optimize.fmin', 'fmin', (['score_leak', '[0.1]'], {'args': '(simulationfixkinetics, voltage, times, protocol_leak_check[prt])', 'disp': '(False)'}), '(score_leak, [0.1], args=(simulationfixkinetics, voltage, times,\n protocol_leak_check[prt]), disp=False)\n', (14628, 14735), False, 'from scipy.optimize import fmin\n'), ((14800, 14875), 'releakcorrect.I_releak', 'I_releak', (['g_releak_simulationfixkinetics[0]', 'simulationfixkinetics', 'voltage'], {}), '(g_releak_simulationfixkinetics[0], simulationfixkinetics, voltage)\n', (14808, 14875), False, 'from releakcorrect import I_releak, score_leak, protocol_leak_check\n'), ((8819, 8836), 'numpy.abs', 'np.abs', (['(t - times)'], {}), '(t - times)\n', (8825, 8836), True, 'import numpy as np\n'), ((8873, 8888), 'numpy.min', 'np.min', (['voltage'], {}), '(voltage)\n', (8879, 8888), True, 'import numpy as np\n'), ((8895, 8910), 'numpy.max', 'np.max', (['voltage'], {}), '(voltage)\n', (8901, 8910), True, 'import numpy as np\n'), ((11464, 11560), 'numpy.loadtxt', 'np.loadtxt', (["('%s/%s-%s-%s.csv' % (data_dir, file_name, prt, cell))"], {'delimiter': '""","""', 'skiprows': '(1)'}), "('%s/%s-%s-%s.csv' % (data_dir, file_name, prt, cell), delimiter=\n ',', skiprows=1)\n", (11474, 11560), True, 'import numpy as np\n'), ((11656, 11751), 'scipy.optimize.fmin', 'fmin', (['score_leak', '[0.0]'], {'args': '(data, voltage, times, protocol_leak_check[prt])', 'disp': '(False)'}), '(score_leak, [0.0], args=(data, voltage, times, protocol_leak_check[prt\n ]), disp=False)\n', (11660, 11751), False, 'from scipy.optimize import fmin\n'), ((11798, 11834), 'releakcorrect.I_releak', 'I_releak', (['g_releak[0]', 'data', 'voltage'], {}), '(g_releak[0], data, voltage)\n', (11806, 11834), False, 'from releakcorrect import I_releak, score_leak, protocol_leak_check\n'), ((11868, 11964), 'numpy.loadtxt', 'np.loadtxt', (["('%s/%s-%s-%s.csv' % (data_dir, file_name, prt, cell))"], {'delimiter': '""","""', 'skiprows': '(1)'}), "('%s/%s-%s-%s.csv' % (data_dir, file_name, prt, cell), delimiter=\n ',', skiprows=1)\n", (11878, 11964), True, 'import numpy as np\n'), ((13795, 13818), 'numpy.abs', 'np.abs', (['ELeakCorrection'], {}), '(ELeakCorrection)\n', (13801, 13818), True, 'import numpy as np\n'), ((15399, 15522), 'scipy.optimize.fmin', 'fmin', (['score_leak', '[0.1]'], {'args': '(simulationfixkinetics[:, i], voltage[:, i], times, protocol_leak_check[prt])', 'disp': '(False)'}), '(score_leak, [0.1], args=(simulationfixkinetics[:, i], voltage[:, i],\n times, protocol_leak_check[prt]), disp=False)\n', (15403, 15522), False, 'from scipy.optimize import fmin\n'), ((15637, 15728), 'releakcorrect.I_releak', 'I_releak', (['g_releak_simulationfixkinetics[0]', 'simulationfixkinetics[:, i]', 'voltage[:, i]'], {}), '(g_releak_simulationfixkinetics[0], simulationfixkinetics[:, i],\n voltage[:, i])\n', (15645, 15728), False, 'from releakcorrect import I_releak, score_leak, protocol_leak_check\n'), ((19390, 19406), 'numpy.where', 'np.where', (['(b <= v)'], {}), '(b <= v)\n', (19398, 19406), True, 'import numpy as np\n'), ((19995, 20012), 'numpy.where', 'np.where', (['(b2 <= v)'], {}), '(b2 <= v)\n', (20003, 20012), True, 'import numpy as np\n'), ((20189, 20213), 'numpy.abs', 'np.abs', (['(n[idx] - n2[idx])'], {}), '(n[idx] - n2[idx])\n', (20195, 20213), True, 'import numpy as np\n'), ((12107, 12213), 'scipy.optimize.fmin', 'fmin', (['score_leak', '[0.0]'], {'args': '(data[:, i], voltage[:, i], times, protocol_leak_check[prt])', 'disp': '(False)'}), '(score_leak, [0.0], args=(data[:, i], voltage[:, i], times,\n protocol_leak_check[prt]), disp=False)\n', (12111, 12213), False, 'from scipy.optimize import fmin\n'), ((12311, 12359), 'releakcorrect.I_releak', 'I_releak', (['g_releak[0]', 'data[:, i]', 'voltage[:, i]'], {}), '(g_releak[0], data[:, i], voltage[:, i])\n', (12319, 12359), False, 'from releakcorrect import I_releak, score_leak, protocol_leak_check\n'), ((15010, 15027), 'numpy.abs', 'np.abs', (['(t - times)'], {}), '(t - times)\n', (15016, 15027), True, 'import numpy as np\n'), ((15183, 15200), 'numpy.abs', 'np.abs', (['(t - times)'], {}), '(t - times)\n', (15189, 15200), True, 'import numpy as np\n'), ((17463, 17475), 'numpy.max', 'np.max', (['iv_i'], {}), '(iv_i)\n', (17469, 17475), True, 'import numpy as np\n'), ((17760, 17772), 'numpy.max', 'np.max', (['iv_i'], {}), '(iv_i)\n', (17766, 17772), True, 'import numpy as np\n'), ((18112, 18136), 'numpy.max', 'np.max', (['iv_i_fixkinetics'], {}), '(iv_i_fixkinetics)\n', (18118, 18136), True, 'import numpy as np\n'), ((19781, 19790), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (19787, 19790), True, 'import numpy as np\n'), ((19792, 19802), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (19798, 19802), True, 'import numpy as np\n'), ((20624, 20634), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (20630, 20634), True, 'import numpy as np\n'), ((20636, 20645), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (20642, 20645), True, 'import numpy as np\n'), ((20240, 20250), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (20246, 20250), True, 'import numpy as np\n'), ((20252, 20261), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (20258, 20261), True, 'import numpy as np\n'), ((20298, 20308), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (20304, 20308), True, 'import numpy as np\n'), ((20310, 20319), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (20316, 20319), True, 'import numpy as np\n'), ((19851, 19861), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (19857, 19861), True, 'import numpy as np\n'), ((19863, 19872), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (19869, 19872), True, 'import numpy as np\n'), ((20694, 20704), 'numpy.max', 'np.max', (['n2'], {}), '(n2)\n', (20700, 20704), True, 'import numpy as np\n'), ((20706, 20715), 'numpy.max', 'np.max', (['n'], {}), '(n)\n', (20712, 20715), True, 'import numpy as np\n')] |
from config import RESULT_PATH, DATABASE_PATH
from utils import load_splitdata, balancer_block, save_to_pickle, \
load_from_pickle, check_loadsave, training_series, evaluating_series, \
load_30xonly, load_50xonly, random_sample
from utils import mutypes, pcodes
from os import path, makedirs
from sys import argv
import numpy as np
seed = int(argv[1])
mt = argv[2]
pc = argv[3]
balance_strategy = argv[4]
ratio_test = int(argv[5])
software = argv[6]
prespath = path.join(RESULT_PATH, software, mt, pc)
X_train_ori, y_train_ori, X_test30x_ori, y_test30x_ori = check_loadsave(
path.join(prespath, 'data_suitcase_30x.pkl'),
load_30xonly, {'pcode': pc, 'mutype': mt, 'software': software})
# X_train50x_ori, y_train50x_ori, X_test50x_ori, y_test50x_ori = check_loadsave(
# path.join(prespath, 'data_suitcase_50x.pkl'),
# load_50xonly, {'pcode': pc, 'mutype': mt, 'software': software})
msk_test30x = check_loadsave(
path.join(prespath, 'msk', 'msk_test30x_{}_{:.1f}.pkl'.format(seed, ratio_test)),
random_sample, {'y': y_test30x_ori, 'ratio': ratio_test, 'seed': seed})
# no_vqsr = [i for i in range(18) if i not in [16]]
# no_vqsr = np.arange(18) # complement for LGB
col_sel = np.arange(X_train_ori.shape[1])
X_train, y_train = X_train_ori[:,col_sel], y_train_ori
Xs_test = [
X_test30x_ori[msk_test30x][:,col_sel],
# X_test50x_ori[:,col_sel]
]
ys_test = [
y_test30x_ori[msk_test30x],
# y_test50x_ori
]
# load mask of undersample
print('training on 30x data')
clfkit_list, normalizer = check_loadsave(
path.join(prespath, 'clf', 'clfkits_{}_{}.pkl'.format(seed, balance_strategy)),
training_series, {
'X_train': X_train,
'y_train': y_train,
'model_list': ['logireg', 'lsvm', 'nn', 'rf', 'xgbdef', 'lgbdef'],
# 'model_list': ['nn', 'lgbdef'],
'model_params': [{},{},
{'hidden_layer_sizes':(50,15)},
{},{},{}],
'seed': seed,
'balance_strategy': balance_strategy})
print('testing on 30x')
# print('testing on 30x and 50x')
ys_df, metrs_df = check_loadsave(
path.join(prespath, 'metr', 'metrs_{}_30x_{}-{:.1f}.pkl'.format(seed, balance_strategy, ratio_test)),
evaluating_series, {
'Xs_test': Xs_test,
'ys_test': ys_test,
'clfkit_list': clfkit_list,
'normalizer': normalizer})
| [
"os.path.join",
"numpy.arange"
] | [((476, 516), 'os.path.join', 'path.join', (['RESULT_PATH', 'software', 'mt', 'pc'], {}), '(RESULT_PATH, software, mt, pc)\n', (485, 516), False, 'from os import path, makedirs\n'), ((1221, 1252), 'numpy.arange', 'np.arange', (['X_train_ori.shape[1]'], {}), '(X_train_ori.shape[1])\n', (1230, 1252), True, 'import numpy as np\n'), ((595, 639), 'os.path.join', 'path.join', (['prespath', '"""data_suitcase_30x.pkl"""'], {}), "(prespath, 'data_suitcase_30x.pkl')\n", (604, 639), False, 'from os import path, makedirs\n')] |
import os
import sys
PROJECT_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(PROJECT_PATH)
"""
Original model, but only with translation transformations (9 transformations), original Resnet is used
"""
import numpy as np
from keras.utils import to_categorical
from modules.data_loaders.base_line_loaders import load_hits
from transformations import KernelTransformer
from models.wide_residual_network import create_wide_residual_network
import time
import datetime
from keras.backend.tensorflow_backend import set_session
import tensorflow as tf
from tqdm import tqdm
from scripts.detached_transformer_od_hits import \
plot_histogram_disc_loss_acc_thr, \
dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum
from scripts.ensemble_transform_vs_all_od_hits import get_entropy
import torch
import torch.nn as nn
if __name__ == "__main__":
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU
sess = tf.Session(config=config)
set_session(sess)
single_class_ind = 1
(x_train, y_train), (x_val, y_val), (x_test, y_test) = load_hits(
n_samples_by_class=10000,
test_size=0.20,
val_size=0.10, return_val=True)
print(x_train.shape)
print(x_val.shape)
print(x_test.shape)
transformer = KernelTransformer(translation_x=8, translation_y=8, rotations=0,
flips=0, gauss=1, log=1)
n, k = (10, 4)
mdl = create_wide_residual_network(input_shape=x_train.shape[1:],
num_classes=transformer.n_transforms,
depth=n, widen_factor=k)
mdl.compile(optimizer='adam', loss='categorical_crossentropy',
metrics=['acc'])
print(mdl.summary())
print('n_transforms ', transformer.n_transforms)
# get inliers of specific class
x_train_task = x_train[y_train.flatten() == single_class_ind]
print(x_train_task.shape)
x_val_task = x_val[y_val.flatten() == single_class_ind]
print(x_val_task.shape)
transformations_inds_train = np.tile(np.arange(transformer.n_transforms),
len(x_train_task))
transformations_inds_val = np.tile(np.arange(transformer.n_transforms),
len(x_val_task))
print(len(transformations_inds_train))
print(len(transformations_inds_val))
# transform data
start_time = time.time()
x_train_task_transformed = transformer.transform_batch(
np.repeat(x_train_task, transformer.n_transforms, axis=0),
transformations_inds_train)
x_val_task_transformed = transformer.transform_batch(
np.repeat(x_val_task, transformer.n_transforms, axis=0),
transformations_inds_val)
time_usage = str(datetime.timedelta(
seconds=int(round(time.time() - start_time))))
print("Time to perform transforms: " + time_usage)
print(x_train_task_transformed.shape)
print(x_val_task_transformed.shape)
batch_size = 128
start_time = time.time()
mdl.fit(x=x_train_task_transformed,
y=to_categorical(transformations_inds_train),
batch_size=batch_size,
epochs=2, # int(np.ceil(200 / transformer.n_transforms))
)
time_usage = str(datetime.timedelta(
seconds=int(round(time.time() - start_time))))
print("Time to train model: " + time_usage)
test_scores = np.zeros((len(x_test),))
val_scores_in = np.zeros((len(x_val_task),))
observed_data = x_train_task
# Dirichlet transforms
for t_ind in tqdm(range(transformer.n_transforms)):
# predictions for a single transformation
observed_dirichlet = mdl.predict(
transformer.transform_batch(observed_data,
[t_ind] * len(observed_data)),
batch_size=1024)
log_p_hat_train = np.log(observed_dirichlet).mean(axis=0)
alpha_sum_approx = calc_approx_alpha_sum(observed_dirichlet)
alpha_0 = observed_dirichlet.mean(axis=0) * alpha_sum_approx
mle_alpha_t = fixed_point_dirichlet_mle(alpha_0, log_p_hat_train)
x_test_p = mdl.predict(
transformer.transform_batch(x_test, [t_ind] * len(x_test)),
batch_size=1024)
test_scores += dirichlet_normality_score(mle_alpha_t, x_test_p)
test_scores /= transformer.n_transforms
# val
# Dirichlet transforms
for t_ind in tqdm(range(transformer.n_transforms)):
# predictions for a single transformation
observed_dirichlet = mdl.predict(
transformer.transform_batch(observed_data,
[t_ind] * len(observed_data)),
batch_size=1024)
log_p_hat_train = np.log(observed_dirichlet).mean(axis=0)
alpha_sum_approx = calc_approx_alpha_sum(observed_dirichlet)
alpha_0 = observed_dirichlet.mean(axis=0) * alpha_sum_approx
mle_alpha_t = fixed_point_dirichlet_mle(alpha_0, log_p_hat_train)
x_val_p = mdl.predict(
transformer.transform_batch(x_val_task, [t_ind] * len(x_val_task)),
batch_size=1024)
val_scores_in += dirichlet_normality_score(mle_alpha_t, x_val_p)
val_scores_in /= transformer.n_transforms
labels = y_test.flatten() == single_class_ind
plot_histogram_disc_loss_acc_thr(test_scores[labels], test_scores[~labels],
path='../results',
x_label_name='KernelTransTransformations_Dscores_hits',
val_inliers_score=val_scores_in)
# Transforms without dirichlet
plain_scores_test = np.zeros((len(x_test),))
for t_ind in tqdm(range(transformer.n_transforms)):
# predictions for a single transformation
x_test_p = mdl.predict(
transformer.transform_batch(x_test, [t_ind] * len(x_test)),
batch_size=1024)
plain_scores_test += x_test_p[:, t_ind]
plain_scores_test /= transformer.n_transforms
# val
plain_scores_val = np.zeros((len(x_val_task),))
for t_ind in tqdm(range(transformer.n_transforms)):
# predictions for a single transformation
x_val_p = mdl.predict(
transformer.transform_batch(x_val_task, [t_ind] * len(x_val_task)),
batch_size=1024)
plain_scores_val += x_val_p[:, t_ind]
plain_scores_val /= transformer.n_transforms
labels = y_test.flatten() == single_class_ind
plot_histogram_disc_loss_acc_thr(plain_scores_test[labels],
plain_scores_test[~labels],
path='../results',
x_label_name='KernelTransTransformations_scores_hits',
val_inliers_score=plain_scores_val) | [
"sys.path.append",
"scripts.detached_transformer_od_hits.calc_approx_alpha_sum",
"transformations.KernelTransformer",
"numpy.log",
"os.path.dirname",
"modules.data_loaders.base_line_loaders.load_hits",
"tensorflow.Session",
"scripts.detached_transformer_od_hits.plot_histogram_disc_loss_acc_thr",
"sc... | [((105, 134), 'sys.path.append', 'sys.path.append', (['PROJECT_PATH'], {}), '(PROJECT_PATH)\n', (120, 134), False, 'import sys\n'), ((922, 938), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (936, 938), True, 'import tensorflow as tf\n'), ((1036, 1061), 'tensorflow.Session', 'tf.Session', ([], {'config': 'config'}), '(config=config)\n', (1046, 1061), True, 'import tensorflow as tf\n'), ((1064, 1081), 'keras.backend.tensorflow_backend.set_session', 'set_session', (['sess'], {}), '(sess)\n', (1075, 1081), False, 'from keras.backend.tensorflow_backend import set_session\n'), ((1164, 1250), 'modules.data_loaders.base_line_loaders.load_hits', 'load_hits', ([], {'n_samples_by_class': '(10000)', 'test_size': '(0.2)', 'val_size': '(0.1)', 'return_val': '(True)'}), '(n_samples_by_class=10000, test_size=0.2, val_size=0.1, return_val\n =True)\n', (1173, 1250), False, 'from modules.data_loaders.base_line_loaders import load_hits\n'), ((1350, 1443), 'transformations.KernelTransformer', 'KernelTransformer', ([], {'translation_x': '(8)', 'translation_y': '(8)', 'rotations': '(0)', 'flips': '(0)', 'gauss': '(1)', 'log': '(1)'}), '(translation_x=8, translation_y=8, rotations=0, flips=0,\n gauss=1, log=1)\n', (1367, 1443), False, 'from transformations import KernelTransformer\n'), ((1500, 1627), 'models.wide_residual_network.create_wide_residual_network', 'create_wide_residual_network', ([], {'input_shape': 'x_train.shape[1:]', 'num_classes': 'transformer.n_transforms', 'depth': 'n', 'widen_factor': 'k'}), '(input_shape=x_train.shape[1:], num_classes=\n transformer.n_transforms, depth=n, widen_factor=k)\n', (1528, 1627), False, 'from models.wide_residual_network import create_wide_residual_network\n'), ((2458, 2469), 'time.time', 'time.time', ([], {}), '()\n', (2467, 2469), False, 'import time\n'), ((3036, 3047), 'time.time', 'time.time', ([], {}), '()\n', (3045, 3047), False, 'import time\n'), ((5191, 5383), 'scripts.detached_transformer_od_hits.plot_histogram_disc_loss_acc_thr', 'plot_histogram_disc_loss_acc_thr', (['test_scores[labels]', 'test_scores[~labels]'], {'path': '"""../results"""', 'x_label_name': '"""KernelTransTransformations_Dscores_hits"""', 'val_inliers_score': 'val_scores_in'}), "(test_scores[labels], test_scores[~labels],\n path='../results', x_label_name=\n 'KernelTransTransformations_Dscores_hits', val_inliers_score=val_scores_in)\n", (5223, 5383), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((6304, 6515), 'scripts.detached_transformer_od_hits.plot_histogram_disc_loss_acc_thr', 'plot_histogram_disc_loss_acc_thr', (['plain_scores_test[labels]', 'plain_scores_test[~labels]'], {'path': '"""../results"""', 'x_label_name': '"""KernelTransTransformations_scores_hits"""', 'val_inliers_score': 'plain_scores_val'}), "(plain_scores_test[labels],\n plain_scores_test[~labels], path='../results', x_label_name=\n 'KernelTransTransformations_scores_hits', val_inliers_score=\n plain_scores_val)\n", (6336, 6515), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((71, 96), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (86, 96), False, 'import os\n'), ((2120, 2155), 'numpy.arange', 'np.arange', (['transformer.n_transforms'], {}), '(transformer.n_transforms)\n', (2129, 2155), True, 'import numpy as np\n'), ((2252, 2287), 'numpy.arange', 'np.arange', (['transformer.n_transforms'], {}), '(transformer.n_transforms)\n', (2261, 2287), True, 'import numpy as np\n'), ((2534, 2591), 'numpy.repeat', 'np.repeat', (['x_train_task', 'transformer.n_transforms'], {'axis': '(0)'}), '(x_train_task, transformer.n_transforms, axis=0)\n', (2543, 2591), True, 'import numpy as np\n'), ((2689, 2744), 'numpy.repeat', 'np.repeat', (['x_val_task', 'transformer.n_transforms'], {'axis': '(0)'}), '(x_val_task, transformer.n_transforms, axis=0)\n', (2698, 2744), True, 'import numpy as np\n'), ((3906, 3947), 'scripts.detached_transformer_od_hits.calc_approx_alpha_sum', 'calc_approx_alpha_sum', (['observed_dirichlet'], {}), '(observed_dirichlet)\n', (3927, 3947), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((4032, 4083), 'scripts.detached_transformer_od_hits.fixed_point_dirichlet_mle', 'fixed_point_dirichlet_mle', (['alpha_0', 'log_p_hat_train'], {}), '(alpha_0, log_p_hat_train)\n', (4057, 4083), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((4225, 4273), 'scripts.detached_transformer_od_hits.dirichlet_normality_score', 'dirichlet_normality_score', (['mle_alpha_t', 'x_test_p'], {}), '(mle_alpha_t, x_test_p)\n', (4250, 4273), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((4718, 4759), 'scripts.detached_transformer_od_hits.calc_approx_alpha_sum', 'calc_approx_alpha_sum', (['observed_dirichlet'], {}), '(observed_dirichlet)\n', (4739, 4759), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((4844, 4895), 'scripts.detached_transformer_od_hits.fixed_point_dirichlet_mle', 'fixed_point_dirichlet_mle', (['alpha_0', 'log_p_hat_train'], {}), '(alpha_0, log_p_hat_train)\n', (4869, 4895), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((5046, 5093), 'scripts.detached_transformer_od_hits.dirichlet_normality_score', 'dirichlet_normality_score', (['mle_alpha_t', 'x_val_p'], {}), '(mle_alpha_t, x_val_p)\n', (5071, 5093), False, 'from scripts.detached_transformer_od_hits import plot_histogram_disc_loss_acc_thr, dirichlet_normality_score, fixed_point_dirichlet_mle, calc_approx_alpha_sum\n'), ((3098, 3140), 'keras.utils.to_categorical', 'to_categorical', (['transformations_inds_train'], {}), '(transformations_inds_train)\n', (3112, 3140), False, 'from keras.utils import to_categorical\n'), ((3842, 3868), 'numpy.log', 'np.log', (['observed_dirichlet'], {}), '(observed_dirichlet)\n', (3848, 3868), True, 'import numpy as np\n'), ((4654, 4680), 'numpy.log', 'np.log', (['observed_dirichlet'], {}), '(observed_dirichlet)\n', (4660, 4680), True, 'import numpy as np\n'), ((2841, 2852), 'time.time', 'time.time', ([], {}), '()\n', (2850, 2852), False, 'import time\n'), ((3318, 3329), 'time.time', 'time.time', ([], {}), '()\n', (3327, 3329), False, 'import time\n')] |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from astropy.tests.helper import pytest
from astropy.utils.data import get_pkg_data_filename
import astropy.units as u
from astropy.io import ascii
from ..utils import (validate_data_table, generate_energy_edges,
build_data_table, estimate_B)
# Read data
fname = get_pkg_data_filename('data/CrabNebula_HESS_ipac.dat')
data_table = ascii.read(fname)
# Read spectrum with symmetric flux errors
fname_sym = get_pkg_data_filename('data/CrabNebula_HESS_ipac_symmetric.dat')
data_table_sym = ascii.read(fname_sym)
def test_validate_energy_error_types():
for etype in ['edges','error','width','errors']:
fname = get_pkg_data_filename(
'data/CrabNebula_HESS_ipac_energy_{0}.dat'.format(etype))
dt = ascii.read(fname)
validate_data_table(dt)
def test_sed():
fname = get_pkg_data_filename('data/Fake_ipac_sed.dat')
validate_data_table(ascii.read(fname))
validate_data_table([ascii.read(fname)])
def test_concatenation():
fname0 = get_pkg_data_filename('data/Fake_ipac_sed.dat')
dt0 = ascii.read(fname0)
for sed in [True, False]:
validate_data_table([dt0,data_table],sed=sed)
validate_data_table([data_table,dt0],sed=sed)
validate_data_table([dt0,dt0],sed=sed)
def test_validate_data_types():
data_table2 = data_table.copy()
data_table2['energy'].unit = ''
with pytest.raises(TypeError):
validate_data_table(data_table2)
def test_validate_missing_column():
data_table2 = data_table.copy()
data_table2.remove_column('energy')
with pytest.raises(TypeError):
validate_data_table(data_table2)
data_table2 = data_table_sym.copy()
data_table2.remove_column('flux_error')
with pytest.raises(TypeError):
validate_data_table(data_table2)
def test_validate_string_uls():
from astropy.table import Column
data_table2 = data_table.copy()
# replace uls column with valid strings
data_table2.remove_column('ul')
data_table2.add_column(
Column(name='ul', dtype=str, data=['False']*len(data_table2))
)
data_table2['ul'][1] = 'True'
data = validate_data_table(data_table2)
assert np.sum(data['ul']) == 1
assert np.sum(~data['ul']) == len(data_table2)-1
# put an invalid value
data_table2['ul'][2] = 'foo'
with pytest.raises(TypeError):
validate_data_table(data_table2)
def test_validate_cl():
data_table2 = data_table.copy()
# use invalid value
data_table2.meta['keywords']['cl']['value'] = 'test'
with pytest.raises(TypeError):
data = validate_data_table(data_table2)
# remove cl
data_table2.meta['keywords'].pop('cl')
data = validate_data_table(data_table2)
assert np.all(data['cl'] == 0.9)
def test_build_data_table():
ene = np.logspace(-2,2,20) * u.TeV
flux = (ene / (1 * u.TeV)) ** -2 * u.Unit('1/(cm2 s TeV)')
flux_error_hi = 0.2 * flux
flux_error_lo = 0.1 * flux
ul = np.zeros(len(ene))
ul[0] = 1
dene = generate_energy_edges(ene)
table = build_data_table(ene, flux, flux_error_hi=flux_error_hi,
flux_error_lo=flux_error_lo, ul=ul)
table = build_data_table(ene, flux, flux_error_hi=flux_error_hi,
flux_error_lo=flux_error_lo, ul=ul, cl=0.99)
table = build_data_table(ene, flux, flux_error=flux_error_hi,
energy_width=dene[0])
table = build_data_table(ene, flux, flux_error=flux_error_hi,
energy_lo=(ene - dene[0]),
energy_hi=(ene + dene[1]))
# no flux_error
with pytest.raises(TypeError):
table = build_data_table(ene, flux)
# errors in energy physical type validation
with pytest.raises(TypeError):
build_data_table(ene.value, flux, flux_error=flux_error_hi)
with pytest.raises(TypeError):
build_data_table(ene.value*u.Unit('erg/(cm2 s)'), flux,
flux_error=flux_error_hi)
def test_estimate_B():
fname = get_pkg_data_filename('data/CrabNebula_Fake_Xray.dat')
xray = ascii.read(fname)
B = estimate_B(xray, data_table)
assert_allclose(B.to('uG'), 0.4848756912803697 * u.uG)
| [
"numpy.sum",
"astropy.io.ascii.read",
"numpy.logspace",
"astropy.tests.helper.pytest.raises",
"astropy.utils.data.get_pkg_data_filename",
"numpy.all",
"astropy.units.Unit"
] | [((411, 465), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/CrabNebula_HESS_ipac.dat"""'], {}), "('data/CrabNebula_HESS_ipac.dat')\n", (432, 465), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((479, 496), 'astropy.io.ascii.read', 'ascii.read', (['fname'], {}), '(fname)\n', (489, 496), False, 'from astropy.io import ascii\n'), ((553, 617), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/CrabNebula_HESS_ipac_symmetric.dat"""'], {}), "('data/CrabNebula_HESS_ipac_symmetric.dat')\n", (574, 617), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((635, 656), 'astropy.io.ascii.read', 'ascii.read', (['fname_sym'], {}), '(fname_sym)\n', (645, 656), False, 'from astropy.io import ascii\n'), ((956, 1003), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/Fake_ipac_sed.dat"""'], {}), "('data/Fake_ipac_sed.dat')\n", (977, 1003), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((1132, 1179), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/Fake_ipac_sed.dat"""'], {}), "('data/Fake_ipac_sed.dat')\n", (1153, 1179), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((1190, 1208), 'astropy.io.ascii.read', 'ascii.read', (['fname0'], {}), '(fname0)\n', (1200, 1208), False, 'from astropy.io import ascii\n'), ((2863, 2888), 'numpy.all', 'np.all', (["(data['cl'] == 0.9)"], {}), "(data['cl'] == 0.9)\n", (2869, 2888), True, 'import numpy as np\n'), ((4177, 4231), 'astropy.utils.data.get_pkg_data_filename', 'get_pkg_data_filename', (['"""data/CrabNebula_Fake_Xray.dat"""'], {}), "('data/CrabNebula_Fake_Xray.dat')\n", (4198, 4231), False, 'from astropy.utils.data import get_pkg_data_filename\n'), ((4243, 4260), 'astropy.io.ascii.read', 'ascii.read', (['fname'], {}), '(fname)\n', (4253, 4260), False, 'from astropy.io import ascii\n'), ((877, 894), 'astropy.io.ascii.read', 'ascii.read', (['fname'], {}), '(fname)\n', (887, 894), False, 'from astropy.io import ascii\n'), ((1028, 1045), 'astropy.io.ascii.read', 'ascii.read', (['fname'], {}), '(fname)\n', (1038, 1045), False, 'from astropy.io import ascii\n'), ((1509, 1533), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1522, 1533), False, 'from astropy.tests.helper import pytest\n'), ((1698, 1722), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1711, 1722), False, 'from astropy.tests.helper import pytest\n'), ((1858, 1882), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1871, 1882), False, 'from astropy.tests.helper import pytest\n'), ((2307, 2325), 'numpy.sum', 'np.sum', (["data['ul']"], {}), "(data['ul'])\n", (2313, 2325), True, 'import numpy as np\n'), ((2342, 2361), 'numpy.sum', 'np.sum', (["(~data['ul'])"], {}), "(~data['ul'])\n", (2348, 2361), True, 'import numpy as np\n'), ((2455, 2479), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2468, 2479), False, 'from astropy.tests.helper import pytest\n'), ((2674, 2698), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2687, 2698), False, 'from astropy.tests.helper import pytest\n'), ((2929, 2951), 'numpy.logspace', 'np.logspace', (['(-2)', '(2)', '(20)'], {}), '(-2, 2, 20)\n', (2940, 2951), True, 'import numpy as np\n'), ((2997, 3020), 'astropy.units.Unit', 'u.Unit', (['"""1/(cm2 s TeV)"""'], {}), "('1/(cm2 s TeV)')\n", (3003, 3020), True, 'import astropy.units as u\n'), ((3767, 3791), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (3780, 3791), False, 'from astropy.tests.helper import pytest\n'), ((3895, 3919), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (3908, 3919), False, 'from astropy.tests.helper import pytest\n'), ((3999, 4023), 'astropy.tests.helper.pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (4012, 4023), False, 'from astropy.tests.helper import pytest\n'), ((1072, 1089), 'astropy.io.ascii.read', 'ascii.read', (['fname'], {}), '(fname)\n', (1082, 1089), False, 'from astropy.io import ascii\n'), ((4060, 4081), 'astropy.units.Unit', 'u.Unit', (['"""erg/(cm2 s)"""'], {}), "('erg/(cm2 s)')\n", (4066, 4081), True, 'import astropy.units as u\n')] |
from __future__ import print_function
import numpy as np
from .lte import *
__all__ = ['initLTE', 'synthLTE']
def initLTE(atmos, lines, wavelengthAxis):
"""
Initialize the LTE synthesis module using nodes
Args:
atmos (float): array of size (ndepth x 7) defining the reference atmosphere. The columns are
log(tau) T [K] vmic [km/s] vmac [km/s] B [G] thetaB [deg] phiB [deg]
lines (float): array of size (nlines x 11) defining the information for the spectral lines. The columns are
lambda0 [A] Element ionization state log(gf) Elow [cm^-1] Lande_up Lande_low Jup Jlow sigmaABO alphaABO
wavelengthAxis (float): array of length nlambda that sets the wavelength axis
Returns:
None
"""
initAtmos(atmos)
initLines(lines, wavelengthAxis)
def synthLTE(referenceAtmos, variablesRF=None, responseFunction=False, deltaRT=0.01):
"""
Synthesize the Stokes profiles perturbing the reference atmosphere using nodes
Args:
referenceAtmos (float): array of size (ndepth x 7) defining the reference atmosphere. The columns are
log(tau) T [K] vmic [km/s] vmac [km/s] B [G] thetaB [deg] phiB [deg]
variablesRF (optional, list): a list containing (0/1) indicating the variables for which the response functions are obtained
responseFunction (bool, optional): return the response functions
deltaRT (float, optional): variation of the parameters when computing the response functions
Returns:
float: Stokes parameters [4 x nwavelength]
float: continuum value [nwavelength]
"""
logTau = referenceAtmos[:,0]
stokes, cont = synthLines(referenceAtmos)
# Compute the response functions if needed
if (responseFunction):
nDepth = len(logTau)
nLambda = len(cont)
atmosPerturbed = np.copy(referenceAtmos)
typicalValues = [500.0, 1.0, 1.0, 200.0, 50.0, 50.0]
if (variablesRF == None):
variablesRF = [1] * 6
nVariables = np.sum(variablesRF)
RF = np.zeros((nVariables,nDepth,4,nLambda))
loop = 0
for indexPar in range(6):
if (variablesRF[indexPar] == 1):
atmosPerturbed = np.copy(referenceAtmos)
for i in range(nDepth):
delta = deltaRT * referenceAtmos[i,indexPar+1]
atmosPerturbed[i,indexPar+1] = referenceAtmos[i,indexPar+1] + delta
stokesNew, cont = synthLines(atmosPerturbed)
atmosPerturbed[i,indexPar+1] = referenceAtmos[i,indexPar+1]
RF[loop,i,:,:] = (stokesNew - stokes) / delta
loop += 1
return stokes, cont, RF
else:
return stokes, cont
| [
"numpy.zeros",
"numpy.sum",
"numpy.copy"
] | [((1957, 1980), 'numpy.copy', 'np.copy', (['referenceAtmos'], {}), '(referenceAtmos)\n', (1964, 1980), True, 'import numpy as np\n'), ((2142, 2161), 'numpy.sum', 'np.sum', (['variablesRF'], {}), '(variablesRF)\n', (2148, 2161), True, 'import numpy as np\n'), ((2176, 2218), 'numpy.zeros', 'np.zeros', (['(nVariables, nDepth, 4, nLambda)'], {}), '((nVariables, nDepth, 4, nLambda))\n', (2184, 2218), True, 'import numpy as np\n'), ((2357, 2380), 'numpy.copy', 'np.copy', (['referenceAtmos'], {}), '(referenceAtmos)\n', (2364, 2380), True, 'import numpy as np\n')] |
from unityagents import UnityEnvironment
import numpy as np
env = UnityEnvironment(file_name='/data/Reacher_Linux_NoVis/Reacher.x86_64')
brain_name = env.brain_names[0]
brain = env.brains[brain_name]
from ddpg_agent import Agent
from collections import deque
import torch
import torch.nn.functional as F
import torch.optim as optim
import time
from workspace_utils import active_session
agent = Agent(state_size=33, action_size=4, random_seed=2)
num_agents = len(env_info.agents)
def ddpg(n_episodes=2000, max_t=1000):
print("Enter ddpg...\n")
scores_deque = deque(maxlen=100)
scores = []
best_score = 0
best_average_score = 0
for i_episode in range(1, n_episodes+1):
avg_score = 0
# reset the environment
env_info = env.reset(train_mode=True)[brain_name]
#get the number of agents
num_agents = len(env_info.agents)
#get the states vector
states = env_info.vector_observations
#init score agents
scores_agents = np.zeros(num_agents)
score = 0
agent.reset()
for t in range(max_t):
#choose actions
actions = agent.act(states)
# send the actions to the environment
env_info = env.step(actions)[brain_name]
# get the next states
next_states = env_info.vector_observations
# get the rewards
rewards = env_info.rewards
# see if episode has finished
dones = env_info.local_done
agent.step(states, actions, rewards, next_states, dones)
states = next_states
scores_agents += rewards
if np.any(dones):
break
#mean score of 20 agents in this episode
score = np.mean(scores_agents)
scores_deque.append(score)
#
avg_score = np.mean(scores_deque)
scores.append(score)
#refresh the best agent score
if score > best_score:
best_score = score
#refresh the best average score
if avg_score > best_average_score:
best_average_score = avg_score
#print current episode
print("Episode:{}, Score:{:.2f}, Best Score:{:.2f}, Average Score:{:.2f}, Best Avg Score:{:.2f}".format(
i_episode, score, best_score, avg_score, best_average_score))
if (avg_score >= 32):
torch.save(agent.actor_local.state_dict(), 'actor_solved.pth')
torch.save(agent.critic_local.state_dict(), 'critic_solved.pth')
break
return scores
start = time.time()
with active_session():
scores = ddpg()
end = time.time()
print('\nTotal training time = {:.1f} min'.format((end-start)/60))
| [
"workspace_utils.active_session",
"numpy.zeros",
"time.time",
"numpy.any",
"numpy.mean",
"unityagents.UnityEnvironment",
"collections.deque",
"ddpg_agent.Agent"
] | [((67, 137), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': '"""/data/Reacher_Linux_NoVis/Reacher.x86_64"""'}), "(file_name='/data/Reacher_Linux_NoVis/Reacher.x86_64')\n", (83, 137), False, 'from unityagents import UnityEnvironment\n'), ((399, 449), 'ddpg_agent.Agent', 'Agent', ([], {'state_size': '(33)', 'action_size': '(4)', 'random_seed': '(2)'}), '(state_size=33, action_size=4, random_seed=2)\n', (404, 449), False, 'from ddpg_agent import Agent\n'), ((2648, 2659), 'time.time', 'time.time', ([], {}), '()\n', (2657, 2659), False, 'import time\n'), ((2709, 2720), 'time.time', 'time.time', ([], {}), '()\n', (2718, 2720), False, 'import time\n'), ((577, 594), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (582, 594), False, 'from collections import deque\n'), ((2665, 2681), 'workspace_utils.active_session', 'active_session', ([], {}), '()\n', (2679, 2681), False, 'from workspace_utils import active_session\n'), ((1027, 1047), 'numpy.zeros', 'np.zeros', (['num_agents'], {}), '(num_agents)\n', (1035, 1047), True, 'import numpy as np\n'), ((1786, 1808), 'numpy.mean', 'np.mean', (['scores_agents'], {}), '(scores_agents)\n', (1793, 1808), True, 'import numpy as np\n'), ((1874, 1895), 'numpy.mean', 'np.mean', (['scores_deque'], {}), '(scores_deque)\n', (1881, 1895), True, 'import numpy as np\n'), ((1684, 1697), 'numpy.any', 'np.any', (['dones'], {}), '(dones)\n', (1690, 1697), True, 'import numpy as np\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.