code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
from django.contrib import admin
from .models import JobListing
from .models import Employer
admin.site.register(JobListing)
admin.site.register(Employer)
|
normal
|
{
"blob_id": "a96575d507a91472176c99d4d55e2a3bbf8111d1",
"index": 2707,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(JobListing)\nadmin.site.register(Employer)\n",
"step-3": "from django.contrib import admin\nfrom .models import JobListing\nfrom .models import Employer\nadmin.site.register(JobListing)\nadmin.site.register(Employer)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Generated by Django 3.2.4 on 2021-09-13 17:41
import dataUpload.models
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('file_id', models.AutoField(primary_key=True, serialize=False)),
('task_id', models.UUIDField(default=uuid.uuid4, editable=False)),
('file', models.FileField(upload_to=dataUpload.models.task_directory_path)),
('path_to_tar', models.CharField(default='', max_length=1000)),
('path_to_cache', models.CharField(default='', max_length=1000)),
],
),
]
|
normal
|
{
"blob_id": "9cab749b915dbb808ac105caa5287b50729f5fd9",
"index": 111,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Task', fields=[('file_id',\n models.AutoField(primary_key=True, serialize=False)), ('task_id',\n models.UUIDField(default=uuid.uuid4, editable=False)), ('file',\n models.FileField(upload_to=dataUpload.models.task_directory_path)),\n ('path_to_tar', models.CharField(default='', max_length=1000)), (\n 'path_to_cache', models.CharField(default='', max_length=1000))])]\n",
"step-4": "import dataUpload.models\nfrom django.db import migrations, models\nimport uuid\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Task', fields=[('file_id',\n models.AutoField(primary_key=True, serialize=False)), ('task_id',\n models.UUIDField(default=uuid.uuid4, editable=False)), ('file',\n models.FileField(upload_to=dataUpload.models.task_directory_path)),\n ('path_to_tar', models.CharField(default='', max_length=1000)), (\n 'path_to_cache', models.CharField(default='', max_length=1000))])]\n",
"step-5": "# Generated by Django 3.2.4 on 2021-09-13 17:41\n\nimport dataUpload.models\nfrom django.db import migrations, models\nimport uuid\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Task',\n fields=[\n ('file_id', models.AutoField(primary_key=True, serialize=False)),\n ('task_id', models.UUIDField(default=uuid.uuid4, editable=False)),\n ('file', models.FileField(upload_to=dataUpload.models.task_directory_path)),\n ('path_to_tar', models.CharField(default='', max_length=1000)),\n ('path_to_cache', models.CharField(default='', max_length=1000)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from splinter import Browser
from time import sleep
from datetime import datetime, timedelta
import os, sys
import urllib
import cv2
import numpy as np
from PIL import Image
import imutils
import csv
class Scraper():
start_date = datetime(2018, 1, 8)
url = 'http://spaceweather.com/'
def scrape(self):
self.browser = Browser('firefox')
self.browser.driver.set_page_load_timeout(60)
self.browser.visit(self.url)
for day in self.get_days():
self.scrape_day(day)
def scrape_day(self, day):
self.browser.select('month', day.strftime('%m'))
self.browser.select('day', day.strftime('%d'))
self.browser.select('year', day.strftime('%Y'))
button = self.browser.find_by_name('view')
button.click()
text = self.browser.find_by_css('.solarWindText')[4].text
number = int(text.split(' ')[2].strip())
link = self.browser.find_link_by_partial_href('images{}/'.format(day.strftime('%Y')))['href']
folder_name = "data/{}{}{}".format(day.strftime('%Y'), day.strftime('%m'), day.strftime('%d'))
image_name = "{}/image.gif".format(folder_name)
txt_name = "{}/data.txt".format(folder_name)
os.mkdir(folder_name)
urllib.urlretrieve(link, image_name)
img = Image.open(image_name)
img.save("{}/image.png".format(folder_name), 'png', optimize=True, quality=70)
txt_file = open(txt_name, 'w')
txt_file.write(str(number))
txt_file.close()
print("Downloaded data for {}, sunspots: {}".format(day.strftime('%m/%d/%Y'), number))
def get_days(self):
days = []
for i in range(0, 8):
base = self.start_date + timedelta(days=7 * i)
first = base
second = base + timedelta(days=2)
third = base + timedelta(days=4)
days.append(first)
days.append(second)
days.append(third)
return days
class Entry():
folder = None
date = None
sunspots = -1
image_path = None
counted_sunspots = 0
sections = [0, 0, 0, 0]
def nothing(self, *arg):
pass
def __init__(self, folder, date, sunspots, image_path):
self.folder = folder
self.date = date
self.sunspots = sunspots
self.image_path = image_path
def process(self):
frame = cv2.imread(self.image_path)
height, width, channels = frame.shape
frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)
hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)
colorLow = np.array([0,90,80])
colorHigh = np.array([10,255,255])
mask = cv2.inRange(hsv, colorLow, colorHigh)
result = cv2.bitwise_and(frame, frame, mask=mask)
image_edged = cv2.Canny(mask, 50, 100)
image_edged = cv2.dilate(image_edged, None, iterations=1)
image_edged = cv2.erode(image_edged, None, iterations=1)
cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if imutils.is_cv2() else cnts[1]
image_contours = cv2.bitwise_not(result)
self.counted_sunspots = 0
self.sections = [0, 0, 0, 0]
section_1_start, section_1_end = 0, height/4
section_2_start, section_2_end = height/4, height/4 * 2
section_3_start, section_3_end = height/4 * 2, height/4 * 3
section_4_start, section_4_end = height/4 * 3, height/4 * 4
cv2.line(image_contours, (0, section_1_end), (width, section_1_end), (0, 0, 0), 5)
cv2.line(image_contours, (0, section_2_end), (width, section_2_end), (0, 0, 0), 10)
cv2.line(image_contours, (0, section_3_end), (width, section_3_end), (0, 0, 0), 5)
cv2.circle(image_contours, (width/2, height/2), width/2, (0, 0, 0), 5)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50), font, 2, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)
for c in cnts:
if cv2.contourArea(c) < 5:
continue
(x,y),radius = cv2.minEnclosingCircle(c)
x = int(x)
y = int(y)
radius = int(radius)
cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)
self.counted_sunspots = self.counted_sunspots + 1
if y >= section_1_start and y <= section_1_end:
#cv2.putText(image_contours, '1', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[0] = self.sections[0] + 1
elif y >= section_2_start and y <= section_2_end:
#cv2.putText(image_contours, '2', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[1] = self.sections[1] + 1
elif y >= section_3_start and y <= section_3_end:
#cv2.putText(image_contours, '3', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[2] = self.sections[2] + 1
elif y >= section_4_start and y <= section_4_end:
#cv2.putText(image_contours, '4', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)
self.sections[3] = self.sections[3] + 1
print('Counted sunspots: {}'.format(self.counted_sunspots))
print(self.sections)
cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)
colorLow = np.array([0,0,90])
colorHigh = np.array([0,0,255])
mask = cv2.inRange(hsv, colorLow, colorHigh)
image_contours[mask > 0] = (0, 0, 0)
vis = np.concatenate((frame, image_contours), axis=1)
cv2.imwrite('out/images/{}.png'.format(self.folder), vis)
class Processor():
entries = []
def load(self):
folders = os.listdir("data")
for folder in folders:
year = int(folder[:4])
month = int(folder[4:6])
day = int(folder[6:8])
date = datetime(year, month, day)
image_name = "data/{}/image.png".format(folder)
txt_name = "data/{}/data.txt".format(folder)
txt_file = open(txt_name, 'r')
content = txt_file.readlines()
txt_file.close()
number = int(content[0])
print(folder)
entry = Entry(folder, date, number, image_name)
entry.process()
self.entries.append(entry)
self.entries.sort(key=lambda x: x.date, reverse=False)
def compute(self):
for section in range(0, 4):
total = 0
for entry in self.entries:
total += entry.sections[section]
average = float(total) / float(len(self.entries))
print('-------[Section {}]-------'.format(section + 1))
print('Total: {}'.format(total))
print('Average: {}'.format(average))
total = 0
sections_data = [["date", "section_1", "section_2", "section_3", "section_4"]]
numbers_data = [["date", "reported", "visible"]]
for entry in self.entries:
total += entry.counted_sunspots
sections_data.append([entry.date.strftime("%Y/%m/%d")] + entry.sections)
numbers_data.append([entry.date.strftime("%Y/%m/%d")] + [entry.sunspots, entry.counted_sunspots])
average = float(total) / float(len(self.entries))
print('---------[TOTAL]---------')
print('Total: {}'.format(total))
print('Average: {}'.format(average))
csv_file = open('out/sections.csv', 'w')
writer = csv.writer(csv_file)
writer.writerows(sections_data)
csv_file.close()
csv_file = open('out/numbers.csv', 'w')
writer = csv.writer(csv_file)
writer.writerows(numbers_data)
csv_file.close()
scraper = Scraper()
scraper.scrape()
processor = Processor()
processor.load()
processor.compute()
|
normal
|
{
"blob_id": "c55991e738c89ee09dabd79d514e710e0fcbac85",
"index": 422,
"step-1": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Scraper:\n <mask token>\n <mask token>\n <mask token>\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(\n day.strftime('%Y')))['href']\n folder_name = 'data/{}{}{}'.format(day.strftime('%Y'), day.strftime\n ('%m'), day.strftime('%d'))\n image_name = '{}/image.gif'.format(folder_name)\n txt_name = '{}/data.txt'.format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save('{}/image.png'.format(folder_name), 'png', optimize=True,\n quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print('Downloaded data for {}, sunspots: {}'.format(day.strftime(\n '%m/%d/%Y'), number))\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\n<mask token>\n",
"step-4": "from splinter import Browser\nfrom time import sleep\nfrom datetime import datetime, timedelta\nimport os, sys\nimport urllib\nimport cv2\nimport numpy as np\nfrom PIL import Image\nimport imutils\nimport csv\n\n\nclass Scraper:\n start_date = datetime(2018, 1, 8)\n url = 'http://spaceweather.com/'\n\n def scrape(self):\n self.browser = Browser('firefox')\n self.browser.driver.set_page_load_timeout(60)\n self.browser.visit(self.url)\n for day in self.get_days():\n self.scrape_day(day)\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(\n day.strftime('%Y')))['href']\n folder_name = 'data/{}{}{}'.format(day.strftime('%Y'), day.strftime\n ('%m'), day.strftime('%d'))\n image_name = '{}/image.gif'.format(folder_name)\n txt_name = '{}/data.txt'.format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save('{}/image.png'.format(folder_name), 'png', optimize=True,\n quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print('Downloaded data for {}, sunspots: {}'.format(day.strftime(\n '%m/%d/%Y'), number))\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\n\nclass Entry:\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n colorLow = np.array([0, 90, 80])\n colorHigh = np.array([10, 255, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height / 4\n section_2_start, section_2_end = height / 4, height / 4 * 2\n section_3_start, section_3_end = height / 4 * 2, height / 4 * 3\n section_4_start, section_4_end = height / 4 * 3, height / 4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end),\n (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end),\n (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end),\n (0, 0, 0), 5)\n cv2.circle(image_contours, (width / 2, height / 2), width / 2, (0, \n 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50\n ), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(\n self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x, y), radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]\n ), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]\n ), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]\n ), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]\n ), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n colorLow = np.array([0, 0, 90])\n colorHigh = np.array([0, 0, 255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = 0, 0, 0\n vis = np.concatenate((frame, image_contours), axis=1)\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\n\nclass Processor:\n entries = []\n\n def load(self):\n folders = os.listdir('data')\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = 'data/{}/image.png'.format(folder)\n txt_name = 'data/{}/data.txt'.format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [['date', 'section_1', 'section_2', 'section_3',\n 'section_4']]\n numbers_data = [['date', 'reported', 'visible']]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime('%Y/%m/%d')] + entry.\n sections)\n numbers_data.append([entry.date.strftime('%Y/%m/%d')] + [entry.\n sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\n\nscraper = Scraper()\nscraper.scrape()\nprocessor = Processor()\nprocessor.load()\nprocessor.compute()\n",
"step-5": "from splinter import Browser\nfrom time import sleep\nfrom datetime import datetime, timedelta\nimport os, sys\nimport urllib\nimport cv2\nimport numpy as np\nfrom PIL import Image\nimport imutils\nimport csv\n\nclass Scraper():\n start_date = datetime(2018, 1, 8)\n url = 'http://spaceweather.com/'\n\n def scrape(self):\n self.browser = Browser('firefox')\n self.browser.driver.set_page_load_timeout(60)\n self.browser.visit(self.url)\n for day in self.get_days():\n self.scrape_day(day)\n\n def scrape_day(self, day):\n self.browser.select('month', day.strftime('%m'))\n self.browser.select('day', day.strftime('%d'))\n self.browser.select('year', day.strftime('%Y'))\n button = self.browser.find_by_name('view')\n button.click()\n text = self.browser.find_by_css('.solarWindText')[4].text\n number = int(text.split(' ')[2].strip())\n link = self.browser.find_link_by_partial_href('images{}/'.format(day.strftime('%Y')))['href']\n folder_name = \"data/{}{}{}\".format(day.strftime('%Y'), day.strftime('%m'), day.strftime('%d'))\n image_name = \"{}/image.gif\".format(folder_name)\n txt_name = \"{}/data.txt\".format(folder_name)\n os.mkdir(folder_name)\n urllib.urlretrieve(link, image_name)\n img = Image.open(image_name)\n img.save(\"{}/image.png\".format(folder_name), 'png', optimize=True, quality=70)\n txt_file = open(txt_name, 'w')\n txt_file.write(str(number))\n txt_file.close()\n print(\"Downloaded data for {}, sunspots: {}\".format(day.strftime('%m/%d/%Y'), number))\n\n\n def get_days(self):\n days = []\n for i in range(0, 8):\n base = self.start_date + timedelta(days=7 * i)\n first = base\n second = base + timedelta(days=2)\n third = base + timedelta(days=4)\n days.append(first)\n days.append(second)\n days.append(third)\n return days\n\nclass Entry():\n folder = None\n date = None\n sunspots = -1\n image_path = None\n counted_sunspots = 0\n sections = [0, 0, 0, 0]\n\n def nothing(self, *arg):\n pass\n\n def __init__(self, folder, date, sunspots, image_path):\n self.folder = folder\n self.date = date\n self.sunspots = sunspots\n self.image_path = image_path\n\n def process(self):\n frame = cv2.imread(self.image_path)\n height, width, channels = frame.shape\n frameBGR = cv2.GaussianBlur(frame, (1, 1), 0)\n hsv = cv2.cvtColor(frameBGR, cv2.COLOR_BGR2HSV)\n \n colorLow = np.array([0,90,80])\n colorHigh = np.array([10,255,255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n result = cv2.bitwise_and(frame, frame, mask=mask)\n image_edged = cv2.Canny(mask, 50, 100)\n image_edged = cv2.dilate(image_edged, None, iterations=1)\n image_edged = cv2.erode(image_edged, None, iterations=1)\n cnts = cv2.findContours(image_edged.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n image_contours = cv2.bitwise_not(result)\n\n self.counted_sunspots = 0\n self.sections = [0, 0, 0, 0]\n section_1_start, section_1_end = 0, height/4\n section_2_start, section_2_end = height/4, height/4 * 2\n section_3_start, section_3_end = height/4 * 2, height/4 * 3\n section_4_start, section_4_end = height/4 * 3, height/4 * 4\n cv2.line(image_contours, (0, section_1_end), (width, section_1_end), (0, 0, 0), 5)\n cv2.line(image_contours, (0, section_2_end), (width, section_2_end), (0, 0, 0), 10)\n cv2.line(image_contours, (0, section_3_end), (width, section_3_end), (0, 0, 0), 5)\n cv2.circle(image_contours, (width/2, height/2), width/2, (0, 0, 0), 5)\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(image_contours, self.date.strftime('%a %b %d'), (20, 50), font, 2, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, self.date.strftime('SSN: {}'.format(self.sunspots)), (20, 100), font, 1.5, (0, 0, 0), 2, cv2.LINE_AA)\n\n for c in cnts:\n if cv2.contourArea(c) < 5:\n continue\n (x,y),radius = cv2.minEnclosingCircle(c)\n x = int(x)\n y = int(y)\n radius = int(radius)\n cv2.circle(image_contours, (x, y), radius, (100, 100, 255), -1)\n\n self.counted_sunspots = self.counted_sunspots + 1\n if y >= section_1_start and y <= section_1_end:\n #cv2.putText(image_contours, '1', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[0] = self.sections[0] + 1\n elif y >= section_2_start and y <= section_2_end:\n #cv2.putText(image_contours, '2', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[1] = self.sections[1] + 1\n elif y >= section_3_start and y <= section_3_end:\n #cv2.putText(image_contours, '3', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[2] = self.sections[2] + 1\n elif y >= section_4_start and y <= section_4_end:\n #cv2.putText(image_contours, '4', (x, y - 10), font, 0.8, (100, 100, 255), 2, cv2.LINE_AA)\n self.sections[3] = self.sections[3] + 1\n print('Counted sunspots: {}'.format(self.counted_sunspots))\n print(self.sections)\n cv2.putText(image_contours, 'Section 1: {}'.format(self.sections[0]), (20, 130), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 2: {}'.format(self.sections[1]), (20, 160), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 3: {}'.format(self.sections[2]), (20, 190), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n cv2.putText(image_contours, 'Section 4: {}'.format(self.sections[3]), (20, 220), font, 1, (0, 0, 0), 2, cv2.LINE_AA)\n\n colorLow = np.array([0,0,90])\n colorHigh = np.array([0,0,255])\n mask = cv2.inRange(hsv, colorLow, colorHigh)\n image_contours[mask > 0] = (0, 0, 0)\n vis = np.concatenate((frame, image_contours), axis=1)\n\n cv2.imwrite('out/images/{}.png'.format(self.folder), vis)\n\nclass Processor():\n entries = []\n \n def load(self):\n folders = os.listdir(\"data\")\n for folder in folders:\n year = int(folder[:4])\n month = int(folder[4:6])\n day = int(folder[6:8])\n date = datetime(year, month, day)\n image_name = \"data/{}/image.png\".format(folder)\n txt_name = \"data/{}/data.txt\".format(folder)\n txt_file = open(txt_name, 'r')\n content = txt_file.readlines()\n txt_file.close()\n number = int(content[0])\n print(folder)\n entry = Entry(folder, date, number, image_name)\n entry.process()\n self.entries.append(entry)\n self.entries.sort(key=lambda x: x.date, reverse=False)\n\n def compute(self):\n for section in range(0, 4):\n total = 0\n for entry in self.entries:\n total += entry.sections[section]\n average = float(total) / float(len(self.entries))\n print('-------[Section {}]-------'.format(section + 1))\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n total = 0\n sections_data = [[\"date\", \"section_1\", \"section_2\", \"section_3\", \"section_4\"]]\n numbers_data = [[\"date\", \"reported\", \"visible\"]]\n for entry in self.entries:\n total += entry.counted_sunspots\n sections_data.append([entry.date.strftime(\"%Y/%m/%d\")] + entry.sections)\n numbers_data.append([entry.date.strftime(\"%Y/%m/%d\")] + [entry.sunspots, entry.counted_sunspots])\n average = float(total) / float(len(self.entries))\n print('---------[TOTAL]---------')\n print('Total: {}'.format(total))\n print('Average: {}'.format(average))\n csv_file = open('out/sections.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(sections_data)\n csv_file.close()\n csv_file = open('out/numbers.csv', 'w')\n writer = csv.writer(csv_file)\n writer.writerows(numbers_data)\n csv_file.close()\n\nscraper = Scraper()\nscraper.scrape()\nprocessor = Processor()\nprocessor.load()\nprocessor.compute()",
"step-ids": [
10,
11,
12,
17,
18
]
}
|
[
10,
11,
12,
17,
18
] |
# Exercício Python 20: O mesmo professor do desafio 19 quer sortear a ordem de apresentação de trabalhos dos alunos. Faça um programa que leia o nome dos quatro alunos e mostre a ordem sorteada.
import random
aluno1 = input('Primeiro aluno: ')
aluno2 = input('Segundo aluno: ')
aluno3 = input('Terceiro aluno: ')
aluno4 = input('Quarto aluno: ')
listaAlunos = [aluno1, aluno2, aluno3, aluno4]
# o shuffle embaralha os dados da lista
random.shuffle(listaAlunos)
print('A ordem de apresentação será ', listaAlunos)
|
normal
|
{
"blob_id": "445bb8ad8dadd207a3546f4623de583fc47a2910",
"index": 2180,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrandom.shuffle(listaAlunos)\nprint('A ordem de apresentação será ', listaAlunos)\n",
"step-3": "<mask token>\naluno1 = input('Primeiro aluno: ')\naluno2 = input('Segundo aluno: ')\naluno3 = input('Terceiro aluno: ')\naluno4 = input('Quarto aluno: ')\nlistaAlunos = [aluno1, aluno2, aluno3, aluno4]\nrandom.shuffle(listaAlunos)\nprint('A ordem de apresentação será ', listaAlunos)\n",
"step-4": "import random\naluno1 = input('Primeiro aluno: ')\naluno2 = input('Segundo aluno: ')\naluno3 = input('Terceiro aluno: ')\naluno4 = input('Quarto aluno: ')\nlistaAlunos = [aluno1, aluno2, aluno3, aluno4]\nrandom.shuffle(listaAlunos)\nprint('A ordem de apresentação será ', listaAlunos)\n",
"step-5": "# Exercício Python 20: O mesmo professor do desafio 19 quer sortear a ordem de apresentação de trabalhos dos alunos. Faça um programa que leia o nome dos quatro alunos e mostre a ordem sorteada.\r\nimport random\r\n\r\naluno1 = input('Primeiro aluno: ')\r\naluno2 = input('Segundo aluno: ')\r\naluno3 = input('Terceiro aluno: ')\r\naluno4 = input('Quarto aluno: ')\r\nlistaAlunos = [aluno1, aluno2, aluno3, aluno4]\r\n# o shuffle embaralha os dados da lista\r\nrandom.shuffle(listaAlunos)\r\nprint('A ordem de apresentação será ', listaAlunos)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(bytes(inp))
<|reserved_special_token_1|>
inp = int(input())
print(bytes(inp))
|
flexible
|
{
"blob_id": "63a2c8b0c2eba2d5f9f82352196ef2b67d4d63b5",
"index": 3838,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(bytes(inp))\n",
"step-3": "inp = int(input())\nprint(bytes(inp))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import time,pickle
from CNN_GPU.CNN_C_Wrapper import *
from pathlib import Path
FSIGMOIG = 0
FTANH = 2
FRELU = 4
REQUEST_INPUT = 0
REQUEST_GRAD_INPUT = 1
REQUEST_OUTPUT = 2
REQUEST_WEIGTH = 3
class CNN:
def __init__(self, inputSize, hitLearn=.1, momentum=.9, weigthDecay=.5, multip=1.0):
file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')
file = file.encode('utf-8')
self.cnn = c_Pointer()
clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer(file),
hitLearn, momentum, weigthDecay, multip, inputSize[0], inputSize[1], inputSize[2])
clib.initRandom(time.time_ns())
def __del__(self):
clib.releaseCnnWrapper(c.addressof(self.cnn))
print('end')
def addConvLayer(self, passo, tamanhoFitro, numeroFiltro):
clib.CnnAddConvLayer(self.cnn.p, passo, tamanhoFitro, numeroFiltro)
def addPoolLayer(self, passo, tamanhoFitro):
clib.CnnAddPoolLayer(self.cnn.p, passo, tamanhoFitro)
def addReluLayer(self):
clib.CnnAddReluLayer(self.cnn.p)
def addDropOutLayer(self, pontoAtivacao, seed):
clib.CnnAddDropOutLayer(self.cnn.p, pontoAtivacao, seed)
def addFullConnectLayer(self, saida, funcaoAtivacao):
clib.CnnAddFullConnectLayer(self.cnn.p, saida, funcaoAtivacao)
def predict(self, input):
tinput = self.createInp(*input)
clib.CnnCall(self.cnn.p, tinput)
def learn(self, target):
ttarg = self.targ(*target)
clib.CnnLearn(self.cnn.p, ttarg)
def getData(self, layer, request, nfilter=0):
size = self.getSizeData(layer, request)
if size is None: return None
data = c.c_double * (size[0] * size[1] * size[2])
data = data(0)
err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)
if err < 0:
self.lastERROR = err
return None
return list(data)
def getSizeData(self, layer, request):
inx, iny, inz, n = c.c_int(0), c.c_int(0), c.c_int(0), c.c_int(0)
err = clib.CnnGetSize(self.cnn.p, layer, request, c.addressof(inx), c.addressof(iny), c.addressof(inz),
c.addressof(n))
if err < 0:
self.lastERROR = err
return None
return inx.value, iny.value, inz.value, n.value
@property
def output(self):
err = clib.CnnGetTensorData(self.cnn.p, -1, REQUEST_OUTPUT, 0, self.out)
if err < 0:
self.lastERROR = err
return None
return list(self.out)
def compile(self):
if self.error: raise Exception("ERROR")
inx, iny, inz = c.c_int(0), c.c_int(0), c.c_int(0)
err = clib.CnnGetSize(self.cnn.p, 0, REQUEST_INPUT, c.addressof(inx), c.addressof(iny), c.addressof(inz),
c.cast(0, c.c_void_p))
if err != 0: raise Exception('Error when request input size', err)
self.createInp = c.c_double * (inx.value * iny.value * inz.value)
err = clib.CnnGetSize(self.cnn.p, -1, REQUEST_OUTPUT, c.addressof(inx), c.addressof(iny), c.addressof(inz),
c.cast(0, c.c_void_p))
if err != 0: raise Exception('Error when request output size', err)
self.out = c.c_double * (inx.value * iny.value * inz.value)
self.targ = self.out
self.out = self.out(0)
def info(self):
clib.CnnInfo(self.cnn.p)
def save(self, fileName:str):
filedesc = Path(fileName).with_suffix('.cdc')
self.salveCnnDescriptor(filedesc)
fileName = fileName.encode('utf-8')
return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))
@staticmethod
def load(fileName):
self = CNN([2,2,1])
fileName = fileName.encode('utf-8')
clib.CnnLoadByFile(self.cnn.p, c.create_string_buffer(fileName))
self.compile()
return self
@property
def error(self):
return clib.getCnnError(self.cnn.p)
@property
def errorMsg(self):
buff = c.create_string_buffer(''.encode('utf-8'),255)
clib.getCnnErrormsg(self.cnn.p,buff)
return buff.value.decode('utf-8')
def salveCnnDescriptor(self,file):
desc_c = c_Pointer()
clib.generateDescriptor(c.addressof(desc_c),self.cnn.p)
msg = c.cast(desc_c.p,c.c_char_p)
msg = msg.value.decode('utf-8')
clib.freeP(desc_c.p)
desc = eval(msg)
with open(file,'wb') as f:
pickle.dump(desc,f)
# AUXILIAR FUNCTION
def getOutputAsIndexMax(self):
ans = clib.CnnGetIndexMax(self.cnn.p)
return ans
def normalizeVector(self,vector:list,maxOutput,minOutput):
out_TYPE =c.c_double * len(vector)
inp = out_TYPE(*vector)
out = out_TYPE()
clib.normalizeGPU(self.cnn.p,inp,out,len(vector),maxOutput,minOutput)
return list(out)
def normalizeVectorKnowedSpace(self,vector:list,maxInput,minInput,maxOutput,minOutput):
out_TYPE =c.c_double * len(vector)
tmp_inp = out_TYPE(*vector)
tmp_out = out_TYPE(*vector)
clib.normalizeGPUSpaceKnow(self.cnn.p,tmp_inp,tmp_out,len(vector),maxInput,minInput,maxOutput,minOutput)
return list(tmp_out)
def getOutPutAsPPM(self):
p = c_Pointer()
h = c.c_size_t()
w = c.c_size_t()
clib.Py_getCnnOutPutAsPPM(self.cnn.p, c.addressof(p), c.addressof(h), c.addressof(w))
h = h.value
w = w.value
out = (c.c_ubyte*(w*h))()
c.memmove(out,p.p,w*h)
a = bytes(out)
clib.freeP(p.p)
return (h,w,a)
|
normal
|
{
"blob_id": "32db21ed7f57f29260d70513d8c34de53adf12d7",
"index": 5740,
"step-1": "<mask token>\n\n\nclass CNN:\n\n def __init__(self, inputSize, hitLearn=0.1, momentum=0.9, weigthDecay=\n 0.5, multip=1.0):\n file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')\n file = file.encode('utf-8')\n self.cnn = c_Pointer()\n clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer\n (file), hitLearn, momentum, weigthDecay, multip, inputSize[0],\n inputSize[1], inputSize[2])\n clib.initRandom(time.time_ns())\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def learn(self, target):\n ttarg = self.targ(*target)\n clib.CnnLearn(self.cnn.p, ttarg)\n\n def getData(self, layer, request, nfilter=0):\n size = self.getSizeData(layer, request)\n if size is None:\n return None\n data = c.c_double * (size[0] * size[1] * size[2])\n data = data(0)\n err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)\n if err < 0:\n self.lastERROR = err\n return None\n return list(data)\n <mask token>\n <mask token>\n <mask token>\n\n def info(self):\n clib.CnnInfo(self.cnn.p)\n\n def save(self, fileName: str):\n filedesc = Path(fileName).with_suffix('.cdc')\n self.salveCnnDescriptor(filedesc)\n fileName = fileName.encode('utf-8')\n return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def normalizeVectorKnowedSpace(self, vector: list, maxInput, minInput,\n maxOutput, minOutput):\n out_TYPE = c.c_double * len(vector)\n tmp_inp = out_TYPE(*vector)\n tmp_out = out_TYPE(*vector)\n clib.normalizeGPUSpaceKnow(self.cnn.p, tmp_inp, tmp_out, len(vector\n ), maxInput, minInput, maxOutput, minOutput)\n return list(tmp_out)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass CNN:\n\n def __init__(self, inputSize, hitLearn=0.1, momentum=0.9, weigthDecay=\n 0.5, multip=1.0):\n file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')\n file = file.encode('utf-8')\n self.cnn = c_Pointer()\n clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer\n (file), hitLearn, momentum, weigthDecay, multip, inputSize[0],\n inputSize[1], inputSize[2])\n clib.initRandom(time.time_ns())\n <mask token>\n <mask token>\n\n def addPoolLayer(self, passo, tamanhoFitro):\n clib.CnnAddPoolLayer(self.cnn.p, passo, tamanhoFitro)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def learn(self, target):\n ttarg = self.targ(*target)\n clib.CnnLearn(self.cnn.p, ttarg)\n\n def getData(self, layer, request, nfilter=0):\n size = self.getSizeData(layer, request)\n if size is None:\n return None\n data = c.c_double * (size[0] * size[1] * size[2])\n data = data(0)\n err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)\n if err < 0:\n self.lastERROR = err\n return None\n return list(data)\n <mask token>\n <mask token>\n <mask token>\n\n def info(self):\n clib.CnnInfo(self.cnn.p)\n\n def save(self, fileName: str):\n filedesc = Path(fileName).with_suffix('.cdc')\n self.salveCnnDescriptor(filedesc)\n fileName = fileName.encode('utf-8')\n return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))\n\n @staticmethod\n def load(fileName):\n self = CNN([2, 2, 1])\n fileName = fileName.encode('utf-8')\n clib.CnnLoadByFile(self.cnn.p, c.create_string_buffer(fileName))\n self.compile()\n return self\n\n @property\n def error(self):\n return clib.getCnnError(self.cnn.p)\n <mask token>\n\n def salveCnnDescriptor(self, file):\n desc_c = c_Pointer()\n clib.generateDescriptor(c.addressof(desc_c), self.cnn.p)\n msg = c.cast(desc_c.p, c.c_char_p)\n msg = msg.value.decode('utf-8')\n clib.freeP(desc_c.p)\n desc = eval(msg)\n with open(file, 'wb') as f:\n pickle.dump(desc, f)\n <mask token>\n <mask token>\n\n def normalizeVectorKnowedSpace(self, vector: list, maxInput, minInput,\n maxOutput, minOutput):\n out_TYPE = c.c_double * len(vector)\n tmp_inp = out_TYPE(*vector)\n tmp_out = out_TYPE(*vector)\n clib.normalizeGPUSpaceKnow(self.cnn.p, tmp_inp, tmp_out, len(vector\n ), maxInput, minInput, maxOutput, minOutput)\n return list(tmp_out)\n\n def getOutPutAsPPM(self):\n p = c_Pointer()\n h = c.c_size_t()\n w = c.c_size_t()\n clib.Py_getCnnOutPutAsPPM(self.cnn.p, c.addressof(p), c.addressof(h\n ), c.addressof(w))\n h = h.value\n w = w.value\n out = (c.c_ubyte * (w * h))()\n c.memmove(out, p.p, w * h)\n a = bytes(out)\n clib.freeP(p.p)\n return h, w, a\n",
"step-3": "<mask token>\n\n\nclass CNN:\n\n def __init__(self, inputSize, hitLearn=0.1, momentum=0.9, weigthDecay=\n 0.5, multip=1.0):\n file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')\n file = file.encode('utf-8')\n self.cnn = c_Pointer()\n clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer\n (file), hitLearn, momentum, weigthDecay, multip, inputSize[0],\n inputSize[1], inputSize[2])\n clib.initRandom(time.time_ns())\n\n def __del__(self):\n clib.releaseCnnWrapper(c.addressof(self.cnn))\n print('end')\n\n def addConvLayer(self, passo, tamanhoFitro, numeroFiltro):\n clib.CnnAddConvLayer(self.cnn.p, passo, tamanhoFitro, numeroFiltro)\n\n def addPoolLayer(self, passo, tamanhoFitro):\n clib.CnnAddPoolLayer(self.cnn.p, passo, tamanhoFitro)\n\n def addReluLayer(self):\n clib.CnnAddReluLayer(self.cnn.p)\n <mask token>\n <mask token>\n <mask token>\n\n def learn(self, target):\n ttarg = self.targ(*target)\n clib.CnnLearn(self.cnn.p, ttarg)\n\n def getData(self, layer, request, nfilter=0):\n size = self.getSizeData(layer, request)\n if size is None:\n return None\n data = c.c_double * (size[0] * size[1] * size[2])\n data = data(0)\n err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)\n if err < 0:\n self.lastERROR = err\n return None\n return list(data)\n\n def getSizeData(self, layer, request):\n inx, iny, inz, n = c.c_int(0), c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, layer, request, c.addressof(inx),\n c.addressof(iny), c.addressof(inz), c.addressof(n))\n if err < 0:\n self.lastERROR = err\n return None\n return inx.value, iny.value, inz.value, n.value\n\n @property\n def output(self):\n err = clib.CnnGetTensorData(self.cnn.p, -1, REQUEST_OUTPUT, 0, self.out\n )\n if err < 0:\n self.lastERROR = err\n return None\n return list(self.out)\n\n def compile(self):\n if self.error:\n raise Exception('ERROR')\n inx, iny, inz = c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, 0, REQUEST_INPUT, c.addressof(inx\n ), c.addressof(iny), c.addressof(inz), c.cast(0, c.c_void_p))\n if err != 0:\n raise Exception('Error when request input size', err)\n self.createInp = c.c_double * (inx.value * iny.value * inz.value)\n err = clib.CnnGetSize(self.cnn.p, -1, REQUEST_OUTPUT, c.addressof(\n inx), c.addressof(iny), c.addressof(inz), c.cast(0, c.c_void_p))\n if err != 0:\n raise Exception('Error when request output size', err)\n self.out = c.c_double * (inx.value * iny.value * inz.value)\n self.targ = self.out\n self.out = self.out(0)\n\n def info(self):\n clib.CnnInfo(self.cnn.p)\n\n def save(self, fileName: str):\n filedesc = Path(fileName).with_suffix('.cdc')\n self.salveCnnDescriptor(filedesc)\n fileName = fileName.encode('utf-8')\n return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))\n\n @staticmethod\n def load(fileName):\n self = CNN([2, 2, 1])\n fileName = fileName.encode('utf-8')\n clib.CnnLoadByFile(self.cnn.p, c.create_string_buffer(fileName))\n self.compile()\n return self\n\n @property\n def error(self):\n return clib.getCnnError(self.cnn.p)\n\n @property\n def errorMsg(self):\n buff = c.create_string_buffer(''.encode('utf-8'), 255)\n clib.getCnnErrormsg(self.cnn.p, buff)\n return buff.value.decode('utf-8')\n\n def salveCnnDescriptor(self, file):\n desc_c = c_Pointer()\n clib.generateDescriptor(c.addressof(desc_c), self.cnn.p)\n msg = c.cast(desc_c.p, c.c_char_p)\n msg = msg.value.decode('utf-8')\n clib.freeP(desc_c.p)\n desc = eval(msg)\n with open(file, 'wb') as f:\n pickle.dump(desc, f)\n\n def getOutputAsIndexMax(self):\n ans = clib.CnnGetIndexMax(self.cnn.p)\n return ans\n <mask token>\n\n def normalizeVectorKnowedSpace(self, vector: list, maxInput, minInput,\n maxOutput, minOutput):\n out_TYPE = c.c_double * len(vector)\n tmp_inp = out_TYPE(*vector)\n tmp_out = out_TYPE(*vector)\n clib.normalizeGPUSpaceKnow(self.cnn.p, tmp_inp, tmp_out, len(vector\n ), maxInput, minInput, maxOutput, minOutput)\n return list(tmp_out)\n\n def getOutPutAsPPM(self):\n p = c_Pointer()\n h = c.c_size_t()\n w = c.c_size_t()\n clib.Py_getCnnOutPutAsPPM(self.cnn.p, c.addressof(p), c.addressof(h\n ), c.addressof(w))\n h = h.value\n w = w.value\n out = (c.c_ubyte * (w * h))()\n c.memmove(out, p.p, w * h)\n a = bytes(out)\n clib.freeP(p.p)\n return h, w, a\n",
"step-4": "<mask token>\n\n\nclass CNN:\n\n def __init__(self, inputSize, hitLearn=0.1, momentum=0.9, weigthDecay=\n 0.5, multip=1.0):\n file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')\n file = file.encode('utf-8')\n self.cnn = c_Pointer()\n clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer\n (file), hitLearn, momentum, weigthDecay, multip, inputSize[0],\n inputSize[1], inputSize[2])\n clib.initRandom(time.time_ns())\n\n def __del__(self):\n clib.releaseCnnWrapper(c.addressof(self.cnn))\n print('end')\n\n def addConvLayer(self, passo, tamanhoFitro, numeroFiltro):\n clib.CnnAddConvLayer(self.cnn.p, passo, tamanhoFitro, numeroFiltro)\n\n def addPoolLayer(self, passo, tamanhoFitro):\n clib.CnnAddPoolLayer(self.cnn.p, passo, tamanhoFitro)\n\n def addReluLayer(self):\n clib.CnnAddReluLayer(self.cnn.p)\n <mask token>\n <mask token>\n\n def predict(self, input):\n tinput = self.createInp(*input)\n clib.CnnCall(self.cnn.p, tinput)\n\n def learn(self, target):\n ttarg = self.targ(*target)\n clib.CnnLearn(self.cnn.p, ttarg)\n\n def getData(self, layer, request, nfilter=0):\n size = self.getSizeData(layer, request)\n if size is None:\n return None\n data = c.c_double * (size[0] * size[1] * size[2])\n data = data(0)\n err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)\n if err < 0:\n self.lastERROR = err\n return None\n return list(data)\n\n def getSizeData(self, layer, request):\n inx, iny, inz, n = c.c_int(0), c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, layer, request, c.addressof(inx),\n c.addressof(iny), c.addressof(inz), c.addressof(n))\n if err < 0:\n self.lastERROR = err\n return None\n return inx.value, iny.value, inz.value, n.value\n\n @property\n def output(self):\n err = clib.CnnGetTensorData(self.cnn.p, -1, REQUEST_OUTPUT, 0, self.out\n )\n if err < 0:\n self.lastERROR = err\n return None\n return list(self.out)\n\n def compile(self):\n if self.error:\n raise Exception('ERROR')\n inx, iny, inz = c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, 0, REQUEST_INPUT, c.addressof(inx\n ), c.addressof(iny), c.addressof(inz), c.cast(0, c.c_void_p))\n if err != 0:\n raise Exception('Error when request input size', err)\n self.createInp = c.c_double * (inx.value * iny.value * inz.value)\n err = clib.CnnGetSize(self.cnn.p, -1, REQUEST_OUTPUT, c.addressof(\n inx), c.addressof(iny), c.addressof(inz), c.cast(0, c.c_void_p))\n if err != 0:\n raise Exception('Error when request output size', err)\n self.out = c.c_double * (inx.value * iny.value * inz.value)\n self.targ = self.out\n self.out = self.out(0)\n\n def info(self):\n clib.CnnInfo(self.cnn.p)\n\n def save(self, fileName: str):\n filedesc = Path(fileName).with_suffix('.cdc')\n self.salveCnnDescriptor(filedesc)\n fileName = fileName.encode('utf-8')\n return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))\n\n @staticmethod\n def load(fileName):\n self = CNN([2, 2, 1])\n fileName = fileName.encode('utf-8')\n clib.CnnLoadByFile(self.cnn.p, c.create_string_buffer(fileName))\n self.compile()\n return self\n\n @property\n def error(self):\n return clib.getCnnError(self.cnn.p)\n\n @property\n def errorMsg(self):\n buff = c.create_string_buffer(''.encode('utf-8'), 255)\n clib.getCnnErrormsg(self.cnn.p, buff)\n return buff.value.decode('utf-8')\n\n def salveCnnDescriptor(self, file):\n desc_c = c_Pointer()\n clib.generateDescriptor(c.addressof(desc_c), self.cnn.p)\n msg = c.cast(desc_c.p, c.c_char_p)\n msg = msg.value.decode('utf-8')\n clib.freeP(desc_c.p)\n desc = eval(msg)\n with open(file, 'wb') as f:\n pickle.dump(desc, f)\n\n def getOutputAsIndexMax(self):\n ans = clib.CnnGetIndexMax(self.cnn.p)\n return ans\n <mask token>\n\n def normalizeVectorKnowedSpace(self, vector: list, maxInput, minInput,\n maxOutput, minOutput):\n out_TYPE = c.c_double * len(vector)\n tmp_inp = out_TYPE(*vector)\n tmp_out = out_TYPE(*vector)\n clib.normalizeGPUSpaceKnow(self.cnn.p, tmp_inp, tmp_out, len(vector\n ), maxInput, minInput, maxOutput, minOutput)\n return list(tmp_out)\n\n def getOutPutAsPPM(self):\n p = c_Pointer()\n h = c.c_size_t()\n w = c.c_size_t()\n clib.Py_getCnnOutPutAsPPM(self.cnn.p, c.addressof(p), c.addressof(h\n ), c.addressof(w))\n h = h.value\n w = w.value\n out = (c.c_ubyte * (w * h))()\n c.memmove(out, p.p, w * h)\n a = bytes(out)\n clib.freeP(p.p)\n return h, w, a\n",
"step-5": "import time,pickle\nfrom CNN_GPU.CNN_C_Wrapper import *\nfrom pathlib import Path\n\nFSIGMOIG = 0\nFTANH = 2\nFRELU = 4\n\nREQUEST_INPUT = 0\nREQUEST_GRAD_INPUT = 1\nREQUEST_OUTPUT = 2\nREQUEST_WEIGTH = 3\n\nclass CNN:\n def __init__(self, inputSize, hitLearn=.1, momentum=.9, weigthDecay=.5, multip=1.0):\n file = '%s/%s' % (DIR_LIBRARY, 'gpu_function.cl')\n file = file.encode('utf-8')\n self.cnn = c_Pointer()\n clib.createCnnWrapper(c.addressof(self.cnn), c.create_string_buffer(file),\n hitLearn, momentum, weigthDecay, multip, inputSize[0], inputSize[1], inputSize[2])\n clib.initRandom(time.time_ns())\n\n def __del__(self):\n clib.releaseCnnWrapper(c.addressof(self.cnn))\n print('end')\n\n def addConvLayer(self, passo, tamanhoFitro, numeroFiltro):\n clib.CnnAddConvLayer(self.cnn.p, passo, tamanhoFitro, numeroFiltro)\n\n def addPoolLayer(self, passo, tamanhoFitro):\n clib.CnnAddPoolLayer(self.cnn.p, passo, tamanhoFitro)\n\n def addReluLayer(self):\n clib.CnnAddReluLayer(self.cnn.p)\n\n def addDropOutLayer(self, pontoAtivacao, seed):\n clib.CnnAddDropOutLayer(self.cnn.p, pontoAtivacao, seed)\n\n def addFullConnectLayer(self, saida, funcaoAtivacao):\n clib.CnnAddFullConnectLayer(self.cnn.p, saida, funcaoAtivacao)\n\n def predict(self, input):\n tinput = self.createInp(*input)\n clib.CnnCall(self.cnn.p, tinput)\n\n def learn(self, target):\n ttarg = self.targ(*target)\n clib.CnnLearn(self.cnn.p, ttarg)\n\n def getData(self, layer, request, nfilter=0):\n size = self.getSizeData(layer, request)\n if size is None: return None\n data = c.c_double * (size[0] * size[1] * size[2])\n data = data(0)\n err = clib.CnnGetTensorData(self.cnn.p, layer, request, nfilter, data)\n if err < 0:\n self.lastERROR = err\n return None\n return list(data)\n\n def getSizeData(self, layer, request):\n inx, iny, inz, n = c.c_int(0), c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, layer, request, c.addressof(inx), c.addressof(iny), c.addressof(inz),\n c.addressof(n))\n if err < 0:\n self.lastERROR = err\n return None\n return inx.value, iny.value, inz.value, n.value\n\n @property\n def output(self):\n err = clib.CnnGetTensorData(self.cnn.p, -1, REQUEST_OUTPUT, 0, self.out)\n if err < 0:\n self.lastERROR = err\n return None\n return list(self.out)\n\n def compile(self):\n if self.error: raise Exception(\"ERROR\")\n inx, iny, inz = c.c_int(0), c.c_int(0), c.c_int(0)\n err = clib.CnnGetSize(self.cnn.p, 0, REQUEST_INPUT, c.addressof(inx), c.addressof(iny), c.addressof(inz),\n c.cast(0, c.c_void_p))\n if err != 0: raise Exception('Error when request input size', err)\n self.createInp = c.c_double * (inx.value * iny.value * inz.value)\n err = clib.CnnGetSize(self.cnn.p, -1, REQUEST_OUTPUT, c.addressof(inx), c.addressof(iny), c.addressof(inz),\n c.cast(0, c.c_void_p))\n if err != 0: raise Exception('Error when request output size', err)\n self.out = c.c_double * (inx.value * iny.value * inz.value)\n self.targ = self.out\n self.out = self.out(0)\n\n def info(self):\n clib.CnnInfo(self.cnn.p)\n\n def save(self, fileName:str):\n filedesc = Path(fileName).with_suffix('.cdc')\n self.salveCnnDescriptor(filedesc)\n fileName = fileName.encode('utf-8')\n return clib.CnnSaveInFile(self.cnn.p, c.create_string_buffer(fileName))\n\n @staticmethod\n def load(fileName):\n self = CNN([2,2,1])\n fileName = fileName.encode('utf-8')\n clib.CnnLoadByFile(self.cnn.p, c.create_string_buffer(fileName))\n self.compile()\n return self\n @property\n def error(self):\n return clib.getCnnError(self.cnn.p)\n @property\n def errorMsg(self):\n buff = c.create_string_buffer(''.encode('utf-8'),255)\n clib.getCnnErrormsg(self.cnn.p,buff)\n return buff.value.decode('utf-8')\n def salveCnnDescriptor(self,file):\n desc_c = c_Pointer()\n clib.generateDescriptor(c.addressof(desc_c),self.cnn.p)\n msg = c.cast(desc_c.p,c.c_char_p)\n msg = msg.value.decode('utf-8')\n clib.freeP(desc_c.p)\n desc = eval(msg)\n with open(file,'wb') as f:\n pickle.dump(desc,f)\n # AUXILIAR FUNCTION\n def getOutputAsIndexMax(self):\n ans = clib.CnnGetIndexMax(self.cnn.p)\n return ans\n def normalizeVector(self,vector:list,maxOutput,minOutput):\n out_TYPE =c.c_double * len(vector)\n inp = out_TYPE(*vector)\n out = out_TYPE()\n clib.normalizeGPU(self.cnn.p,inp,out,len(vector),maxOutput,minOutput)\n return list(out)\n def normalizeVectorKnowedSpace(self,vector:list,maxInput,minInput,maxOutput,minOutput):\n out_TYPE =c.c_double * len(vector)\n tmp_inp = out_TYPE(*vector)\n tmp_out = out_TYPE(*vector)\n clib.normalizeGPUSpaceKnow(self.cnn.p,tmp_inp,tmp_out,len(vector),maxInput,minInput,maxOutput,minOutput)\n return list(tmp_out)\n def getOutPutAsPPM(self):\n p = c_Pointer()\n h = c.c_size_t()\n w = c.c_size_t()\n clib.Py_getCnnOutPutAsPPM(self.cnn.p, c.addressof(p), c.addressof(h), c.addressof(w))\n h = h.value\n w = w.value\n out = (c.c_ubyte*(w*h))()\n c.memmove(out,p.p,w*h)\n a = bytes(out)\n clib.freeP(p.p)\n return (h,w,a)",
"step-ids": [
7,
12,
20,
21,
27
]
}
|
[
7,
12,
20,
21,
27
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if no == rev:
print(f'{no}--->{rev} Input is a palindrome')
else:
print(f'{no}--->{rev} Input is not a palindrome')
<|reserved_special_token_1|>
no = int(input('Enter a number: '))
no = str(no)
rev = no[::-1]
if no == rev:
print(f'{no}--->{rev} Input is a palindrome')
else:
print(f'{no}--->{rev} Input is not a palindrome')
<|reserved_special_token_1|>
no = int(input("Enter a number: "))
no = str(no)
rev = no[::-1]
if no==rev:
print(f"{no}--->{rev} Input is a palindrome")
else:
print(f"{no}--->{rev} Input is not a palindrome")
|
flexible
|
{
"blob_id": "020a41e7d3cc3f5adf3a38a6852dac6037595372",
"index": 2043,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif no == rev:\n print(f'{no}--->{rev} Input is a palindrome')\nelse:\n print(f'{no}--->{rev} Input is not a palindrome')\n",
"step-3": "no = int(input('Enter a number: '))\nno = str(no)\nrev = no[::-1]\nif no == rev:\n print(f'{no}--->{rev} Input is a palindrome')\nelse:\n print(f'{no}--->{rev} Input is not a palindrome')\n",
"step-4": "no = int(input(\"Enter a number: \"))\nno = str(no)\nrev = no[::-1]\nif no==rev:\n print(f\"{no}--->{rev} Input is a palindrome\")\nelse:\n print(f\"{no}--->{rev} Input is not a palindrome\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import re
import random
import requests
from bs4 import BeautifulSoup
import js2py
from fake_useragent import UserAgent
def _get_request_key(session):
res = session.post("https://spys.one/en/socks-proxy-list/")
soup = BeautifulSoup(res.text, 'html.parser')
return soup.find("input", {"name": "xx0"}).get("value")
def _get_proxy_list(session, xx0):
res = session.post("https://spys.one/en/socks-proxy-list/",
data=f"xx0={xx0}&xpp={0}&xf1={0}&xf2={0}&xf4={0}&xf5={2}",
headers={
"Content-Type": "application/x-www-form-urlencoded",
})
soup = BeautifulSoup(res.text, 'html.parser')
js = js2py.EvalJs({"document": {"write": lambda a: a}})
js.execute(soup.select_one("body > script").string)
addrs = soup.select("tr[onmouseover] > td:first-child")
ports = [js.eval(i.find("script").string) for i in addrs]
addrs = [i.get_text() for i in addrs]
ports = [re.sub(r"<[^<]*>", "", i) for i in ports]
return list(map(''.join, zip(addrs, ports)))
class ProxyScrapper:
def __init__(self):
self._proxies = []
def refresh(self):
session = requests.Session()
session.headers["User-Agent"] = UserAgent().random
print("Rotating proxy list")
xx0 = _get_request_key(session)
print(f"Got proxy request key xx0={xx0}")
addrs = _get_proxy_list(session, xx0)
self._proxies = [f"socks5://{i}" for i in addrs]
print(f"Got {len(self._proxies)} proxies")
def random(self):
assert(len(self._proxies) > 0)
return random.choice(self._proxies)
|
normal
|
{
"blob_id": "647dde6e3288ded29336062b78baacc3a92908a7",
"index": 478,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ProxyScrapper:\n\n def __init__(self):\n self._proxies = []\n\n def refresh(self):\n session = requests.Session()\n session.headers['User-Agent'] = UserAgent().random\n print('Rotating proxy list')\n xx0 = _get_request_key(session)\n print(f'Got proxy request key xx0={xx0}')\n addrs = _get_proxy_list(session, xx0)\n self._proxies = [f'socks5://{i}' for i in addrs]\n print(f'Got {len(self._proxies)} proxies')\n\n def random(self):\n assert len(self._proxies) > 0\n return random.choice(self._proxies)\n",
"step-3": "<mask token>\n\n\ndef _get_request_key(session):\n res = session.post('https://spys.one/en/socks-proxy-list/')\n soup = BeautifulSoup(res.text, 'html.parser')\n return soup.find('input', {'name': 'xx0'}).get('value')\n\n\n<mask token>\n\n\nclass ProxyScrapper:\n\n def __init__(self):\n self._proxies = []\n\n def refresh(self):\n session = requests.Session()\n session.headers['User-Agent'] = UserAgent().random\n print('Rotating proxy list')\n xx0 = _get_request_key(session)\n print(f'Got proxy request key xx0={xx0}')\n addrs = _get_proxy_list(session, xx0)\n self._proxies = [f'socks5://{i}' for i in addrs]\n print(f'Got {len(self._proxies)} proxies')\n\n def random(self):\n assert len(self._proxies) > 0\n return random.choice(self._proxies)\n",
"step-4": "import re\nimport random\nimport requests\nfrom bs4 import BeautifulSoup\nimport js2py\nfrom fake_useragent import UserAgent\n\n\ndef _get_request_key(session):\n res = session.post('https://spys.one/en/socks-proxy-list/')\n soup = BeautifulSoup(res.text, 'html.parser')\n return soup.find('input', {'name': 'xx0'}).get('value')\n\n\ndef _get_proxy_list(session, xx0):\n res = session.post('https://spys.one/en/socks-proxy-list/', data=\n f'xx0={xx0}&xpp={0}&xf1={0}&xf2={0}&xf4={0}&xf5={2}', headers={\n 'Content-Type': 'application/x-www-form-urlencoded'})\n soup = BeautifulSoup(res.text, 'html.parser')\n js = js2py.EvalJs({'document': {'write': lambda a: a}})\n js.execute(soup.select_one('body > script').string)\n addrs = soup.select('tr[onmouseover] > td:first-child')\n ports = [js.eval(i.find('script').string) for i in addrs]\n addrs = [i.get_text() for i in addrs]\n ports = [re.sub('<[^<]*>', '', i) for i in ports]\n return list(map(''.join, zip(addrs, ports)))\n\n\nclass ProxyScrapper:\n\n def __init__(self):\n self._proxies = []\n\n def refresh(self):\n session = requests.Session()\n session.headers['User-Agent'] = UserAgent().random\n print('Rotating proxy list')\n xx0 = _get_request_key(session)\n print(f'Got proxy request key xx0={xx0}')\n addrs = _get_proxy_list(session, xx0)\n self._proxies = [f'socks5://{i}' for i in addrs]\n print(f'Got {len(self._proxies)} proxies')\n\n def random(self):\n assert len(self._proxies) > 0\n return random.choice(self._proxies)\n",
"step-5": "import re\nimport random\nimport requests\nfrom bs4 import BeautifulSoup\nimport js2py\nfrom fake_useragent import UserAgent\n\n\ndef _get_request_key(session):\n res = session.post(\"https://spys.one/en/socks-proxy-list/\")\n soup = BeautifulSoup(res.text, 'html.parser')\n return soup.find(\"input\", {\"name\": \"xx0\"}).get(\"value\")\n\n\ndef _get_proxy_list(session, xx0):\n res = session.post(\"https://spys.one/en/socks-proxy-list/\",\n data=f\"xx0={xx0}&xpp={0}&xf1={0}&xf2={0}&xf4={0}&xf5={2}\",\n headers={\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n })\n\n soup = BeautifulSoup(res.text, 'html.parser')\n js = js2py.EvalJs({\"document\": {\"write\": lambda a: a}})\n js.execute(soup.select_one(\"body > script\").string)\n\n addrs = soup.select(\"tr[onmouseover] > td:first-child\")\n ports = [js.eval(i.find(\"script\").string) for i in addrs]\n addrs = [i.get_text() for i in addrs]\n ports = [re.sub(r\"<[^<]*>\", \"\", i) for i in ports]\n\n return list(map(''.join, zip(addrs, ports)))\n\n\nclass ProxyScrapper:\n def __init__(self):\n self._proxies = []\n\n def refresh(self):\n session = requests.Session()\n session.headers[\"User-Agent\"] = UserAgent().random\n print(\"Rotating proxy list\")\n\n xx0 = _get_request_key(session)\n print(f\"Got proxy request key xx0={xx0}\")\n\n addrs = _get_proxy_list(session, xx0)\n self._proxies = [f\"socks5://{i}\" for i in addrs]\n print(f\"Got {len(self._proxies)} proxies\")\n\n def random(self):\n assert(len(self._proxies) > 0)\n return random.choice(self._proxies)\n",
"step-ids": [
0,
4,
5,
7,
8
]
}
|
[
0,
4,
5,
7,
8
] |
<|reserved_special_token_0|>
def get_evaluate_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 54
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_test_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 72
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_evaluate_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 54
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_test_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 72
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_batches_mono(data_dir):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
X = np.load('/home/yunhan/data_dir/train_x_224.npy')
X = X / 255
Y = np.load('/home/yunhan/data_dir/train_y_224.npy')
return [(X, Y, 32, 0.2)]
def get_test_data_batches(data_dir='/home/yunhan/data_dir'):
for i in range(17):
X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0
yield X
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_train_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 53
idx = np.random.permutation(n)
idx = idx + 1
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_evaluate_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 54
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_test_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 72
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_batches_mono(data_dir):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
X = np.load('/home/yunhan/data_dir/train_x_224.npy')
X = X / 255
Y = np.load('/home/yunhan/data_dir/train_y_224.npy')
return [(X, Y, 32, 0.2)]
def get_test_data_batches(data_dir='/home/yunhan/data_dir'):
for i in range(17):
X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0
yield X
<|reserved_special_token_1|>
import numpy as np
def get_train_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 53
idx = np.random.permutation(n)
idx = idx + 1
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_evaluate_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 54
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_test_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
n = 18
idx = np.random.permutation(n)
idx = idx + 72
for i in range(n):
X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0
Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_batches_mono(data_dir):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
X = np.load('/home/yunhan/data_dir/train_x_224.npy')
X = X / 255
Y = np.load('/home/yunhan/data_dir/train_y_224.npy')
return [(X, Y, 32, 0.2)]
def get_test_data_batches(data_dir='/home/yunhan/data_dir'):
for i in range(17):
X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0
yield X
<|reserved_special_token_1|>
import numpy as np
def get_train_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
# todo: read in data that is preoprocessed
# Use batch 1 - 52 as train (60%), 53 - 71 as validation (20%), 72 - 89 as test (20%)
n = 53
idx = np.random.permutation(n)
idx = idx + 1
for i in range(n):
X = np.load("%s/X%d.npy" % (data_dir, idx[i]))/255.
Y = np.load("%s/y%d.npy" % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_evaluate_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
# train 3 valid 1
# Use batch 1 - 53 as train (60%), 54 - 71 as validation (20%), 72 - 89 as test (20%)
n = 18
idx = np.random.permutation(n)
idx = idx + 54
for i in range(n):
X = np.load("%s/X%d.npy" % (data_dir, idx[i]))/255.
Y = np.load("%s/y%d.npy" % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_test_batches(data_dir='/home/yunhan/batchified'):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
# train 3 valid 1
# Use batch 1 - 53 as train (60%), 54 - 71 as validation (20%), 72 - 89 as test (20%)
n = 18
idx = np.random.permutation(n)
idx = idx + 72
for i in range(n):
X = np.load("%s/X%d.npy" % (data_dir, idx[i]))/255.
Y = np.load("%s/y%d.npy" % (data_dir, idx[i])).reshape(-1)
yield X, Y
def get_batches_mono(data_dir):
"""
return a list or generator of (large) ndarrays,
in order to efficiently utilize GPU
"""
X = np.load('/home/yunhan/data_dir/train_x_224.npy')
# X = np.load('train_x_sample.npy')
X = X / 255
# X = np.load('/home/yunhan/data_dir/train_x_224.npy')
Y = np.load('/home/yunhan/data_dir/train_y_224.npy')
# Y = np.load('train_y_sample.npy')
return [(X, Y, 32, 0.2), ]
def get_test_data_batches(data_dir='/home/yunhan/data_dir'):
for i in range(17):
X = np.load("%s/X_%d.npy" % (data_dir, 3000*(i+1)))/255.
yield X
|
flexible
|
{
"blob_id": "c04c38d78144b6f5d3e5af4ebe9ce430e882a367",
"index": 8014,
"step-1": "<mask token>\n\n\ndef get_evaluate_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 54\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_test_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_evaluate_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 54\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_test_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_batches_mono(data_dir):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n X = np.load('/home/yunhan/data_dir/train_x_224.npy')\n X = X / 255\n Y = np.load('/home/yunhan/data_dir/train_y_224.npy')\n return [(X, Y, 32, 0.2)]\n\n\ndef get_test_data_batches(data_dir='/home/yunhan/data_dir'):\n for i in range(17):\n X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0\n yield X\n",
"step-3": "<mask token>\n\n\ndef get_train_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 53\n idx = np.random.permutation(n)\n idx = idx + 1\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_evaluate_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 54\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_test_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_batches_mono(data_dir):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n X = np.load('/home/yunhan/data_dir/train_x_224.npy')\n X = X / 255\n Y = np.load('/home/yunhan/data_dir/train_y_224.npy')\n return [(X, Y, 32, 0.2)]\n\n\ndef get_test_data_batches(data_dir='/home/yunhan/data_dir'):\n for i in range(17):\n X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0\n yield X\n",
"step-4": "import numpy as np\n\n\ndef get_train_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 53\n idx = np.random.permutation(n)\n idx = idx + 1\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_evaluate_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 54\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_test_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load('%s/X%d.npy' % (data_dir, idx[i])) / 255.0\n Y = np.load('%s/y%d.npy' % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_batches_mono(data_dir):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n X = np.load('/home/yunhan/data_dir/train_x_224.npy')\n X = X / 255\n Y = np.load('/home/yunhan/data_dir/train_y_224.npy')\n return [(X, Y, 32, 0.2)]\n\n\ndef get_test_data_batches(data_dir='/home/yunhan/data_dir'):\n for i in range(17):\n X = np.load('%s/X_%d.npy' % (data_dir, 3000 * (i + 1))) / 255.0\n yield X\n",
"step-5": "import numpy as np\n\n\ndef get_train_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n # todo: read in data that is preoprocessed\n # Use batch 1 - 52 as train (60%), 53 - 71 as validation (20%), 72 - 89 as test (20%)\n n = 53\n idx = np.random.permutation(n)\n idx = idx + 1\n for i in range(n):\n X = np.load(\"%s/X%d.npy\" % (data_dir, idx[i]))/255.\n Y = np.load(\"%s/y%d.npy\" % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_evaluate_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n # train 3 valid 1\n # Use batch 1 - 53 as train (60%), 54 - 71 as validation (20%), 72 - 89 as test (20%)\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 54\n for i in range(n):\n X = np.load(\"%s/X%d.npy\" % (data_dir, idx[i]))/255.\n Y = np.load(\"%s/y%d.npy\" % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_test_batches(data_dir='/home/yunhan/batchified'):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n # train 3 valid 1\n # Use batch 1 - 53 as train (60%), 54 - 71 as validation (20%), 72 - 89 as test (20%)\n n = 18\n idx = np.random.permutation(n)\n idx = idx + 72\n for i in range(n):\n X = np.load(\"%s/X%d.npy\" % (data_dir, idx[i]))/255.\n Y = np.load(\"%s/y%d.npy\" % (data_dir, idx[i])).reshape(-1)\n yield X, Y\n\n\ndef get_batches_mono(data_dir):\n \"\"\"\n return a list or generator of (large) ndarrays,\n in order to efficiently utilize GPU\n \"\"\"\n X = np.load('/home/yunhan/data_dir/train_x_224.npy')\n # X = np.load('train_x_sample.npy')\n X = X / 255\n # X = np.load('/home/yunhan/data_dir/train_x_224.npy')\n Y = np.load('/home/yunhan/data_dir/train_y_224.npy')\n # Y = np.load('train_y_sample.npy')\n return [(X, Y, 32, 0.2), ]\n\n\ndef get_test_data_batches(data_dir='/home/yunhan/data_dir'):\n for i in range(17):\n X = np.load(\"%s/X_%d.npy\" % (data_dir, 3000*(i+1)))/255.\n yield X\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('Positive:', ', '.join(list(filter(lambda x: int(x) > -1, line_numbers)))
)
print('Negative:', ', '.join(list(filter(lambda x: int(x) < 0, line_numbers))))
print('Even:', ', '.join(list(filter(lambda x: int(x) % 2 == 0, line_numbers)))
)
print('Odd:', ', '.join(list(filter(lambda x: int(x) % 2 != 0, line_numbers))))
<|reserved_special_token_1|>
line_numbers = input().split(', ')
print('Positive:', ', '.join(list(filter(lambda x: int(x) > -1, line_numbers)))
)
print('Negative:', ', '.join(list(filter(lambda x: int(x) < 0, line_numbers))))
print('Even:', ', '.join(list(filter(lambda x: int(x) % 2 == 0, line_numbers)))
)
print('Odd:', ', '.join(list(filter(lambda x: int(x) % 2 != 0, line_numbers))))
<|reserved_special_token_1|>
line_numbers = input().split(", ")
print("Positive:", ", ".join(list(filter((lambda x: int(x) > -1), line_numbers))))
print("Negative:", ", ".join((list(filter((lambda x: int(x) < 0), line_numbers)))))
print("Even:", ", ".join((list(filter((lambda x: int(x) % 2 == 0), line_numbers)))))
print("Odd:", ", ".join((list(filter((lambda x: int(x) % 2 != 0), line_numbers)))))
# # INPUT 1
# 1, -2, 0, 5, 3, 4, -100, -20, 12, 19, -33
# # OUTPUT 1
# Positive: 1, 0, 5, 3, 4, 12, 19
# Negative: -2, -100, -20, -33
# Even: -2, 0, 4, -100, -20, 12
# Odd: 1, 5, 3, 19, -33
|
flexible
|
{
"blob_id": "e4845e5aa949ec523515efc4d7996d647fddabdb",
"index": 7060,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Positive:', ', '.join(list(filter(lambda x: int(x) > -1, line_numbers)))\n )\nprint('Negative:', ', '.join(list(filter(lambda x: int(x) < 0, line_numbers))))\nprint('Even:', ', '.join(list(filter(lambda x: int(x) % 2 == 0, line_numbers)))\n )\nprint('Odd:', ', '.join(list(filter(lambda x: int(x) % 2 != 0, line_numbers))))\n",
"step-3": "line_numbers = input().split(', ')\nprint('Positive:', ', '.join(list(filter(lambda x: int(x) > -1, line_numbers)))\n )\nprint('Negative:', ', '.join(list(filter(lambda x: int(x) < 0, line_numbers))))\nprint('Even:', ', '.join(list(filter(lambda x: int(x) % 2 == 0, line_numbers)))\n )\nprint('Odd:', ', '.join(list(filter(lambda x: int(x) % 2 != 0, line_numbers))))\n",
"step-4": "line_numbers = input().split(\", \")\nprint(\"Positive:\", \", \".join(list(filter((lambda x: int(x) > -1), line_numbers))))\nprint(\"Negative:\", \", \".join((list(filter((lambda x: int(x) < 0), line_numbers)))))\nprint(\"Even:\", \", \".join((list(filter((lambda x: int(x) % 2 == 0), line_numbers)))))\nprint(\"Odd:\", \", \".join((list(filter((lambda x: int(x) % 2 != 0), line_numbers)))))\n# # INPUT 1\n# 1, -2, 0, 5, 3, 4, -100, -20, 12, 19, -33\n# # OUTPUT 1\n# Positive: 1, 0, 5, 3, 4, 12, 19\n# Negative: -2, -100, -20, -33\n# Even: -2, 0, 4, -100, -20, 12\n# Odd: 1, 5, 3, 19, -33\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('read imafe from file')
<|reserved_special_token_0|>
print('create a window holder for the image')
cv2.namedWindow('Image', cv2.WINDOW_NORMAL)
print('display the image ')
cv2.imshow('Image', img)
print('press a key inside the image to make a copy')
cv2.waitKey(0)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('read imafe from file')
img = cv2.imread('panda.jpg')
print('create a window holder for the image')
cv2.namedWindow('Image', cv2.WINDOW_NORMAL)
print('display the image ')
cv2.imshow('Image', img)
print('press a key inside the image to make a copy')
cv2.waitKey(0)
<|reserved_special_token_1|>
import numpy as np
import cv2
print('read imafe from file')
img = cv2.imread('panda.jpg')
print('create a window holder for the image')
cv2.namedWindow('Image', cv2.WINDOW_NORMAL)
print('display the image ')
cv2.imshow('Image', img)
print('press a key inside the image to make a copy')
cv2.waitKey(0)
<|reserved_special_token_1|>
import numpy as np
import cv2
print("read imafe from file" )
img = cv2.imread("panda.jpg")
print("create a window holder for the image")
cv2.namedWindow("Image",cv2.WINDOW_NORMAL)
print ('display the image ')
cv2.imshow("Image",img)
print ('press a key inside the image to make a copy')
cv2.waitKey(0)
|
flexible
|
{
"blob_id": "7cf6a4b8057280b38572dd92693013724751c47f",
"index": 9502,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('read imafe from file')\n<mask token>\nprint('create a window holder for the image')\ncv2.namedWindow('Image', cv2.WINDOW_NORMAL)\nprint('display the image ')\ncv2.imshow('Image', img)\nprint('press a key inside the image to make a copy')\ncv2.waitKey(0)\n",
"step-3": "<mask token>\nprint('read imafe from file')\nimg = cv2.imread('panda.jpg')\nprint('create a window holder for the image')\ncv2.namedWindow('Image', cv2.WINDOW_NORMAL)\nprint('display the image ')\ncv2.imshow('Image', img)\nprint('press a key inside the image to make a copy')\ncv2.waitKey(0)\n",
"step-4": "import numpy as np\nimport cv2\nprint('read imafe from file')\nimg = cv2.imread('panda.jpg')\nprint('create a window holder for the image')\ncv2.namedWindow('Image', cv2.WINDOW_NORMAL)\nprint('display the image ')\ncv2.imshow('Image', img)\nprint('press a key inside the image to make a copy')\ncv2.waitKey(0)\n",
"step-5": "import numpy as np \nimport cv2\n\n\nprint(\"read imafe from file\" )\nimg = cv2.imread(\"panda.jpg\")\n\nprint(\"create a window holder for the image\")\ncv2.namedWindow(\"Image\",cv2.WINDOW_NORMAL)\n\nprint ('display the image ')\ncv2.imshow(\"Image\",img)\n\nprint ('press a key inside the image to make a copy')\ncv2.waitKey(0)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests
def login(username, password):
data = {'login':username,'pwd':password,'lang':''}
r = requests.post('http://dms-pit.htb/seeddms51x/seeddms/op/op.Login.php', data=data, allow_redirects=False)
if r.headers['Location'] == '../out/out.Login.php?msg=Error+signing+in.+User+ID+or+password+incorrect':
return False
return True
# import pdb;pdb.set_trace()
if login("michelle", "michelle"):
print("Login Successfull[+]")
|
normal
|
{
"blob_id": "ae84b449c8919f14954633b14993e6291501bc24",
"index": 1019,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef login(username, password):\n data = {'login': username, 'pwd': password, 'lang': ''}\n r = requests.post('http://dms-pit.htb/seeddms51x/seeddms/op/op.Login.php',\n data=data, allow_redirects=False)\n if (r.headers['Location'] ==\n '../out/out.Login.php?msg=Error+signing+in.+User+ID+or+password+incorrect'\n ):\n return False\n return True\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef login(username, password):\n data = {'login': username, 'pwd': password, 'lang': ''}\n r = requests.post('http://dms-pit.htb/seeddms51x/seeddms/op/op.Login.php',\n data=data, allow_redirects=False)\n if (r.headers['Location'] ==\n '../out/out.Login.php?msg=Error+signing+in.+User+ID+or+password+incorrect'\n ):\n return False\n return True\n\n\nif login('michelle', 'michelle'):\n print('Login Successfull[+]')\n",
"step-4": "import requests\n\n\ndef login(username, password):\n data = {'login': username, 'pwd': password, 'lang': ''}\n r = requests.post('http://dms-pit.htb/seeddms51x/seeddms/op/op.Login.php',\n data=data, allow_redirects=False)\n if (r.headers['Location'] ==\n '../out/out.Login.php?msg=Error+signing+in.+User+ID+or+password+incorrect'\n ):\n return False\n return True\n\n\nif login('michelle', 'michelle'):\n print('Login Successfull[+]')\n",
"step-5": "import requests\n\ndef login(username, password):\n data = {'login':username,'pwd':password,'lang':''}\n r = requests.post('http://dms-pit.htb/seeddms51x/seeddms/op/op.Login.php', data=data, allow_redirects=False)\n if r.headers['Location'] == '../out/out.Login.php?msg=Error+signing+in.+User+ID+or+password+incorrect':\n return False\n return True\n # import pdb;pdb.set_trace()\n\n\nif login(\"michelle\", \"michelle\"):\n print(\"Login Successfull[+]\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {'keyword': 'y'}
self.assertEquals(struct.color_scheme, expected)
defaults = {'name': 'c'}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,
defaults)
self.assertEquals(struct.color_scheme, expected)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {'keyword': 'y'}
self.assertEquals(struct.color_scheme, expected)
defaults = {'name': 'c'}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,
defaults)
self.assertEquals(struct.color_scheme, expected)
def test_load_config(self):
struct = config.Struct()
with tempfile.NamedTemporaryFile() as f:
f.write(''.encode('utf8'))
f.write('[keyboard]\nhelp = C-h\n'.encode('utf8'))
f.flush()
config.loadini(struct, f.name)
self.assertEqual(struct.help_key, 'C-h')
self.assertEqual(struct.backspace_key, '')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
TEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {'keyword': 'y'}
self.assertEquals(struct.color_scheme, expected)
defaults = {'name': 'c'}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,
defaults)
self.assertEquals(struct.color_scheme, expected)
def test_load_config(self):
struct = config.Struct()
with tempfile.NamedTemporaryFile() as f:
f.write(''.encode('utf8'))
f.write('[keyboard]\nhelp = C-h\n'.encode('utf8'))
f.flush()
config.loadini(struct, f.name)
self.assertEqual(struct.help_key, 'C-h')
self.assertEqual(struct.backspace_key, '')
<|reserved_special_token_1|>
import os
import unittest
import tempfile
from bpython import config
TEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {'keyword': 'y'}
self.assertEquals(struct.color_scheme, expected)
defaults = {'name': 'c'}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,
defaults)
self.assertEquals(struct.color_scheme, expected)
def test_load_config(self):
struct = config.Struct()
with tempfile.NamedTemporaryFile() as f:
f.write(''.encode('utf8'))
f.write('[keyboard]\nhelp = C-h\n'.encode('utf8'))
f.flush()
config.loadini(struct, f.name)
self.assertEqual(struct.help_key, 'C-h')
self.assertEqual(struct.backspace_key, '')
<|reserved_special_token_1|>
import os
import unittest
import tempfile
from bpython import config
TEST_THEME_PATH = os.path.join(os.path.dirname(__file__), "test.theme")
class TestConfig(unittest.TestCase):
def test_load_theme(self):
struct = config.Struct()
struct.color_scheme = dict()
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())
expected = {"keyword": "y"}
self.assertEquals(struct.color_scheme, expected)
defaults = {"name": "c"}
expected.update(defaults)
config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, defaults)
self.assertEquals(struct.color_scheme, expected)
def test_load_config(self):
struct = config.Struct()
with tempfile.NamedTemporaryFile() as f:
f.write(''.encode('utf8'))
f.write('[keyboard]\nhelp = C-h\n'.encode('utf8'))
f.flush()
config.loadini(struct, f.name)
self.assertEqual(struct.help_key, 'C-h')
self.assertEqual(struct.backspace_key, '')
|
flexible
|
{
"blob_id": "d5efbbb6e818e797652f304f3d022e04be245778",
"index": 4931,
"step-1": "<mask token>\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-3": "<mask token>\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-4": "import os\nimport unittest\nimport tempfile\nfrom bpython import config\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), 'test.theme')\n\n\nclass TestConfig(unittest.TestCase):\n\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {'keyword': 'y'}\n self.assertEquals(struct.color_scheme, expected)\n defaults = {'name': 'c'}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme,\n defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n",
"step-5": "import os\nimport unittest\nimport tempfile\n\nfrom bpython import config\n\nTEST_THEME_PATH = os.path.join(os.path.dirname(__file__), \"test.theme\")\n\nclass TestConfig(unittest.TestCase):\n def test_load_theme(self):\n struct = config.Struct()\n struct.color_scheme = dict()\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, dict())\n expected = {\"keyword\": \"y\"}\n self.assertEquals(struct.color_scheme, expected)\n\n defaults = {\"name\": \"c\"}\n expected.update(defaults)\n config.load_theme(struct, TEST_THEME_PATH, struct.color_scheme, defaults)\n self.assertEquals(struct.color_scheme, expected)\n\n def test_load_config(self):\n struct = config.Struct()\n with tempfile.NamedTemporaryFile() as f:\n f.write(''.encode('utf8'))\n f.write('[keyboard]\\nhelp = C-h\\n'.encode('utf8'))\n f.flush()\n config.loadini(struct, f.name)\n self.assertEqual(struct.help_key, 'C-h')\n self.assertEqual(struct.backspace_key, '')\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#! /bin/env python3
"""
Lane Emden Python interface.
Main routine:
lane_emden_int(dz, n)
"""
import numpy as np
from . import _solver
def test():
"""
A simple test.
"""
n = 3.
dz = 2.**(-14)
_solver.lane(dz,n)
out = _solver.laneout
n = out.ndata
t = out.theta
return t,n
def lane_emden_int(dz = 2.**(-14), n = 3., w = 0.):
"""
Interface to FORTRAN90 Lane-Emden Integrator.
Call:
ndata, data = laneemden.lane_emden_int(dz, n, w)
INPUT:
dz:
step in z, maye use 2**(-14)
n:
polytropic index (use 3.)
w:
rotation parameter(use 0. for non-rot)
w = 2 Omega^2 / (4 pi G rho_c)
OUTPUT:
ndata:
number of last point (starts with 0)
data:
output data in form [0:ndata,0:1]
index 0:
equidistant grid with step size dz starting at 0
index 1:
0: theta(z)
1: d theta(z) / dz
"""
_solver.lane(dz, n, w)
out = _solver.laneout
n = int(out.ndata)
t = out.theta
return n,t[0:n+1,:]
def lane_emden_step(x,y,dx,n,w):
"""
This allows a single call to the rk4 subroutine.
It turns out to be *way* less efficient.
Do not use.
"""
_solver.rk4(x,y[0],y[1],dx,n,w)
out = _solver.rk4out
return np.array([out.z0,out.z1])
if __name__ == '__main__':
t,n = test()
print(t, n)
|
normal
|
{
"blob_id": "10723f703f40b5db2b7c9532cda520b2ae078546",
"index": 2175,
"step-1": "<mask token>\n\n\ndef lane_emden_int(dz=2.0 ** -14, n=3.0, w=0.0):\n \"\"\"\n Interface to FORTRAN90 Lane-Emden Integrator.\n\n Call:\n ndata, data = laneemden.lane_emden_int(dz, n, w)\n\n INPUT:\n dz:\n step in z, maye use 2**(-14)\n n:\n polytropic index (use 3.)\n w:\n rotation parameter(use 0. for non-rot)\n w = 2 Omega^2 / (4 pi G rho_c)\n\n OUTPUT:\n ndata:\n number of last point (starts with 0)\n data:\n output data in form [0:ndata,0:1]\n index 0:\n equidistant grid with step size dz starting at 0\n index 1:\n 0: theta(z)\n 1: d theta(z) / dz\n \"\"\"\n _solver.lane(dz, n, w)\n out = _solver.laneout\n n = int(out.ndata)\n t = out.theta\n return n, t[0:n + 1, :]\n\n\ndef lane_emden_step(x, y, dx, n, w):\n \"\"\"\n This allows a single call to the rk4 subroutine.\n\n It turns out to be *way* less efficient.\n Do not use.\n \"\"\"\n _solver.rk4(x, y[0], y[1], dx, n, w)\n out = _solver.rk4out\n return np.array([out.z0, out.z1])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test():\n \"\"\"\n A simple test.\n \"\"\"\n n = 3.0\n dz = 2.0 ** -14\n _solver.lane(dz, n)\n out = _solver.laneout\n n = out.ndata\n t = out.theta\n return t, n\n\n\ndef lane_emden_int(dz=2.0 ** -14, n=3.0, w=0.0):\n \"\"\"\n Interface to FORTRAN90 Lane-Emden Integrator.\n\n Call:\n ndata, data = laneemden.lane_emden_int(dz, n, w)\n\n INPUT:\n dz:\n step in z, maye use 2**(-14)\n n:\n polytropic index (use 3.)\n w:\n rotation parameter(use 0. for non-rot)\n w = 2 Omega^2 / (4 pi G rho_c)\n\n OUTPUT:\n ndata:\n number of last point (starts with 0)\n data:\n output data in form [0:ndata,0:1]\n index 0:\n equidistant grid with step size dz starting at 0\n index 1:\n 0: theta(z)\n 1: d theta(z) / dz\n \"\"\"\n _solver.lane(dz, n, w)\n out = _solver.laneout\n n = int(out.ndata)\n t = out.theta\n return n, t[0:n + 1, :]\n\n\ndef lane_emden_step(x, y, dx, n, w):\n \"\"\"\n This allows a single call to the rk4 subroutine.\n\n It turns out to be *way* less efficient.\n Do not use.\n \"\"\"\n _solver.rk4(x, y[0], y[1], dx, n, w)\n out = _solver.rk4out\n return np.array([out.z0, out.z1])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test():\n \"\"\"\n A simple test.\n \"\"\"\n n = 3.0\n dz = 2.0 ** -14\n _solver.lane(dz, n)\n out = _solver.laneout\n n = out.ndata\n t = out.theta\n return t, n\n\n\ndef lane_emden_int(dz=2.0 ** -14, n=3.0, w=0.0):\n \"\"\"\n Interface to FORTRAN90 Lane-Emden Integrator.\n\n Call:\n ndata, data = laneemden.lane_emden_int(dz, n, w)\n\n INPUT:\n dz:\n step in z, maye use 2**(-14)\n n:\n polytropic index (use 3.)\n w:\n rotation parameter(use 0. for non-rot)\n w = 2 Omega^2 / (4 pi G rho_c)\n\n OUTPUT:\n ndata:\n number of last point (starts with 0)\n data:\n output data in form [0:ndata,0:1]\n index 0:\n equidistant grid with step size dz starting at 0\n index 1:\n 0: theta(z)\n 1: d theta(z) / dz\n \"\"\"\n _solver.lane(dz, n, w)\n out = _solver.laneout\n n = int(out.ndata)\n t = out.theta\n return n, t[0:n + 1, :]\n\n\ndef lane_emden_step(x, y, dx, n, w):\n \"\"\"\n This allows a single call to the rk4 subroutine.\n\n It turns out to be *way* less efficient.\n Do not use.\n \"\"\"\n _solver.rk4(x, y[0], y[1], dx, n, w)\n out = _solver.rk4out\n return np.array([out.z0, out.z1])\n\n\nif __name__ == '__main__':\n t, n = test()\n print(t, n)\n",
"step-4": "<mask token>\nimport numpy as np\nfrom . import _solver\n\n\ndef test():\n \"\"\"\n A simple test.\n \"\"\"\n n = 3.0\n dz = 2.0 ** -14\n _solver.lane(dz, n)\n out = _solver.laneout\n n = out.ndata\n t = out.theta\n return t, n\n\n\ndef lane_emden_int(dz=2.0 ** -14, n=3.0, w=0.0):\n \"\"\"\n Interface to FORTRAN90 Lane-Emden Integrator.\n\n Call:\n ndata, data = laneemden.lane_emden_int(dz, n, w)\n\n INPUT:\n dz:\n step in z, maye use 2**(-14)\n n:\n polytropic index (use 3.)\n w:\n rotation parameter(use 0. for non-rot)\n w = 2 Omega^2 / (4 pi G rho_c)\n\n OUTPUT:\n ndata:\n number of last point (starts with 0)\n data:\n output data in form [0:ndata,0:1]\n index 0:\n equidistant grid with step size dz starting at 0\n index 1:\n 0: theta(z)\n 1: d theta(z) / dz\n \"\"\"\n _solver.lane(dz, n, w)\n out = _solver.laneout\n n = int(out.ndata)\n t = out.theta\n return n, t[0:n + 1, :]\n\n\ndef lane_emden_step(x, y, dx, n, w):\n \"\"\"\n This allows a single call to the rk4 subroutine.\n\n It turns out to be *way* less efficient.\n Do not use.\n \"\"\"\n _solver.rk4(x, y[0], y[1], dx, n, w)\n out = _solver.rk4out\n return np.array([out.z0, out.z1])\n\n\nif __name__ == '__main__':\n t, n = test()\n print(t, n)\n",
"step-5": "#! /bin/env python3\n\n\"\"\"\nLane Emden Python interface.\n\nMain routine:\n lane_emden_int(dz, n)\n\"\"\"\n\nimport numpy as np\nfrom . import _solver\n\ndef test():\n \"\"\"\n A simple test.\n \"\"\"\n n = 3.\n dz = 2.**(-14)\n _solver.lane(dz,n)\n out = _solver.laneout\n n = out.ndata\n t = out.theta\n return t,n\n\ndef lane_emden_int(dz = 2.**(-14), n = 3., w = 0.):\n \"\"\"\n Interface to FORTRAN90 Lane-Emden Integrator.\n\n Call:\n ndata, data = laneemden.lane_emden_int(dz, n, w)\n\n INPUT:\n dz:\n step in z, maye use 2**(-14)\n n:\n polytropic index (use 3.)\n w:\n rotation parameter(use 0. for non-rot)\n w = 2 Omega^2 / (4 pi G rho_c)\n\n OUTPUT:\n ndata:\n number of last point (starts with 0)\n data:\n output data in form [0:ndata,0:1]\n index 0:\n equidistant grid with step size dz starting at 0\n index 1:\n 0: theta(z)\n 1: d theta(z) / dz\n \"\"\"\n _solver.lane(dz, n, w)\n out = _solver.laneout\n n = int(out.ndata)\n t = out.theta\n return n,t[0:n+1,:]\n\ndef lane_emden_step(x,y,dx,n,w):\n \"\"\"\n This allows a single call to the rk4 subroutine.\n\n It turns out to be *way* less efficient.\n Do not use.\n \"\"\"\n _solver.rk4(x,y[0],y[1],dx,n,w)\n out = _solver.rk4out\n return np.array([out.z0,out.z1])\n\nif __name__ == '__main__':\n t,n = test()\n print(t, n)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def pre_build():
newver = _G.newver.removeprefix('amd-drm-fixes-')
for line in edit_file('PKGBUILD'):
if line.startswith('_tag'):
line = "_tag='amd-drm-fixes-" + newver + "'"
print(line)
newver2 = newver.replace('-', '.')
update_pkgver_and_pkgrel(newver2)
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
<|reserved_special_token_1|>
from lilaclib import *
def pre_build():
newver = _G.newver.removeprefix('amd-drm-fixes-')
for line in edit_file('PKGBUILD'):
if line.startswith('_tag'):
line = "_tag='amd-drm-fixes-" + newver + "'"
print(line)
newver2 = newver.replace('-', '.')
update_pkgver_and_pkgrel(newver2)
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
<|reserved_special_token_1|>
from lilaclib import *
def pre_build():
newver = _G.newver.removeprefix('amd-drm-fixes-')
for line in edit_file('PKGBUILD'):
if line.startswith('_tag'):
line = "_tag='amd-drm-fixes-" + newver + "'"
print(line)
newver2 = newver.replace("-",".")
update_pkgver_and_pkgrel(newver2)
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
#if __name__ == '__main__':
# single_main()
|
flexible
|
{
"blob_id": "32eff306444966fab47815fcbae4aefb6769d29b",
"index": 9684,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-3": "<mask token>\n\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace('-', '.')\n update_pkgver_and_pkgrel(newver2)\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-4": "from lilaclib import *\n\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace('-', '.')\n update_pkgver_and_pkgrel(newver2)\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-5": "from lilaclib import *\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace(\"-\",\".\")\n update_pkgver_and_pkgrel(newver2)\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n\n#if __name__ == '__main__':\n# single_main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from base_page import Base_Page
import locators
class Product_Object:
"Page Object for the table"
#locators
def get_all_text(self):
"Get the text within the table"
table_text = []
row_doms = self.get_elements(self.rows_xpath)
for index,row_dom in enumerate(row_doms):
row_text = []
cell_doms = self.get_elements(self.cols_relative_xpath%(index+1))
for cell_dom in cell_doms:
row_text.append(self.get_dom_text(cell_dom))
table_text.append(row_text)
return table_text
def get_num_rows(self):
"Get the total number of rows in the table"
#NOTE: We do not count the header row
row_doms = self.get_elements(self.rows_xpath)
return len(row_doms)
def get_num_cols(self):
"Return the number of columns"
#NOTE: We just count the columns in the header row
col_doms = self.get_elements(self.cols_header)
return len(col_doms)
def get_column_text(self,column_name):
"Get the text within a column"
pass
def get_column_names(self):
"Return a list with the column names"
column_names = []
col_doms = self.get_elements(self.cols_header)
for col_dom in col_doms:
column_names.append(self.get_dom_text(col_dom))
return column_names
def check_cell_text_present(self,text,column_name='all'):
"Check if the text you want is present in a cell"
result_flag = False
if column_name == 'all':
table_text = self.get_all_text()
else:
table_text = [self.get_column_text(column_name)]
for row in table_text:
for col in row:
if col == text:
result_flag = True
break
if result_flag is True:
break
return result_flag
def check_name_present(self,name):
"Check if the supplied name is present anywhere in the table"
return self.check_cell_text_present(name,column_name='name')
def print_table_text(self):
"Print out the table text neatly"
column_names = self.get_column_names()
table_text = self.get_all_text()
self.write('||'.join(column_names))
for row in table_text:
self.write('|'.join(row))
|
normal
|
{
"blob_id": "aebc8665a97ab0a71b1d8a920b5cbf2643254883",
"index": 479,
"step-1": "<mask token>\n\n\nclass Product_Object:\n <mask token>\n\n def get_all_text(self):\n \"\"\"Get the text within the table\"\"\"\n table_text = []\n row_doms = self.get_elements(self.rows_xpath)\n for index, row_dom in enumerate(row_doms):\n row_text = []\n cell_doms = self.get_elements(self.cols_relative_xpath % (index +\n 1))\n for cell_dom in cell_doms:\n row_text.append(self.get_dom_text(cell_dom))\n table_text.append(row_text)\n return table_text\n\n def get_num_rows(self):\n \"\"\"Get the total number of rows in the table\"\"\"\n row_doms = self.get_elements(self.rows_xpath)\n return len(row_doms)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def check_name_present(self, name):\n \"\"\"Check if the supplied name is present anywhere in the table\"\"\"\n return self.check_cell_text_present(name, column_name='name')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Product_Object:\n <mask token>\n\n def get_all_text(self):\n \"\"\"Get the text within the table\"\"\"\n table_text = []\n row_doms = self.get_elements(self.rows_xpath)\n for index, row_dom in enumerate(row_doms):\n row_text = []\n cell_doms = self.get_elements(self.cols_relative_xpath % (index +\n 1))\n for cell_dom in cell_doms:\n row_text.append(self.get_dom_text(cell_dom))\n table_text.append(row_text)\n return table_text\n\n def get_num_rows(self):\n \"\"\"Get the total number of rows in the table\"\"\"\n row_doms = self.get_elements(self.rows_xpath)\n return len(row_doms)\n\n def get_num_cols(self):\n \"\"\"Return the number of columns\"\"\"\n col_doms = self.get_elements(self.cols_header)\n return len(col_doms)\n <mask token>\n <mask token>\n\n def check_cell_text_present(self, text, column_name='all'):\n \"\"\"Check if the text you want is present in a cell\"\"\"\n result_flag = False\n if column_name == 'all':\n table_text = self.get_all_text()\n else:\n table_text = [self.get_column_text(column_name)]\n for row in table_text:\n for col in row:\n if col == text:\n result_flag = True\n break\n if result_flag is True:\n break\n return result_flag\n\n def check_name_present(self, name):\n \"\"\"Check if the supplied name is present anywhere in the table\"\"\"\n return self.check_cell_text_present(name, column_name='name')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Product_Object:\n <mask token>\n\n def get_all_text(self):\n \"\"\"Get the text within the table\"\"\"\n table_text = []\n row_doms = self.get_elements(self.rows_xpath)\n for index, row_dom in enumerate(row_doms):\n row_text = []\n cell_doms = self.get_elements(self.cols_relative_xpath % (index +\n 1))\n for cell_dom in cell_doms:\n row_text.append(self.get_dom_text(cell_dom))\n table_text.append(row_text)\n return table_text\n\n def get_num_rows(self):\n \"\"\"Get the total number of rows in the table\"\"\"\n row_doms = self.get_elements(self.rows_xpath)\n return len(row_doms)\n\n def get_num_cols(self):\n \"\"\"Return the number of columns\"\"\"\n col_doms = self.get_elements(self.cols_header)\n return len(col_doms)\n\n def get_column_text(self, column_name):\n \"\"\"Get the text within a column\"\"\"\n pass\n <mask token>\n\n def check_cell_text_present(self, text, column_name='all'):\n \"\"\"Check if the text you want is present in a cell\"\"\"\n result_flag = False\n if column_name == 'all':\n table_text = self.get_all_text()\n else:\n table_text = [self.get_column_text(column_name)]\n for row in table_text:\n for col in row:\n if col == text:\n result_flag = True\n break\n if result_flag is True:\n break\n return result_flag\n\n def check_name_present(self, name):\n \"\"\"Check if the supplied name is present anywhere in the table\"\"\"\n return self.check_cell_text_present(name, column_name='name')\n\n def print_table_text(self):\n \"\"\"Print out the table text neatly\"\"\"\n column_names = self.get_column_names()\n table_text = self.get_all_text()\n self.write('||'.join(column_names))\n for row in table_text:\n self.write('|'.join(row))\n",
"step-4": "from base_page import Base_Page\nimport locators\n\n\nclass Product_Object:\n \"\"\"Page Object for the table\"\"\"\n\n def get_all_text(self):\n \"\"\"Get the text within the table\"\"\"\n table_text = []\n row_doms = self.get_elements(self.rows_xpath)\n for index, row_dom in enumerate(row_doms):\n row_text = []\n cell_doms = self.get_elements(self.cols_relative_xpath % (index +\n 1))\n for cell_dom in cell_doms:\n row_text.append(self.get_dom_text(cell_dom))\n table_text.append(row_text)\n return table_text\n\n def get_num_rows(self):\n \"\"\"Get the total number of rows in the table\"\"\"\n row_doms = self.get_elements(self.rows_xpath)\n return len(row_doms)\n\n def get_num_cols(self):\n \"\"\"Return the number of columns\"\"\"\n col_doms = self.get_elements(self.cols_header)\n return len(col_doms)\n\n def get_column_text(self, column_name):\n \"\"\"Get the text within a column\"\"\"\n pass\n\n def get_column_names(self):\n \"\"\"Return a list with the column names\"\"\"\n column_names = []\n col_doms = self.get_elements(self.cols_header)\n for col_dom in col_doms:\n column_names.append(self.get_dom_text(col_dom))\n return column_names\n\n def check_cell_text_present(self, text, column_name='all'):\n \"\"\"Check if the text you want is present in a cell\"\"\"\n result_flag = False\n if column_name == 'all':\n table_text = self.get_all_text()\n else:\n table_text = [self.get_column_text(column_name)]\n for row in table_text:\n for col in row:\n if col == text:\n result_flag = True\n break\n if result_flag is True:\n break\n return result_flag\n\n def check_name_present(self, name):\n \"\"\"Check if the supplied name is present anywhere in the table\"\"\"\n return self.check_cell_text_present(name, column_name='name')\n\n def print_table_text(self):\n \"\"\"Print out the table text neatly\"\"\"\n column_names = self.get_column_names()\n table_text = self.get_all_text()\n self.write('||'.join(column_names))\n for row in table_text:\n self.write('|'.join(row))\n",
"step-5": "from base_page import Base_Page\nimport locators\n\n\nclass Product_Object:\n \"Page Object for the table\"\n \n #locators\n\n def get_all_text(self):\n \"Get the text within the table\"\n table_text = []\n row_doms = self.get_elements(self.rows_xpath)\n for index,row_dom in enumerate(row_doms):\n row_text = []\n cell_doms = self.get_elements(self.cols_relative_xpath%(index+1))\n for cell_dom in cell_doms:\n row_text.append(self.get_dom_text(cell_dom))\n table_text.append(row_text)\n\n return table_text\n\n \n def get_num_rows(self):\n \"Get the total number of rows in the table\"\n #NOTE: We do not count the header row\n row_doms = self.get_elements(self.rows_xpath)\n\n return len(row_doms)\n\n\n def get_num_cols(self):\n \"Return the number of columns\"\n #NOTE: We just count the columns in the header row\n col_doms = self.get_elements(self.cols_header)\n\n return len(col_doms)\n\n\n def get_column_text(self,column_name):\n \"Get the text within a column\"\n pass\n\n def get_column_names(self):\n \"Return a list with the column names\"\n column_names = []\n col_doms = self.get_elements(self.cols_header)\n for col_dom in col_doms:\n column_names.append(self.get_dom_text(col_dom))\n\n return column_names\n\n\n def check_cell_text_present(self,text,column_name='all'):\n \"Check if the text you want is present in a cell\"\n result_flag = False\n if column_name == 'all':\n table_text = self.get_all_text()\n else:\n table_text = [self.get_column_text(column_name)]\n for row in table_text:\n for col in row:\n if col == text:\n result_flag = True\n break\n if result_flag is True:\n break\n\n return result_flag\n\n \n def check_name_present(self,name):\n \"Check if the supplied name is present anywhere in the table\"\n return self.check_cell_text_present(name,column_name='name')\n\n\n def print_table_text(self):\n \"Print out the table text neatly\"\n column_names = self.get_column_names()\n table_text = self.get_all_text()\n\n self.write('||'.join(column_names))\n for row in table_text:\n self.write('|'.join(row))\n",
"step-ids": [
4,
6,
8,
11,
12
]
}
|
[
4,
6,
8,
11,
12
] |
# *** Обработка исключений (исключительные события, искл. ситуации)***
# генерация исключения
a=100
b=0
# "деление на ноль" - пример ошибки (не рабочий)
# c=a/b
# решение - обработка исключений (отлов исключения)
# конструкция "try-except"
# try:
# c = a / b
# print("Все отлично")
# except:
# # тут должен быть код, который срабатывает при исключительных ситуациях
# # т.е. "запасной" код
# print("Что-то пошло не так")
# c=a/1
# # тут может быть код который выполняется после предыдущего блока
# print("Result: ", c)
# обработка множества исключений
# result=None
# try:
# var = int(input("Введите число, но не ноль: "))
# result = 50/var
# # обработка исключения конкретного типа (класса)
# except ZeroDivisionError: # в данном примере тип исключения - ZeroDivisionError
# print("Вы попытались поделить на ноль!")
# result=50/1
# except ValueError as val_error: # в данном примере тип исключения - ValueError,
# print(f"По-моему, Вы ввели не число. Инфо: {val_error}")
# result=0
# # обработка общего (базового) исключения - отлавливает все исключения
# except Exception as err:
# print(f"Что-то пошло не так: {err}")
# print("Result: ", result)
# конструкция "try-except-finally"
# try:
# var=int(input("Введите число: "))
# c = 100/var
# print("Полет нормальный!")
# except ZeroDivisionError:
# c=0
# print("Попытка деления на ноль")
# finally:
# # finally срабатывает в любом случае, даже если программа завершится аварийно
# # т.е. тут должна быть критически важная логика
# print("Критически важное действие")
# print("Result", c)
# конструкция "try-except-finally"
try:
var=int(input("Введите число: "))
c = 100/var
print("Полет нормальный!")
except ZeroDivisionError:
c=0
print("Попытка деления на ноль")
else:
#else срабатывает только тогда, когда нет исключений
print("Логика, которая выполняется только если нет исключений")
finally:
# finally срабатывает в любом случае, даже если программа завершится аварийно
# т.е. тут должна быть критически важная логика
print("Критически важное действие")
print("Result", c)
|
normal
|
{
"blob_id": "bb02ba68eb6629dad364b5f015680e4126e655f3",
"index": 6173,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntry:\n var = int(input('Введите число: '))\n c = 100 / var\n print('Полет нормальный!')\nexcept ZeroDivisionError:\n c = 0\n print('Попытка деления на ноль')\nelse:\n print('Логика, которая выполняется только если нет исключений')\nfinally:\n print('Критически важное действие')\nprint('Result', c)\n",
"step-3": "a = 100\nb = 0\ntry:\n var = int(input('Введите число: '))\n c = 100 / var\n print('Полет нормальный!')\nexcept ZeroDivisionError:\n c = 0\n print('Попытка деления на ноль')\nelse:\n print('Логика, которая выполняется только если нет исключений')\nfinally:\n print('Критически важное действие')\nprint('Result', c)\n",
"step-4": "# *** Обработка исключений (исключительные события, искл. ситуации)***\n\n# генерация исключения\na=100\nb=0\n\n# \"деление на ноль\" - пример ошибки (не рабочий)\n# c=a/b\n\n# решение - обработка исключений (отлов исключения)\n# конструкция \"try-except\"\n\n# try:\n# c = a / b\n# print(\"Все отлично\")\n# except:\n# # тут должен быть код, который срабатывает при исключительных ситуациях\n# # т.е. \"запасной\" код\n# print(\"Что-то пошло не так\")\n# c=a/1\n\n# # тут может быть код который выполняется после предыдущего блока\n# print(\"Result: \", c)\n\n\n# обработка множества исключений\n\n# result=None\n\n# try:\n# var = int(input(\"Введите число, но не ноль: \"))\n# result = 50/var\n# # обработка исключения конкретного типа (класса)\n# except ZeroDivisionError: # в данном примере тип исключения - ZeroDivisionError\n# print(\"Вы попытались поделить на ноль!\")\n# result=50/1\n# except ValueError as val_error: # в данном примере тип исключения - ValueError, \n# print(f\"По-моему, Вы ввели не число. Инфо: {val_error}\")\n# result=0\n\n# # обработка общего (базового) исключения - отлавливает все исключения\n# except Exception as err:\n# print(f\"Что-то пошло не так: {err}\")\n\n# print(\"Result: \", result)\n\n\n# конструкция \"try-except-finally\"\n\n# try:\n# var=int(input(\"Введите число: \"))\n# c = 100/var\n# print(\"Полет нормальный!\")\n# except ZeroDivisionError:\n# c=0\n# print(\"Попытка деления на ноль\")\n# finally:\n# # finally срабатывает в любом случае, даже если программа завершится аварийно\n# # т.е. тут должна быть критически важная логика\n# print(\"Критически важное действие\")\n\n# print(\"Result\", c)\n\n# конструкция \"try-except-finally\"\n\ntry:\n var=int(input(\"Введите число: \"))\n c = 100/var\n print(\"Полет нормальный!\")\nexcept ZeroDivisionError:\n c=0\n print(\"Попытка деления на ноль\")\nelse: \n #else срабатывает только тогда, когда нет исключений\n print(\"Логика, которая выполняется только если нет исключений\")\nfinally:\n # finally срабатывает в любом случае, даже если программа завершится аварийно\n # т.е. тут должна быть критически важная логика\n print(\"Критически важное действие\")\n\nprint(\"Result\", c)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#coding: utf-8
#/usr/bin/python
__author__='julia sayapina'
### Use db_reset.py to drop the db and recreate it, then use 'migrate' --> 'createsuperuser' --> 'makemigrations' --> 'migrate' as usual.
### This will create the DB structure as it has to be from django
### Then use test_db_fullfill.py to fullfill the db with test data. if you don't need to create tables manually don't use db_create()
from warnings import filterwarnings
import MySQLdb as db
import os
import shutil
import os
import sys
from subprocess import Popen, PIPE, STDOUT
import uuid
from decimal import *
from datetime import date
from random import randint
# Создание или открытие файла базы данных и создание схемы
filterwarnings('ignore', category = db.Warning)
db_name = 'ved3'
def db_create(): # creates tables manually (doesn't create AO and AB tables)
cur.execute("""
create table if not exists Offshores_asset (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
asset_name VARCHAR(100),
asset_link VARCHAR(200),
slug CHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_offshore (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
off_name VARCHAR(50),
off_jurisdiction VARCHAR(50),
file VARCHAR(100),
image VARCHAR(100),
off_parent VARCHAR(50),
off_link VARCHAR(300),
slug VARCHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_beneficiary (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
ben_name VARCHAR(50),
ben_lastname VARCHAR(100),
ben_midname VARCHAR(30),
ben_holding VARCHAR(70),
ben_link VARCHAR(300),
slug VARCHAR(200),
uuid CHAR(36)
);
""")
cur.execute("""
create table if not exists Offshores_beneficiariesoffshores (
id INTEGER PRIMARY KEY AUTO_INCREMENT,
share DECIMAL,
rel_date DATE,
source VARCHAR(150),
link VARCHAR(200),
beneficiary_id INT,
offshore_id INT,
uuid CHAR(36)
);
""")
conn.commit()
print('tables created')
def db_insert(numrows):
# inserts test data into tables
for x in xrange(0,numrows): #creates test data for tables
num = str(x)
a_name = 'Asset' + num
a_link = 'http://somelink/'+a_name
a_uuid = uuid.uuid4().hex
a_slug = a_name + '-' + str(a_uuid)
o_name = 'Offshore' + num
o_jur = 'Cyprus'
o_file = 'offshores/favicon.xcf'
o_image = 'offshores/favicon.png'
o_prnt = 'parent' + num
o_link = 'http://' + o_name + '-' + num + '.com'
o_uuid = uuid.uuid4().hex
o_slug = o_name + str(o_uuid)
b_name = 'Michael' + num
b_lname = 'Prohorov' + num
b_mname = 'Dmitrievich' + num
b_holding = 'Onexim' + num
b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'
b_uuid = uuid.uuid4().hex
b_slug = b_lname + str(b_uuid)
try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why
cur.execute("""INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)""",(a_name, a_link, a_slug, a_uuid))
cur.execute("""INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)""",(o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid))
cur.execute("""INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid))
conn.commit()
except Exception as e:
print ("Exception 1:", type(e), e)
def db_insert_linktables(numrows):
# inserts test data into linking tables; has to be called after db_insert(), as first basic tables need to be generated to produce links between
# them using random numbers
for x in xrange(0,numrows): #creates test data for tables
num = str(x)
bo_share = Decimal(x)
bo_date = date(2016, randint(1, 12), randint(1, 28))
bo_source = 'source' + num
bo_link = 'http://bo.ru/' + bo_source + '-' + num
bo_ben = randint(1, numrows)
bo_off = randint(1, numrows)
bo_uuid = uuid.uuid4().hex
oa_uuid = uuid.uuid4().hex
oa_share = Decimal(x)
oa_date = date(2016, randint(1, 12), randint(1, 28))
oa_source = 'source' + num
oa_link = 'http://oa.ru/' + oa_source + '-' + num
oa_asset = randint(1, numrows)
oa_off = randint(1, numrows)
ab_uuid = uuid.uuid4().hex
ab_share = Decimal(x)
ab_date = date(2016, randint(1, 12), randint(1, 28))
ab_source = 'source' + num
ab_link = 'http://ab.ru/' + oa_source + '-' + num
ab_asset = randint(1, numrows)
ab_ben = randint(1, numrows)
try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why
cur.execute("""INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid))
cur.execute("""INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off))
cur.execute("""INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)""",(ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben))
conn.commit()
except Exception as e:
print ("Exception 1:", type(e), e)
numrows = 20
try:
conn = db.connect("localhost","root","0013Tau","ved2" )
cur = conn.cursor()
# db_create() #<-- to create tables manually uncomment this
db_insert(numrows)
db_insert_linktables(numrows) # IMPORTANT! has to be called ONLY after db_insert()!
except Exception as e:
print ("Exception 0:", type(e), e)
except: db.rollback()
conn.commit()
conn.close()
print ('DB fullfilled')
# def main():
# if len(sys.argv) != 2:
# print('usage: python3 db_fullfill.py [numrows]')
# sys.exit(1)
# if len(sys.argv) == 2:
# numrows = sys.argv[1]
# else:
# numrows = 15
# print (numrows)
# return numrows
# sys.exit(1)
# if __name__ == '__main__':
# main()
|
normal
|
{
"blob_id": "3240310653930662dcc4d79646b1a75c2994cda7",
"index": 9063,
"step-1": "<mask token>\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\n<mask token>\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\n<mask token>\n",
"step-2": "<mask token>\nfilterwarnings('ignore', category=db.Warning)\n<mask token>\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\n<mask token>\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n",
"step-3": "__author__ = 'julia sayapina'\n<mask token>\nfilterwarnings('ignore', category=db.Warning)\ndb_name = 'ved3'\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\nnumrows = 20\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n",
"step-4": "__author__ = 'julia sayapina'\nfrom warnings import filterwarnings\nimport MySQLdb as db\nimport os\nimport shutil\nimport os\nimport sys\nfrom subprocess import Popen, PIPE, STDOUT\nimport uuid\nfrom decimal import *\nfrom datetime import date\nfrom random import randint\nfilterwarnings('ignore', category=db.Warning)\ndb_name = 'ved3'\n\n\ndef db_create():\n cur.execute(\n \"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\"\n )\n cur.execute(\n \"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\"\n )\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/' + a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n try:\n cur.execute(\n 'INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)'\n , (a_name, a_link, a_slug, a_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)'\n , (o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug,\n o_uuid))\n cur.execute(\n 'INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid)\n )\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\ndef db_insert_linktables(numrows):\n for x in xrange(0, numrows):\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n try:\n cur.execute(\n 'INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off,\n bo_uuid))\n cur.execute(\n 'INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset,\n oa_off))\n cur.execute(\n 'INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset,\n ab_ben))\n conn.commit()\n except Exception as e:\n print('Exception 1:', type(e), e)\n\n\nnumrows = 20\ntry:\n conn = db.connect('localhost', 'root', '0013Tau', 'ved2')\n cur = conn.cursor()\n db_insert(numrows)\n db_insert_linktables(numrows)\nexcept Exception as e:\n print('Exception 0:', type(e), e)\nexcept:\n db.rollback()\nconn.commit()\nconn.close()\nprint('DB fullfilled')\n",
"step-5": "#coding: utf-8\n#/usr/bin/python\n__author__='julia sayapina'\n\n### Use db_reset.py to drop the db and recreate it, then use 'migrate' --> 'createsuperuser' --> 'makemigrations' --> 'migrate' as usual.\n### This will create the DB structure as it has to be from django\n### Then use test_db_fullfill.py to fullfill the db with test data. if you don't need to create tables manually don't use db_create()\n\nfrom warnings import filterwarnings\nimport MySQLdb as db\nimport os\nimport shutil\nimport os\nimport sys \nfrom subprocess import Popen, PIPE, STDOUT\nimport uuid\nfrom decimal import *\nfrom datetime import date\nfrom random import randint\n\n\n# Создание или открытие файла базы данных и создание схемы\nfilterwarnings('ignore', category = db.Warning)\ndb_name = 'ved3'\n\ndef db_create(): # creates tables manually (doesn't create AO and AB tables)\n cur.execute(\"\"\"\n create table if not exists Offshores_asset (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n asset_name VARCHAR(100),\n asset_link VARCHAR(200),\n slug CHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_offshore (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n off_name VARCHAR(50),\n off_jurisdiction VARCHAR(50),\n file VARCHAR(100),\n image VARCHAR(100),\n off_parent VARCHAR(50),\n off_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_beneficiary (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n ben_name VARCHAR(50),\n ben_lastname VARCHAR(100),\n ben_midname VARCHAR(30),\n ben_holding VARCHAR(70),\n ben_link VARCHAR(300),\n slug VARCHAR(200),\n uuid CHAR(36)\n );\n \"\"\")\n cur.execute(\"\"\"\n create table if not exists Offshores_beneficiariesoffshores (\n id INTEGER PRIMARY KEY AUTO_INCREMENT,\n share DECIMAL,\n rel_date DATE,\n source VARCHAR(150),\n link VARCHAR(200),\n beneficiary_id INT,\n offshore_id INT,\n uuid CHAR(36)\n );\n \"\"\")\n conn.commit()\n print('tables created')\n\n\ndef db_insert(numrows):\n # inserts test data into tables\n for x in xrange(0,numrows): #creates test data for tables\n num = str(x)\n a_name = 'Asset' + num\n a_link = 'http://somelink/'+a_name\n a_uuid = uuid.uuid4().hex\n a_slug = a_name + '-' + str(a_uuid)\n o_name = 'Offshore' + num\n o_jur = 'Cyprus'\n o_file = 'offshores/favicon.xcf'\n o_image = 'offshores/favicon.png'\n o_prnt = 'parent' + num\n o_link = 'http://' + o_name + '-' + num + '.com'\n o_uuid = uuid.uuid4().hex\n o_slug = o_name + str(o_uuid)\n b_name = 'Michael' + num\n b_lname = 'Prohorov' + num\n b_mname = 'Dmitrievich' + num\n b_holding = 'Onexim' + num\n b_link = 'http://onexim.ru/' + b_name + b_lname + '-' + num + '.com'\n b_uuid = uuid.uuid4().hex\n b_slug = b_lname + str(b_uuid)\n\n try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why\n cur.execute(\"\"\"INSERT INTO Offshores_asset (asset_name, asset_link, slug, uuid) VALUES (%s,%s,%s,%s)\"\"\",(a_name, a_link, a_slug, a_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_offshore (off_name, off_jurisdiction, file, image, off_parent, off_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)\"\"\",(o_name, o_jur, o_file, o_image, o_prnt, o_link, o_slug, o_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_beneficiary (ben_name, ben_lastname, ben_midname, ben_holding, ben_link, slug, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(b_name, b_lname, b_mname, b_holding, b_link, b_slug, b_uuid))\n conn.commit()\n except Exception as e:\n print (\"Exception 1:\", type(e), e)\n\ndef db_insert_linktables(numrows):\n # inserts test data into linking tables; has to be called after db_insert(), as first basic tables need to be generated to produce links between\n # them using random numbers\n for x in xrange(0,numrows): #creates test data for tables\n num = str(x)\n bo_share = Decimal(x)\n bo_date = date(2016, randint(1, 12), randint(1, 28))\n bo_source = 'source' + num\n bo_link = 'http://bo.ru/' + bo_source + '-' + num\n bo_ben = randint(1, numrows)\n bo_off = randint(1, numrows)\n bo_uuid = uuid.uuid4().hex\n oa_uuid = uuid.uuid4().hex\n oa_share = Decimal(x)\n oa_date = date(2016, randint(1, 12), randint(1, 28))\n oa_source = 'source' + num\n oa_link = 'http://oa.ru/' + oa_source + '-' + num\n oa_asset = randint(1, numrows)\n oa_off = randint(1, numrows)\n ab_uuid = uuid.uuid4().hex\n ab_share = Decimal(x)\n ab_date = date(2016, randint(1, 12), randint(1, 28))\n ab_source = 'source' + num\n ab_link = 'http://ab.ru/' + oa_source + '-' + num\n ab_asset = randint(1, numrows)\n ab_ben = randint(1, numrows)\n\n try: #inserts test data to tables via SQL; still produces wierd errors for Beneficiariesoffshores idk why\n cur.execute(\"\"\"INSERT INTO Offshores_beneficiariesoffshores (share, rel_date, source, link, beneficiary_id, offshore_id, uuid) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(bo_share, bo_date, bo_source, bo_link, bo_ben, bo_off, bo_uuid))\n cur.execute(\"\"\"INSERT INTO Offshores_offshoresassets (uuid, share, rel_date, source, link, asset_id, offshore_id) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(oa_uuid, oa_share, oa_date, oa_source, oa_link, oa_asset, oa_off))\n cur.execute(\"\"\"INSERT INTO Offshores_assetsbeneficiaries (uuid, share, rel_date, source, link, asset_id, beneficiary_id) VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",(ab_uuid, ab_share, ab_date, ab_source, ab_link, ab_asset, ab_ben))\n conn.commit()\n except Exception as e:\n print (\"Exception 1:\", type(e), e)\n\nnumrows = 20\ntry:\n conn = db.connect(\"localhost\",\"root\",\"0013Tau\",\"ved2\" )\n cur = conn.cursor()\n # db_create() #<-- to create tables manually uncomment this\n db_insert(numrows)\n db_insert_linktables(numrows) # IMPORTANT! has to be called ONLY after db_insert()!\n\nexcept Exception as e:\n print (\"Exception 0:\", type(e), e)\n\nexcept: db.rollback() \n\n\nconn.commit()\nconn.close()\nprint ('DB fullfilled')\n\n\n# def main():\n# if len(sys.argv) != 2:\n# print('usage: python3 db_fullfill.py [numrows]')\n# sys.exit(1)\n\n# if len(sys.argv) == 2: \n# numrows = sys.argv[1]\n\n# else:\n# numrows = 15\n# print (numrows)\n\n# return numrows\n# sys.exit(1)\n\n# if __name__ == '__main__':\n# main()\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def II():
return int(sys.stdin.readline())
def MI():
return map(int, sys.stdin.readline().split())
def LI():
return list(map(int, sys.stdin.readline().split()))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def II():
return int(sys.stdin.readline())
def MI():
return map(int, sys.stdin.readline().split())
def LI():
return list(map(int, sys.stdin.readline().split()))
<|reserved_special_token_0|>
def main():
N, K = MI()
kukan = []
for _ in range(K):
tmp = LI()
kukan.append(tmp)
dp = [0] * (N + 1)
dp[1] = 1
dp_sum = [0] * (N + 1)
dp_sum[1] = 1
for i in range(N + 1):
for k in range(K):
l, r = kukan[k]
pre_l = i - r
pre_r = i - l
if pre_r < 0:
continue
pre_l = max(pre_l, 0)
dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]
dp_sum[i] = dp[i] + dp_sum[i - 1]
dp_sum[i] %= MOD
dp[i] %= MOD
print(dp[-1])
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def II():
return int(sys.stdin.readline())
def MI():
return map(int, sys.stdin.readline().split())
def LI():
return list(map(int, sys.stdin.readline().split()))
MOD = 998244353
def main():
N, K = MI()
kukan = []
for _ in range(K):
tmp = LI()
kukan.append(tmp)
dp = [0] * (N + 1)
dp[1] = 1
dp_sum = [0] * (N + 1)
dp_sum[1] = 1
for i in range(N + 1):
for k in range(K):
l, r = kukan[k]
pre_l = i - r
pre_r = i - l
if pre_r < 0:
continue
pre_l = max(pre_l, 0)
dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]
dp_sum[i] = dp[i] + dp_sum[i - 1]
dp_sum[i] %= MOD
dp[i] %= MOD
print(dp[-1])
main()
<|reserved_special_token_1|>
import sys
import collections as cl
def II():
return int(sys.stdin.readline())
def MI():
return map(int, sys.stdin.readline().split())
def LI():
return list(map(int, sys.stdin.readline().split()))
MOD = 998244353
def main():
N, K = MI()
kukan = []
for _ in range(K):
tmp = LI()
kukan.append(tmp)
dp = [0] * (N + 1)
dp[1] = 1
dp_sum = [0] * (N + 1)
dp_sum[1] = 1
for i in range(N + 1):
for k in range(K):
l, r = kukan[k]
pre_l = i - r
pre_r = i - l
if pre_r < 0:
continue
pre_l = max(pre_l, 0)
dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]
dp_sum[i] = dp[i] + dp_sum[i - 1]
dp_sum[i] %= MOD
dp[i] %= MOD
print(dp[-1])
main()
<|reserved_special_token_1|>
#!/usr/bin/env python3
import sys
import collections as cl
def II(): return int(sys.stdin.readline())
def MI(): return map(int, sys.stdin.readline().split())
def LI(): return list(map(int, sys.stdin.readline().split()))
MOD = 998244353
def main():
N, K = MI()
kukan = []
for _ in range(K):
tmp = LI()
kukan.append(tmp)
dp = [0] * (N + 1)
dp[1] = 1
dp_sum = [0] * (N+1)
dp_sum[1] = 1
for i in range(N+1):
for k in range(K):
l, r = kukan[k]
pre_l = i - r
pre_r = i - l
if pre_r < 0:
continue
pre_l = max(pre_l, 0)
dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]
dp_sum[i] = dp[i] + dp_sum[i-1]
dp_sum[i] %= MOD
dp[i] %= MOD
print(dp[-1])
main()
|
flexible
|
{
"blob_id": "60b70171dededd758e00d6446842355a47b54cc0",
"index": 9700,
"step-1": "<mask token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\n<mask token>\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"step-3": "<mask token>\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"step-4": "import sys\nimport collections as cl\n\n\ndef II():\n return int(sys.stdin.readline())\n\n\ndef MI():\n return map(int, sys.stdin.readline().split())\n\n\ndef LI():\n return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N + 1)\n dp_sum[1] = 1\n for i in range(N + 1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n dp_sum[i] = dp[i] + dp_sum[i - 1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n print(dp[-1])\n\n\nmain()\n",
"step-5": "#!/usr/bin/env python3\nimport sys\nimport collections as cl\n\n\ndef II(): return int(sys.stdin.readline())\n\n\ndef MI(): return map(int, sys.stdin.readline().split())\n\n\ndef LI(): return list(map(int, sys.stdin.readline().split()))\n\n\nMOD = 998244353\n\n\ndef main():\n N, K = MI()\n\n kukan = []\n for _ in range(K):\n tmp = LI()\n kukan.append(tmp)\n\n dp = [0] * (N + 1)\n dp[1] = 1\n dp_sum = [0] * (N+1)\n dp_sum[1] = 1\n\n for i in range(N+1):\n for k in range(K):\n l, r = kukan[k]\n pre_l = i - r\n pre_r = i - l\n if pre_r < 0:\n continue\n pre_l = max(pre_l, 0)\n dp[i] += dp_sum[pre_r] - dp_sum[pre_l - 1]\n\n dp_sum[i] = dp[i] + dp_sum[i-1]\n dp_sum[i] %= MOD\n dp[i] %= MOD\n\n print(dp[-1])\n\n\nmain()\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
#!/usr/bin/env python
# encoding: UTF-8
'''
Script to select current version for a given soft (python, ruby or java).
'''
import os
import re
import sys
import glob
import getopt
# fix input in Python 2 and 3
try:
input = raw_input # pylint: disable=redefined-builtin,invalid-name
except NameError:
pass
class Version(object): # pylint: disable=useless-object-inheritance
'''
Software management class
'''
HELP = '''version [-h] software
Select software version in a menu:
-h To print this help screen.
software Software version to choose.'''
SELECTED = ' *'
def __init__(self, soft):
'''
Constructor that takes software name
'''
self.soft = soft
self.app_dir = os.environ.get('APP_DIR')
if self.app_dir is None:
self.app_dir = '/opt'
self.sudo = True
if os.access(self.app_dir, os.W_OK):
self.sudo = False
self.soft_root = os.path.join(self.app_dir, self.soft)
self.soft_paths = sorted(glob.glob(self.soft_root+'/[0-9]*'))
self.versions = [v[len(self.soft_root)+1:] for v in self.soft_paths]
path = os.path.realpath("%s/current" % self.soft_root)
self.current_version = path[path.rindex(os.path.sep)+1:]
def set_version(self, index):
'''
Set software version by index
'''
sudo = 'sudo ' if self.sudo else ''
old_dir = "current"
if index == -1:
print("Selecting system version")
if os.path.exists(os.path.join(self.soft_root, old_dir)):
os.system("cd %s && %srm %s" % (self.soft_root, sudo, old_dir))
else:
print("Selecting %s version '%s'" %
(self.soft, self.versions[index]))
directory = self.versions[index]
if os.path.exists(os.path.join(self.soft_root, old_dir)):
os.system("cd %s && %srm %s" % (self.soft_root, sudo, old_dir))
os.system("cd %s && %sln -s %s %s" % (self.soft_root, sudo, directory, old_dir))
def ask_version(self):
'''
Prompt user for software version in the list of installed versions
'''
# print version list
print('Please choose a version:')
index = 1
if self.current_version == 'current':
selected = self.SELECTED
else:
selected = ''
print("0: System"+selected)
for version in self.soft_paths:
number = version[len(self.soft_root)+1:]
if number == self.current_version:
selected = self.SELECTED
else:
selected = ''
print(str(index)+': '+str(number)+selected)
index += 1
# ask for the version
chosen = None
maximum = len(self.soft_paths)
while not chosen:
try:
choice = input()
except KeyboardInterrupt:
print("\nUser abort!")
sys.exit(0)
if re.match('\\d+', choice) and int(choice) <= maximum and \
int(choice) >= 0:
index = int(choice) - 1
chosen = True
elif choice == '':
print("Keeping current")
sys.exit(0)
else:
print("Bad version, please choose a number between 0 and %s" %
str(maximum))
# return index in version table
return index
@staticmethod
def run():
'''
Read software name on command line and run version selection
'''
try:
opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])
except getopt.GetoptError as exception:
print('Error parsing command line: %s' % exception)
print(Version.HELP)
sys.exit(1)
for option, _ in opts:
if option in ('-h', '--help'):
print(Version.HELP)
sys.exit(0)
else:
print("Error parsing command line: Unhandled option '%s'" % option)
print(Version.HELP)
sys.exit(2)
if len(args) != 1:
print("Error parsing command line: You must pass software")
print(Version.HELP)
sys.exit(1)
soft = args[0]
version = Version(soft)
version.set_version(version.ask_version())
if __name__ == '__main__':
Version.run()
|
normal
|
{
"blob_id": "93e8e9fc4f0503dfc3243bef5ab8261a4cdfc296",
"index": 1009,
"step-1": "<mask token>\n\n\nclass Version(object):\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, soft):\n \"\"\"\n Constructor that takes software name\n \"\"\"\n self.soft = soft\n self.app_dir = os.environ.get('APP_DIR')\n if self.app_dir is None:\n self.app_dir = '/opt'\n self.sudo = True\n if os.access(self.app_dir, os.W_OK):\n self.sudo = False\n self.soft_root = os.path.join(self.app_dir, self.soft)\n self.soft_paths = sorted(glob.glob(self.soft_root + '/[0-9]*'))\n self.versions = [v[len(self.soft_root) + 1:] for v in self.soft_paths]\n path = os.path.realpath('%s/current' % self.soft_root)\n self.current_version = path[path.rindex(os.path.sep) + 1:]\n\n def set_version(self, index):\n \"\"\"\n Set software version by index\n \"\"\"\n sudo = 'sudo ' if self.sudo else ''\n old_dir = 'current'\n if index == -1:\n print('Selecting system version')\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n else:\n print(\"Selecting %s version '%s'\" % (self.soft, self.versions[\n index]))\n directory = self.versions[index]\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n os.system('cd %s && %sln -s %s %s' % (self.soft_root, sudo,\n directory, old_dir))\n\n def ask_version(self):\n \"\"\"\n Prompt user for software version in the list of installed versions\n \"\"\"\n print('Please choose a version:')\n index = 1\n if self.current_version == 'current':\n selected = self.SELECTED\n else:\n selected = ''\n print('0: System' + selected)\n for version in self.soft_paths:\n number = version[len(self.soft_root) + 1:]\n if number == self.current_version:\n selected = self.SELECTED\n else:\n selected = ''\n print(str(index) + ': ' + str(number) + selected)\n index += 1\n chosen = None\n maximum = len(self.soft_paths)\n while not chosen:\n try:\n choice = input()\n except KeyboardInterrupt:\n print('\\nUser abort!')\n sys.exit(0)\n if re.match('\\\\d+', choice) and int(choice) <= maximum and int(\n choice) >= 0:\n index = int(choice) - 1\n chosen = True\n elif choice == '':\n print('Keeping current')\n sys.exit(0)\n else:\n print(\n 'Bad version, please choose a number between 0 and %s' %\n str(maximum))\n return index\n\n @staticmethod\n def run():\n \"\"\"\n Read software name on command line and run version selection\n \"\"\"\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])\n except getopt.GetoptError as exception:\n print('Error parsing command line: %s' % exception)\n print(Version.HELP)\n sys.exit(1)\n for option, _ in opts:\n if option in ('-h', '--help'):\n print(Version.HELP)\n sys.exit(0)\n else:\n print(\"Error parsing command line: Unhandled option '%s'\" %\n option)\n print(Version.HELP)\n sys.exit(2)\n if len(args) != 1:\n print('Error parsing command line: You must pass software')\n print(Version.HELP)\n sys.exit(1)\n soft = args[0]\n version = Version(soft)\n version.set_version(version.ask_version())\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Version(object):\n <mask token>\n HELP = \"\"\"version [-h] software\nSelect software version in a menu:\n-h To print this help screen.\nsoftware Software version to choose.\"\"\"\n SELECTED = ' *'\n\n def __init__(self, soft):\n \"\"\"\n Constructor that takes software name\n \"\"\"\n self.soft = soft\n self.app_dir = os.environ.get('APP_DIR')\n if self.app_dir is None:\n self.app_dir = '/opt'\n self.sudo = True\n if os.access(self.app_dir, os.W_OK):\n self.sudo = False\n self.soft_root = os.path.join(self.app_dir, self.soft)\n self.soft_paths = sorted(glob.glob(self.soft_root + '/[0-9]*'))\n self.versions = [v[len(self.soft_root) + 1:] for v in self.soft_paths]\n path = os.path.realpath('%s/current' % self.soft_root)\n self.current_version = path[path.rindex(os.path.sep) + 1:]\n\n def set_version(self, index):\n \"\"\"\n Set software version by index\n \"\"\"\n sudo = 'sudo ' if self.sudo else ''\n old_dir = 'current'\n if index == -1:\n print('Selecting system version')\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n else:\n print(\"Selecting %s version '%s'\" % (self.soft, self.versions[\n index]))\n directory = self.versions[index]\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n os.system('cd %s && %sln -s %s %s' % (self.soft_root, sudo,\n directory, old_dir))\n\n def ask_version(self):\n \"\"\"\n Prompt user for software version in the list of installed versions\n \"\"\"\n print('Please choose a version:')\n index = 1\n if self.current_version == 'current':\n selected = self.SELECTED\n else:\n selected = ''\n print('0: System' + selected)\n for version in self.soft_paths:\n number = version[len(self.soft_root) + 1:]\n if number == self.current_version:\n selected = self.SELECTED\n else:\n selected = ''\n print(str(index) + ': ' + str(number) + selected)\n index += 1\n chosen = None\n maximum = len(self.soft_paths)\n while not chosen:\n try:\n choice = input()\n except KeyboardInterrupt:\n print('\\nUser abort!')\n sys.exit(0)\n if re.match('\\\\d+', choice) and int(choice) <= maximum and int(\n choice) >= 0:\n index = int(choice) - 1\n chosen = True\n elif choice == '':\n print('Keeping current')\n sys.exit(0)\n else:\n print(\n 'Bad version, please choose a number between 0 and %s' %\n str(maximum))\n return index\n\n @staticmethod\n def run():\n \"\"\"\n Read software name on command line and run version selection\n \"\"\"\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])\n except getopt.GetoptError as exception:\n print('Error parsing command line: %s' % exception)\n print(Version.HELP)\n sys.exit(1)\n for option, _ in opts:\n if option in ('-h', '--help'):\n print(Version.HELP)\n sys.exit(0)\n else:\n print(\"Error parsing command line: Unhandled option '%s'\" %\n option)\n print(Version.HELP)\n sys.exit(2)\n if len(args) != 1:\n print('Error parsing command line: You must pass software')\n print(Version.HELP)\n sys.exit(1)\n soft = args[0]\n version = Version(soft)\n version.set_version(version.ask_version())\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Version(object):\n \"\"\"\n Software management class\n \"\"\"\n HELP = \"\"\"version [-h] software\nSelect software version in a menu:\n-h To print this help screen.\nsoftware Software version to choose.\"\"\"\n SELECTED = ' *'\n\n def __init__(self, soft):\n \"\"\"\n Constructor that takes software name\n \"\"\"\n self.soft = soft\n self.app_dir = os.environ.get('APP_DIR')\n if self.app_dir is None:\n self.app_dir = '/opt'\n self.sudo = True\n if os.access(self.app_dir, os.W_OK):\n self.sudo = False\n self.soft_root = os.path.join(self.app_dir, self.soft)\n self.soft_paths = sorted(glob.glob(self.soft_root + '/[0-9]*'))\n self.versions = [v[len(self.soft_root) + 1:] for v in self.soft_paths]\n path = os.path.realpath('%s/current' % self.soft_root)\n self.current_version = path[path.rindex(os.path.sep) + 1:]\n\n def set_version(self, index):\n \"\"\"\n Set software version by index\n \"\"\"\n sudo = 'sudo ' if self.sudo else ''\n old_dir = 'current'\n if index == -1:\n print('Selecting system version')\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n else:\n print(\"Selecting %s version '%s'\" % (self.soft, self.versions[\n index]))\n directory = self.versions[index]\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n os.system('cd %s && %sln -s %s %s' % (self.soft_root, sudo,\n directory, old_dir))\n\n def ask_version(self):\n \"\"\"\n Prompt user for software version in the list of installed versions\n \"\"\"\n print('Please choose a version:')\n index = 1\n if self.current_version == 'current':\n selected = self.SELECTED\n else:\n selected = ''\n print('0: System' + selected)\n for version in self.soft_paths:\n number = version[len(self.soft_root) + 1:]\n if number == self.current_version:\n selected = self.SELECTED\n else:\n selected = ''\n print(str(index) + ': ' + str(number) + selected)\n index += 1\n chosen = None\n maximum = len(self.soft_paths)\n while not chosen:\n try:\n choice = input()\n except KeyboardInterrupt:\n print('\\nUser abort!')\n sys.exit(0)\n if re.match('\\\\d+', choice) and int(choice) <= maximum and int(\n choice) >= 0:\n index = int(choice) - 1\n chosen = True\n elif choice == '':\n print('Keeping current')\n sys.exit(0)\n else:\n print(\n 'Bad version, please choose a number between 0 and %s' %\n str(maximum))\n return index\n\n @staticmethod\n def run():\n \"\"\"\n Read software name on command line and run version selection\n \"\"\"\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])\n except getopt.GetoptError as exception:\n print('Error parsing command line: %s' % exception)\n print(Version.HELP)\n sys.exit(1)\n for option, _ in opts:\n if option in ('-h', '--help'):\n print(Version.HELP)\n sys.exit(0)\n else:\n print(\"Error parsing command line: Unhandled option '%s'\" %\n option)\n print(Version.HELP)\n sys.exit(2)\n if len(args) != 1:\n print('Error parsing command line: You must pass software')\n print(Version.HELP)\n sys.exit(1)\n soft = args[0]\n version = Version(soft)\n version.set_version(version.ask_version())\n\n\n<mask token>\n",
"step-4": "<mask token>\ntry:\n input = raw_input\nexcept NameError:\n pass\n\n\nclass Version(object):\n \"\"\"\n Software management class\n \"\"\"\n HELP = \"\"\"version [-h] software\nSelect software version in a menu:\n-h To print this help screen.\nsoftware Software version to choose.\"\"\"\n SELECTED = ' *'\n\n def __init__(self, soft):\n \"\"\"\n Constructor that takes software name\n \"\"\"\n self.soft = soft\n self.app_dir = os.environ.get('APP_DIR')\n if self.app_dir is None:\n self.app_dir = '/opt'\n self.sudo = True\n if os.access(self.app_dir, os.W_OK):\n self.sudo = False\n self.soft_root = os.path.join(self.app_dir, self.soft)\n self.soft_paths = sorted(glob.glob(self.soft_root + '/[0-9]*'))\n self.versions = [v[len(self.soft_root) + 1:] for v in self.soft_paths]\n path = os.path.realpath('%s/current' % self.soft_root)\n self.current_version = path[path.rindex(os.path.sep) + 1:]\n\n def set_version(self, index):\n \"\"\"\n Set software version by index\n \"\"\"\n sudo = 'sudo ' if self.sudo else ''\n old_dir = 'current'\n if index == -1:\n print('Selecting system version')\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n else:\n print(\"Selecting %s version '%s'\" % (self.soft, self.versions[\n index]))\n directory = self.versions[index]\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system('cd %s && %srm %s' % (self.soft_root, sudo, old_dir))\n os.system('cd %s && %sln -s %s %s' % (self.soft_root, sudo,\n directory, old_dir))\n\n def ask_version(self):\n \"\"\"\n Prompt user for software version in the list of installed versions\n \"\"\"\n print('Please choose a version:')\n index = 1\n if self.current_version == 'current':\n selected = self.SELECTED\n else:\n selected = ''\n print('0: System' + selected)\n for version in self.soft_paths:\n number = version[len(self.soft_root) + 1:]\n if number == self.current_version:\n selected = self.SELECTED\n else:\n selected = ''\n print(str(index) + ': ' + str(number) + selected)\n index += 1\n chosen = None\n maximum = len(self.soft_paths)\n while not chosen:\n try:\n choice = input()\n except KeyboardInterrupt:\n print('\\nUser abort!')\n sys.exit(0)\n if re.match('\\\\d+', choice) and int(choice) <= maximum and int(\n choice) >= 0:\n index = int(choice) - 1\n chosen = True\n elif choice == '':\n print('Keeping current')\n sys.exit(0)\n else:\n print(\n 'Bad version, please choose a number between 0 and %s' %\n str(maximum))\n return index\n\n @staticmethod\n def run():\n \"\"\"\n Read software name on command line and run version selection\n \"\"\"\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])\n except getopt.GetoptError as exception:\n print('Error parsing command line: %s' % exception)\n print(Version.HELP)\n sys.exit(1)\n for option, _ in opts:\n if option in ('-h', '--help'):\n print(Version.HELP)\n sys.exit(0)\n else:\n print(\"Error parsing command line: Unhandled option '%s'\" %\n option)\n print(Version.HELP)\n sys.exit(2)\n if len(args) != 1:\n print('Error parsing command line: You must pass software')\n print(Version.HELP)\n sys.exit(1)\n soft = args[0]\n version = Version(soft)\n version.set_version(version.ask_version())\n\n\nif __name__ == '__main__':\n Version.run()\n",
"step-5": "#!/usr/bin/env python\n# encoding: UTF-8\n\n'''\nScript to select current version for a given soft (python, ruby or java).\n'''\n\nimport os\nimport re\nimport sys\nimport glob\nimport getopt\n\n\n# fix input in Python 2 and 3\ntry:\n input = raw_input # pylint: disable=redefined-builtin,invalid-name\nexcept NameError:\n pass\n\n\nclass Version(object): # pylint: disable=useless-object-inheritance\n '''\n Software management class\n '''\n\n HELP = '''version [-h] software\nSelect software version in a menu:\n-h To print this help screen.\nsoftware Software version to choose.'''\n SELECTED = ' *'\n\n def __init__(self, soft):\n '''\n Constructor that takes software name\n '''\n self.soft = soft\n self.app_dir = os.environ.get('APP_DIR')\n if self.app_dir is None:\n self.app_dir = '/opt'\n self.sudo = True\n if os.access(self.app_dir, os.W_OK):\n self.sudo = False\n self.soft_root = os.path.join(self.app_dir, self.soft)\n self.soft_paths = sorted(glob.glob(self.soft_root+'/[0-9]*'))\n self.versions = [v[len(self.soft_root)+1:] for v in self.soft_paths]\n path = os.path.realpath(\"%s/current\" % self.soft_root)\n self.current_version = path[path.rindex(os.path.sep)+1:]\n\n def set_version(self, index):\n '''\n Set software version by index\n '''\n sudo = 'sudo ' if self.sudo else ''\n old_dir = \"current\"\n if index == -1:\n print(\"Selecting system version\")\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system(\"cd %s && %srm %s\" % (self.soft_root, sudo, old_dir))\n else:\n print(\"Selecting %s version '%s'\" %\n (self.soft, self.versions[index]))\n directory = self.versions[index]\n if os.path.exists(os.path.join(self.soft_root, old_dir)):\n os.system(\"cd %s && %srm %s\" % (self.soft_root, sudo, old_dir))\n os.system(\"cd %s && %sln -s %s %s\" % (self.soft_root, sudo, directory, old_dir))\n\n def ask_version(self):\n '''\n Prompt user for software version in the list of installed versions\n '''\n # print version list\n print('Please choose a version:')\n index = 1\n if self.current_version == 'current':\n selected = self.SELECTED\n else:\n selected = ''\n print(\"0: System\"+selected)\n for version in self.soft_paths:\n number = version[len(self.soft_root)+1:]\n if number == self.current_version:\n selected = self.SELECTED\n else:\n selected = ''\n print(str(index)+': '+str(number)+selected)\n index += 1\n # ask for the version\n chosen = None\n maximum = len(self.soft_paths)\n while not chosen:\n try:\n choice = input()\n except KeyboardInterrupt:\n print(\"\\nUser abort!\")\n sys.exit(0)\n if re.match('\\\\d+', choice) and int(choice) <= maximum and \\\n int(choice) >= 0:\n index = int(choice) - 1\n chosen = True\n elif choice == '':\n print(\"Keeping current\")\n sys.exit(0)\n else:\n print(\"Bad version, please choose a number between 0 and %s\" %\n str(maximum))\n # return index in version table\n return index\n\n @staticmethod\n def run():\n '''\n Read software name on command line and run version selection\n '''\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'h', ['help'])\n except getopt.GetoptError as exception:\n print('Error parsing command line: %s' % exception)\n print(Version.HELP)\n sys.exit(1)\n for option, _ in opts:\n if option in ('-h', '--help'):\n print(Version.HELP)\n sys.exit(0)\n else:\n print(\"Error parsing command line: Unhandled option '%s'\" % option)\n print(Version.HELP)\n sys.exit(2)\n if len(args) != 1:\n print(\"Error parsing command line: You must pass software\")\n print(Version.HELP)\n sys.exit(1)\n soft = args[0]\n version = Version(soft)\n version.set_version(version.ask_version())\n\n\nif __name__ == '__main__':\n Version.run()\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'name', 'abbreviation',
'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = ['id', 'father', 'mother', 'child', 'husband_order',
'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',
'mother_label']
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',
'modified', 'ext_version', 'ext_date', 'status', 'note']
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',
'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',
'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',
'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',
'sort_date']
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',
'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class NameAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',
'nickname', 'name_type', 'date', 'sort_date', 'is_primary',
'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',
'death_year']
class PersonAdmin(RootsMagicModelAdmin):
list_display = ['id', 'primary_name', 'sex_short', 'edit_date',
'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',
'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']
class PlaceAdmin(RootsMagicModelAdmin):
list_display = ['id', 'place_type', 'name', 'abbreviation',
'normalized', 'master_place', 'pretty_latlong',
'exact_latituate_longitude', 'note']
raw_id_fields = ['master_place']
readonly_fields = ['pretty_latlong']
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',
'url', 'note']
class WitnessAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event', 'person', 'witness_order', 'role',
'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class EventAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',
'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',
'is_private', 'proof', 'status', 'edit_date', 'sentence']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class ExclusionAdmin(RootsMagicModelAdmin):
pass
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'name', 'abbreviation',
'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = ['id', 'father', 'mother', 'child', 'husband_order',
'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',
'mother_label']
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',
'modified', 'ext_version', 'ext_date', 'status', 'note']
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',
'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',
'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',
'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',
'sort_date']
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',
'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class NameAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',
'nickname', 'name_type', 'date', 'sort_date', 'is_primary',
'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',
'death_year']
class PersonAdmin(RootsMagicModelAdmin):
list_display = ['id', 'primary_name', 'sex_short', 'edit_date',
'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',
'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']
class PlaceAdmin(RootsMagicModelAdmin):
list_display = ['id', 'place_type', 'name', 'abbreviation',
'normalized', 'master_place', 'pretty_latlong',
'exact_latituate_longitude', 'note']
raw_id_fields = ['master_place']
readonly_fields = ['pretty_latlong']
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',
'url', 'note']
class WitnessAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event', 'person', 'witness_order', 'role',
'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CitationAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',
'is_private', 'comments', 'actual_text', 'reference_number', 'flags']
class ConfigurationAdmin(RootsMagicModelAdmin):
pass
class EventAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',
'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',
'is_private', 'proof', 'status', 'edit_date', 'sentence']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class ExclusionAdmin(RootsMagicModelAdmin):
pass
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'name', 'abbreviation',
'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = ['id', 'father', 'mother', 'child', 'husband_order',
'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',
'mother_label']
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',
'modified', 'ext_version', 'ext_date', 'status', 'note']
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',
'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',
'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',
'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',
'sort_date']
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',
'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class NameAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',
'nickname', 'name_type', 'date', 'sort_date', 'is_primary',
'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',
'death_year']
class PersonAdmin(RootsMagicModelAdmin):
list_display = ['id', 'primary_name', 'sex_short', 'edit_date',
'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',
'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']
class PlaceAdmin(RootsMagicModelAdmin):
list_display = ['id', 'place_type', 'name', 'abbreviation',
'normalized', 'master_place', 'pretty_latlong',
'exact_latituate_longitude', 'note']
raw_id_fields = ['master_place']
readonly_fields = ['pretty_latlong']
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',
'url', 'note']
class WitnessAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event', 'person', 'witness_order', 'role',
'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ChildAdmin(RootsMagicModelAdmin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class CitationAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',
'is_private', 'comments', 'actual_text', 'reference_number', 'flags']
class ConfigurationAdmin(RootsMagicModelAdmin):
pass
class EventAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',
'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',
'is_private', 'proof', 'status', 'edit_date', 'sentence']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class ExclusionAdmin(RootsMagicModelAdmin):
pass
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'name', 'abbreviation',
'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = ['id', 'father', 'mother', 'child', 'husband_order',
'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',
'mother_label']
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',
'modified', 'ext_version', 'ext_date', 'status', 'note']
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',
'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',
'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',
'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',
'sort_date']
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',
'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = 'Date'
class NameAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',
'nickname', 'name_type', 'date', 'sort_date', 'is_primary',
'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',
'death_year']
class PersonAdmin(RootsMagicModelAdmin):
list_display = ['id', 'primary_name', 'sex_short', 'edit_date',
'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',
'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']
class PlaceAdmin(RootsMagicModelAdmin):
list_display = ['id', 'place_type', 'name', 'abbreviation',
'normalized', 'master_place', 'pretty_latlong',
'exact_latituate_longitude', 'note']
raw_id_fields = ['master_place']
readonly_fields = ['pretty_latlong']
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',
'url', 'note']
class WitnessAdmin(RootsMagicModelAdmin):
list_display = ['id', 'event', 'person', 'witness_order', 'role',
'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from django.contrib import admin
from .models import (AddressLink, Address, Child, Citation,
Configuration, Event, Exclusion, FactType,
Family, Group, Label, LinkAncestry,
Link, MediaLink, Multimedia, Name,
Person, Place, ResearchItem, Research,
Role, Source, SourceTemplate, Url,
Witness)
from . import EXODUS_DB_NAME
from .utils.admin import MultiDBModelAdmin
from .utils.rootsmagic import read_and_pprint_date
class RootsMagicModelAdmin(MultiDBModelAdmin):
using = EXODUS_DB_NAME
class AddressLinkAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"address",
"owner_id",
"address_number",
"details",
]
class AddressAdmin(RootsMagicModelAdmin):
pass
class ChildAdmin(RootsMagicModelAdmin):
list_display = [
"record_id",
"child",
"family",
"father_relationship",
"mother_relationship",
"child_order",
"is_private",
"father_proof",
"mother_proof",
"note",
]
raw_id_fields = [
'child',
'family',
]
class CitationAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"source_id",
"owner_id",
"quality",
"is_private",
"comments",
"actual_text",
"reference_number",
"flags",
# "fields",
]
class ConfigurationAdmin(RootsMagicModelAdmin):
pass
class EventAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"event_type",
"owner",
"owner_type",
"owner_id",
"family",
"place",
"site",
# "date",
"pretty_date",
"sort_date",
"is_primary",
"is_private",
"proof",
"status",
"edit_date",
"sentence",
# "details",
# "note",
]
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = "Date"
class ExclusionAdmin(RootsMagicModelAdmin):
pass
class FactTypeAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"name",
"abbreviation",
"gedcom_tag",
"use_value",
"use_date",
"use_place",
"sentence",
"flags",
]
class FamilyAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"father",
"mother",
"child",
"husband_order",
"wife_order",
"is_private",
"proof",
"spouse_label",
"father_label",
"mother_label",
# "note",
]
class GroupAdmin(RootsMagicModelAdmin):
pass
class LabelAdmin(RootsMagicModelAdmin):
pass
class LinkAncestryAdmin(RootsMagicModelAdmin):
pass
class LinkAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"ext_system",
"link_type",
"rootsmagic",
"ext_id",
"modified",
"ext_version",
"ext_date",
"status",
"note",
]
class MediaLinkAdmin(RootsMagicModelAdmin):
list_display = [
"link_id",
"media",
"owner",
"owner_type",
"owner_id",
"is_primary",
"include_1",
"include_2",
"include_3",
"include_4",
"sort_order",
"rectangle_left",
"rectangle_top",
"rectangle_right",
"rectangle_bottom",
"note",
"caption",
"reference_number",
"date",
"sort_date",
# "description",
]
class MultimediaAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"media_type",
"media_path",
"media_file",
"url",
"thumbnail",
"caption",
"reference_number",
# "date",
"pretty_date",
"sort_date",
# "description",
]
def pretty_date(self, obj):
return read_and_pprint_date(obj.date)
pretty_date.short_description = "Date"
class NameAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner",
"surname",
"given",
"prefix",
"suffix",
"nickname",
"name_type",
"date",
"sort_date",
"is_primary",
"is_private",
"proof",
"edit_date",
"sentence",
# "note",
"birth_year",
"death_year",
]
class PersonAdmin(RootsMagicModelAdmin):
list_display = [
"id",
'primary_name',
"sex_short",
"edit_date",
"parent",
"spouse",
"color",
"relate_1",
"relate_2",
"flags",
"is_living",
"is_private",
"proof",
"unique_id",
"bookmark",
# "note",
]
class PlaceAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"place_type",
"name",
"abbreviation",
"normalized",
"master_place",
# "latitude",
# "longitude",
"pretty_latlong",
"exact_latituate_longitude",
"note",
]
raw_id_fields = [
"master_place"
]
readonly_fields = [
"pretty_latlong"
]
class ResearchItemAdmin(RootsMagicModelAdmin):
pass
class ResearchAdmin(RootsMagicModelAdmin):
pass
class RoleAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"role_name",
"event_type",
"role_type",
"sentence",
]
class SourceAdmin(RootsMagicModelAdmin):
raw_id_fields = ['template']
class SourceTemplateAdmin(RootsMagicModelAdmin):
pass
class UrlAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"owner_type",
"owner_id",
"link_type",
"name",
"url",
"note",
]
class WitnessAdmin(RootsMagicModelAdmin):
list_display = [
"id",
"event",
"person",
"witness_order",
"role",
"sentence",
"note",
"given",
"surname",
"prefix",
"suffix",
]
admin.site.register(AddressLink, AddressLinkAdmin)
admin.site.register(Address, AddressAdmin)
admin.site.register(Child, ChildAdmin)
admin.site.register(Citation, CitationAdmin)
admin.site.register(Configuration, ConfigurationAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Exclusion, ExclusionAdmin)
admin.site.register(FactType, FactTypeAdmin)
admin.site.register(Family, FamilyAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Label, LabelAdmin)
admin.site.register(LinkAncestry, LinkAncestryAdmin)
admin.site.register(Link, LinkAdmin)
admin.site.register(MediaLink, MediaLinkAdmin)
admin.site.register(Multimedia, MultimediaAdmin)
admin.site.register(Name, NameAdmin)
admin.site.register(Person, PersonAdmin)
admin.site.register(Place, PlaceAdmin)
admin.site.register(ResearchItem, ResearchItemAdmin)
admin.site.register(Research, ResearchAdmin)
admin.site.register(Role, RoleAdmin)
admin.site.register(Source, SourceAdmin)
admin.site.register(SourceTemplate, SourceTemplateAdmin)
admin.site.register(Url, UrlAdmin)
admin.site.register(Witness, WitnessAdmin)
|
flexible
|
{
"blob_id": "b4d48427dddc7c0240cf05c003cbf7b0163279ee",
"index": 9729,
"step-1": "<mask token>\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ChildAdmin(RootsMagicModelAdmin):\n <mask token>\n <mask token>\n\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'source_id', 'owner_id', 'quality',\n 'is_private', 'comments', 'actual_text', 'reference_number', 'flags']\n\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event_type', 'owner', 'owner_type', 'owner_id',\n 'family', 'place', 'site', 'pretty_date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'status', 'edit_date', 'sentence']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'name', 'abbreviation',\n 'gedcom_tag', 'use_value', 'use_date', 'use_place', 'sentence', 'flags'\n ]\n\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'father', 'mother', 'child', 'husband_order',\n 'wife_order', 'is_private', 'proof', 'spouse_label', 'father_label',\n 'mother_label']\n\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'ext_system', 'link_type', 'rootsmagic', 'ext_id',\n 'modified', 'ext_version', 'ext_date', 'status', 'note']\n\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = ['link_id', 'media', 'owner', 'owner_type', 'owner_id',\n 'is_primary', 'include_1', 'include_2', 'include_3', 'include_4',\n 'sort_order', 'rectangle_left', 'rectangle_top', 'rectangle_right',\n 'rectangle_bottom', 'note', 'caption', 'reference_number', 'date',\n 'sort_date']\n\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'media_type', 'media_path', 'media_file', 'url',\n 'thumbnail', 'caption', 'reference_number', 'pretty_date', 'sort_date']\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = 'Date'\n\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner', 'surname', 'given', 'prefix', 'suffix',\n 'nickname', 'name_type', 'date', 'sort_date', 'is_primary',\n 'is_private', 'proof', 'edit_date', 'sentence', 'birth_year',\n 'death_year']\n\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'primary_name', 'sex_short', 'edit_date',\n 'parent', 'spouse', 'color', 'relate_1', 'relate_2', 'flags',\n 'is_living', 'is_private', 'proof', 'unique_id', 'bookmark']\n\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'place_type', 'name', 'abbreviation',\n 'normalized', 'master_place', 'pretty_latlong',\n 'exact_latituate_longitude', 'note']\n raw_id_fields = ['master_place']\n readonly_fields = ['pretty_latlong']\n\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'role_name', 'event_type', 'role_type', 'sentence']\n\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'owner_type', 'owner_id', 'link_type', 'name',\n 'url', 'note']\n\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = ['id', 'event', 'person', 'witness_order', 'role',\n 'sentence', 'note', 'given', 'surname', 'prefix', 'suffix']\n\n\n<mask token>\n",
"step-5": "from django.contrib import admin\n\nfrom .models import (AddressLink, Address, Child, Citation,\n Configuration, Event, Exclusion, FactType,\n Family, Group, Label, LinkAncestry,\n Link, MediaLink, Multimedia, Name,\n Person, Place, ResearchItem, Research,\n Role, Source, SourceTemplate, Url,\n Witness)\n\nfrom . import EXODUS_DB_NAME\nfrom .utils.admin import MultiDBModelAdmin\nfrom .utils.rootsmagic import read_and_pprint_date\n\n\nclass RootsMagicModelAdmin(MultiDBModelAdmin):\n using = EXODUS_DB_NAME\n\n\nclass AddressLinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"address\",\n \"owner_id\",\n \"address_number\",\n \"details\",\n ]\n\nclass AddressAdmin(RootsMagicModelAdmin):\n pass\n\nclass ChildAdmin(RootsMagicModelAdmin):\n list_display = [\n \"record_id\",\n \"child\",\n \"family\",\n \"father_relationship\",\n \"mother_relationship\",\n \"child_order\",\n \"is_private\",\n \"father_proof\",\n \"mother_proof\",\n \"note\",\n ]\n raw_id_fields = [\n 'child',\n 'family',\n ]\n\nclass CitationAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"source_id\",\n \"owner_id\",\n \"quality\",\n \"is_private\",\n \"comments\",\n \"actual_text\",\n \"reference_number\",\n \"flags\",\n # \"fields\",\n ]\n\nclass ConfigurationAdmin(RootsMagicModelAdmin):\n pass\n\nclass EventAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"event_type\",\n \"owner\",\n \"owner_type\",\n \"owner_id\",\n \"family\",\n \"place\",\n \"site\",\n # \"date\",\n \"pretty_date\",\n \"sort_date\",\n \"is_primary\",\n \"is_private\",\n \"proof\",\n \"status\",\n \"edit_date\",\n \"sentence\",\n # \"details\",\n # \"note\",\n ]\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = \"Date\"\n\nclass ExclusionAdmin(RootsMagicModelAdmin):\n pass\n\nclass FactTypeAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"name\",\n \"abbreviation\",\n \"gedcom_tag\",\n \"use_value\",\n \"use_date\",\n \"use_place\",\n \"sentence\",\n \"flags\",\n ]\n\nclass FamilyAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"father\",\n \"mother\",\n \"child\",\n \"husband_order\",\n \"wife_order\",\n \"is_private\",\n \"proof\",\n \"spouse_label\",\n \"father_label\",\n \"mother_label\",\n # \"note\",\n ]\n\nclass GroupAdmin(RootsMagicModelAdmin):\n pass\n\nclass LabelAdmin(RootsMagicModelAdmin):\n pass\n\nclass LinkAncestryAdmin(RootsMagicModelAdmin):\n pass\n\nclass LinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"ext_system\",\n \"link_type\",\n \"rootsmagic\",\n \"ext_id\",\n \"modified\",\n \"ext_version\",\n \"ext_date\",\n \"status\",\n \"note\",\n ]\n\nclass MediaLinkAdmin(RootsMagicModelAdmin):\n list_display = [\n \"link_id\",\n \"media\",\n \"owner\",\n \"owner_type\",\n \"owner_id\",\n \"is_primary\",\n \"include_1\",\n \"include_2\",\n \"include_3\",\n \"include_4\",\n \"sort_order\",\n \"rectangle_left\",\n \"rectangle_top\",\n \"rectangle_right\",\n \"rectangle_bottom\",\n \"note\",\n \"caption\",\n \"reference_number\",\n \"date\",\n \"sort_date\",\n # \"description\",\n ]\n\nclass MultimediaAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"media_type\",\n \"media_path\",\n \"media_file\",\n \"url\",\n \"thumbnail\",\n \"caption\",\n \"reference_number\",\n # \"date\",\n \"pretty_date\",\n \"sort_date\",\n # \"description\",\n ]\n\n def pretty_date(self, obj):\n return read_and_pprint_date(obj.date)\n pretty_date.short_description = \"Date\"\n\nclass NameAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner\",\n \"surname\",\n \"given\",\n \"prefix\",\n \"suffix\",\n \"nickname\",\n \"name_type\",\n \"date\",\n \"sort_date\",\n \"is_primary\",\n \"is_private\",\n \"proof\",\n \"edit_date\",\n \"sentence\",\n # \"note\",\n \"birth_year\",\n \"death_year\",\n ]\n\nclass PersonAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n 'primary_name',\n \"sex_short\",\n \"edit_date\",\n \"parent\",\n \"spouse\",\n \"color\",\n \"relate_1\",\n \"relate_2\",\n \"flags\",\n \"is_living\",\n \"is_private\",\n \"proof\",\n \"unique_id\",\n \"bookmark\",\n # \"note\",\n ]\n\nclass PlaceAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"place_type\",\n \"name\",\n \"abbreviation\",\n \"normalized\",\n \"master_place\",\n # \"latitude\",\n # \"longitude\",\n \"pretty_latlong\",\n \"exact_latituate_longitude\",\n \"note\",\n ]\n raw_id_fields = [\n \"master_place\"\n ]\n readonly_fields = [\n \"pretty_latlong\"\n ]\n\nclass ResearchItemAdmin(RootsMagicModelAdmin):\n pass\n\nclass ResearchAdmin(RootsMagicModelAdmin):\n pass\n\nclass RoleAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"role_name\",\n \"event_type\",\n \"role_type\",\n \"sentence\",\n ]\n\nclass SourceAdmin(RootsMagicModelAdmin):\n raw_id_fields = ['template']\n\nclass SourceTemplateAdmin(RootsMagicModelAdmin):\n pass\n\nclass UrlAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"owner_type\",\n \"owner_id\",\n \"link_type\",\n \"name\",\n \"url\",\n \"note\",\n ]\n\nclass WitnessAdmin(RootsMagicModelAdmin):\n list_display = [\n \"id\",\n \"event\",\n \"person\",\n \"witness_order\",\n \"role\",\n \"sentence\",\n \"note\",\n \"given\",\n \"surname\",\n \"prefix\",\n \"suffix\",\n ]\n\n\n\nadmin.site.register(AddressLink, AddressLinkAdmin)\nadmin.site.register(Address, AddressAdmin)\nadmin.site.register(Child, ChildAdmin)\nadmin.site.register(Citation, CitationAdmin)\nadmin.site.register(Configuration, ConfigurationAdmin)\nadmin.site.register(Event, EventAdmin)\nadmin.site.register(Exclusion, ExclusionAdmin)\nadmin.site.register(FactType, FactTypeAdmin)\nadmin.site.register(Family, FamilyAdmin)\nadmin.site.register(Group, GroupAdmin)\nadmin.site.register(Label, LabelAdmin)\nadmin.site.register(LinkAncestry, LinkAncestryAdmin)\nadmin.site.register(Link, LinkAdmin)\nadmin.site.register(MediaLink, MediaLinkAdmin)\nadmin.site.register(Multimedia, MultimediaAdmin)\nadmin.site.register(Name, NameAdmin)\nadmin.site.register(Person, PersonAdmin)\nadmin.site.register(Place, PlaceAdmin)\nadmin.site.register(ResearchItem, ResearchItemAdmin)\nadmin.site.register(Research, ResearchAdmin)\nadmin.site.register(Role, RoleAdmin)\nadmin.site.register(Source, SourceAdmin)\nadmin.site.register(SourceTemplate, SourceTemplateAdmin)\nadmin.site.register(Url, UrlAdmin)\nadmin.site.register(Witness, WitnessAdmin)\n",
"step-ids": [
31,
35,
38,
39,
48
]
}
|
[
31,
35,
38,
39,
48
] |
#!/usr/bin/env pypy
from __future__ import print_function
from __future__ import division
import subprocess
import random
import math
import sys
import string
randmin = int(sys.argv[1])
randmax = int(sys.argv[2])
random.seed(int(sys.argv[3]))
n = random.randint(randmin, randmax)
print('%d' % n)
|
normal
|
{
"blob_id": "83e1c86095de88692d0116f7e32bd485ab381b29",
"index": 7040,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nrandom.seed(int(sys.argv[3]))\n<mask token>\nprint('%d' % n)\n",
"step-3": "<mask token>\nrandmin = int(sys.argv[1])\nrandmax = int(sys.argv[2])\nrandom.seed(int(sys.argv[3]))\nn = random.randint(randmin, randmax)\nprint('%d' % n)\n",
"step-4": "from __future__ import print_function\nfrom __future__ import division\nimport subprocess\nimport random\nimport math\nimport sys\nimport string\nrandmin = int(sys.argv[1])\nrandmax = int(sys.argv[2])\nrandom.seed(int(sys.argv[3]))\nn = random.randint(randmin, randmax)\nprint('%d' % n)\n",
"step-5": "#!/usr/bin/env pypy\n\nfrom __future__ import print_function\nfrom __future__ import division\nimport subprocess\nimport random\nimport math\nimport sys\nimport string\n\nrandmin = int(sys.argv[1])\nrandmax = int(sys.argv[2])\nrandom.seed(int(sys.argv[3]))\n\nn = random.randint(randmin, randmax)\n\nprint('%d' % n)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def matchuj_wydzial(nazwa):
try:
return Wydzial.objects.get(nazwa__iexact=nazwa.strip())
except Wydzial.DoesNotExist:
pass
def matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:
"""
Dostaje tytuł: pełną nazwę albo skrót
"""
try:
return Tytul.objects.get(nazwa__iexact=tytul)
except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):
return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))
def matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:
funkcja_autora = normalize_funkcja_autora(funkcja_autora)
return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(
skrot__iexact=funkcja_autora))
def matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:
grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)
return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)
<|reserved_special_token_0|>
def matchuj_jednostke(nazwa, wydzial=None):
nazwa = normalize_nazwa_jednostki(nazwa)
try:
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa))
except Jednostka.DoesNotExist:
if nazwa.endswith('.'):
nazwa = nazwa[:-1].strip()
try:
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))
<|reserved_special_token_0|>
def matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:
Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:
if s is None or str(s) == '':
return
if issn is not None:
try:
return Zrodlo.objects.get(issn=issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
if e_issn is not None:
try:
return Zrodlo.objects.get(e_issn=e_issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
for elem in (s, alt_nazwa):
if elem is None:
continue
elem = normalize_tytul_zrodla(elem)
try:
return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(
skrot__iexact=elem))
except Zrodlo.MultipleObjectsReturned:
pass
except Zrodlo.DoesNotExist:
if elem.endswith('.'):
try:
return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1
]) | Q(skrot__istartswith=elem[:-1]))
except Zrodlo.DoesNotExist:
pass
except Zrodlo.MultipleObjectsReturned:
pass
def matchuj_dyscypline(kod, nazwa):
nazwa = normalize_nazwa_dyscypliny(nazwa)
try:
return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
kod = normalize_kod_dyscypliny(kod)
try:
return Dyscyplina_Naukowa.objects.get(kod=kod)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
def matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):
nazwa = normalize_nazwa_wydawcy(nazwa)
try:
return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)
except Wydawca.DoesNotExist:
pass
if pbn_uid_id is not None:
try:
return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)
except Wydawca.DoesNotExist:
pass
loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',
nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]
if loose.count() > 0 and loose.count() < 2:
return loose.first()
<|reserved_special_token_0|>
def normalize_zrodlo_skrot_for_db_lookup(s):
return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')
<|reserved_special_token_0|>
def normalize_zrodlo_nazwa_for_db_lookup(s):
return s.lower().replace(' ', '').strip()
<|reserved_special_token_0|>
def matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,
Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,
DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,
doi_matchuj_tylko_nadrzedne=True):
if doi is not None:
doi = normalize_doi(doi)
if doi:
zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)
if doi_matchuj_tylko_nadrzedne:
if hasattr(klass, 'wydawnictwo_nadrzedne_id'):
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
res = zapytanie.annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[
:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first(
).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
title = normalize_tytul_publikacji(title)
title_has_spaces = False
if title is not None:
title_has_spaces = title.find(' ') > 0
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
if zrodlo is not None and hasattr(klass, 'zrodlo'):
try:
return klass.objects.get(tytul_oryginalny__istartswith=
title, rok=year, zrodlo=zrodlo)
except klass.DoesNotExist:
pass
except klass.MultipleObjectsReturned:
print(
f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'
)
if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(
klass, 'e_isbn'):
ni = normalize_isbn(isbn)
zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn
='', e_isbn='')
if isbn_matchuj_tylko_nadrzedne:
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
if klass == Rekord:
zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.
get_for_model(Wydawnictwo_Zwarte).pk, x) for x in
Wydawnictwo_Zwarte.objects.
wydawnictwa_nadrzedne_dla_innych()])
elif klass == Wydawnictwo_Zwarte:
zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.
objects.wydawnictwa_nadrzedne_dla_innych())
else:
raise NotImplementedError(
'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'
)
res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo
=TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
public_uri = normalize_public_uri(public_uri)
if public_uri:
res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)
).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,
title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok
=year).annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
res = klass.objects.filter(rok=year).annotate(podobienstwo=
TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:
return res.first()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def matchuj_wydzial(nazwa):
try:
return Wydzial.objects.get(nazwa__iexact=nazwa.strip())
except Wydzial.DoesNotExist:
pass
def matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:
"""
Dostaje tytuł: pełną nazwę albo skrót
"""
try:
return Tytul.objects.get(nazwa__iexact=tytul)
except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):
return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))
def matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:
funkcja_autora = normalize_funkcja_autora(funkcja_autora)
return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(
skrot__iexact=funkcja_autora))
def matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:
grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)
return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)
def matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:
wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)
return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)
def matchuj_jednostke(nazwa, wydzial=None):
nazwa = normalize_nazwa_jednostki(nazwa)
try:
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa))
except Jednostka.DoesNotExist:
if nazwa.endswith('.'):
nazwa = nazwa[:-1].strip()
try:
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))
<|reserved_special_token_0|>
def matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:
Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:
if s is None or str(s) == '':
return
if issn is not None:
try:
return Zrodlo.objects.get(issn=issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
if e_issn is not None:
try:
return Zrodlo.objects.get(e_issn=e_issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
for elem in (s, alt_nazwa):
if elem is None:
continue
elem = normalize_tytul_zrodla(elem)
try:
return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(
skrot__iexact=elem))
except Zrodlo.MultipleObjectsReturned:
pass
except Zrodlo.DoesNotExist:
if elem.endswith('.'):
try:
return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1
]) | Q(skrot__istartswith=elem[:-1]))
except Zrodlo.DoesNotExist:
pass
except Zrodlo.MultipleObjectsReturned:
pass
def matchuj_dyscypline(kod, nazwa):
nazwa = normalize_nazwa_dyscypliny(nazwa)
try:
return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
kod = normalize_kod_dyscypliny(kod)
try:
return Dyscyplina_Naukowa.objects.get(kod=kod)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
def matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):
nazwa = normalize_nazwa_wydawcy(nazwa)
try:
return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)
except Wydawca.DoesNotExist:
pass
if pbn_uid_id is not None:
try:
return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)
except Wydawca.DoesNotExist:
pass
loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',
nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]
if loose.count() > 0 and loose.count() < 2:
return loose.first()
<|reserved_special_token_0|>
def normalize_zrodlo_skrot_for_db_lookup(s):
return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')
<|reserved_special_token_0|>
def normalize_zrodlo_nazwa_for_db_lookup(s):
return s.lower().replace(' ', '').strip()
<|reserved_special_token_0|>
def matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,
Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,
DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,
doi_matchuj_tylko_nadrzedne=True):
if doi is not None:
doi = normalize_doi(doi)
if doi:
zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)
if doi_matchuj_tylko_nadrzedne:
if hasattr(klass, 'wydawnictwo_nadrzedne_id'):
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
res = zapytanie.annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[
:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first(
).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
title = normalize_tytul_publikacji(title)
title_has_spaces = False
if title is not None:
title_has_spaces = title.find(' ') > 0
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
if zrodlo is not None and hasattr(klass, 'zrodlo'):
try:
return klass.objects.get(tytul_oryginalny__istartswith=
title, rok=year, zrodlo=zrodlo)
except klass.DoesNotExist:
pass
except klass.MultipleObjectsReturned:
print(
f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'
)
if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(
klass, 'e_isbn'):
ni = normalize_isbn(isbn)
zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn
='', e_isbn='')
if isbn_matchuj_tylko_nadrzedne:
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
if klass == Rekord:
zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.
get_for_model(Wydawnictwo_Zwarte).pk, x) for x in
Wydawnictwo_Zwarte.objects.
wydawnictwa_nadrzedne_dla_innych()])
elif klass == Wydawnictwo_Zwarte:
zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.
objects.wydawnictwa_nadrzedne_dla_innych())
else:
raise NotImplementedError(
'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'
)
res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo
=TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
public_uri = normalize_public_uri(public_uri)
if public_uri:
res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)
).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,
title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok
=year).annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
res = klass.objects.filter(rok=year).annotate(podobienstwo=
TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:
return res.first()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def matchuj_wydzial(nazwa):
try:
return Wydzial.objects.get(nazwa__iexact=nazwa.strip())
except Wydzial.DoesNotExist:
pass
def matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:
"""
Dostaje tytuł: pełną nazwę albo skrót
"""
try:
return Tytul.objects.get(nazwa__iexact=tytul)
except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):
return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))
def matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:
funkcja_autora = normalize_funkcja_autora(funkcja_autora)
return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(
skrot__iexact=funkcja_autora))
def matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:
grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)
return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)
def matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:
wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)
return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)
def matchuj_jednostke(nazwa, wydzial=None):
nazwa = normalize_nazwa_jednostki(nazwa)
try:
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa))
except Jednostka.DoesNotExist:
if nazwa.endswith('.'):
nazwa = nazwa[:-1].strip()
try:
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))
def matchuj_autora(imiona: str, nazwisko: str, jednostka: Union[Jednostka,
None]=None, bpp_id: Union[int, None]=None, pbn_uid_id: Union[str, None]
=None, system_kadrowy_id: Union[int, None]=None, pbn_id: Union[int,
None]=None, orcid: Union[str, None]=None, tytul_str: Union[Tytul, None]
=None):
if bpp_id is not None:
try:
return Autor.objects.get(pk=bpp_id)
except Autor.DoesNotExist:
pass
if orcid:
try:
return Autor.objects.get(orcid__iexact=orcid.strip())
except Autor.DoesNotExist:
pass
if pbn_uid_id is not None and pbn_uid_id.strip() != '':
_qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)
if _qset.exists():
return _qset.first()
if system_kadrowy_id is not None:
try:
int(system_kadrowy_id)
except (TypeError, ValueError):
system_kadrowy_id = None
if system_kadrowy_id is not None:
try:
return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)
except Autor.DoesNotExist:
pass
if pbn_id is not None:
if isinstance(pbn_id, str):
pbn_id = pbn_id.strip()
try:
pbn_id = int(pbn_id)
except (TypeError, ValueError):
pbn_id = None
if pbn_id is not None:
try:
return Autor.objects.get(pbn_id=pbn_id)
except Autor.DoesNotExist:
pass
queries = [Q(Q(nazwisko__iexact=nazwisko.strip()) | Q(
poprzednie_nazwiska__icontains=nazwisko.strip()), imiona__iexact=
imiona.strip())]
if tytul_str:
queries.append(queries[0] & Q(tytul__skrot=tytul_str))
for qry in queries:
try:
return Autor.objects.get(qry)
except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):
pass
try:
return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))
except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):
pass
if jednostka:
queries = [Q(Q(autor__nazwisko__iexact=nazwisko.strip()) | Q(
autor__poprzednie_nazwiska__icontains=nazwisko.strip()),
autor__imiona__iexact=imiona.strip())]
if tytul_str:
queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))
for qry in queries:
try:
return jednostka.autor_jednostka_set.get(qry).autor
except (Autor_Jednostka.MultipleObjectsReturned,
Autor_Jednostka.DoesNotExist):
pass
return None
def matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:
Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:
if s is None or str(s) == '':
return
if issn is not None:
try:
return Zrodlo.objects.get(issn=issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
if e_issn is not None:
try:
return Zrodlo.objects.get(e_issn=e_issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
for elem in (s, alt_nazwa):
if elem is None:
continue
elem = normalize_tytul_zrodla(elem)
try:
return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(
skrot__iexact=elem))
except Zrodlo.MultipleObjectsReturned:
pass
except Zrodlo.DoesNotExist:
if elem.endswith('.'):
try:
return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1
]) | Q(skrot__istartswith=elem[:-1]))
except Zrodlo.DoesNotExist:
pass
except Zrodlo.MultipleObjectsReturned:
pass
def matchuj_dyscypline(kod, nazwa):
nazwa = normalize_nazwa_dyscypliny(nazwa)
try:
return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
kod = normalize_kod_dyscypliny(kod)
try:
return Dyscyplina_Naukowa.objects.get(kod=kod)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
def matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):
nazwa = normalize_nazwa_wydawcy(nazwa)
try:
return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)
except Wydawca.DoesNotExist:
pass
if pbn_uid_id is not None:
try:
return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)
except Wydawca.DoesNotExist:
pass
loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',
nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]
if loose.count() > 0 and loose.count() < 2:
return loose.first()
<|reserved_special_token_0|>
def normalize_zrodlo_skrot_for_db_lookup(s):
return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')
<|reserved_special_token_0|>
def normalize_zrodlo_nazwa_for_db_lookup(s):
return s.lower().replace(' ', '').strip()
<|reserved_special_token_0|>
def matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,
Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,
DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,
doi_matchuj_tylko_nadrzedne=True):
if doi is not None:
doi = normalize_doi(doi)
if doi:
zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)
if doi_matchuj_tylko_nadrzedne:
if hasattr(klass, 'wydawnictwo_nadrzedne_id'):
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
res = zapytanie.annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[
:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first(
).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
title = normalize_tytul_publikacji(title)
title_has_spaces = False
if title is not None:
title_has_spaces = title.find(' ') > 0
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
if zrodlo is not None and hasattr(klass, 'zrodlo'):
try:
return klass.objects.get(tytul_oryginalny__istartswith=
title, rok=year, zrodlo=zrodlo)
except klass.DoesNotExist:
pass
except klass.MultipleObjectsReturned:
print(
f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'
)
if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(
klass, 'e_isbn'):
ni = normalize_isbn(isbn)
zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn
='', e_isbn='')
if isbn_matchuj_tylko_nadrzedne:
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
if klass == Rekord:
zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.
get_for_model(Wydawnictwo_Zwarte).pk, x) for x in
Wydawnictwo_Zwarte.objects.
wydawnictwa_nadrzedne_dla_innych()])
elif klass == Wydawnictwo_Zwarte:
zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.
objects.wydawnictwa_nadrzedne_dla_innych())
else:
raise NotImplementedError(
'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'
)
res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo
=TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
public_uri = normalize_public_uri(public_uri)
if public_uri:
res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)
).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,
title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok
=year).annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
res = klass.objects.filter(rok=year).annotate(podobienstwo=
TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:
return res.first()
<|reserved_special_token_1|>
from typing import Union
from django.db.models import Q, Value
from django.db.models.functions import Lower, Replace, Trim
from .normalization import normalize_doi, normalize_funkcja_autora, normalize_grupa_pracownicza, normalize_isbn, normalize_kod_dyscypliny, normalize_nazwa_dyscypliny, normalize_nazwa_jednostki, normalize_nazwa_wydawcy, normalize_public_uri, normalize_tytul_naukowy, normalize_tytul_publikacji, normalize_tytul_zrodla, normalize_wymiar_etatu
from django.contrib.contenttypes.models import ContentType
from django.contrib.postgres.search import TrigramSimilarity
from bpp.models import Autor, Autor_Jednostka, Dyscyplina_Naukowa, Funkcja_Autora, Grupa_Pracownicza, Jednostka, Rekord, Tytul, Wydawca, Wydawnictwo_Ciagle, Wydawnictwo_Zwarte, Wydzial, Wymiar_Etatu, Zrodlo
from bpp.util import fail_if_seq_scan
def matchuj_wydzial(nazwa):
try:
return Wydzial.objects.get(nazwa__iexact=nazwa.strip())
except Wydzial.DoesNotExist:
pass
def matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:
"""
Dostaje tytuł: pełną nazwę albo skrót
"""
try:
return Tytul.objects.get(nazwa__iexact=tytul)
except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):
return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))
def matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:
funkcja_autora = normalize_funkcja_autora(funkcja_autora)
return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(
skrot__iexact=funkcja_autora))
def matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:
grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)
return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)
def matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:
wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)
return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)
def matchuj_jednostke(nazwa, wydzial=None):
nazwa = normalize_nazwa_jednostki(nazwa)
try:
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa))
except Jednostka.DoesNotExist:
if nazwa.endswith('.'):
nazwa = nazwa[:-1].strip()
try:
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(
skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(
skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))
def matchuj_autora(imiona: str, nazwisko: str, jednostka: Union[Jednostka,
None]=None, bpp_id: Union[int, None]=None, pbn_uid_id: Union[str, None]
=None, system_kadrowy_id: Union[int, None]=None, pbn_id: Union[int,
None]=None, orcid: Union[str, None]=None, tytul_str: Union[Tytul, None]
=None):
if bpp_id is not None:
try:
return Autor.objects.get(pk=bpp_id)
except Autor.DoesNotExist:
pass
if orcid:
try:
return Autor.objects.get(orcid__iexact=orcid.strip())
except Autor.DoesNotExist:
pass
if pbn_uid_id is not None and pbn_uid_id.strip() != '':
_qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)
if _qset.exists():
return _qset.first()
if system_kadrowy_id is not None:
try:
int(system_kadrowy_id)
except (TypeError, ValueError):
system_kadrowy_id = None
if system_kadrowy_id is not None:
try:
return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)
except Autor.DoesNotExist:
pass
if pbn_id is not None:
if isinstance(pbn_id, str):
pbn_id = pbn_id.strip()
try:
pbn_id = int(pbn_id)
except (TypeError, ValueError):
pbn_id = None
if pbn_id is not None:
try:
return Autor.objects.get(pbn_id=pbn_id)
except Autor.DoesNotExist:
pass
queries = [Q(Q(nazwisko__iexact=nazwisko.strip()) | Q(
poprzednie_nazwiska__icontains=nazwisko.strip()), imiona__iexact=
imiona.strip())]
if tytul_str:
queries.append(queries[0] & Q(tytul__skrot=tytul_str))
for qry in queries:
try:
return Autor.objects.get(qry)
except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):
pass
try:
return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))
except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):
pass
if jednostka:
queries = [Q(Q(autor__nazwisko__iexact=nazwisko.strip()) | Q(
autor__poprzednie_nazwiska__icontains=nazwisko.strip()),
autor__imiona__iexact=imiona.strip())]
if tytul_str:
queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))
for qry in queries:
try:
return jednostka.autor_jednostka_set.get(qry).autor
except (Autor_Jednostka.MultipleObjectsReturned,
Autor_Jednostka.DoesNotExist):
pass
return None
def matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:
Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:
if s is None or str(s) == '':
return
if issn is not None:
try:
return Zrodlo.objects.get(issn=issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
if e_issn is not None:
try:
return Zrodlo.objects.get(e_issn=e_issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
for elem in (s, alt_nazwa):
if elem is None:
continue
elem = normalize_tytul_zrodla(elem)
try:
return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(
skrot__iexact=elem))
except Zrodlo.MultipleObjectsReturned:
pass
except Zrodlo.DoesNotExist:
if elem.endswith('.'):
try:
return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1
]) | Q(skrot__istartswith=elem[:-1]))
except Zrodlo.DoesNotExist:
pass
except Zrodlo.MultipleObjectsReturned:
pass
def matchuj_dyscypline(kod, nazwa):
nazwa = normalize_nazwa_dyscypliny(nazwa)
try:
return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
kod = normalize_kod_dyscypliny(kod)
try:
return Dyscyplina_Naukowa.objects.get(kod=kod)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
def matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):
nazwa = normalize_nazwa_wydawcy(nazwa)
try:
return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)
except Wydawca.DoesNotExist:
pass
if pbn_uid_id is not None:
try:
return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)
except Wydawca.DoesNotExist:
pass
loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',
nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]
if loose.count() > 0 and loose.count() < 2:
return loose.first()
TITLE_LIMIT_SINGLE_WORD = 15
TITLE_LIMIT_MANY_WORDS = 25
MATCH_SIMILARITY_THRESHOLD = 0.95
MATCH_SIMILARITY_THRESHOLD_LOW = 0.9
MATCH_SIMILARITY_THRESHOLD_VERY_LOW = 0.8
normalized_db_title = Trim(Replace(Replace(Lower('tytul_oryginalny'), Value
(' [online]'), Value('')), Value(' '), Value(' ')))
normalized_db_zrodlo_skrot = Trim(Replace(Replace(Replace(Lower('skrot'),
Value(' '), Value('')), Value('-'), Value('')), Value('.'), Value('')))
def normalize_zrodlo_skrot_for_db_lookup(s):
return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')
normalized_db_zrodlo_nazwa = Trim(Replace(Lower('nazwa'), Value(' '), Value
('')))
def normalize_zrodlo_nazwa_for_db_lookup(s):
return s.lower().replace(' ', '').strip()
normalized_db_isbn = Trim(Replace(Lower('isbn'), Value('-'), Value('')))
def matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,
Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,
DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,
doi_matchuj_tylko_nadrzedne=True):
if doi is not None:
doi = normalize_doi(doi)
if doi:
zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)
if doi_matchuj_tylko_nadrzedne:
if hasattr(klass, 'wydawnictwo_nadrzedne_id'):
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
res = zapytanie.annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[
:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first(
).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
title = normalize_tytul_publikacji(title)
title_has_spaces = False
if title is not None:
title_has_spaces = title.find(' ') > 0
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
if zrodlo is not None and hasattr(klass, 'zrodlo'):
try:
return klass.objects.get(tytul_oryginalny__istartswith=
title, rok=year, zrodlo=zrodlo)
except klass.DoesNotExist:
pass
except klass.MultipleObjectsReturned:
print(
f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'
)
if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(
klass, 'e_isbn'):
ni = normalize_isbn(isbn)
zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn
='', e_isbn='')
if isbn_matchuj_tylko_nadrzedne:
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
if klass == Rekord:
zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.
get_for_model(Wydawnictwo_Zwarte).pk, x) for x in
Wydawnictwo_Zwarte.objects.
wydawnictwa_nadrzedne_dla_innych()])
elif klass == Wydawnictwo_Zwarte:
zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.
objects.wydawnictwa_nadrzedne_dla_innych())
else:
raise NotImplementedError(
'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'
)
res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo
=TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
public_uri = normalize_public_uri(public_uri)
if public_uri:
res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)
).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,
title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
if title is not None and (not title_has_spaces and len(title) >=
TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=
TITLE_LIMIT_MANY_WORDS):
res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok
=year).annotate(podobienstwo=TrigramSimilarity(
normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
res = klass.objects.filter(rok=year).annotate(podobienstwo=
TrigramSimilarity(normalized_db_title, title.lower())).order_by(
'-podobienstwo')[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:
return res.first()
<|reserved_special_token_1|>
from typing import Union
from django.db.models import Q, Value
from django.db.models.functions import Lower, Replace, Trim
from .normalization import (
normalize_doi,
normalize_funkcja_autora,
normalize_grupa_pracownicza,
normalize_isbn,
normalize_kod_dyscypliny,
normalize_nazwa_dyscypliny,
normalize_nazwa_jednostki,
normalize_nazwa_wydawcy,
normalize_public_uri,
normalize_tytul_naukowy,
normalize_tytul_publikacji,
normalize_tytul_zrodla,
normalize_wymiar_etatu,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.postgres.search import TrigramSimilarity
from bpp.models import (
Autor,
Autor_Jednostka,
Dyscyplina_Naukowa,
Funkcja_Autora,
Grupa_Pracownicza,
Jednostka,
Rekord,
Tytul,
Wydawca,
Wydawnictwo_Ciagle,
Wydawnictwo_Zwarte,
Wydzial,
Wymiar_Etatu,
Zrodlo,
)
from bpp.util import fail_if_seq_scan
def matchuj_wydzial(nazwa):
try:
return Wydzial.objects.get(nazwa__iexact=nazwa.strip())
except Wydzial.DoesNotExist:
pass
def matchuj_tytul(tytul: str, create_if_not_exist=False) -> Tytul:
"""
Dostaje tytuł: pełną nazwę albo skrót
"""
try:
return Tytul.objects.get(nazwa__iexact=tytul)
except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):
return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))
def matchuj_funkcja_autora(funkcja_autora: str) -> Funkcja_Autora:
funkcja_autora = normalize_funkcja_autora(funkcja_autora)
return Funkcja_Autora.objects.get(
Q(nazwa__iexact=funkcja_autora) | Q(skrot__iexact=funkcja_autora)
)
def matchuj_grupa_pracownicza(grupa_pracownicza: str) -> Grupa_Pracownicza:
grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)
return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)
def matchuj_wymiar_etatu(wymiar_etatu: str) -> Wymiar_Etatu:
wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)
return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)
def matchuj_jednostke(nazwa, wydzial=None):
nazwa = normalize_nazwa_jednostki(nazwa)
try:
return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(skrot__iexact=nazwa))
except Jednostka.DoesNotExist:
if nazwa.endswith("."):
nazwa = nazwa[:-1].strip()
try:
return Jednostka.objects.get(
Q(nazwa__istartswith=nazwa) | Q(skrot__istartswith=nazwa)
)
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(
Q(nazwa__istartswith=nazwa) | Q(skrot__istartswith=nazwa),
Q(wydzial__nazwa__iexact=wydzial),
)
except Jednostka.MultipleObjectsReturned as e:
if wydzial is None:
raise e
return Jednostka.objects.get(
Q(nazwa__iexact=nazwa) | Q(skrot__iexact=nazwa),
Q(wydzial__nazwa__iexact=wydzial),
)
def matchuj_autora(
imiona: str,
nazwisko: str,
jednostka: Union[Jednostka, None] = None,
bpp_id: Union[int, None] = None,
pbn_uid_id: Union[str, None] = None,
system_kadrowy_id: Union[int, None] = None,
pbn_id: Union[int, None] = None,
orcid: Union[str, None] = None,
tytul_str: Union[Tytul, None] = None,
):
if bpp_id is not None:
try:
return Autor.objects.get(pk=bpp_id)
except Autor.DoesNotExist:
pass
if orcid:
try:
return Autor.objects.get(orcid__iexact=orcid.strip())
except Autor.DoesNotExist:
pass
if pbn_uid_id is not None and pbn_uid_id.strip() != "":
# Może być > 1 autor z takim pbn_uid_id
_qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)
if _qset.exists():
return _qset.first()
if system_kadrowy_id is not None:
try:
int(system_kadrowy_id)
except (TypeError, ValueError):
system_kadrowy_id = None
if system_kadrowy_id is not None:
try:
return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)
except Autor.DoesNotExist:
pass
if pbn_id is not None:
if isinstance(pbn_id, str):
pbn_id = pbn_id.strip()
try:
pbn_id = int(pbn_id)
except (TypeError, ValueError):
pbn_id = None
if pbn_id is not None:
try:
return Autor.objects.get(pbn_id=pbn_id)
except Autor.DoesNotExist:
pass
queries = [
Q(
Q(nazwisko__iexact=nazwisko.strip())
| Q(poprzednie_nazwiska__icontains=nazwisko.strip()),
imiona__iexact=imiona.strip(),
)
]
if tytul_str:
queries.append(queries[0] & Q(tytul__skrot=tytul_str))
for qry in queries:
try:
return Autor.objects.get(qry)
except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):
pass
try:
return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))
except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):
pass
# Jesteśmy tutaj. Najwyraźniej poszukiwanie po aktualnej jednostce, imieniu, nazwisku,
# tytule itp nie bardzo się powiodło. Spróbujmy innej strategii -- jednostka jest
# określona, poszukajmy w jej autorach. Wszak nie musi być ta jednostka jednostką
# aktualną...
if jednostka:
queries = [
Q(
Q(autor__nazwisko__iexact=nazwisko.strip())
| Q(autor__poprzednie_nazwiska__icontains=nazwisko.strip()),
autor__imiona__iexact=imiona.strip(),
)
]
if tytul_str:
queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))
for qry in queries:
try:
return jednostka.autor_jednostka_set.get(qry).autor
except (
Autor_Jednostka.MultipleObjectsReturned,
Autor_Jednostka.DoesNotExist,
):
pass
return None
def matchuj_zrodlo(
s: Union[str, None],
issn: Union[str, None] = None,
e_issn: Union[str, None] = None,
alt_nazwa=None,
) -> Union[None, Zrodlo]:
if s is None or str(s) == "":
return
if issn is not None:
try:
return Zrodlo.objects.get(issn=issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
if e_issn is not None:
try:
return Zrodlo.objects.get(e_issn=e_issn)
except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):
pass
for elem in s, alt_nazwa:
if elem is None:
continue
elem = normalize_tytul_zrodla(elem)
try:
return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(skrot__iexact=elem))
except Zrodlo.MultipleObjectsReturned:
pass
except Zrodlo.DoesNotExist:
if elem.endswith("."):
try:
return Zrodlo.objects.get(
Q(nazwa__istartswith=elem[:-1])
| Q(skrot__istartswith=elem[:-1])
)
except Zrodlo.DoesNotExist:
pass
except Zrodlo.MultipleObjectsReturned:
pass
def matchuj_dyscypline(kod, nazwa):
nazwa = normalize_nazwa_dyscypliny(nazwa)
try:
return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
kod = normalize_kod_dyscypliny(kod)
try:
return Dyscyplina_Naukowa.objects.get(kod=kod)
except Dyscyplina_Naukowa.DoesNotExist:
pass
except Dyscyplina_Naukowa.MultipleObjectsReturned:
pass
def matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):
nazwa = normalize_nazwa_wydawcy(nazwa)
try:
return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)
except Wydawca.DoesNotExist:
pass
if pbn_uid_id is not None:
try:
return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)
except Wydawca.DoesNotExist:
pass
loose = (
Wydawca.objects.annotate(similarity=TrigramSimilarity("nazwa", nazwa))
.filter(similarity__gte=similarity)
.order_by("-similarity")[:5]
)
if loose.count() > 0 and loose.count() < 2:
return loose.first()
TITLE_LIMIT_SINGLE_WORD = 15
TITLE_LIMIT_MANY_WORDS = 25
MATCH_SIMILARITY_THRESHOLD = 0.95
MATCH_SIMILARITY_THRESHOLD_LOW = 0.90
MATCH_SIMILARITY_THRESHOLD_VERY_LOW = 0.80
# Znormalizowany tytuł w bazie danych -- wyrzucony ciąg znaków [online], podwójne
# spacje pozamieniane na pojedyncze, trim całości
normalized_db_title = Trim(
Replace(
Replace(Lower("tytul_oryginalny"), Value(" [online]"), Value("")),
Value(" "),
Value(" "),
)
)
# Znormalizowany skrót nazwy źródła -- wyrzucone spacje i kropki, trim, zmniejszone
# znaki
normalized_db_zrodlo_skrot = Trim(
Replace(
Replace(
Replace(Lower("skrot"), Value(" "), Value("")),
Value("-"),
Value(""),
),
Value("."),
Value(""),
)
)
def normalize_zrodlo_skrot_for_db_lookup(s):
return s.lower().replace(" ", "").strip().replace("-", "").replace(".", "")
# Znormalizowany skrot zrodla do wyszukiwania -- wyrzucone wszystko procz kropek
normalized_db_zrodlo_nazwa = Trim(
Replace(Lower("nazwa"), Value(" "), Value("")),
)
def normalize_zrodlo_nazwa_for_db_lookup(s):
return s.lower().replace(" ", "").strip()
normalized_db_isbn = Trim(Replace(Lower("isbn"), Value("-"), Value("")))
def matchuj_publikacje(
klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle, Rekord],
title,
year,
doi=None,
public_uri=None,
isbn=None,
zrodlo=None,
DEBUG_MATCHOWANIE=False,
isbn_matchuj_tylko_nadrzedne=True,
doi_matchuj_tylko_nadrzedne=True,
):
if doi is not None:
doi = normalize_doi(doi)
if doi:
zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)
if doi_matchuj_tylko_nadrzedne:
if hasattr(klass, "wydawnictwo_nadrzedne_id"):
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
res = zapytanie.annotate(
podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())
).order_by("-podobienstwo")[:2]
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
title = normalize_tytul_publikacji(title)
title_has_spaces = False
if title is not None:
title_has_spaces = title.find(" ") > 0
if title is not None and (
(not title_has_spaces and len(title) >= TITLE_LIMIT_SINGLE_WORD)
or (title_has_spaces and len(title) >= TITLE_LIMIT_MANY_WORDS)
):
if zrodlo is not None and hasattr(klass, "zrodlo"):
try:
return klass.objects.get(
tytul_oryginalny__istartswith=title, rok=year, zrodlo=zrodlo
)
except klass.DoesNotExist:
pass
except klass.MultipleObjectsReturned:
print(
f"PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}"
)
if (
isbn is not None
and isbn != ""
and hasattr(klass, "isbn")
and hasattr(klass, "e_isbn")
):
ni = normalize_isbn(isbn)
zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(
isbn="", e_isbn=""
)
if isbn_matchuj_tylko_nadrzedne:
zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)
if klass == Rekord:
zapytanie = zapytanie.filter(
pk__in=[
(ContentType.objects.get_for_model(Wydawnictwo_Zwarte).pk, x)
for x in Wydawnictwo_Zwarte.objects.wydawnictwa_nadrzedne_dla_innych()
]
)
elif klass == Wydawnictwo_Zwarte:
zapytanie = zapytanie.filter(
pk__in=Wydawnictwo_Zwarte.objects.wydawnictwa_nadrzedne_dla_innych()
)
else:
raise NotImplementedError(
"Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane"
)
#
# Uwaga uwaga uwaga.
#
# Gdy matchujemy ISBN, to w BPP dochodzi do takiej nieciekawej sytuacji: wpisywany jest
# ISBN zarówno dla rozdziałów jak i dla wydawnictw nadrzędnych.
#
# Zatem, na ten moment, aby usprawnić matchowanie ISBN, jeżeli ustawiona jest flaga
# isbn_matchuj_tylko_nadrzedne, to system bedzie szukał tylko i wyłącznie wśród
# rekordów będących wydawnictwami nadrzędnymi (czyli nie mającymi rekordów podrzędnych)
#
res = (
zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni))
.annotate(
podobienstwo=TrigramSimilarity(
normalized_db_title,
title.lower(),
)
)
.order_by("-podobienstwo")[:2]
)
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:
return res.first()
public_uri = normalize_public_uri(public_uri)
if public_uri:
res = (
klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri))
.annotate(
podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())
)
.order_by("-podobienstwo")[:2]
)
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
if title is not None and (
(not title_has_spaces and len(title) >= TITLE_LIMIT_SINGLE_WORD)
or (title_has_spaces and len(title) >= TITLE_LIMIT_MANY_WORDS)
):
res = (
klass.objects.filter(tytul_oryginalny__istartswith=title, rok=year)
.annotate(
podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())
)
.order_by("-podobienstwo")[:2]
)
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:
return res.first()
# Ostatnia szansa, po podobieństwie, niski próg
res = (
klass.objects.filter(rok=year)
.annotate(
podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())
)
.order_by("-podobienstwo")[:2]
)
fail_if_seq_scan(res, DEBUG_MATCHOWANIE)
if res.exists():
if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:
return res.first()
|
flexible
|
{
"blob_id": "47025a30d79341ff0819fe87638e35960a5fc87d",
"index": 6446,
"step-1": "<mask token>\n\n\ndef matchuj_wydzial(nazwa):\n try:\n return Wydzial.objects.get(nazwa__iexact=nazwa.strip())\n except Wydzial.DoesNotExist:\n pass\n\n\ndef matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:\n \"\"\"\n Dostaje tytuł: pełną nazwę albo skrót\n \"\"\"\n try:\n return Tytul.objects.get(nazwa__iexact=tytul)\n except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):\n return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))\n\n\ndef matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:\n funkcja_autora = normalize_funkcja_autora(funkcja_autora)\n return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(\n skrot__iexact=funkcja_autora))\n\n\ndef matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:\n grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)\n return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)\n\n\n<mask token>\n\n\ndef matchuj_jednostke(nazwa, wydzial=None):\n nazwa = normalize_nazwa_jednostki(nazwa)\n try:\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa))\n except Jednostka.DoesNotExist:\n if nazwa.endswith('.'):\n nazwa = nazwa[:-1].strip()\n try:\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n\n\n<mask token>\n\n\ndef matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:\n Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:\n if s is None or str(s) == '':\n return\n if issn is not None:\n try:\n return Zrodlo.objects.get(issn=issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n if e_issn is not None:\n try:\n return Zrodlo.objects.get(e_issn=e_issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n for elem in (s, alt_nazwa):\n if elem is None:\n continue\n elem = normalize_tytul_zrodla(elem)\n try:\n return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(\n skrot__iexact=elem))\n except Zrodlo.MultipleObjectsReturned:\n pass\n except Zrodlo.DoesNotExist:\n if elem.endswith('.'):\n try:\n return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1\n ]) | Q(skrot__istartswith=elem[:-1]))\n except Zrodlo.DoesNotExist:\n pass\n except Zrodlo.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_dyscypline(kod, nazwa):\n nazwa = normalize_nazwa_dyscypliny(nazwa)\n try:\n return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n kod = normalize_kod_dyscypliny(kod)\n try:\n return Dyscyplina_Naukowa.objects.get(kod=kod)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):\n nazwa = normalize_nazwa_wydawcy(nazwa)\n try:\n return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)\n except Wydawca.DoesNotExist:\n pass\n if pbn_uid_id is not None:\n try:\n return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)\n except Wydawca.DoesNotExist:\n pass\n loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',\n nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]\n if loose.count() > 0 and loose.count() < 2:\n return loose.first()\n\n\n<mask token>\n\n\ndef normalize_zrodlo_skrot_for_db_lookup(s):\n return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')\n\n\n<mask token>\n\n\ndef normalize_zrodlo_nazwa_for_db_lookup(s):\n return s.lower().replace(' ', '').strip()\n\n\n<mask token>\n\n\ndef matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,\n Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,\n DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,\n doi_matchuj_tylko_nadrzedne=True):\n if doi is not None:\n doi = normalize_doi(doi)\n if doi:\n zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)\n if doi_matchuj_tylko_nadrzedne:\n if hasattr(klass, 'wydawnictwo_nadrzedne_id'):\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n res = zapytanie.annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[\n :2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first(\n ).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n title = normalize_tytul_publikacji(title)\n title_has_spaces = False\n if title is not None:\n title_has_spaces = title.find(' ') > 0\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n if zrodlo is not None and hasattr(klass, 'zrodlo'):\n try:\n return klass.objects.get(tytul_oryginalny__istartswith=\n title, rok=year, zrodlo=zrodlo)\n except klass.DoesNotExist:\n pass\n except klass.MultipleObjectsReturned:\n print(\n f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'\n )\n if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(\n klass, 'e_isbn'):\n ni = normalize_isbn(isbn)\n zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn\n ='', e_isbn='')\n if isbn_matchuj_tylko_nadrzedne:\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n if klass == Rekord:\n zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.\n get_for_model(Wydawnictwo_Zwarte).pk, x) for x in\n Wydawnictwo_Zwarte.objects.\n wydawnictwa_nadrzedne_dla_innych()])\n elif klass == Wydawnictwo_Zwarte:\n zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.\n objects.wydawnictwa_nadrzedne_dla_innych())\n else:\n raise NotImplementedError(\n 'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'\n )\n res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo\n =TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n public_uri = normalize_public_uri(public_uri)\n if public_uri:\n res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)\n ).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,\n title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok\n =year).annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n res = klass.objects.filter(rok=year).annotate(podobienstwo=\n TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:\n return res.first()\n",
"step-2": "<mask token>\n\n\ndef matchuj_wydzial(nazwa):\n try:\n return Wydzial.objects.get(nazwa__iexact=nazwa.strip())\n except Wydzial.DoesNotExist:\n pass\n\n\ndef matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:\n \"\"\"\n Dostaje tytuł: pełną nazwę albo skrót\n \"\"\"\n try:\n return Tytul.objects.get(nazwa__iexact=tytul)\n except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):\n return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))\n\n\ndef matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:\n funkcja_autora = normalize_funkcja_autora(funkcja_autora)\n return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(\n skrot__iexact=funkcja_autora))\n\n\ndef matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:\n grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)\n return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)\n\n\ndef matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:\n wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)\n return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)\n\n\ndef matchuj_jednostke(nazwa, wydzial=None):\n nazwa = normalize_nazwa_jednostki(nazwa)\n try:\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa))\n except Jednostka.DoesNotExist:\n if nazwa.endswith('.'):\n nazwa = nazwa[:-1].strip()\n try:\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n\n\n<mask token>\n\n\ndef matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:\n Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:\n if s is None or str(s) == '':\n return\n if issn is not None:\n try:\n return Zrodlo.objects.get(issn=issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n if e_issn is not None:\n try:\n return Zrodlo.objects.get(e_issn=e_issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n for elem in (s, alt_nazwa):\n if elem is None:\n continue\n elem = normalize_tytul_zrodla(elem)\n try:\n return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(\n skrot__iexact=elem))\n except Zrodlo.MultipleObjectsReturned:\n pass\n except Zrodlo.DoesNotExist:\n if elem.endswith('.'):\n try:\n return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1\n ]) | Q(skrot__istartswith=elem[:-1]))\n except Zrodlo.DoesNotExist:\n pass\n except Zrodlo.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_dyscypline(kod, nazwa):\n nazwa = normalize_nazwa_dyscypliny(nazwa)\n try:\n return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n kod = normalize_kod_dyscypliny(kod)\n try:\n return Dyscyplina_Naukowa.objects.get(kod=kod)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):\n nazwa = normalize_nazwa_wydawcy(nazwa)\n try:\n return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)\n except Wydawca.DoesNotExist:\n pass\n if pbn_uid_id is not None:\n try:\n return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)\n except Wydawca.DoesNotExist:\n pass\n loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',\n nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]\n if loose.count() > 0 and loose.count() < 2:\n return loose.first()\n\n\n<mask token>\n\n\ndef normalize_zrodlo_skrot_for_db_lookup(s):\n return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')\n\n\n<mask token>\n\n\ndef normalize_zrodlo_nazwa_for_db_lookup(s):\n return s.lower().replace(' ', '').strip()\n\n\n<mask token>\n\n\ndef matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,\n Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,\n DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,\n doi_matchuj_tylko_nadrzedne=True):\n if doi is not None:\n doi = normalize_doi(doi)\n if doi:\n zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)\n if doi_matchuj_tylko_nadrzedne:\n if hasattr(klass, 'wydawnictwo_nadrzedne_id'):\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n res = zapytanie.annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[\n :2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first(\n ).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n title = normalize_tytul_publikacji(title)\n title_has_spaces = False\n if title is not None:\n title_has_spaces = title.find(' ') > 0\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n if zrodlo is not None and hasattr(klass, 'zrodlo'):\n try:\n return klass.objects.get(tytul_oryginalny__istartswith=\n title, rok=year, zrodlo=zrodlo)\n except klass.DoesNotExist:\n pass\n except klass.MultipleObjectsReturned:\n print(\n f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'\n )\n if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(\n klass, 'e_isbn'):\n ni = normalize_isbn(isbn)\n zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn\n ='', e_isbn='')\n if isbn_matchuj_tylko_nadrzedne:\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n if klass == Rekord:\n zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.\n get_for_model(Wydawnictwo_Zwarte).pk, x) for x in\n Wydawnictwo_Zwarte.objects.\n wydawnictwa_nadrzedne_dla_innych()])\n elif klass == Wydawnictwo_Zwarte:\n zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.\n objects.wydawnictwa_nadrzedne_dla_innych())\n else:\n raise NotImplementedError(\n 'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'\n )\n res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo\n =TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n public_uri = normalize_public_uri(public_uri)\n if public_uri:\n res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)\n ).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,\n title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok\n =year).annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n res = klass.objects.filter(rok=year).annotate(podobienstwo=\n TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:\n return res.first()\n",
"step-3": "<mask token>\n\n\ndef matchuj_wydzial(nazwa):\n try:\n return Wydzial.objects.get(nazwa__iexact=nazwa.strip())\n except Wydzial.DoesNotExist:\n pass\n\n\ndef matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:\n \"\"\"\n Dostaje tytuł: pełną nazwę albo skrót\n \"\"\"\n try:\n return Tytul.objects.get(nazwa__iexact=tytul)\n except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):\n return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))\n\n\ndef matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:\n funkcja_autora = normalize_funkcja_autora(funkcja_autora)\n return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(\n skrot__iexact=funkcja_autora))\n\n\ndef matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:\n grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)\n return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)\n\n\ndef matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:\n wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)\n return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)\n\n\ndef matchuj_jednostke(nazwa, wydzial=None):\n nazwa = normalize_nazwa_jednostki(nazwa)\n try:\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa))\n except Jednostka.DoesNotExist:\n if nazwa.endswith('.'):\n nazwa = nazwa[:-1].strip()\n try:\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n\n\ndef matchuj_autora(imiona: str, nazwisko: str, jednostka: Union[Jednostka,\n None]=None, bpp_id: Union[int, None]=None, pbn_uid_id: Union[str, None]\n =None, system_kadrowy_id: Union[int, None]=None, pbn_id: Union[int,\n None]=None, orcid: Union[str, None]=None, tytul_str: Union[Tytul, None]\n =None):\n if bpp_id is not None:\n try:\n return Autor.objects.get(pk=bpp_id)\n except Autor.DoesNotExist:\n pass\n if orcid:\n try:\n return Autor.objects.get(orcid__iexact=orcid.strip())\n except Autor.DoesNotExist:\n pass\n if pbn_uid_id is not None and pbn_uid_id.strip() != '':\n _qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)\n if _qset.exists():\n return _qset.first()\n if system_kadrowy_id is not None:\n try:\n int(system_kadrowy_id)\n except (TypeError, ValueError):\n system_kadrowy_id = None\n if system_kadrowy_id is not None:\n try:\n return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)\n except Autor.DoesNotExist:\n pass\n if pbn_id is not None:\n if isinstance(pbn_id, str):\n pbn_id = pbn_id.strip()\n try:\n pbn_id = int(pbn_id)\n except (TypeError, ValueError):\n pbn_id = None\n if pbn_id is not None:\n try:\n return Autor.objects.get(pbn_id=pbn_id)\n except Autor.DoesNotExist:\n pass\n queries = [Q(Q(nazwisko__iexact=nazwisko.strip()) | Q(\n poprzednie_nazwiska__icontains=nazwisko.strip()), imiona__iexact=\n imiona.strip())]\n if tytul_str:\n queries.append(queries[0] & Q(tytul__skrot=tytul_str))\n for qry in queries:\n try:\n return Autor.objects.get(qry)\n except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):\n pass\n try:\n return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))\n except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):\n pass\n if jednostka:\n queries = [Q(Q(autor__nazwisko__iexact=nazwisko.strip()) | Q(\n autor__poprzednie_nazwiska__icontains=nazwisko.strip()),\n autor__imiona__iexact=imiona.strip())]\n if tytul_str:\n queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))\n for qry in queries:\n try:\n return jednostka.autor_jednostka_set.get(qry).autor\n except (Autor_Jednostka.MultipleObjectsReturned,\n Autor_Jednostka.DoesNotExist):\n pass\n return None\n\n\ndef matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:\n Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:\n if s is None or str(s) == '':\n return\n if issn is not None:\n try:\n return Zrodlo.objects.get(issn=issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n if e_issn is not None:\n try:\n return Zrodlo.objects.get(e_issn=e_issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n for elem in (s, alt_nazwa):\n if elem is None:\n continue\n elem = normalize_tytul_zrodla(elem)\n try:\n return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(\n skrot__iexact=elem))\n except Zrodlo.MultipleObjectsReturned:\n pass\n except Zrodlo.DoesNotExist:\n if elem.endswith('.'):\n try:\n return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1\n ]) | Q(skrot__istartswith=elem[:-1]))\n except Zrodlo.DoesNotExist:\n pass\n except Zrodlo.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_dyscypline(kod, nazwa):\n nazwa = normalize_nazwa_dyscypliny(nazwa)\n try:\n return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n kod = normalize_kod_dyscypliny(kod)\n try:\n return Dyscyplina_Naukowa.objects.get(kod=kod)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):\n nazwa = normalize_nazwa_wydawcy(nazwa)\n try:\n return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)\n except Wydawca.DoesNotExist:\n pass\n if pbn_uid_id is not None:\n try:\n return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)\n except Wydawca.DoesNotExist:\n pass\n loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',\n nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]\n if loose.count() > 0 and loose.count() < 2:\n return loose.first()\n\n\n<mask token>\n\n\ndef normalize_zrodlo_skrot_for_db_lookup(s):\n return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')\n\n\n<mask token>\n\n\ndef normalize_zrodlo_nazwa_for_db_lookup(s):\n return s.lower().replace(' ', '').strip()\n\n\n<mask token>\n\n\ndef matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,\n Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,\n DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,\n doi_matchuj_tylko_nadrzedne=True):\n if doi is not None:\n doi = normalize_doi(doi)\n if doi:\n zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)\n if doi_matchuj_tylko_nadrzedne:\n if hasattr(klass, 'wydawnictwo_nadrzedne_id'):\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n res = zapytanie.annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[\n :2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first(\n ).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n title = normalize_tytul_publikacji(title)\n title_has_spaces = False\n if title is not None:\n title_has_spaces = title.find(' ') > 0\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n if zrodlo is not None and hasattr(klass, 'zrodlo'):\n try:\n return klass.objects.get(tytul_oryginalny__istartswith=\n title, rok=year, zrodlo=zrodlo)\n except klass.DoesNotExist:\n pass\n except klass.MultipleObjectsReturned:\n print(\n f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'\n )\n if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(\n klass, 'e_isbn'):\n ni = normalize_isbn(isbn)\n zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn\n ='', e_isbn='')\n if isbn_matchuj_tylko_nadrzedne:\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n if klass == Rekord:\n zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.\n get_for_model(Wydawnictwo_Zwarte).pk, x) for x in\n Wydawnictwo_Zwarte.objects.\n wydawnictwa_nadrzedne_dla_innych()])\n elif klass == Wydawnictwo_Zwarte:\n zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.\n objects.wydawnictwa_nadrzedne_dla_innych())\n else:\n raise NotImplementedError(\n 'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'\n )\n res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo\n =TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n public_uri = normalize_public_uri(public_uri)\n if public_uri:\n res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)\n ).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,\n title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok\n =year).annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n res = klass.objects.filter(rok=year).annotate(podobienstwo=\n TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:\n return res.first()\n",
"step-4": "from typing import Union\nfrom django.db.models import Q, Value\nfrom django.db.models.functions import Lower, Replace, Trim\nfrom .normalization import normalize_doi, normalize_funkcja_autora, normalize_grupa_pracownicza, normalize_isbn, normalize_kod_dyscypliny, normalize_nazwa_dyscypliny, normalize_nazwa_jednostki, normalize_nazwa_wydawcy, normalize_public_uri, normalize_tytul_naukowy, normalize_tytul_publikacji, normalize_tytul_zrodla, normalize_wymiar_etatu\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.contrib.postgres.search import TrigramSimilarity\nfrom bpp.models import Autor, Autor_Jednostka, Dyscyplina_Naukowa, Funkcja_Autora, Grupa_Pracownicza, Jednostka, Rekord, Tytul, Wydawca, Wydawnictwo_Ciagle, Wydawnictwo_Zwarte, Wydzial, Wymiar_Etatu, Zrodlo\nfrom bpp.util import fail_if_seq_scan\n\n\ndef matchuj_wydzial(nazwa):\n try:\n return Wydzial.objects.get(nazwa__iexact=nazwa.strip())\n except Wydzial.DoesNotExist:\n pass\n\n\ndef matchuj_tytul(tytul: str, create_if_not_exist=False) ->Tytul:\n \"\"\"\n Dostaje tytuł: pełną nazwę albo skrót\n \"\"\"\n try:\n return Tytul.objects.get(nazwa__iexact=tytul)\n except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):\n return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))\n\n\ndef matchuj_funkcja_autora(funkcja_autora: str) ->Funkcja_Autora:\n funkcja_autora = normalize_funkcja_autora(funkcja_autora)\n return Funkcja_Autora.objects.get(Q(nazwa__iexact=funkcja_autora) | Q(\n skrot__iexact=funkcja_autora))\n\n\ndef matchuj_grupa_pracownicza(grupa_pracownicza: str) ->Grupa_Pracownicza:\n grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)\n return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)\n\n\ndef matchuj_wymiar_etatu(wymiar_etatu: str) ->Wymiar_Etatu:\n wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)\n return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)\n\n\ndef matchuj_jednostke(nazwa, wydzial=None):\n nazwa = normalize_nazwa_jednostki(nazwa)\n try:\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa))\n except Jednostka.DoesNotExist:\n if nazwa.endswith('.'):\n nazwa = nazwa[:-1].strip()\n try:\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__istartswith=nazwa) | Q(\n skrot__istartswith=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(\n skrot__iexact=nazwa), Q(wydzial__nazwa__iexact=wydzial))\n\n\ndef matchuj_autora(imiona: str, nazwisko: str, jednostka: Union[Jednostka,\n None]=None, bpp_id: Union[int, None]=None, pbn_uid_id: Union[str, None]\n =None, system_kadrowy_id: Union[int, None]=None, pbn_id: Union[int,\n None]=None, orcid: Union[str, None]=None, tytul_str: Union[Tytul, None]\n =None):\n if bpp_id is not None:\n try:\n return Autor.objects.get(pk=bpp_id)\n except Autor.DoesNotExist:\n pass\n if orcid:\n try:\n return Autor.objects.get(orcid__iexact=orcid.strip())\n except Autor.DoesNotExist:\n pass\n if pbn_uid_id is not None and pbn_uid_id.strip() != '':\n _qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)\n if _qset.exists():\n return _qset.first()\n if system_kadrowy_id is not None:\n try:\n int(system_kadrowy_id)\n except (TypeError, ValueError):\n system_kadrowy_id = None\n if system_kadrowy_id is not None:\n try:\n return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)\n except Autor.DoesNotExist:\n pass\n if pbn_id is not None:\n if isinstance(pbn_id, str):\n pbn_id = pbn_id.strip()\n try:\n pbn_id = int(pbn_id)\n except (TypeError, ValueError):\n pbn_id = None\n if pbn_id is not None:\n try:\n return Autor.objects.get(pbn_id=pbn_id)\n except Autor.DoesNotExist:\n pass\n queries = [Q(Q(nazwisko__iexact=nazwisko.strip()) | Q(\n poprzednie_nazwiska__icontains=nazwisko.strip()), imiona__iexact=\n imiona.strip())]\n if tytul_str:\n queries.append(queries[0] & Q(tytul__skrot=tytul_str))\n for qry in queries:\n try:\n return Autor.objects.get(qry)\n except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):\n pass\n try:\n return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))\n except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):\n pass\n if jednostka:\n queries = [Q(Q(autor__nazwisko__iexact=nazwisko.strip()) | Q(\n autor__poprzednie_nazwiska__icontains=nazwisko.strip()),\n autor__imiona__iexact=imiona.strip())]\n if tytul_str:\n queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))\n for qry in queries:\n try:\n return jednostka.autor_jednostka_set.get(qry).autor\n except (Autor_Jednostka.MultipleObjectsReturned,\n Autor_Jednostka.DoesNotExist):\n pass\n return None\n\n\ndef matchuj_zrodlo(s: Union[str, None], issn: Union[str, None]=None, e_issn:\n Union[str, None]=None, alt_nazwa=None) ->Union[None, Zrodlo]:\n if s is None or str(s) == '':\n return\n if issn is not None:\n try:\n return Zrodlo.objects.get(issn=issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n if e_issn is not None:\n try:\n return Zrodlo.objects.get(e_issn=e_issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n for elem in (s, alt_nazwa):\n if elem is None:\n continue\n elem = normalize_tytul_zrodla(elem)\n try:\n return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(\n skrot__iexact=elem))\n except Zrodlo.MultipleObjectsReturned:\n pass\n except Zrodlo.DoesNotExist:\n if elem.endswith('.'):\n try:\n return Zrodlo.objects.get(Q(nazwa__istartswith=elem[:-1\n ]) | Q(skrot__istartswith=elem[:-1]))\n except Zrodlo.DoesNotExist:\n pass\n except Zrodlo.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_dyscypline(kod, nazwa):\n nazwa = normalize_nazwa_dyscypliny(nazwa)\n try:\n return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n kod = normalize_kod_dyscypliny(kod)\n try:\n return Dyscyplina_Naukowa.objects.get(kod=kod)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):\n nazwa = normalize_nazwa_wydawcy(nazwa)\n try:\n return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)\n except Wydawca.DoesNotExist:\n pass\n if pbn_uid_id is not None:\n try:\n return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)\n except Wydawca.DoesNotExist:\n pass\n loose = Wydawca.objects.annotate(similarity=TrigramSimilarity('nazwa',\n nazwa)).filter(similarity__gte=similarity).order_by('-similarity')[:5]\n if loose.count() > 0 and loose.count() < 2:\n return loose.first()\n\n\nTITLE_LIMIT_SINGLE_WORD = 15\nTITLE_LIMIT_MANY_WORDS = 25\nMATCH_SIMILARITY_THRESHOLD = 0.95\nMATCH_SIMILARITY_THRESHOLD_LOW = 0.9\nMATCH_SIMILARITY_THRESHOLD_VERY_LOW = 0.8\nnormalized_db_title = Trim(Replace(Replace(Lower('tytul_oryginalny'), Value\n (' [online]'), Value('')), Value(' '), Value(' ')))\nnormalized_db_zrodlo_skrot = Trim(Replace(Replace(Replace(Lower('skrot'),\n Value(' '), Value('')), Value('-'), Value('')), Value('.'), Value('')))\n\n\ndef normalize_zrodlo_skrot_for_db_lookup(s):\n return s.lower().replace(' ', '').strip().replace('-', '').replace('.', '')\n\n\nnormalized_db_zrodlo_nazwa = Trim(Replace(Lower('nazwa'), Value(' '), Value\n ('')))\n\n\ndef normalize_zrodlo_nazwa_for_db_lookup(s):\n return s.lower().replace(' ', '').strip()\n\n\nnormalized_db_isbn = Trim(Replace(Lower('isbn'), Value('-'), Value('')))\n\n\ndef matchuj_publikacje(klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle,\n Rekord], title, year, doi=None, public_uri=None, isbn=None, zrodlo=None,\n DEBUG_MATCHOWANIE=False, isbn_matchuj_tylko_nadrzedne=True,\n doi_matchuj_tylko_nadrzedne=True):\n if doi is not None:\n doi = normalize_doi(doi)\n if doi:\n zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)\n if doi_matchuj_tylko_nadrzedne:\n if hasattr(klass, 'wydawnictwo_nadrzedne_id'):\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n res = zapytanie.annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[\n :2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first(\n ).podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n title = normalize_tytul_publikacji(title)\n title_has_spaces = False\n if title is not None:\n title_has_spaces = title.find(' ') > 0\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n if zrodlo is not None and hasattr(klass, 'zrodlo'):\n try:\n return klass.objects.get(tytul_oryginalny__istartswith=\n title, rok=year, zrodlo=zrodlo)\n except klass.DoesNotExist:\n pass\n except klass.MultipleObjectsReturned:\n print(\n f'PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}'\n )\n if isbn is not None and isbn != '' and hasattr(klass, 'isbn') and hasattr(\n klass, 'e_isbn'):\n ni = normalize_isbn(isbn)\n zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(isbn\n ='', e_isbn='')\n if isbn_matchuj_tylko_nadrzedne:\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n if klass == Rekord:\n zapytanie = zapytanie.filter(pk__in=[(ContentType.objects.\n get_for_model(Wydawnictwo_Zwarte).pk, x) for x in\n Wydawnictwo_Zwarte.objects.\n wydawnictwa_nadrzedne_dla_innych()])\n elif klass == Wydawnictwo_Zwarte:\n zapytanie = zapytanie.filter(pk__in=Wydawnictwo_Zwarte.\n objects.wydawnictwa_nadrzedne_dla_innych())\n else:\n raise NotImplementedError(\n 'Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane'\n )\n res = zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni)).annotate(podobienstwo\n =TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n public_uri = normalize_public_uri(public_uri)\n if public_uri:\n res = klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri)\n ).annotate(podobienstwo=TrigramSimilarity(normalized_db_title,\n title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n if title is not None and (not title_has_spaces and len(title) >=\n TITLE_LIMIT_SINGLE_WORD or title_has_spaces and len(title) >=\n TITLE_LIMIT_MANY_WORDS):\n res = klass.objects.filter(tytul_oryginalny__istartswith=title, rok\n =year).annotate(podobienstwo=TrigramSimilarity(\n normalized_db_title, title.lower())).order_by('-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n res = klass.objects.filter(rok=year).annotate(podobienstwo=\n TrigramSimilarity(normalized_db_title, title.lower())).order_by(\n '-podobienstwo')[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:\n return res.first()\n",
"step-5": "from typing import Union\n\nfrom django.db.models import Q, Value\nfrom django.db.models.functions import Lower, Replace, Trim\n\nfrom .normalization import (\n normalize_doi,\n normalize_funkcja_autora,\n normalize_grupa_pracownicza,\n normalize_isbn,\n normalize_kod_dyscypliny,\n normalize_nazwa_dyscypliny,\n normalize_nazwa_jednostki,\n normalize_nazwa_wydawcy,\n normalize_public_uri,\n normalize_tytul_naukowy,\n normalize_tytul_publikacji,\n normalize_tytul_zrodla,\n normalize_wymiar_etatu,\n)\n\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.contrib.postgres.search import TrigramSimilarity\n\nfrom bpp.models import (\n Autor,\n Autor_Jednostka,\n Dyscyplina_Naukowa,\n Funkcja_Autora,\n Grupa_Pracownicza,\n Jednostka,\n Rekord,\n Tytul,\n Wydawca,\n Wydawnictwo_Ciagle,\n Wydawnictwo_Zwarte,\n Wydzial,\n Wymiar_Etatu,\n Zrodlo,\n)\nfrom bpp.util import fail_if_seq_scan\n\n\ndef matchuj_wydzial(nazwa):\n try:\n return Wydzial.objects.get(nazwa__iexact=nazwa.strip())\n except Wydzial.DoesNotExist:\n pass\n\n\ndef matchuj_tytul(tytul: str, create_if_not_exist=False) -> Tytul:\n \"\"\"\n Dostaje tytuł: pełną nazwę albo skrót\n \"\"\"\n\n try:\n return Tytul.objects.get(nazwa__iexact=tytul)\n except (Tytul.DoesNotExist, Tytul.MultipleObjectsReturned):\n return Tytul.objects.get(skrot=normalize_tytul_naukowy(tytul))\n\n\ndef matchuj_funkcja_autora(funkcja_autora: str) -> Funkcja_Autora:\n funkcja_autora = normalize_funkcja_autora(funkcja_autora)\n return Funkcja_Autora.objects.get(\n Q(nazwa__iexact=funkcja_autora) | Q(skrot__iexact=funkcja_autora)\n )\n\n\ndef matchuj_grupa_pracownicza(grupa_pracownicza: str) -> Grupa_Pracownicza:\n grupa_pracownicza = normalize_grupa_pracownicza(grupa_pracownicza)\n return Grupa_Pracownicza.objects.get(nazwa__iexact=grupa_pracownicza)\n\n\ndef matchuj_wymiar_etatu(wymiar_etatu: str) -> Wymiar_Etatu:\n wymiar_etatu = normalize_wymiar_etatu(wymiar_etatu)\n return Wymiar_Etatu.objects.get(nazwa__iexact=wymiar_etatu)\n\n\ndef matchuj_jednostke(nazwa, wydzial=None):\n nazwa = normalize_nazwa_jednostki(nazwa)\n\n try:\n return Jednostka.objects.get(Q(nazwa__iexact=nazwa) | Q(skrot__iexact=nazwa))\n except Jednostka.DoesNotExist:\n if nazwa.endswith(\".\"):\n nazwa = nazwa[:-1].strip()\n\n try:\n return Jednostka.objects.get(\n Q(nazwa__istartswith=nazwa) | Q(skrot__istartswith=nazwa)\n )\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n\n return Jednostka.objects.get(\n Q(nazwa__istartswith=nazwa) | Q(skrot__istartswith=nazwa),\n Q(wydzial__nazwa__iexact=wydzial),\n )\n\n except Jednostka.MultipleObjectsReturned as e:\n if wydzial is None:\n raise e\n\n return Jednostka.objects.get(\n Q(nazwa__iexact=nazwa) | Q(skrot__iexact=nazwa),\n Q(wydzial__nazwa__iexact=wydzial),\n )\n\n\ndef matchuj_autora(\n imiona: str,\n nazwisko: str,\n jednostka: Union[Jednostka, None] = None,\n bpp_id: Union[int, None] = None,\n pbn_uid_id: Union[str, None] = None,\n system_kadrowy_id: Union[int, None] = None,\n pbn_id: Union[int, None] = None,\n orcid: Union[str, None] = None,\n tytul_str: Union[Tytul, None] = None,\n):\n if bpp_id is not None:\n try:\n return Autor.objects.get(pk=bpp_id)\n except Autor.DoesNotExist:\n pass\n\n if orcid:\n try:\n return Autor.objects.get(orcid__iexact=orcid.strip())\n except Autor.DoesNotExist:\n pass\n\n if pbn_uid_id is not None and pbn_uid_id.strip() != \"\":\n # Może być > 1 autor z takim pbn_uid_id\n _qset = Autor.objects.filter(pbn_uid_id=pbn_uid_id)\n if _qset.exists():\n return _qset.first()\n\n if system_kadrowy_id is not None:\n try:\n int(system_kadrowy_id)\n except (TypeError, ValueError):\n system_kadrowy_id = None\n\n if system_kadrowy_id is not None:\n try:\n return Autor.objects.get(system_kadrowy_id=system_kadrowy_id)\n except Autor.DoesNotExist:\n pass\n\n if pbn_id is not None:\n if isinstance(pbn_id, str):\n pbn_id = pbn_id.strip()\n\n try:\n pbn_id = int(pbn_id)\n except (TypeError, ValueError):\n pbn_id = None\n\n if pbn_id is not None:\n try:\n return Autor.objects.get(pbn_id=pbn_id)\n except Autor.DoesNotExist:\n pass\n\n queries = [\n Q(\n Q(nazwisko__iexact=nazwisko.strip())\n | Q(poprzednie_nazwiska__icontains=nazwisko.strip()),\n imiona__iexact=imiona.strip(),\n )\n ]\n\n if tytul_str:\n queries.append(queries[0] & Q(tytul__skrot=tytul_str))\n\n for qry in queries:\n try:\n return Autor.objects.get(qry)\n except (Autor.DoesNotExist, Autor.MultipleObjectsReturned):\n pass\n\n try:\n return Autor.objects.get(qry & Q(aktualna_jednostka=jednostka))\n except (Autor.MultipleObjectsReturned, Autor.DoesNotExist):\n pass\n\n # Jesteśmy tutaj. Najwyraźniej poszukiwanie po aktualnej jednostce, imieniu, nazwisku,\n # tytule itp nie bardzo się powiodło. Spróbujmy innej strategii -- jednostka jest\n # określona, poszukajmy w jej autorach. Wszak nie musi być ta jednostka jednostką\n # aktualną...\n\n if jednostka:\n\n queries = [\n Q(\n Q(autor__nazwisko__iexact=nazwisko.strip())\n | Q(autor__poprzednie_nazwiska__icontains=nazwisko.strip()),\n autor__imiona__iexact=imiona.strip(),\n )\n ]\n if tytul_str:\n queries.append(queries[0] & Q(autor__tytul__skrot=tytul_str))\n\n for qry in queries:\n try:\n return jednostka.autor_jednostka_set.get(qry).autor\n except (\n Autor_Jednostka.MultipleObjectsReturned,\n Autor_Jednostka.DoesNotExist,\n ):\n pass\n\n return None\n\n\ndef matchuj_zrodlo(\n s: Union[str, None],\n issn: Union[str, None] = None,\n e_issn: Union[str, None] = None,\n alt_nazwa=None,\n) -> Union[None, Zrodlo]:\n if s is None or str(s) == \"\":\n return\n\n if issn is not None:\n try:\n return Zrodlo.objects.get(issn=issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n\n if e_issn is not None:\n try:\n return Zrodlo.objects.get(e_issn=e_issn)\n except (Zrodlo.DoesNotExist, Zrodlo.MultipleObjectsReturned):\n pass\n\n for elem in s, alt_nazwa:\n if elem is None:\n continue\n\n elem = normalize_tytul_zrodla(elem)\n try:\n return Zrodlo.objects.get(Q(nazwa__iexact=elem) | Q(skrot__iexact=elem))\n except Zrodlo.MultipleObjectsReturned:\n pass\n except Zrodlo.DoesNotExist:\n if elem.endswith(\".\"):\n try:\n return Zrodlo.objects.get(\n Q(nazwa__istartswith=elem[:-1])\n | Q(skrot__istartswith=elem[:-1])\n )\n except Zrodlo.DoesNotExist:\n pass\n except Zrodlo.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_dyscypline(kod, nazwa):\n nazwa = normalize_nazwa_dyscypliny(nazwa)\n try:\n return Dyscyplina_Naukowa.objects.get(nazwa=nazwa)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n kod = normalize_kod_dyscypliny(kod)\n try:\n return Dyscyplina_Naukowa.objects.get(kod=kod)\n except Dyscyplina_Naukowa.DoesNotExist:\n pass\n except Dyscyplina_Naukowa.MultipleObjectsReturned:\n pass\n\n\ndef matchuj_wydawce(nazwa, pbn_uid_id=None, similarity=0.9):\n nazwa = normalize_nazwa_wydawcy(nazwa)\n try:\n return Wydawca.objects.get(nazwa=nazwa, alias_dla_id=None)\n except Wydawca.DoesNotExist:\n pass\n\n if pbn_uid_id is not None:\n\n try:\n return Wydawca.objects.get(pbn_uid_id=pbn_uid_id)\n except Wydawca.DoesNotExist:\n pass\n\n loose = (\n Wydawca.objects.annotate(similarity=TrigramSimilarity(\"nazwa\", nazwa))\n .filter(similarity__gte=similarity)\n .order_by(\"-similarity\")[:5]\n )\n if loose.count() > 0 and loose.count() < 2:\n return loose.first()\n\n\nTITLE_LIMIT_SINGLE_WORD = 15\nTITLE_LIMIT_MANY_WORDS = 25\n\nMATCH_SIMILARITY_THRESHOLD = 0.95\nMATCH_SIMILARITY_THRESHOLD_LOW = 0.90\nMATCH_SIMILARITY_THRESHOLD_VERY_LOW = 0.80\n\n# Znormalizowany tytuł w bazie danych -- wyrzucony ciąg znaków [online], podwójne\n# spacje pozamieniane na pojedyncze, trim całości\nnormalized_db_title = Trim(\n Replace(\n Replace(Lower(\"tytul_oryginalny\"), Value(\" [online]\"), Value(\"\")),\n Value(\" \"),\n Value(\" \"),\n )\n)\n\n# Znormalizowany skrót nazwy źródła -- wyrzucone spacje i kropki, trim, zmniejszone\n# znaki\nnormalized_db_zrodlo_skrot = Trim(\n Replace(\n Replace(\n Replace(Lower(\"skrot\"), Value(\" \"), Value(\"\")),\n Value(\"-\"),\n Value(\"\"),\n ),\n Value(\".\"),\n Value(\"\"),\n )\n)\n\n\ndef normalize_zrodlo_skrot_for_db_lookup(s):\n return s.lower().replace(\" \", \"\").strip().replace(\"-\", \"\").replace(\".\", \"\")\n\n\n# Znormalizowany skrot zrodla do wyszukiwania -- wyrzucone wszystko procz kropek\nnormalized_db_zrodlo_nazwa = Trim(\n Replace(Lower(\"nazwa\"), Value(\" \"), Value(\"\")),\n)\n\n\ndef normalize_zrodlo_nazwa_for_db_lookup(s):\n return s.lower().replace(\" \", \"\").strip()\n\n\nnormalized_db_isbn = Trim(Replace(Lower(\"isbn\"), Value(\"-\"), Value(\"\")))\n\n\ndef matchuj_publikacje(\n klass: [Wydawnictwo_Zwarte, Wydawnictwo_Ciagle, Rekord],\n title,\n year,\n doi=None,\n public_uri=None,\n isbn=None,\n zrodlo=None,\n DEBUG_MATCHOWANIE=False,\n isbn_matchuj_tylko_nadrzedne=True,\n doi_matchuj_tylko_nadrzedne=True,\n):\n\n if doi is not None:\n doi = normalize_doi(doi)\n if doi:\n zapytanie = klass.objects.filter(doi__istartswith=doi, rok=year)\n\n if doi_matchuj_tylko_nadrzedne:\n if hasattr(klass, \"wydawnictwo_nadrzedne_id\"):\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n\n res = zapytanie.annotate(\n podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())\n ).order_by(\"-podobienstwo\")[:2]\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n\n title = normalize_tytul_publikacji(title)\n\n title_has_spaces = False\n\n if title is not None:\n title_has_spaces = title.find(\" \") > 0\n\n if title is not None and (\n (not title_has_spaces and len(title) >= TITLE_LIMIT_SINGLE_WORD)\n or (title_has_spaces and len(title) >= TITLE_LIMIT_MANY_WORDS)\n ):\n if zrodlo is not None and hasattr(klass, \"zrodlo\"):\n try:\n return klass.objects.get(\n tytul_oryginalny__istartswith=title, rok=year, zrodlo=zrodlo\n )\n except klass.DoesNotExist:\n pass\n except klass.MultipleObjectsReturned:\n print(\n f\"PPP ZZZ MultipleObjectsReturned dla title={title} rok={year} zrodlo={zrodlo}\"\n )\n\n if (\n isbn is not None\n and isbn != \"\"\n and hasattr(klass, \"isbn\")\n and hasattr(klass, \"e_isbn\")\n ):\n ni = normalize_isbn(isbn)\n\n zapytanie = klass.objects.exclude(isbn=None, e_isbn=None).exclude(\n isbn=\"\", e_isbn=\"\"\n )\n\n if isbn_matchuj_tylko_nadrzedne:\n zapytanie = zapytanie.filter(wydawnictwo_nadrzedne_id=None)\n\n if klass == Rekord:\n zapytanie = zapytanie.filter(\n pk__in=[\n (ContentType.objects.get_for_model(Wydawnictwo_Zwarte).pk, x)\n for x in Wydawnictwo_Zwarte.objects.wydawnictwa_nadrzedne_dla_innych()\n ]\n )\n elif klass == Wydawnictwo_Zwarte:\n zapytanie = zapytanie.filter(\n pk__in=Wydawnictwo_Zwarte.objects.wydawnictwa_nadrzedne_dla_innych()\n )\n else:\n raise NotImplementedError(\n \"Matchowanie po ISBN dla czegoś innego niż wydawnictwo zwarte nie opracowane\"\n )\n\n #\n # Uwaga uwaga uwaga.\n #\n # Gdy matchujemy ISBN, to w BPP dochodzi do takiej nieciekawej sytuacji: wpisywany jest\n # ISBN zarówno dla rozdziałów jak i dla wydawnictw nadrzędnych.\n #\n # Zatem, na ten moment, aby usprawnić matchowanie ISBN, jeżeli ustawiona jest flaga\n # isbn_matchuj_tylko_nadrzedne, to system bedzie szukał tylko i wyłącznie wśród\n # rekordów będących wydawnictwami nadrzędnymi (czyli nie mającymi rekordów podrzędnych)\n #\n\n res = (\n zapytanie.filter(Q(isbn=ni) | Q(e_isbn=ni))\n .annotate(\n podobienstwo=TrigramSimilarity(\n normalized_db_title,\n title.lower(),\n )\n )\n .order_by(\"-podobienstwo\")[:2]\n )\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_VERY_LOW:\n return res.first()\n\n public_uri = normalize_public_uri(public_uri)\n if public_uri:\n res = (\n klass.objects.filter(Q(www=public_uri) | Q(public_www=public_uri))\n .annotate(\n podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())\n )\n .order_by(\"-podobienstwo\")[:2]\n )\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n\n if title is not None and (\n (not title_has_spaces and len(title) >= TITLE_LIMIT_SINGLE_WORD)\n or (title_has_spaces and len(title) >= TITLE_LIMIT_MANY_WORDS)\n ):\n res = (\n klass.objects.filter(tytul_oryginalny__istartswith=title, rok=year)\n .annotate(\n podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())\n )\n .order_by(\"-podobienstwo\")[:2]\n )\n\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD:\n return res.first()\n\n # Ostatnia szansa, po podobieństwie, niski próg\n\n res = (\n klass.objects.filter(rok=year)\n .annotate(\n podobienstwo=TrigramSimilarity(normalized_db_title, title.lower())\n )\n .order_by(\"-podobienstwo\")[:2]\n )\n\n fail_if_seq_scan(res, DEBUG_MATCHOWANIE)\n if res.exists():\n if res.first().podobienstwo >= MATCH_SIMILARITY_THRESHOLD_LOW:\n return res.first()\n",
"step-ids": [
11,
12,
13,
15,
16
]
}
|
[
11,
12,
13,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
b.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,
inplace=True)
b.to_csv('Cleaned_dataset.csv', index=False)
<|reserved_special_token_0|>
report.show_html()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
b = pd.read_csv('final_cricket_players.csv', low_memory=False)
b = b.replace(to_replace='-', value='')
b = b.replace(to_replace='[]', value='')
b = b.replace(to_replace='{}', value='')
b.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,
inplace=True)
b.to_csv('Cleaned_dataset.csv', index=False)
report = sv.analyze(b, pairwise_analysis='off')
report.show_html()
<|reserved_special_token_1|>
import pandas as pd
import sweetviz as sv
b = pd.read_csv('final_cricket_players.csv', low_memory=False)
b = b.replace(to_replace='-', value='')
b = b.replace(to_replace='[]', value='')
b = b.replace(to_replace='{}', value='')
b.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,
inplace=True)
b.to_csv('Cleaned_dataset.csv', index=False)
report = sv.analyze(b, pairwise_analysis='off')
report.show_html()
<|reserved_special_token_1|>
import pandas as pd
import sweetviz as sv
b = pd.read_csv("final_cricket_players.csv", low_memory=False)
b = b.replace(to_replace="-",value="")
b = b.replace(to_replace="[]",value="")
b = b.replace(to_replace="{}",value="")
b.drop(b.columns[b.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
b.to_csv('Cleaned_dataset.csv', index=False)
report = sv.analyze(b, pairwise_analysis='off')
report.show_html()
|
flexible
|
{
"blob_id": "f93b7f2939bbee9b0cb5402d3e5f5d6c482d37c4",
"index": 6983,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\n<mask token>\nreport.show_html()\n",
"step-3": "<mask token>\nb = pd.read_csv('final_cricket_players.csv', low_memory=False)\nb = b.replace(to_replace='-', value='')\nb = b.replace(to_replace='[]', value='')\nb = b.replace(to_replace='{}', value='')\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\nreport = sv.analyze(b, pairwise_analysis='off')\nreport.show_html()\n",
"step-4": "import pandas as pd\nimport sweetviz as sv\nb = pd.read_csv('final_cricket_players.csv', low_memory=False)\nb = b.replace(to_replace='-', value='')\nb = b.replace(to_replace='[]', value='')\nb = b.replace(to_replace='{}', value='')\nb.drop(b.columns[b.columns.str.contains('unnamed', case=False)], axis=1,\n inplace=True)\nb.to_csv('Cleaned_dataset.csv', index=False)\nreport = sv.analyze(b, pairwise_analysis='off')\nreport.show_html()\n",
"step-5": "import pandas as pd\r\nimport sweetviz as sv\r\nb = pd.read_csv(\"final_cricket_players.csv\", low_memory=False)\r\nb = b.replace(to_replace=\"-\",value=\"\")\r\nb = b.replace(to_replace=\"[]\",value=\"\")\r\nb = b.replace(to_replace=\"{}\",value=\"\")\r\n\r\nb.drop(b.columns[b.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)\r\nb.to_csv('Cleaned_dataset.csv', index=False)\r\nreport = sv.analyze(b, pairwise_analysis='off')\r\nreport.show_html()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for index, elements in enumerate(a):
if elements == 5:
b.append(index)
print(b)
<|reserved_special_token_1|>
a = 5, 1, 3, 5, 3, 1, 0, 9, 5, 3, 8, 6, 5, 7
b = []
for index, elements in enumerate(a):
if elements == 5:
b.append(index)
print(b)
<|reserved_special_token_1|>
a = (5, 1, 3, 5, 3, 1, 0, 9, 5, 3, 8, 6, 5, 7)
b = []
for index, elements in enumerate (a):
if elements == 5:
b.append(index)
print(b)
|
flexible
|
{
"blob_id": "d7876a078af8572e44b4eb16f3ec0898db73724d",
"index": 2118,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor index, elements in enumerate(a):\n if elements == 5:\n b.append(index)\nprint(b)\n",
"step-3": "a = 5, 1, 3, 5, 3, 1, 0, 9, 5, 3, 8, 6, 5, 7\nb = []\nfor index, elements in enumerate(a):\n if elements == 5:\n b.append(index)\nprint(b)\n",
"step-4": "a = (5, 1, 3, 5, 3, 1, 0, 9, 5, 3, 8, 6, 5, 7)\r\nb = []\r\nfor index, elements in enumerate (a):\r\n if elements == 5:\r\n b.append(index)\r\nprint(b)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
c.execute(q)
<|reserved_special_token_0|>
c.execute(q)
<|reserved_special_token_0|>
c.execute(q)
conn.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
conn = sqlite3.connect('blog.db')
c = conn.cursor()
q = 'CREATE TABLE users(Username text, Password text, UserID integer)'
c.execute(q)
q = (
'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'
)
c.execute(q)
q = (
'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'
)
c.execute(q)
conn.commit()
<|reserved_special_token_1|>
import sqlite3
conn = sqlite3.connect('blog.db')
c = conn.cursor()
q = 'CREATE TABLE users(Username text, Password text, UserID integer)'
c.execute(q)
q = (
'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'
)
c.execute(q)
q = (
'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'
)
c.execute(q)
conn.commit()
<|reserved_special_token_1|>
import sqlite3
conn = sqlite3.connect("blog.db")
c = conn.cursor()
q = "CREATE TABLE users(Username text, Password text, UserID integer)"
c.execute(q)
q = "CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)"
c.execute(q)
q = "CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)"
c.execute(q)
conn.commit()
|
flexible
|
{
"blob_id": "8afaa69d3a20c5e39e6321869f25dbd9020a5b3a",
"index": 2460,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nc.execute(q)\n<mask token>\nc.execute(q)\n<mask token>\nc.execute(q)\nconn.commit()\n",
"step-3": "<mask token>\nconn = sqlite3.connect('blog.db')\nc = conn.cursor()\nq = 'CREATE TABLE users(Username text, Password text, UserID integer)'\nc.execute(q)\nq = (\n 'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'\n )\nc.execute(q)\nq = (\n 'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'\n )\nc.execute(q)\nconn.commit()\n",
"step-4": "import sqlite3\nconn = sqlite3.connect('blog.db')\nc = conn.cursor()\nq = 'CREATE TABLE users(Username text, Password text, UserID integer)'\nc.execute(q)\nq = (\n 'CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)'\n )\nc.execute(q)\nq = (\n 'CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)'\n )\nc.execute(q)\nconn.commit()\n",
"step-5": "import sqlite3\n\nconn = sqlite3.connect(\"blog.db\")\n\nc = conn.cursor()\n\nq = \"CREATE TABLE users(Username text, Password text, UserID integer)\"\nc.execute(q)\n\nq = \"CREATE TABLE blogs(Title text, Content text, BlogID integer, UserID integer)\"\nc.execute(q)\n\nq = \"CREATE TABLE comments(Content text, CommentID integer, BlogID integer, UserID integer)\"\nc.execute(q)\n\nconn.commit() \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#Calculadora mediante el terminal
numero1 = 0
numero2 = 0
#Preguntamos los valores
operacion = input("¿Qué operación quiere realizar (Suma / Resta / Division / Multiplicacion)?: ").upper()
numero1 = int(input("Introduzca el valor 1: "))
numero2 = int(input("Introduzca el valor 2: "))
#Realizamos las operaciones
if operacion == "SUMA":
resultado = numero1 + numero2
elif operacion == "RESTA":
resultado = numero1 - numero2
elif operacion == "DIVISION":
resultado = numero1 / numero2
elif operacion == "MULTIPLICACION":
resultado = numero1 * numero2
#Mostramos en pantalla el resultado
print("Resultado : {}".format(resultado))
|
normal
|
{
"blob_id": "5d618acc0962447554807cbb9d3546cd4e0b3572",
"index": 3005,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif operacion == 'SUMA':\n resultado = numero1 + numero2\nelif operacion == 'RESTA':\n resultado = numero1 - numero2\nelif operacion == 'DIVISION':\n resultado = numero1 / numero2\nelif operacion == 'MULTIPLICACION':\n resultado = numero1 * numero2\nprint('Resultado : {}'.format(resultado))\n",
"step-3": "numero1 = 0\nnumero2 = 0\noperacion = input(\n '¿Qué operación quiere realizar (Suma / Resta / Division / Multiplicacion)?: '\n ).upper()\nnumero1 = int(input('Introduzca el valor 1: '))\nnumero2 = int(input('Introduzca el valor 2: '))\nif operacion == 'SUMA':\n resultado = numero1 + numero2\nelif operacion == 'RESTA':\n resultado = numero1 - numero2\nelif operacion == 'DIVISION':\n resultado = numero1 / numero2\nelif operacion == 'MULTIPLICACION':\n resultado = numero1 * numero2\nprint('Resultado : {}'.format(resultado))\n",
"step-4": "#Calculadora mediante el terminal\n\nnumero1 = 0\nnumero2 = 0\n\n\n#Preguntamos los valores\n\noperacion = input(\"¿Qué operación quiere realizar (Suma / Resta / Division / Multiplicacion)?: \").upper()\n\nnumero1 = int(input(\"Introduzca el valor 1: \"))\nnumero2 = int(input(\"Introduzca el valor 2: \"))\n\n\n#Realizamos las operaciones\nif operacion == \"SUMA\":\n resultado = numero1 + numero2\nelif operacion == \"RESTA\":\n resultado = numero1 - numero2\nelif operacion == \"DIVISION\":\n resultado = numero1 / numero2\nelif operacion == \"MULTIPLICACION\":\n resultado = numero1 * numero2\n\n\n#Mostramos en pantalla el resultado\nprint(\"Resultado : {}\".format(resultado))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .menu import menu
from .create_portfolio import create_portfolio
from .search import search
from .list_assets import list_assets
from .add_transaction import add_transaction
from .stats import stats
from .info import info
|
flexible
|
{
"blob_id": "f2abb7ea3426e37a10e139d83c33011542e0b3d1",
"index": 3863,
"step-1": "<mask token>\n",
"step-2": "from .menu import menu\nfrom .create_portfolio import create_portfolio\nfrom .search import search\nfrom .list_assets import list_assets\nfrom .add_transaction import add_transaction\nfrom .stats import stats\nfrom .info import info\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import tkinter
import webbrowser
ventana = tkinter.Tk()
ventana.geometry("1920x1080")
def test():
webbrowser.open_new_tab('Test.html')
boton1 = tkinter.Button(ventana,text ="WEB", width = 10, height=5, command = test );
boton2 = tkinter.Button(ventana,text ="boton2", width = 10, height=5);
boton3 = tkinter.Button(ventana,text ="boton3", width = 10, height=5);
boton1.grid(row = 3, column = 0)
boton2.grid(row = 4, column = 0)
boton3.grid(row = 5, column = 0)
ventana.mainloop()
|
normal
|
{
"blob_id": "8bf330dc7bee65ac9478722233477ebe5d0286c2",
"index": 1102,
"step-1": "<mask token>\n\n\ndef test():\n webbrowser.open_new_tab('Test.html')\n\n\n<mask token>\n",
"step-2": "<mask token>\nventana.geometry('1920x1080')\n\n\ndef test():\n webbrowser.open_new_tab('Test.html')\n\n\n<mask token>\nboton1.grid(row=3, column=0)\nboton2.grid(row=4, column=0)\nboton3.grid(row=5, column=0)\nventana.mainloop()\n",
"step-3": "<mask token>\nventana = tkinter.Tk()\nventana.geometry('1920x1080')\n\n\ndef test():\n webbrowser.open_new_tab('Test.html')\n\n\nboton1 = tkinter.Button(ventana, text='WEB', width=10, height=5, command=test)\nboton2 = tkinter.Button(ventana, text='boton2', width=10, height=5)\nboton3 = tkinter.Button(ventana, text='boton3', width=10, height=5)\nboton1.grid(row=3, column=0)\nboton2.grid(row=4, column=0)\nboton3.grid(row=5, column=0)\nventana.mainloop()\n",
"step-4": "import tkinter\nimport webbrowser\nventana = tkinter.Tk()\nventana.geometry('1920x1080')\n\n\ndef test():\n webbrowser.open_new_tab('Test.html')\n\n\nboton1 = tkinter.Button(ventana, text='WEB', width=10, height=5, command=test)\nboton2 = tkinter.Button(ventana, text='boton2', width=10, height=5)\nboton3 = tkinter.Button(ventana, text='boton3', width=10, height=5)\nboton1.grid(row=3, column=0)\nboton2.grid(row=4, column=0)\nboton3.grid(row=5, column=0)\nventana.mainloop()\n",
"step-5": "import tkinter\r\nimport webbrowser\r\nventana = tkinter.Tk()\r\nventana.geometry(\"1920x1080\")\r\n\r\ndef test():\r\n webbrowser.open_new_tab('Test.html')\r\n\r\nboton1 = tkinter.Button(ventana,text =\"WEB\", width = 10, height=5, command = test );\r\nboton2 = tkinter.Button(ventana,text =\"boton2\", width = 10, height=5);\r\nboton3 = tkinter.Button(ventana,text =\"boton3\", width = 10, height=5);\r\n\r\n\r\nboton1.grid(row = 3, column = 0)\r\nboton2.grid(row = 4, column = 0)\r\nboton3.grid(row = 5, column = 0)\r\n\r\nventana.mainloop()\r\n\r\n\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from jaqsmds.server.repliers.basic import RegularReplier
from jaqsmds.server.repliers.handlers import JsetHandler, JsdHandler, JsiHandler
from queue import Queue, Empty
from threading import Thread
import logging
class FreeReplier(RegularReplier):
def __init__(self):
super(FreeReplier, self).__init__()
self.jset = JsetHandler()
self.jsd = JsdHandler()
self.jsi = JsiHandler()
self.methods["jset.query"] = self.jset.handle
self.methods["jsd.query"] = self.jsd.handle
self.methods["jsi.query"] = self.jsi.handle
self.input = Queue()
self.output = Queue()
self._running = False
self.thread = Thread(target=self.run)
def run(self):
while self._running or self.input.qsize():
try:
client, message = self.input.get(timeout=2)
except Empty:
continue
result = self.handle(message)
self.output.put([client, result])
def start(self):
self._running = True
self.thread.start()
def stop(self):
self._running = False
self.thread.join()
@property
def unfinished(self):
return self.input.qsize() + self.output.qsize()
def put(self, client, message):
if message.get("method", None) == ".sys.heartbeat":
return self.methods[".sys.heartbeat"](message)
else:
self.input.put([client, message])
logging.debug("queue size | %s", self.input.qsize())
def get_output(self, timeout=0.001):
return self.output.get(timeout=timeout)
|
normal
|
{
"blob_id": "42ebd42801b7d1563c9f204f296afba5fa3c6d3c",
"index": 1592,
"step-1": "<mask token>\n\n\nclass FreeReplier(RegularReplier):\n <mask token>\n\n def run(self):\n while self._running or self.input.qsize():\n try:\n client, message = self.input.get(timeout=2)\n except Empty:\n continue\n result = self.handle(message)\n self.output.put([client, result])\n <mask token>\n <mask token>\n\n @property\n def unfinished(self):\n return self.input.qsize() + self.output.qsize()\n\n def put(self, client, message):\n if message.get('method', None) == '.sys.heartbeat':\n return self.methods['.sys.heartbeat'](message)\n else:\n self.input.put([client, message])\n logging.debug('queue size | %s', self.input.qsize())\n\n def get_output(self, timeout=0.001):\n return self.output.get(timeout=timeout)\n",
"step-2": "<mask token>\n\n\nclass FreeReplier(RegularReplier):\n\n def __init__(self):\n super(FreeReplier, self).__init__()\n self.jset = JsetHandler()\n self.jsd = JsdHandler()\n self.jsi = JsiHandler()\n self.methods['jset.query'] = self.jset.handle\n self.methods['jsd.query'] = self.jsd.handle\n self.methods['jsi.query'] = self.jsi.handle\n self.input = Queue()\n self.output = Queue()\n self._running = False\n self.thread = Thread(target=self.run)\n\n def run(self):\n while self._running or self.input.qsize():\n try:\n client, message = self.input.get(timeout=2)\n except Empty:\n continue\n result = self.handle(message)\n self.output.put([client, result])\n <mask token>\n <mask token>\n\n @property\n def unfinished(self):\n return self.input.qsize() + self.output.qsize()\n\n def put(self, client, message):\n if message.get('method', None) == '.sys.heartbeat':\n return self.methods['.sys.heartbeat'](message)\n else:\n self.input.put([client, message])\n logging.debug('queue size | %s', self.input.qsize())\n\n def get_output(self, timeout=0.001):\n return self.output.get(timeout=timeout)\n",
"step-3": "<mask token>\n\n\nclass FreeReplier(RegularReplier):\n\n def __init__(self):\n super(FreeReplier, self).__init__()\n self.jset = JsetHandler()\n self.jsd = JsdHandler()\n self.jsi = JsiHandler()\n self.methods['jset.query'] = self.jset.handle\n self.methods['jsd.query'] = self.jsd.handle\n self.methods['jsi.query'] = self.jsi.handle\n self.input = Queue()\n self.output = Queue()\n self._running = False\n self.thread = Thread(target=self.run)\n\n def run(self):\n while self._running or self.input.qsize():\n try:\n client, message = self.input.get(timeout=2)\n except Empty:\n continue\n result = self.handle(message)\n self.output.put([client, result])\n <mask token>\n\n def stop(self):\n self._running = False\n self.thread.join()\n\n @property\n def unfinished(self):\n return self.input.qsize() + self.output.qsize()\n\n def put(self, client, message):\n if message.get('method', None) == '.sys.heartbeat':\n return self.methods['.sys.heartbeat'](message)\n else:\n self.input.put([client, message])\n logging.debug('queue size | %s', self.input.qsize())\n\n def get_output(self, timeout=0.001):\n return self.output.get(timeout=timeout)\n",
"step-4": "<mask token>\n\n\nclass FreeReplier(RegularReplier):\n\n def __init__(self):\n super(FreeReplier, self).__init__()\n self.jset = JsetHandler()\n self.jsd = JsdHandler()\n self.jsi = JsiHandler()\n self.methods['jset.query'] = self.jset.handle\n self.methods['jsd.query'] = self.jsd.handle\n self.methods['jsi.query'] = self.jsi.handle\n self.input = Queue()\n self.output = Queue()\n self._running = False\n self.thread = Thread(target=self.run)\n\n def run(self):\n while self._running or self.input.qsize():\n try:\n client, message = self.input.get(timeout=2)\n except Empty:\n continue\n result = self.handle(message)\n self.output.put([client, result])\n\n def start(self):\n self._running = True\n self.thread.start()\n\n def stop(self):\n self._running = False\n self.thread.join()\n\n @property\n def unfinished(self):\n return self.input.qsize() + self.output.qsize()\n\n def put(self, client, message):\n if message.get('method', None) == '.sys.heartbeat':\n return self.methods['.sys.heartbeat'](message)\n else:\n self.input.put([client, message])\n logging.debug('queue size | %s', self.input.qsize())\n\n def get_output(self, timeout=0.001):\n return self.output.get(timeout=timeout)\n",
"step-5": "from jaqsmds.server.repliers.basic import RegularReplier\nfrom jaqsmds.server.repliers.handlers import JsetHandler, JsdHandler, JsiHandler\nfrom queue import Queue, Empty\nfrom threading import Thread\nimport logging\n\n\nclass FreeReplier(RegularReplier):\n\n def __init__(self):\n super(FreeReplier, self).__init__()\n self.jset = JsetHandler()\n self.jsd = JsdHandler()\n self.jsi = JsiHandler()\n self.methods[\"jset.query\"] = self.jset.handle\n self.methods[\"jsd.query\"] = self.jsd.handle\n self.methods[\"jsi.query\"] = self.jsi.handle\n self.input = Queue()\n self.output = Queue()\n self._running = False\n self.thread = Thread(target=self.run)\n\n def run(self):\n while self._running or self.input.qsize():\n try:\n client, message = self.input.get(timeout=2)\n except Empty:\n continue\n \n result = self.handle(message)\n self.output.put([client, result])\n\n def start(self):\n self._running = True\n self.thread.start()\n\n def stop(self):\n self._running = False\n self.thread.join() \n\n @property\n def unfinished(self):\n return self.input.qsize() + self.output.qsize()\n\n def put(self, client, message):\n if message.get(\"method\", None) == \".sys.heartbeat\":\n return self.methods[\".sys.heartbeat\"](message)\n else:\n self.input.put([client, message])\n logging.debug(\"queue size | %s\", self.input.qsize())\n \n def get_output(self, timeout=0.001):\n return self.output.get(timeout=timeout)",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
class product(object):
def __init__(self, item_name, price, weight, brand, status = "for sale"):
self.item_name = item_name
self.price = price
self.weight = weight
self.brand = brand
self.cost = price
self.status = status
self.displayInfo()
def displayInfo(self):
print "Item name:", self.item_name
print "Price:", self.price
print "Weight:", self.weight
print "Brand:", self.brand
print "Cost:", self.cost
print "Status:", self.status
return self
def sell(self):
self.status = "Sold"
return self
def addTax(self, num):
self.cost = self.cost * (1+num)
return self
def Return(self, reason):
if reason == "Defective":
self.cost = 0
self.status = reason
elif reason == "Opened":
self.cost = self.cost * 0.80
self.status = "for sale"
elif reason == "Box":
self.status = "for sale"
return self
print "add items to inv"
product1 = product("Kona Dew", 499, 1.2, "Kona")
product2 = product("Kona Dew Plus", 799, 1.5, "Kona")
product3 = product("Kona Dr.Dew", 999, 1.2, "Kona")
product1.addTax(0.10)
product2.addTax(0.15)
product3.addTax(0.11)
print "add tax"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
product1.sell()
product2.sell()
product3.sell()
print "sell items"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
product1.Return("Defective")
product2.Return("Box")
product3.Return("Opened")
print "return items"
product1.displayInfo()
product2.displayInfo()
product3.displayInfo()
|
normal
|
{
"blob_id": "303d56c18cce922ace45de1b8e195ebfdd874e23",
"index": 7394,
"step-1": "class product(object):\n def __init__(self, item_name, price, weight, brand, status = \"for sale\"):\n self.item_name = item_name\n self.price = price\n self.weight = weight\n self.brand = brand\n self.cost = price\n self.status = status\n self.displayInfo()\n def displayInfo(self):\n print \"Item name:\", self.item_name\n print \"Price:\", self.price\n print \"Weight:\", self.weight\n print \"Brand:\", self.brand\n print \"Cost:\", self.cost\n print \"Status:\", self.status\n return self\n def sell(self):\n self.status = \"Sold\"\n return self\n def addTax(self, num):\n self.cost = self.cost * (1+num)\n return self\n def Return(self, reason):\n if reason == \"Defective\":\n self.cost = 0\n self.status = reason\n elif reason == \"Opened\":\n self.cost = self.cost * 0.80\n self.status = \"for sale\"\n elif reason == \"Box\":\n self.status = \"for sale\"\n return self\nprint \"add items to inv\"\nproduct1 = product(\"Kona Dew\", 499, 1.2, \"Kona\")\nproduct2 = product(\"Kona Dew Plus\", 799, 1.5, \"Kona\")\nproduct3 = product(\"Kona Dr.Dew\", 999, 1.2, \"Kona\")\nproduct1.addTax(0.10)\nproduct2.addTax(0.15)\nproduct3.addTax(0.11)\nprint \"add tax\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\nproduct1.sell()\nproduct2.sell()\nproduct3.sell()\nprint \"sell items\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\nproduct1.Return(\"Defective\")\nproduct2.Return(\"Box\")\nproduct3.Return(\"Opened\")\nprint \"return items\"\nproduct1.displayInfo()\nproduct2.displayInfo()\nproduct3.displayInfo()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""Integration to integrate Keymitt BLE devices with Home Assistant."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from microbot import MicroBotApiClient, parse_advertisement_data
from homeassistant.components import bluetooth
from homeassistant.components.bluetooth.passive_update_coordinator import (
PassiveBluetoothDataUpdateCoordinator,
)
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
if TYPE_CHECKING:
from bleak.backends.device import BLEDevice
_LOGGER: logging.Logger = logging.getLogger(__package__)
PLATFORMS: list[str] = [Platform.SWITCH]
class MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):
"""Class to manage fetching data from the MicroBot."""
def __init__(
self,
hass: HomeAssistant,
client: MicroBotApiClient,
ble_device: BLEDevice,
) -> None:
"""Initialize."""
self.api: MicroBotApiClient = client
self.data: dict[str, Any] = {}
self.ble_device = ble_device
super().__init__(
hass,
_LOGGER,
ble_device.address,
bluetooth.BluetoothScanningMode.ACTIVE,
)
@callback
def _async_handle_bluetooth_event(
self,
service_info: bluetooth.BluetoothServiceInfoBleak,
change: bluetooth.BluetoothChange,
) -> None:
"""Handle a Bluetooth event."""
if adv := parse_advertisement_data(
service_info.device, service_info.advertisement
):
self.data = adv.data
_LOGGER.debug("%s: MicroBot data: %s", self.ble_device.address, self.data)
self.api.update_from_advertisement(adv)
super()._async_handle_bluetooth_event(service_info, change)
|
normal
|
{
"blob_id": "5509880c30c2e03ca6eb42ad32018c39fb5939ed",
"index": 9955,
"step-1": "<mask token>\n\n\nclass MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):\n <mask token>\n\n def __init__(self, hass: HomeAssistant, client: MicroBotApiClient,\n ble_device: BLEDevice) ->None:\n \"\"\"Initialize.\"\"\"\n self.api: MicroBotApiClient = client\n self.data: dict[str, Any] = {}\n self.ble_device = ble_device\n super().__init__(hass, _LOGGER, ble_device.address, bluetooth.\n BluetoothScanningMode.ACTIVE)\n\n @callback\n def _async_handle_bluetooth_event(self, service_info: bluetooth.\n BluetoothServiceInfoBleak, change: bluetooth.BluetoothChange) ->None:\n \"\"\"Handle a Bluetooth event.\"\"\"\n if (adv := parse_advertisement_data(service_info.device,\n service_info.advertisement)):\n self.data = adv.data\n _LOGGER.debug('%s: MicroBot data: %s', self.ble_device.address,\n self.data)\n self.api.update_from_advertisement(adv)\n super()._async_handle_bluetooth_event(service_info, change)\n",
"step-2": "<mask token>\n\n\nclass MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):\n \"\"\"Class to manage fetching data from the MicroBot.\"\"\"\n\n def __init__(self, hass: HomeAssistant, client: MicroBotApiClient,\n ble_device: BLEDevice) ->None:\n \"\"\"Initialize.\"\"\"\n self.api: MicroBotApiClient = client\n self.data: dict[str, Any] = {}\n self.ble_device = ble_device\n super().__init__(hass, _LOGGER, ble_device.address, bluetooth.\n BluetoothScanningMode.ACTIVE)\n\n @callback\n def _async_handle_bluetooth_event(self, service_info: bluetooth.\n BluetoothServiceInfoBleak, change: bluetooth.BluetoothChange) ->None:\n \"\"\"Handle a Bluetooth event.\"\"\"\n if (adv := parse_advertisement_data(service_info.device,\n service_info.advertisement)):\n self.data = adv.data\n _LOGGER.debug('%s: MicroBot data: %s', self.ble_device.address,\n self.data)\n self.api.update_from_advertisement(adv)\n super()._async_handle_bluetooth_event(service_info, change)\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from bleak.backends.device import BLEDevice\n_LOGGER: logging.Logger = logging.getLogger(__package__)\nPLATFORMS: list[str] = [Platform.SWITCH]\n\n\nclass MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):\n \"\"\"Class to manage fetching data from the MicroBot.\"\"\"\n\n def __init__(self, hass: HomeAssistant, client: MicroBotApiClient,\n ble_device: BLEDevice) ->None:\n \"\"\"Initialize.\"\"\"\n self.api: MicroBotApiClient = client\n self.data: dict[str, Any] = {}\n self.ble_device = ble_device\n super().__init__(hass, _LOGGER, ble_device.address, bluetooth.\n BluetoothScanningMode.ACTIVE)\n\n @callback\n def _async_handle_bluetooth_event(self, service_info: bluetooth.\n BluetoothServiceInfoBleak, change: bluetooth.BluetoothChange) ->None:\n \"\"\"Handle a Bluetooth event.\"\"\"\n if (adv := parse_advertisement_data(service_info.device,\n service_info.advertisement)):\n self.data = adv.data\n _LOGGER.debug('%s: MicroBot data: %s', self.ble_device.address,\n self.data)\n self.api.update_from_advertisement(adv)\n super()._async_handle_bluetooth_event(service_info, change)\n",
"step-4": "<mask token>\nfrom __future__ import annotations\nimport logging\nfrom typing import TYPE_CHECKING, Any\nfrom microbot import MicroBotApiClient, parse_advertisement_data\nfrom homeassistant.components import bluetooth\nfrom homeassistant.components.bluetooth.passive_update_coordinator import PassiveBluetoothDataUpdateCoordinator\nfrom homeassistant.const import Platform\nfrom homeassistant.core import HomeAssistant, callback\nif TYPE_CHECKING:\n from bleak.backends.device import BLEDevice\n_LOGGER: logging.Logger = logging.getLogger(__package__)\nPLATFORMS: list[str] = [Platform.SWITCH]\n\n\nclass MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):\n \"\"\"Class to manage fetching data from the MicroBot.\"\"\"\n\n def __init__(self, hass: HomeAssistant, client: MicroBotApiClient,\n ble_device: BLEDevice) ->None:\n \"\"\"Initialize.\"\"\"\n self.api: MicroBotApiClient = client\n self.data: dict[str, Any] = {}\n self.ble_device = ble_device\n super().__init__(hass, _LOGGER, ble_device.address, bluetooth.\n BluetoothScanningMode.ACTIVE)\n\n @callback\n def _async_handle_bluetooth_event(self, service_info: bluetooth.\n BluetoothServiceInfoBleak, change: bluetooth.BluetoothChange) ->None:\n \"\"\"Handle a Bluetooth event.\"\"\"\n if (adv := parse_advertisement_data(service_info.device,\n service_info.advertisement)):\n self.data = adv.data\n _LOGGER.debug('%s: MicroBot data: %s', self.ble_device.address,\n self.data)\n self.api.update_from_advertisement(adv)\n super()._async_handle_bluetooth_event(service_info, change)\n",
"step-5": "\"\"\"Integration to integrate Keymitt BLE devices with Home Assistant.\"\"\"\nfrom __future__ import annotations\n\nimport logging\nfrom typing import TYPE_CHECKING, Any\n\nfrom microbot import MicroBotApiClient, parse_advertisement_data\n\nfrom homeassistant.components import bluetooth\nfrom homeassistant.components.bluetooth.passive_update_coordinator import (\n PassiveBluetoothDataUpdateCoordinator,\n)\nfrom homeassistant.const import Platform\nfrom homeassistant.core import HomeAssistant, callback\n\nif TYPE_CHECKING:\n from bleak.backends.device import BLEDevice\n\n_LOGGER: logging.Logger = logging.getLogger(__package__)\nPLATFORMS: list[str] = [Platform.SWITCH]\n\n\nclass MicroBotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):\n \"\"\"Class to manage fetching data from the MicroBot.\"\"\"\n\n def __init__(\n self,\n hass: HomeAssistant,\n client: MicroBotApiClient,\n ble_device: BLEDevice,\n ) -> None:\n \"\"\"Initialize.\"\"\"\n self.api: MicroBotApiClient = client\n self.data: dict[str, Any] = {}\n self.ble_device = ble_device\n super().__init__(\n hass,\n _LOGGER,\n ble_device.address,\n bluetooth.BluetoothScanningMode.ACTIVE,\n )\n\n @callback\n def _async_handle_bluetooth_event(\n self,\n service_info: bluetooth.BluetoothServiceInfoBleak,\n change: bluetooth.BluetoothChange,\n ) -> None:\n \"\"\"Handle a Bluetooth event.\"\"\"\n if adv := parse_advertisement_data(\n service_info.device, service_info.advertisement\n ):\n self.data = adv.data\n _LOGGER.debug(\"%s: MicroBot data: %s\", self.ble_device.address, self.data)\n self.api.update_from_advertisement(adv)\n super()._async_handle_bluetooth_event(service_info, change)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(
'orig_message', Optional[str]), ('log_message_id', Optional[str]), (
'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def step_key(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.step_key
@property
def event_type_value(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.event_type_value
class DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(
'run_id', Optional[str]), ('pipeline_name', Optional[str]), (
'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (
'solid_name', Optional[str]), ('resource_name', Optional[str]), (
'resource_fn_name', Optional[str])])):
"""Internal class used to represent the context in which a given message was logged (i.e. the
step, pipeline run, resource, etc.)
"""
def __new__(cls, run_id: str=None, pipeline_name: str=None,
pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:
str=None, resource_name: str=None, resource_fn_name: str=None):
return super().__new__(cls, run_id=run_id, pipeline_name=
pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=
step_key, solid_name=solid_name, resource_name=resource_name,
resource_fn_name=resource_fn_name)
@property
def log_source(self):
if self.resource_name is None:
return self.pipeline_name or 'system'
return f'resource:{self.resource_name}'
def to_tags(self) ->Dict[str, str]:
return {k: str(v) for k, v in self._asdict().items()}
<|reserved_special_token_0|>
class DagsterLogManager(logging.Logger):
def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:
List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):
self._logging_metadata = check.inst_param(logging_metadata,
'logging_metadata', DagsterLoggingMetadata)
self._loggers = check.list_param(loggers, 'loggers', of_type=
logging.Logger)
super().__init__(name='dagster', level=logging.DEBUG)
handlers = check.opt_list_param(handlers, 'handlers', of_type=
logging.Handler)
for handler in handlers:
self.addHandler(handler)
@property
def logging_metadata(self) ->DagsterLoggingMetadata:
return self._logging_metadata
@property
def loggers(self) ->List[logging.Logger]:
return self._loggers
def log_dagster_event(self, level: int, msg: str, dagster_event:
'DagsterEvent'):
self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})
def log(self, level, msg, *args, **kwargs):
super().log(coerce_valid_log_level(level), msg, *args, **kwargs)
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=
False):
extra = extra or {}
dagster_message_props = DagsterMessageProps(orig_message=msg,
dagster_event=extra.get(DAGSTER_META_KEY))
msg = construct_log_string(self.logging_metadata, dagster_message_props
)
meta_dict = {**self.logging_metadata._asdict(), **
dagster_message_props._asdict()}
if meta_dict['step_key'] is None:
meta_dict['step_key'] = dagster_message_props.step_key
extra[DAGSTER_META_KEY] = meta_dict
for logger in self._loggers:
logger.log(level, msg, *args, extra=extra)
super()._log(level, msg, args, exc_info=exc_info, extra=extra,
stack_info=stack_info)
def with_tags(self, **new_tags):
"""Add new tags in "new_tags" to the set of tags attached to this log manager instance, and
return a new DagsterLogManager with the merged set of tags.
Args:
tags (Dict[str,str]): Dictionary of tags
Returns:
DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same
run ID and loggers.
"""
return DagsterLogManager(logging_metadata=self.logging_metadata.
_replace(**new_tags), loggers=self._loggers, handlers=self.handlers
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(
'orig_message', Optional[str]), ('log_message_id', Optional[str]), (
'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):
<|reserved_special_token_0|>
def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,
log_timestamp: Optional[str]=None, dagster_event: Optional[
'DagsterEvent']=None):
return super().__new__(cls, orig_message=check.str_param(
orig_message, 'orig_message'), log_message_id=check.
opt_str_param(log_message_id, 'log_message_id', default=
make_new_run_id()), log_timestamp=check.opt_str_param(
log_timestamp, 'log_timestamp', default=datetime.datetime.
utcnow().isoformat()), dagster_event=dagster_event)
<|reserved_special_token_0|>
@property
def pid(self) ->Optional[str]:
if self.dagster_event is None or self.dagster_event.pid is None:
return None
return str(self.dagster_event.pid)
@property
def step_key(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.step_key
@property
def event_type_value(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.event_type_value
class DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(
'run_id', Optional[str]), ('pipeline_name', Optional[str]), (
'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (
'solid_name', Optional[str]), ('resource_name', Optional[str]), (
'resource_fn_name', Optional[str])])):
"""Internal class used to represent the context in which a given message was logged (i.e. the
step, pipeline run, resource, etc.)
"""
def __new__(cls, run_id: str=None, pipeline_name: str=None,
pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:
str=None, resource_name: str=None, resource_fn_name: str=None):
return super().__new__(cls, run_id=run_id, pipeline_name=
pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=
step_key, solid_name=solid_name, resource_name=resource_name,
resource_fn_name=resource_fn_name)
@property
def log_source(self):
if self.resource_name is None:
return self.pipeline_name or 'system'
return f'resource:{self.resource_name}'
def to_tags(self) ->Dict[str, str]:
return {k: str(v) for k, v in self._asdict().items()}
<|reserved_special_token_0|>
class DagsterLogManager(logging.Logger):
def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:
List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):
self._logging_metadata = check.inst_param(logging_metadata,
'logging_metadata', DagsterLoggingMetadata)
self._loggers = check.list_param(loggers, 'loggers', of_type=
logging.Logger)
super().__init__(name='dagster', level=logging.DEBUG)
handlers = check.opt_list_param(handlers, 'handlers', of_type=
logging.Handler)
for handler in handlers:
self.addHandler(handler)
@property
def logging_metadata(self) ->DagsterLoggingMetadata:
return self._logging_metadata
@property
def loggers(self) ->List[logging.Logger]:
return self._loggers
def log_dagster_event(self, level: int, msg: str, dagster_event:
'DagsterEvent'):
self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})
def log(self, level, msg, *args, **kwargs):
super().log(coerce_valid_log_level(level), msg, *args, **kwargs)
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=
False):
extra = extra or {}
dagster_message_props = DagsterMessageProps(orig_message=msg,
dagster_event=extra.get(DAGSTER_META_KEY))
msg = construct_log_string(self.logging_metadata, dagster_message_props
)
meta_dict = {**self.logging_metadata._asdict(), **
dagster_message_props._asdict()}
if meta_dict['step_key'] is None:
meta_dict['step_key'] = dagster_message_props.step_key
extra[DAGSTER_META_KEY] = meta_dict
for logger in self._loggers:
logger.log(level, msg, *args, extra=extra)
super()._log(level, msg, args, exc_info=exc_info, extra=extra,
stack_info=stack_info)
def with_tags(self, **new_tags):
"""Add new tags in "new_tags" to the set of tags attached to this log manager instance, and
return a new DagsterLogManager with the merged set of tags.
Args:
tags (Dict[str,str]): Dictionary of tags
Returns:
DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same
run ID and loggers.
"""
return DagsterLogManager(logging_metadata=self.logging_metadata.
_replace(**new_tags), loggers=self._loggers, handlers=self.handlers
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(
'orig_message', Optional[str]), ('log_message_id', Optional[str]), (
'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):
"""Internal class used to represent specific attributes about a logged message"""
def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,
log_timestamp: Optional[str]=None, dagster_event: Optional[
'DagsterEvent']=None):
return super().__new__(cls, orig_message=check.str_param(
orig_message, 'orig_message'), log_message_id=check.
opt_str_param(log_message_id, 'log_message_id', default=
make_new_run_id()), log_timestamp=check.opt_str_param(
log_timestamp, 'log_timestamp', default=datetime.datetime.
utcnow().isoformat()), dagster_event=dagster_event)
@property
def error_str(self) ->Optional[str]:
if self.dagster_event is None:
return None
event_specific_data = self.dagster_event.event_specific_data
if not event_specific_data:
return None
error = getattr(event_specific_data, 'error', None)
if error:
return '\n\n' + getattr(event_specific_data,
'error_display_string', error.to_string())
return None
@property
def pid(self) ->Optional[str]:
if self.dagster_event is None or self.dagster_event.pid is None:
return None
return str(self.dagster_event.pid)
@property
def step_key(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.step_key
@property
def event_type_value(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.event_type_value
class DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(
'run_id', Optional[str]), ('pipeline_name', Optional[str]), (
'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (
'solid_name', Optional[str]), ('resource_name', Optional[str]), (
'resource_fn_name', Optional[str])])):
"""Internal class used to represent the context in which a given message was logged (i.e. the
step, pipeline run, resource, etc.)
"""
def __new__(cls, run_id: str=None, pipeline_name: str=None,
pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:
str=None, resource_name: str=None, resource_fn_name: str=None):
return super().__new__(cls, run_id=run_id, pipeline_name=
pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=
step_key, solid_name=solid_name, resource_name=resource_name,
resource_fn_name=resource_fn_name)
@property
def log_source(self):
if self.resource_name is None:
return self.pipeline_name or 'system'
return f'resource:{self.resource_name}'
def to_tags(self) ->Dict[str, str]:
return {k: str(v) for k, v in self._asdict().items()}
def construct_log_string(logging_metadata: DagsterLoggingMetadata,
message_props: DagsterMessageProps) ->str:
return ' - '.join(filter(None, (logging_metadata.log_source,
logging_metadata.run_id, message_props.pid, logging_metadata.
step_key, message_props.event_type_value, message_props.orig_message))
) + (message_props.error_str or '')
class DagsterLogManager(logging.Logger):
def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:
List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):
self._logging_metadata = check.inst_param(logging_metadata,
'logging_metadata', DagsterLoggingMetadata)
self._loggers = check.list_param(loggers, 'loggers', of_type=
logging.Logger)
super().__init__(name='dagster', level=logging.DEBUG)
handlers = check.opt_list_param(handlers, 'handlers', of_type=
logging.Handler)
for handler in handlers:
self.addHandler(handler)
@property
def logging_metadata(self) ->DagsterLoggingMetadata:
return self._logging_metadata
@property
def loggers(self) ->List[logging.Logger]:
return self._loggers
def log_dagster_event(self, level: int, msg: str, dagster_event:
'DagsterEvent'):
self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})
def log(self, level, msg, *args, **kwargs):
super().log(coerce_valid_log_level(level), msg, *args, **kwargs)
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=
False):
extra = extra or {}
dagster_message_props = DagsterMessageProps(orig_message=msg,
dagster_event=extra.get(DAGSTER_META_KEY))
msg = construct_log_string(self.logging_metadata, dagster_message_props
)
meta_dict = {**self.logging_metadata._asdict(), **
dagster_message_props._asdict()}
if meta_dict['step_key'] is None:
meta_dict['step_key'] = dagster_message_props.step_key
extra[DAGSTER_META_KEY] = meta_dict
for logger in self._loggers:
logger.log(level, msg, *args, extra=extra)
super()._log(level, msg, args, exc_info=exc_info, extra=extra,
stack_info=stack_info)
def with_tags(self, **new_tags):
"""Add new tags in "new_tags" to the set of tags attached to this log manager instance, and
return a new DagsterLogManager with the merged set of tags.
Args:
tags (Dict[str,str]): Dictionary of tags
Returns:
DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same
run ID and loggers.
"""
return DagsterLogManager(logging_metadata=self.logging_metadata.
_replace(**new_tags), loggers=self._loggers, handlers=self.handlers
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if TYPE_CHECKING:
from dagster.core.events import DagsterEvent
<|reserved_special_token_0|>
class DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(
'orig_message', Optional[str]), ('log_message_id', Optional[str]), (
'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):
"""Internal class used to represent specific attributes about a logged message"""
def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,
log_timestamp: Optional[str]=None, dagster_event: Optional[
'DagsterEvent']=None):
return super().__new__(cls, orig_message=check.str_param(
orig_message, 'orig_message'), log_message_id=check.
opt_str_param(log_message_id, 'log_message_id', default=
make_new_run_id()), log_timestamp=check.opt_str_param(
log_timestamp, 'log_timestamp', default=datetime.datetime.
utcnow().isoformat()), dagster_event=dagster_event)
@property
def error_str(self) ->Optional[str]:
if self.dagster_event is None:
return None
event_specific_data = self.dagster_event.event_specific_data
if not event_specific_data:
return None
error = getattr(event_specific_data, 'error', None)
if error:
return '\n\n' + getattr(event_specific_data,
'error_display_string', error.to_string())
return None
@property
def pid(self) ->Optional[str]:
if self.dagster_event is None or self.dagster_event.pid is None:
return None
return str(self.dagster_event.pid)
@property
def step_key(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.step_key
@property
def event_type_value(self) ->Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.event_type_value
class DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(
'run_id', Optional[str]), ('pipeline_name', Optional[str]), (
'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (
'solid_name', Optional[str]), ('resource_name', Optional[str]), (
'resource_fn_name', Optional[str])])):
"""Internal class used to represent the context in which a given message was logged (i.e. the
step, pipeline run, resource, etc.)
"""
def __new__(cls, run_id: str=None, pipeline_name: str=None,
pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:
str=None, resource_name: str=None, resource_fn_name: str=None):
return super().__new__(cls, run_id=run_id, pipeline_name=
pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=
step_key, solid_name=solid_name, resource_name=resource_name,
resource_fn_name=resource_fn_name)
@property
def log_source(self):
if self.resource_name is None:
return self.pipeline_name or 'system'
return f'resource:{self.resource_name}'
def to_tags(self) ->Dict[str, str]:
return {k: str(v) for k, v in self._asdict().items()}
def construct_log_string(logging_metadata: DagsterLoggingMetadata,
message_props: DagsterMessageProps) ->str:
return ' - '.join(filter(None, (logging_metadata.log_source,
logging_metadata.run_id, message_props.pid, logging_metadata.
step_key, message_props.event_type_value, message_props.orig_message))
) + (message_props.error_str or '')
class DagsterLogManager(logging.Logger):
def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:
List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):
self._logging_metadata = check.inst_param(logging_metadata,
'logging_metadata', DagsterLoggingMetadata)
self._loggers = check.list_param(loggers, 'loggers', of_type=
logging.Logger)
super().__init__(name='dagster', level=logging.DEBUG)
handlers = check.opt_list_param(handlers, 'handlers', of_type=
logging.Handler)
for handler in handlers:
self.addHandler(handler)
@property
def logging_metadata(self) ->DagsterLoggingMetadata:
return self._logging_metadata
@property
def loggers(self) ->List[logging.Logger]:
return self._loggers
def log_dagster_event(self, level: int, msg: str, dagster_event:
'DagsterEvent'):
self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})
def log(self, level, msg, *args, **kwargs):
super().log(coerce_valid_log_level(level), msg, *args, **kwargs)
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=
False):
extra = extra or {}
dagster_message_props = DagsterMessageProps(orig_message=msg,
dagster_event=extra.get(DAGSTER_META_KEY))
msg = construct_log_string(self.logging_metadata, dagster_message_props
)
meta_dict = {**self.logging_metadata._asdict(), **
dagster_message_props._asdict()}
if meta_dict['step_key'] is None:
meta_dict['step_key'] = dagster_message_props.step_key
extra[DAGSTER_META_KEY] = meta_dict
for logger in self._loggers:
logger.log(level, msg, *args, extra=extra)
super()._log(level, msg, args, exc_info=exc_info, extra=extra,
stack_info=stack_info)
def with_tags(self, **new_tags):
"""Add new tags in "new_tags" to the set of tags attached to this log manager instance, and
return a new DagsterLogManager with the merged set of tags.
Args:
tags (Dict[str,str]): Dictionary of tags
Returns:
DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same
run ID and loggers.
"""
return DagsterLogManager(logging_metadata=self.logging_metadata.
_replace(**new_tags), loggers=self._loggers, handlers=self.handlers
)
<|reserved_special_token_1|>
import datetime
import logging
from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Optional
from dagster import check
from dagster.core.utils import coerce_valid_log_level, make_new_run_id
if TYPE_CHECKING:
from dagster.core.events import DagsterEvent
DAGSTER_META_KEY = "dagster_meta"
class DagsterMessageProps(
NamedTuple(
"_DagsterMessageProps",
[
("orig_message", Optional[str]),
("log_message_id", Optional[str]),
("log_timestamp", Optional[str]),
("dagster_event", Optional[Any]),
],
)
):
"""Internal class used to represent specific attributes about a logged message"""
def __new__(
cls,
orig_message: str,
log_message_id: Optional[str] = None,
log_timestamp: Optional[str] = None,
dagster_event: Optional["DagsterEvent"] = None,
):
return super().__new__(
cls,
orig_message=check.str_param(orig_message, "orig_message"),
log_message_id=check.opt_str_param(
log_message_id, "log_message_id", default=make_new_run_id()
),
log_timestamp=check.opt_str_param(
log_timestamp, "log_timestamp", default=datetime.datetime.utcnow().isoformat()
),
dagster_event=dagster_event,
)
@property
def error_str(self) -> Optional[str]:
if self.dagster_event is None:
return None
event_specific_data = self.dagster_event.event_specific_data
if not event_specific_data:
return None
error = getattr(event_specific_data, "error", None)
if error:
return "\n\n" + getattr(event_specific_data, "error_display_string", error.to_string())
return None
@property
def pid(self) -> Optional[str]:
if self.dagster_event is None or self.dagster_event.pid is None:
return None
return str(self.dagster_event.pid)
@property
def step_key(self) -> Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.step_key
@property
def event_type_value(self) -> Optional[str]:
if self.dagster_event is None:
return None
return self.dagster_event.event_type_value
class DagsterLoggingMetadata(
NamedTuple(
"_DagsterLoggingMetadata",
[
("run_id", Optional[str]),
("pipeline_name", Optional[str]),
("pipeline_tags", Dict[str, str]),
("step_key", Optional[str]),
("solid_name", Optional[str]),
("resource_name", Optional[str]),
("resource_fn_name", Optional[str]),
],
)
):
"""Internal class used to represent the context in which a given message was logged (i.e. the
step, pipeline run, resource, etc.)
"""
def __new__(
cls,
run_id: str = None,
pipeline_name: str = None,
pipeline_tags: Dict[str, str] = None,
step_key: str = None,
solid_name: str = None,
resource_name: str = None,
resource_fn_name: str = None,
):
return super().__new__(
cls,
run_id=run_id,
pipeline_name=pipeline_name,
pipeline_tags=pipeline_tags or {},
step_key=step_key,
solid_name=solid_name,
resource_name=resource_name,
resource_fn_name=resource_fn_name,
)
@property
def log_source(self):
if self.resource_name is None:
return self.pipeline_name or "system"
return f"resource:{self.resource_name}"
def to_tags(self) -> Dict[str, str]:
# converts all values into strings
return {k: str(v) for k, v in self._asdict().items()}
def construct_log_string(
logging_metadata: DagsterLoggingMetadata, message_props: DagsterMessageProps
) -> str:
return (
" - ".join(
filter(
None,
(
logging_metadata.log_source,
logging_metadata.run_id,
message_props.pid,
logging_metadata.step_key,
message_props.event_type_value,
message_props.orig_message,
),
)
)
+ (message_props.error_str or "")
)
class DagsterLogManager(logging.Logger):
def __init__(
self,
logging_metadata: DagsterLoggingMetadata,
loggers: List[logging.Logger],
handlers: Optional[List[logging.Handler]] = None,
):
self._logging_metadata = check.inst_param(
logging_metadata, "logging_metadata", DagsterLoggingMetadata
)
self._loggers = check.list_param(loggers, "loggers", of_type=logging.Logger)
super().__init__(name="dagster", level=logging.DEBUG)
handlers = check.opt_list_param(handlers, "handlers", of_type=logging.Handler)
for handler in handlers:
self.addHandler(handler)
@property
def logging_metadata(self) -> DagsterLoggingMetadata:
return self._logging_metadata
@property
def loggers(self) -> List[logging.Logger]:
return self._loggers
def log_dagster_event(self, level: int, msg: str, dagster_event: "DagsterEvent"):
self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})
def log(self, level, msg, *args, **kwargs):
# allow for string level names
super().log(coerce_valid_log_level(level), msg, *args, **kwargs)
def _log(
self, level, msg, args, exc_info=None, extra=None, stack_info=False
): # pylint: disable=arguments-differ
# we stash dagster meta information in the extra field
extra = extra or {}
dagster_message_props = DagsterMessageProps(
orig_message=msg, dagster_event=extra.get(DAGSTER_META_KEY)
)
# convert the message to our preferred format
msg = construct_log_string(self.logging_metadata, dagster_message_props)
# combine all dagster meta information into a single dictionary
meta_dict = {
**self.logging_metadata._asdict(),
**dagster_message_props._asdict(),
}
# step-level events can be logged from a pipeline context. for these cases, pull the step
# key from the underlying DagsterEvent
if meta_dict["step_key"] is None:
meta_dict["step_key"] = dagster_message_props.step_key
extra[DAGSTER_META_KEY] = meta_dict
for logger in self._loggers:
logger.log(level, msg, *args, extra=extra)
super()._log(level, msg, args, exc_info=exc_info, extra=extra, stack_info=stack_info)
def with_tags(self, **new_tags):
"""Add new tags in "new_tags" to the set of tags attached to this log manager instance, and
return a new DagsterLogManager with the merged set of tags.
Args:
tags (Dict[str,str]): Dictionary of tags
Returns:
DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same
run ID and loggers.
"""
return DagsterLogManager(
logging_metadata=self.logging_metadata._replace(**new_tags),
loggers=self._loggers,
handlers=self.handlers,
)
|
flexible
|
{
"blob_id": "f900e08c06ae736f5e32ac748e282700f9d0a969",
"index": 7922,
"step-1": "<mask token>\n\n\nclass DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(\n 'orig_message', Optional[str]), ('log_message_id', Optional[str]), (\n 'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def step_key(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.step_key\n\n @property\n def event_type_value(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.event_type_value\n\n\nclass DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(\n 'run_id', Optional[str]), ('pipeline_name', Optional[str]), (\n 'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (\n 'solid_name', Optional[str]), ('resource_name', Optional[str]), (\n 'resource_fn_name', Optional[str])])):\n \"\"\"Internal class used to represent the context in which a given message was logged (i.e. the\n step, pipeline run, resource, etc.)\n \"\"\"\n\n def __new__(cls, run_id: str=None, pipeline_name: str=None,\n pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:\n str=None, resource_name: str=None, resource_fn_name: str=None):\n return super().__new__(cls, run_id=run_id, pipeline_name=\n pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=\n step_key, solid_name=solid_name, resource_name=resource_name,\n resource_fn_name=resource_fn_name)\n\n @property\n def log_source(self):\n if self.resource_name is None:\n return self.pipeline_name or 'system'\n return f'resource:{self.resource_name}'\n\n def to_tags(self) ->Dict[str, str]:\n return {k: str(v) for k, v in self._asdict().items()}\n\n\n<mask token>\n\n\nclass DagsterLogManager(logging.Logger):\n\n def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:\n List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):\n self._logging_metadata = check.inst_param(logging_metadata,\n 'logging_metadata', DagsterLoggingMetadata)\n self._loggers = check.list_param(loggers, 'loggers', of_type=\n logging.Logger)\n super().__init__(name='dagster', level=logging.DEBUG)\n handlers = check.opt_list_param(handlers, 'handlers', of_type=\n logging.Handler)\n for handler in handlers:\n self.addHandler(handler)\n\n @property\n def logging_metadata(self) ->DagsterLoggingMetadata:\n return self._logging_metadata\n\n @property\n def loggers(self) ->List[logging.Logger]:\n return self._loggers\n\n def log_dagster_event(self, level: int, msg: str, dagster_event:\n 'DagsterEvent'):\n self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})\n\n def log(self, level, msg, *args, **kwargs):\n super().log(coerce_valid_log_level(level), msg, *args, **kwargs)\n\n def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=\n False):\n extra = extra or {}\n dagster_message_props = DagsterMessageProps(orig_message=msg,\n dagster_event=extra.get(DAGSTER_META_KEY))\n msg = construct_log_string(self.logging_metadata, dagster_message_props\n )\n meta_dict = {**self.logging_metadata._asdict(), **\n dagster_message_props._asdict()}\n if meta_dict['step_key'] is None:\n meta_dict['step_key'] = dagster_message_props.step_key\n extra[DAGSTER_META_KEY] = meta_dict\n for logger in self._loggers:\n logger.log(level, msg, *args, extra=extra)\n super()._log(level, msg, args, exc_info=exc_info, extra=extra,\n stack_info=stack_info)\n\n def with_tags(self, **new_tags):\n \"\"\"Add new tags in \"new_tags\" to the set of tags attached to this log manager instance, and\n return a new DagsterLogManager with the merged set of tags.\n\n Args:\n tags (Dict[str,str]): Dictionary of tags\n\n Returns:\n DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same\n run ID and loggers.\n \"\"\"\n return DagsterLogManager(logging_metadata=self.logging_metadata.\n _replace(**new_tags), loggers=self._loggers, handlers=self.handlers\n )\n",
"step-2": "<mask token>\n\n\nclass DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(\n 'orig_message', Optional[str]), ('log_message_id', Optional[str]), (\n 'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):\n <mask token>\n\n def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,\n log_timestamp: Optional[str]=None, dagster_event: Optional[\n 'DagsterEvent']=None):\n return super().__new__(cls, orig_message=check.str_param(\n orig_message, 'orig_message'), log_message_id=check.\n opt_str_param(log_message_id, 'log_message_id', default=\n make_new_run_id()), log_timestamp=check.opt_str_param(\n log_timestamp, 'log_timestamp', default=datetime.datetime.\n utcnow().isoformat()), dagster_event=dagster_event)\n <mask token>\n\n @property\n def pid(self) ->Optional[str]:\n if self.dagster_event is None or self.dagster_event.pid is None:\n return None\n return str(self.dagster_event.pid)\n\n @property\n def step_key(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.step_key\n\n @property\n def event_type_value(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.event_type_value\n\n\nclass DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(\n 'run_id', Optional[str]), ('pipeline_name', Optional[str]), (\n 'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (\n 'solid_name', Optional[str]), ('resource_name', Optional[str]), (\n 'resource_fn_name', Optional[str])])):\n \"\"\"Internal class used to represent the context in which a given message was logged (i.e. the\n step, pipeline run, resource, etc.)\n \"\"\"\n\n def __new__(cls, run_id: str=None, pipeline_name: str=None,\n pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:\n str=None, resource_name: str=None, resource_fn_name: str=None):\n return super().__new__(cls, run_id=run_id, pipeline_name=\n pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=\n step_key, solid_name=solid_name, resource_name=resource_name,\n resource_fn_name=resource_fn_name)\n\n @property\n def log_source(self):\n if self.resource_name is None:\n return self.pipeline_name or 'system'\n return f'resource:{self.resource_name}'\n\n def to_tags(self) ->Dict[str, str]:\n return {k: str(v) for k, v in self._asdict().items()}\n\n\n<mask token>\n\n\nclass DagsterLogManager(logging.Logger):\n\n def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:\n List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):\n self._logging_metadata = check.inst_param(logging_metadata,\n 'logging_metadata', DagsterLoggingMetadata)\n self._loggers = check.list_param(loggers, 'loggers', of_type=\n logging.Logger)\n super().__init__(name='dagster', level=logging.DEBUG)\n handlers = check.opt_list_param(handlers, 'handlers', of_type=\n logging.Handler)\n for handler in handlers:\n self.addHandler(handler)\n\n @property\n def logging_metadata(self) ->DagsterLoggingMetadata:\n return self._logging_metadata\n\n @property\n def loggers(self) ->List[logging.Logger]:\n return self._loggers\n\n def log_dagster_event(self, level: int, msg: str, dagster_event:\n 'DagsterEvent'):\n self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})\n\n def log(self, level, msg, *args, **kwargs):\n super().log(coerce_valid_log_level(level), msg, *args, **kwargs)\n\n def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=\n False):\n extra = extra or {}\n dagster_message_props = DagsterMessageProps(orig_message=msg,\n dagster_event=extra.get(DAGSTER_META_KEY))\n msg = construct_log_string(self.logging_metadata, dagster_message_props\n )\n meta_dict = {**self.logging_metadata._asdict(), **\n dagster_message_props._asdict()}\n if meta_dict['step_key'] is None:\n meta_dict['step_key'] = dagster_message_props.step_key\n extra[DAGSTER_META_KEY] = meta_dict\n for logger in self._loggers:\n logger.log(level, msg, *args, extra=extra)\n super()._log(level, msg, args, exc_info=exc_info, extra=extra,\n stack_info=stack_info)\n\n def with_tags(self, **new_tags):\n \"\"\"Add new tags in \"new_tags\" to the set of tags attached to this log manager instance, and\n return a new DagsterLogManager with the merged set of tags.\n\n Args:\n tags (Dict[str,str]): Dictionary of tags\n\n Returns:\n DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same\n run ID and loggers.\n \"\"\"\n return DagsterLogManager(logging_metadata=self.logging_metadata.\n _replace(**new_tags), loggers=self._loggers, handlers=self.handlers\n )\n",
"step-3": "<mask token>\n\n\nclass DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(\n 'orig_message', Optional[str]), ('log_message_id', Optional[str]), (\n 'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):\n \"\"\"Internal class used to represent specific attributes about a logged message\"\"\"\n\n def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,\n log_timestamp: Optional[str]=None, dagster_event: Optional[\n 'DagsterEvent']=None):\n return super().__new__(cls, orig_message=check.str_param(\n orig_message, 'orig_message'), log_message_id=check.\n opt_str_param(log_message_id, 'log_message_id', default=\n make_new_run_id()), log_timestamp=check.opt_str_param(\n log_timestamp, 'log_timestamp', default=datetime.datetime.\n utcnow().isoformat()), dagster_event=dagster_event)\n\n @property\n def error_str(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n event_specific_data = self.dagster_event.event_specific_data\n if not event_specific_data:\n return None\n error = getattr(event_specific_data, 'error', None)\n if error:\n return '\\n\\n' + getattr(event_specific_data,\n 'error_display_string', error.to_string())\n return None\n\n @property\n def pid(self) ->Optional[str]:\n if self.dagster_event is None or self.dagster_event.pid is None:\n return None\n return str(self.dagster_event.pid)\n\n @property\n def step_key(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.step_key\n\n @property\n def event_type_value(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.event_type_value\n\n\nclass DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(\n 'run_id', Optional[str]), ('pipeline_name', Optional[str]), (\n 'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (\n 'solid_name', Optional[str]), ('resource_name', Optional[str]), (\n 'resource_fn_name', Optional[str])])):\n \"\"\"Internal class used to represent the context in which a given message was logged (i.e. the\n step, pipeline run, resource, etc.)\n \"\"\"\n\n def __new__(cls, run_id: str=None, pipeline_name: str=None,\n pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:\n str=None, resource_name: str=None, resource_fn_name: str=None):\n return super().__new__(cls, run_id=run_id, pipeline_name=\n pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=\n step_key, solid_name=solid_name, resource_name=resource_name,\n resource_fn_name=resource_fn_name)\n\n @property\n def log_source(self):\n if self.resource_name is None:\n return self.pipeline_name or 'system'\n return f'resource:{self.resource_name}'\n\n def to_tags(self) ->Dict[str, str]:\n return {k: str(v) for k, v in self._asdict().items()}\n\n\ndef construct_log_string(logging_metadata: DagsterLoggingMetadata,\n message_props: DagsterMessageProps) ->str:\n return ' - '.join(filter(None, (logging_metadata.log_source,\n logging_metadata.run_id, message_props.pid, logging_metadata.\n step_key, message_props.event_type_value, message_props.orig_message))\n ) + (message_props.error_str or '')\n\n\nclass DagsterLogManager(logging.Logger):\n\n def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:\n List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):\n self._logging_metadata = check.inst_param(logging_metadata,\n 'logging_metadata', DagsterLoggingMetadata)\n self._loggers = check.list_param(loggers, 'loggers', of_type=\n logging.Logger)\n super().__init__(name='dagster', level=logging.DEBUG)\n handlers = check.opt_list_param(handlers, 'handlers', of_type=\n logging.Handler)\n for handler in handlers:\n self.addHandler(handler)\n\n @property\n def logging_metadata(self) ->DagsterLoggingMetadata:\n return self._logging_metadata\n\n @property\n def loggers(self) ->List[logging.Logger]:\n return self._loggers\n\n def log_dagster_event(self, level: int, msg: str, dagster_event:\n 'DagsterEvent'):\n self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})\n\n def log(self, level, msg, *args, **kwargs):\n super().log(coerce_valid_log_level(level), msg, *args, **kwargs)\n\n def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=\n False):\n extra = extra or {}\n dagster_message_props = DagsterMessageProps(orig_message=msg,\n dagster_event=extra.get(DAGSTER_META_KEY))\n msg = construct_log_string(self.logging_metadata, dagster_message_props\n )\n meta_dict = {**self.logging_metadata._asdict(), **\n dagster_message_props._asdict()}\n if meta_dict['step_key'] is None:\n meta_dict['step_key'] = dagster_message_props.step_key\n extra[DAGSTER_META_KEY] = meta_dict\n for logger in self._loggers:\n logger.log(level, msg, *args, extra=extra)\n super()._log(level, msg, args, exc_info=exc_info, extra=extra,\n stack_info=stack_info)\n\n def with_tags(self, **new_tags):\n \"\"\"Add new tags in \"new_tags\" to the set of tags attached to this log manager instance, and\n return a new DagsterLogManager with the merged set of tags.\n\n Args:\n tags (Dict[str,str]): Dictionary of tags\n\n Returns:\n DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same\n run ID and loggers.\n \"\"\"\n return DagsterLogManager(logging_metadata=self.logging_metadata.\n _replace(**new_tags), loggers=self._loggers, handlers=self.handlers\n )\n",
"step-4": "<mask token>\nif TYPE_CHECKING:\n from dagster.core.events import DagsterEvent\n<mask token>\n\n\nclass DagsterMessageProps(NamedTuple('_DagsterMessageProps', [(\n 'orig_message', Optional[str]), ('log_message_id', Optional[str]), (\n 'log_timestamp', Optional[str]), ('dagster_event', Optional[Any])])):\n \"\"\"Internal class used to represent specific attributes about a logged message\"\"\"\n\n def __new__(cls, orig_message: str, log_message_id: Optional[str]=None,\n log_timestamp: Optional[str]=None, dagster_event: Optional[\n 'DagsterEvent']=None):\n return super().__new__(cls, orig_message=check.str_param(\n orig_message, 'orig_message'), log_message_id=check.\n opt_str_param(log_message_id, 'log_message_id', default=\n make_new_run_id()), log_timestamp=check.opt_str_param(\n log_timestamp, 'log_timestamp', default=datetime.datetime.\n utcnow().isoformat()), dagster_event=dagster_event)\n\n @property\n def error_str(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n event_specific_data = self.dagster_event.event_specific_data\n if not event_specific_data:\n return None\n error = getattr(event_specific_data, 'error', None)\n if error:\n return '\\n\\n' + getattr(event_specific_data,\n 'error_display_string', error.to_string())\n return None\n\n @property\n def pid(self) ->Optional[str]:\n if self.dagster_event is None or self.dagster_event.pid is None:\n return None\n return str(self.dagster_event.pid)\n\n @property\n def step_key(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.step_key\n\n @property\n def event_type_value(self) ->Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.event_type_value\n\n\nclass DagsterLoggingMetadata(NamedTuple('_DagsterLoggingMetadata', [(\n 'run_id', Optional[str]), ('pipeline_name', Optional[str]), (\n 'pipeline_tags', Dict[str, str]), ('step_key', Optional[str]), (\n 'solid_name', Optional[str]), ('resource_name', Optional[str]), (\n 'resource_fn_name', Optional[str])])):\n \"\"\"Internal class used to represent the context in which a given message was logged (i.e. the\n step, pipeline run, resource, etc.)\n \"\"\"\n\n def __new__(cls, run_id: str=None, pipeline_name: str=None,\n pipeline_tags: Dict[str, str]=None, step_key: str=None, solid_name:\n str=None, resource_name: str=None, resource_fn_name: str=None):\n return super().__new__(cls, run_id=run_id, pipeline_name=\n pipeline_name, pipeline_tags=pipeline_tags or {}, step_key=\n step_key, solid_name=solid_name, resource_name=resource_name,\n resource_fn_name=resource_fn_name)\n\n @property\n def log_source(self):\n if self.resource_name is None:\n return self.pipeline_name or 'system'\n return f'resource:{self.resource_name}'\n\n def to_tags(self) ->Dict[str, str]:\n return {k: str(v) for k, v in self._asdict().items()}\n\n\ndef construct_log_string(logging_metadata: DagsterLoggingMetadata,\n message_props: DagsterMessageProps) ->str:\n return ' - '.join(filter(None, (logging_metadata.log_source,\n logging_metadata.run_id, message_props.pid, logging_metadata.\n step_key, message_props.event_type_value, message_props.orig_message))\n ) + (message_props.error_str or '')\n\n\nclass DagsterLogManager(logging.Logger):\n\n def __init__(self, logging_metadata: DagsterLoggingMetadata, loggers:\n List[logging.Logger], handlers: Optional[List[logging.Handler]]=None):\n self._logging_metadata = check.inst_param(logging_metadata,\n 'logging_metadata', DagsterLoggingMetadata)\n self._loggers = check.list_param(loggers, 'loggers', of_type=\n logging.Logger)\n super().__init__(name='dagster', level=logging.DEBUG)\n handlers = check.opt_list_param(handlers, 'handlers', of_type=\n logging.Handler)\n for handler in handlers:\n self.addHandler(handler)\n\n @property\n def logging_metadata(self) ->DagsterLoggingMetadata:\n return self._logging_metadata\n\n @property\n def loggers(self) ->List[logging.Logger]:\n return self._loggers\n\n def log_dagster_event(self, level: int, msg: str, dagster_event:\n 'DagsterEvent'):\n self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})\n\n def log(self, level, msg, *args, **kwargs):\n super().log(coerce_valid_log_level(level), msg, *args, **kwargs)\n\n def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=\n False):\n extra = extra or {}\n dagster_message_props = DagsterMessageProps(orig_message=msg,\n dagster_event=extra.get(DAGSTER_META_KEY))\n msg = construct_log_string(self.logging_metadata, dagster_message_props\n )\n meta_dict = {**self.logging_metadata._asdict(), **\n dagster_message_props._asdict()}\n if meta_dict['step_key'] is None:\n meta_dict['step_key'] = dagster_message_props.step_key\n extra[DAGSTER_META_KEY] = meta_dict\n for logger in self._loggers:\n logger.log(level, msg, *args, extra=extra)\n super()._log(level, msg, args, exc_info=exc_info, extra=extra,\n stack_info=stack_info)\n\n def with_tags(self, **new_tags):\n \"\"\"Add new tags in \"new_tags\" to the set of tags attached to this log manager instance, and\n return a new DagsterLogManager with the merged set of tags.\n\n Args:\n tags (Dict[str,str]): Dictionary of tags\n\n Returns:\n DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same\n run ID and loggers.\n \"\"\"\n return DagsterLogManager(logging_metadata=self.logging_metadata.\n _replace(**new_tags), loggers=self._loggers, handlers=self.handlers\n )\n",
"step-5": "import datetime\nimport logging\nfrom typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Optional\n\nfrom dagster import check\nfrom dagster.core.utils import coerce_valid_log_level, make_new_run_id\n\nif TYPE_CHECKING:\n from dagster.core.events import DagsterEvent\n\nDAGSTER_META_KEY = \"dagster_meta\"\n\n\nclass DagsterMessageProps(\n NamedTuple(\n \"_DagsterMessageProps\",\n [\n (\"orig_message\", Optional[str]),\n (\"log_message_id\", Optional[str]),\n (\"log_timestamp\", Optional[str]),\n (\"dagster_event\", Optional[Any]),\n ],\n )\n):\n \"\"\"Internal class used to represent specific attributes about a logged message\"\"\"\n\n def __new__(\n cls,\n orig_message: str,\n log_message_id: Optional[str] = None,\n log_timestamp: Optional[str] = None,\n dagster_event: Optional[\"DagsterEvent\"] = None,\n ):\n return super().__new__(\n cls,\n orig_message=check.str_param(orig_message, \"orig_message\"),\n log_message_id=check.opt_str_param(\n log_message_id, \"log_message_id\", default=make_new_run_id()\n ),\n log_timestamp=check.opt_str_param(\n log_timestamp, \"log_timestamp\", default=datetime.datetime.utcnow().isoformat()\n ),\n dagster_event=dagster_event,\n )\n\n @property\n def error_str(self) -> Optional[str]:\n if self.dagster_event is None:\n return None\n\n event_specific_data = self.dagster_event.event_specific_data\n if not event_specific_data:\n return None\n\n error = getattr(event_specific_data, \"error\", None)\n if error:\n return \"\\n\\n\" + getattr(event_specific_data, \"error_display_string\", error.to_string())\n return None\n\n @property\n def pid(self) -> Optional[str]:\n if self.dagster_event is None or self.dagster_event.pid is None:\n return None\n return str(self.dagster_event.pid)\n\n @property\n def step_key(self) -> Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.step_key\n\n @property\n def event_type_value(self) -> Optional[str]:\n if self.dagster_event is None:\n return None\n return self.dagster_event.event_type_value\n\n\nclass DagsterLoggingMetadata(\n NamedTuple(\n \"_DagsterLoggingMetadata\",\n [\n (\"run_id\", Optional[str]),\n (\"pipeline_name\", Optional[str]),\n (\"pipeline_tags\", Dict[str, str]),\n (\"step_key\", Optional[str]),\n (\"solid_name\", Optional[str]),\n (\"resource_name\", Optional[str]),\n (\"resource_fn_name\", Optional[str]),\n ],\n )\n):\n \"\"\"Internal class used to represent the context in which a given message was logged (i.e. the\n step, pipeline run, resource, etc.)\n \"\"\"\n\n def __new__(\n cls,\n run_id: str = None,\n pipeline_name: str = None,\n pipeline_tags: Dict[str, str] = None,\n step_key: str = None,\n solid_name: str = None,\n resource_name: str = None,\n resource_fn_name: str = None,\n ):\n return super().__new__(\n cls,\n run_id=run_id,\n pipeline_name=pipeline_name,\n pipeline_tags=pipeline_tags or {},\n step_key=step_key,\n solid_name=solid_name,\n resource_name=resource_name,\n resource_fn_name=resource_fn_name,\n )\n\n @property\n def log_source(self):\n if self.resource_name is None:\n return self.pipeline_name or \"system\"\n return f\"resource:{self.resource_name}\"\n\n def to_tags(self) -> Dict[str, str]:\n # converts all values into strings\n return {k: str(v) for k, v in self._asdict().items()}\n\n\ndef construct_log_string(\n logging_metadata: DagsterLoggingMetadata, message_props: DagsterMessageProps\n) -> str:\n\n return (\n \" - \".join(\n filter(\n None,\n (\n logging_metadata.log_source,\n logging_metadata.run_id,\n message_props.pid,\n logging_metadata.step_key,\n message_props.event_type_value,\n message_props.orig_message,\n ),\n )\n )\n + (message_props.error_str or \"\")\n )\n\n\nclass DagsterLogManager(logging.Logger):\n def __init__(\n self,\n logging_metadata: DagsterLoggingMetadata,\n loggers: List[logging.Logger],\n handlers: Optional[List[logging.Handler]] = None,\n ):\n self._logging_metadata = check.inst_param(\n logging_metadata, \"logging_metadata\", DagsterLoggingMetadata\n )\n self._loggers = check.list_param(loggers, \"loggers\", of_type=logging.Logger)\n\n super().__init__(name=\"dagster\", level=logging.DEBUG)\n\n handlers = check.opt_list_param(handlers, \"handlers\", of_type=logging.Handler)\n for handler in handlers:\n self.addHandler(handler)\n\n @property\n def logging_metadata(self) -> DagsterLoggingMetadata:\n return self._logging_metadata\n\n @property\n def loggers(self) -> List[logging.Logger]:\n return self._loggers\n\n def log_dagster_event(self, level: int, msg: str, dagster_event: \"DagsterEvent\"):\n self.log(level=level, msg=msg, extra={DAGSTER_META_KEY: dagster_event})\n\n def log(self, level, msg, *args, **kwargs):\n # allow for string level names\n super().log(coerce_valid_log_level(level), msg, *args, **kwargs)\n\n def _log(\n self, level, msg, args, exc_info=None, extra=None, stack_info=False\n ): # pylint: disable=arguments-differ\n\n # we stash dagster meta information in the extra field\n extra = extra or {}\n\n dagster_message_props = DagsterMessageProps(\n orig_message=msg, dagster_event=extra.get(DAGSTER_META_KEY)\n )\n\n # convert the message to our preferred format\n msg = construct_log_string(self.logging_metadata, dagster_message_props)\n\n # combine all dagster meta information into a single dictionary\n meta_dict = {\n **self.logging_metadata._asdict(),\n **dagster_message_props._asdict(),\n }\n # step-level events can be logged from a pipeline context. for these cases, pull the step\n # key from the underlying DagsterEvent\n if meta_dict[\"step_key\"] is None:\n meta_dict[\"step_key\"] = dagster_message_props.step_key\n\n extra[DAGSTER_META_KEY] = meta_dict\n\n for logger in self._loggers:\n logger.log(level, msg, *args, extra=extra)\n\n super()._log(level, msg, args, exc_info=exc_info, extra=extra, stack_info=stack_info)\n\n def with_tags(self, **new_tags):\n \"\"\"Add new tags in \"new_tags\" to the set of tags attached to this log manager instance, and\n return a new DagsterLogManager with the merged set of tags.\n\n Args:\n tags (Dict[str,str]): Dictionary of tags\n\n Returns:\n DagsterLogManager: a new DagsterLogManager namedtuple with updated tags for the same\n run ID and loggers.\n \"\"\"\n return DagsterLogManager(\n logging_metadata=self.logging_metadata._replace(**new_tags),\n loggers=self._loggers,\n handlers=self.handlers,\n )\n",
"step-ids": [
16,
18,
21,
22,
25
]
}
|
[
16,
18,
21,
22,
25
] |
## Filename: name.py
# Author: Marcelo Feitoza Parisi
#
# Description: Report the objects
# on the bucket sorted by name.
#
# ###########################
# # DISCLAIMER - IMPORTANT! #
# ###########################
#
# Stuff found here was built as a
# Proof-Of-Concept or Study material
# and should not be considered
# production ready!
#
# USE WITH CARE!
##
from lib import byte
from google.cloud import storage
from prettytable import PrettyTable
def exec(bucket_id, project_id, reverse_opt):
# Google Cloud Storage Client
client = storage.Client()
bucket = client.bucket(bucket_id, user_project=project_id)
blobs = bucket.list_blobs()
# Will hold our local list of objects
blob_list = []
try:
for blob in blobs:
# For each object we'll save name, owner, class, size and date
this_blob = { 'name': blob.name,
'owner': blob.owner,
'class': blob.storage_class,
'size' : blob.size,
'date' : str(blob.updated).split('.')[0].split('+')[0]
}
# Append object to our list
blob_list.append(this_blob)
except Exception as e:
print(e)
exit(1)
# Sort our object list by name using our reverse_opt
sorted_list = sorted(blob_list, key=lambda k: blob.name, reverse=reverse_opt)
# Generating our PrettyTable
report_table = PrettyTable()
report_table.field_names = ["NAME", "OWNER", "CLASS", "SIZE", "DATE"]
report_table.align["NAME"] = "l"
report_table.align["SIZE"] = "r"
report_table.align["DATE"] = "r"
for blob in sorted_list:
report_table.add_row([blob['name'], blob['owner'], blob['class'], str(byte.convert_size(blob['size'])), blob['date']])
print(report_table)
|
normal
|
{
"blob_id": "562b2c3567e42699cfd0804a5780af7ede142e13",
"index": 1056,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef exec(bucket_id, project_id, reverse_opt):\n client = storage.Client()\n bucket = client.bucket(bucket_id, user_project=project_id)\n blobs = bucket.list_blobs()\n blob_list = []\n try:\n for blob in blobs:\n this_blob = {'name': blob.name, 'owner': blob.owner, 'class':\n blob.storage_class, 'size': blob.size, 'date': str(blob.\n updated).split('.')[0].split('+')[0]}\n blob_list.append(this_blob)\n except Exception as e:\n print(e)\n exit(1)\n sorted_list = sorted(blob_list, key=lambda k: blob.name, reverse=\n reverse_opt)\n report_table = PrettyTable()\n report_table.field_names = ['NAME', 'OWNER', 'CLASS', 'SIZE', 'DATE']\n report_table.align['NAME'] = 'l'\n report_table.align['SIZE'] = 'r'\n report_table.align['DATE'] = 'r'\n for blob in sorted_list:\n report_table.add_row([blob['name'], blob['owner'], blob['class'],\n str(byte.convert_size(blob['size'])), blob['date']])\n print(report_table)\n",
"step-3": "from lib import byte\nfrom google.cloud import storage\nfrom prettytable import PrettyTable\n\n\ndef exec(bucket_id, project_id, reverse_opt):\n client = storage.Client()\n bucket = client.bucket(bucket_id, user_project=project_id)\n blobs = bucket.list_blobs()\n blob_list = []\n try:\n for blob in blobs:\n this_blob = {'name': blob.name, 'owner': blob.owner, 'class':\n blob.storage_class, 'size': blob.size, 'date': str(blob.\n updated).split('.')[0].split('+')[0]}\n blob_list.append(this_blob)\n except Exception as e:\n print(e)\n exit(1)\n sorted_list = sorted(blob_list, key=lambda k: blob.name, reverse=\n reverse_opt)\n report_table = PrettyTable()\n report_table.field_names = ['NAME', 'OWNER', 'CLASS', 'SIZE', 'DATE']\n report_table.align['NAME'] = 'l'\n report_table.align['SIZE'] = 'r'\n report_table.align['DATE'] = 'r'\n for blob in sorted_list:\n report_table.add_row([blob['name'], blob['owner'], blob['class'],\n str(byte.convert_size(blob['size'])), blob['date']])\n print(report_table)\n",
"step-4": "## Filename: name.py\n # Author: Marcelo Feitoza Parisi\n # \n # Description: Report the objects\n # on the bucket sorted by name.\n # \n # ###########################\n # # DISCLAIMER - IMPORTANT! #\n # ###########################\n # \n # Stuff found here was built as a\n # Proof-Of-Concept or Study material\n # and should not be considered\n # production ready!\n # \n # USE WITH CARE!\n##\nfrom lib import byte\nfrom google.cloud import storage\nfrom prettytable import PrettyTable\n\ndef exec(bucket_id, project_id, reverse_opt):\n\n # Google Cloud Storage Client\n client = storage.Client()\n bucket = client.bucket(bucket_id, user_project=project_id)\n blobs = bucket.list_blobs()\n\n # Will hold our local list of objects\n blob_list = []\n\n try: \n for blob in blobs:\n # For each object we'll save name, owner, class, size and date\n this_blob = { 'name': blob.name,\n 'owner': blob.owner,\n 'class': blob.storage_class,\n 'size' : blob.size,\n 'date' : str(blob.updated).split('.')[0].split('+')[0]\n }\n # Append object to our list\n blob_list.append(this_blob)\n except Exception as e:\n print(e)\n exit(1)\n\n # Sort our object list by name using our reverse_opt\n sorted_list = sorted(blob_list, key=lambda k: blob.name, reverse=reverse_opt)\n\n # Generating our PrettyTable\n report_table = PrettyTable()\n report_table.field_names = [\"NAME\", \"OWNER\", \"CLASS\", \"SIZE\", \"DATE\"]\n report_table.align[\"NAME\"] = \"l\"\n report_table.align[\"SIZE\"] = \"r\"\n report_table.align[\"DATE\"] = \"r\"\n for blob in sorted_list:\n report_table.add_row([blob['name'], blob['owner'], blob['class'], str(byte.convert_size(blob['size'])), blob['date']])\n\n print(report_table)\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 18 13:37:10 2018
@author: ninja1mmm
"""
import os
import numpy as np
import pandas as pd
from sklearn import preprocessing
def file_name(file_dir):
root_tmp=[]
dirs_tmp=[]
files_tmp=[]
for root, dirs, files in os.walk(file_dir):
root_tmp.append(root)
dirs_tmp.append(dirs)
files_tmp.append(files)
return root_tmp, dirs_tmp, files_tmp
root = '/home/ninja1mmm/Desktop/phm/data'
root_tmp, dirs_tmp, files_tmp = file_name(root)
combined_all = {}
feature_all = pd.DataFrame(columns = ['mean', 'std','root amplitude',
'rms','max','skewness','kurtosis',
'peak factor','margin','waveform',
'pulse','start_time', 'end_time',
'recipe', 'stage', 'Lot'])
#df_check = pd.DataFrame()
# read the first file to test here
file_tmp = files_tmp[2][0]
# iterate through the files if needed
#for file_tmp in files_tmp[2]:
path_tmp = root_tmp[2]+'/'+file_tmp
df = pd.read_pickle(path_tmp)
#df_tmp = df[df['Lot']==28113]
#if len(df_tmp)>0:
# df_tmp = df_tmp.iloc[0,:]
# df_check = df_check.append(df_tmp)
#------------------------------------------------------------------------------
# Crucial step
df=df.replace([np.inf, -np.inf], np.nan).dropna()
df=df.reset_index(drop=True)
df_scaler = preprocessing.MinMaxScaler(feature_range = (0,1))
#------------------------------------------------------------------------------
lot_list = list(set(df.Lot))
# Check if Lot already existed
for key in lot_list:
if key in combined_all.keys():
print('The Lot %d in %s already existed in %s' % (key, file_tmp,
combined_all[key]))
# for tmp in lot_list:
# combined_all[tmp] = file_tmp
# Select and save all the wafer processing cycles
list_tmp = []
lot_last = df.Lot[0]
counter = 0
idx = 0
# Specify the range. Here set to 100000 for the ease of test
for row_tmp in df.index:
lot_tmp = df.iloc[row_tmp,:].Lot
if lot_tmp == lot_last:
list_tmp.append(df.iloc[row_tmp,:])
counter += 1
else:
df_tmp = pd.concat(list_tmp, axis = 1)
# lot_last serves as the key, can be changed
# combined_all[lot_last] = df_tmp.T
combined_all[df_tmp.T.time.iloc[-1]] = df_tmp.T
# Calculate mean and save in feature dictionary as an example
# Normalize the data again because for some parameters we need the local (within cycle) feature
feature_tmp = df_tmp.T.iloc[:,7:] # Not a correct way, because shutter position also need to be excluded
feature_tmp = df_scaler.fit_transform(feature_tmp)
# ------------------------------------------------------------------
# Add features here. Remember to add new columns when initialzing df
t1 = np.mean(feature_tmp)
t2 = np.std(feature_tmp)
t3 = np.mean(np.sqrt(np.abs(feature_tmp)))**2
t4 = np.sqrt(np.mean(feature_tmp**2))
t5 = np.max(feature_tmp)
t6 = np.sum((feature_tmp-t1)**3)/((len(feature_tmp)-1)*(t2**3))
t7 = np.sum((feature_tmp-t1)**4)/((len(feature_tmp)-1)*(t2**4))
t8 = t5/t4
t9 = t5/t3
t10 = t4/(np.sum(np.abs(feature_tmp))/len(feature_tmp))
t11 = t5/(np.sum(np.abs(feature_tmp))/(len(feature_tmp)))
# Newly added
# First order difference
# ---------------------------------------------------------------------
feature_all.loc[idx,:] = [t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,
df_tmp.T.time.iloc[0],df_tmp.T.time.iloc[-1],
df_tmp.T.recipe.iloc[0],df_tmp.T.stage.iloc[0],
lot_last]
list_tmp = []
idx += 1
counter = 0
lot_last = lot_tmp
print(row_tmp)
#------------------------------------------------------------------------------
|
normal
|
{
"blob_id": "96d5cf948a9b0f622889977e8b26993299bceead",
"index": 770,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef file_name(file_dir):\n root_tmp = []\n dirs_tmp = []\n files_tmp = []\n for root, dirs, files in os.walk(file_dir):\n root_tmp.append(root)\n dirs_tmp.append(dirs)\n files_tmp.append(files)\n return root_tmp, dirs_tmp, files_tmp\n\n\n<mask token>\nfor key in lot_list:\n if key in combined_all.keys():\n print('The Lot %d in %s already existed in %s' % (key, file_tmp,\n combined_all[key]))\n<mask token>\nfor row_tmp in df.index:\n lot_tmp = df.iloc[row_tmp, :].Lot\n if lot_tmp == lot_last:\n list_tmp.append(df.iloc[row_tmp, :])\n counter += 1\n else:\n df_tmp = pd.concat(list_tmp, axis=1)\n combined_all[df_tmp.T.time.iloc[-1]] = df_tmp.T\n feature_tmp = df_tmp.T.iloc[:, 7:]\n feature_tmp = df_scaler.fit_transform(feature_tmp)\n t1 = np.mean(feature_tmp)\n t2 = np.std(feature_tmp)\n t3 = np.mean(np.sqrt(np.abs(feature_tmp))) ** 2\n t4 = np.sqrt(np.mean(feature_tmp ** 2))\n t5 = np.max(feature_tmp)\n t6 = np.sum((feature_tmp - t1) ** 3) / ((len(feature_tmp) - 1) * t2 **\n 3)\n t7 = np.sum((feature_tmp - t1) ** 4) / ((len(feature_tmp) - 1) * t2 **\n 4)\n t8 = t5 / t4\n t9 = t5 / t3\n t10 = t4 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n t11 = t5 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n feature_all.loc[idx, :] = [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10,\n t11, df_tmp.T.time.iloc[0], df_tmp.T.time.iloc[-1], df_tmp.T.\n recipe.iloc[0], df_tmp.T.stage.iloc[0], lot_last]\n list_tmp = []\n idx += 1\n counter = 0\n lot_last = lot_tmp\n print(row_tmp)\n",
"step-3": "<mask token>\n\n\ndef file_name(file_dir):\n root_tmp = []\n dirs_tmp = []\n files_tmp = []\n for root, dirs, files in os.walk(file_dir):\n root_tmp.append(root)\n dirs_tmp.append(dirs)\n files_tmp.append(files)\n return root_tmp, dirs_tmp, files_tmp\n\n\nroot = '/home/ninja1mmm/Desktop/phm/data'\nroot_tmp, dirs_tmp, files_tmp = file_name(root)\ncombined_all = {}\nfeature_all = pd.DataFrame(columns=['mean', 'std', 'root amplitude', 'rms',\n 'max', 'skewness', 'kurtosis', 'peak factor', 'margin', 'waveform',\n 'pulse', 'start_time', 'end_time', 'recipe', 'stage', 'Lot'])\nfile_tmp = files_tmp[2][0]\npath_tmp = root_tmp[2] + '/' + file_tmp\ndf = pd.read_pickle(path_tmp)\ndf = df.replace([np.inf, -np.inf], np.nan).dropna()\ndf = df.reset_index(drop=True)\ndf_scaler = preprocessing.MinMaxScaler(feature_range=(0, 1))\nlot_list = list(set(df.Lot))\nfor key in lot_list:\n if key in combined_all.keys():\n print('The Lot %d in %s already existed in %s' % (key, file_tmp,\n combined_all[key]))\nlist_tmp = []\nlot_last = df.Lot[0]\ncounter = 0\nidx = 0\nfor row_tmp in df.index:\n lot_tmp = df.iloc[row_tmp, :].Lot\n if lot_tmp == lot_last:\n list_tmp.append(df.iloc[row_tmp, :])\n counter += 1\n else:\n df_tmp = pd.concat(list_tmp, axis=1)\n combined_all[df_tmp.T.time.iloc[-1]] = df_tmp.T\n feature_tmp = df_tmp.T.iloc[:, 7:]\n feature_tmp = df_scaler.fit_transform(feature_tmp)\n t1 = np.mean(feature_tmp)\n t2 = np.std(feature_tmp)\n t3 = np.mean(np.sqrt(np.abs(feature_tmp))) ** 2\n t4 = np.sqrt(np.mean(feature_tmp ** 2))\n t5 = np.max(feature_tmp)\n t6 = np.sum((feature_tmp - t1) ** 3) / ((len(feature_tmp) - 1) * t2 **\n 3)\n t7 = np.sum((feature_tmp - t1) ** 4) / ((len(feature_tmp) - 1) * t2 **\n 4)\n t8 = t5 / t4\n t9 = t5 / t3\n t10 = t4 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n t11 = t5 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n feature_all.loc[idx, :] = [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10,\n t11, df_tmp.T.time.iloc[0], df_tmp.T.time.iloc[-1], df_tmp.T.\n recipe.iloc[0], df_tmp.T.stage.iloc[0], lot_last]\n list_tmp = []\n idx += 1\n counter = 0\n lot_last = lot_tmp\n print(row_tmp)\n",
"step-4": "<mask token>\nimport os\nimport numpy as np\nimport pandas as pd\nfrom sklearn import preprocessing\n\n\ndef file_name(file_dir):\n root_tmp = []\n dirs_tmp = []\n files_tmp = []\n for root, dirs, files in os.walk(file_dir):\n root_tmp.append(root)\n dirs_tmp.append(dirs)\n files_tmp.append(files)\n return root_tmp, dirs_tmp, files_tmp\n\n\nroot = '/home/ninja1mmm/Desktop/phm/data'\nroot_tmp, dirs_tmp, files_tmp = file_name(root)\ncombined_all = {}\nfeature_all = pd.DataFrame(columns=['mean', 'std', 'root amplitude', 'rms',\n 'max', 'skewness', 'kurtosis', 'peak factor', 'margin', 'waveform',\n 'pulse', 'start_time', 'end_time', 'recipe', 'stage', 'Lot'])\nfile_tmp = files_tmp[2][0]\npath_tmp = root_tmp[2] + '/' + file_tmp\ndf = pd.read_pickle(path_tmp)\ndf = df.replace([np.inf, -np.inf], np.nan).dropna()\ndf = df.reset_index(drop=True)\ndf_scaler = preprocessing.MinMaxScaler(feature_range=(0, 1))\nlot_list = list(set(df.Lot))\nfor key in lot_list:\n if key in combined_all.keys():\n print('The Lot %d in %s already existed in %s' % (key, file_tmp,\n combined_all[key]))\nlist_tmp = []\nlot_last = df.Lot[0]\ncounter = 0\nidx = 0\nfor row_tmp in df.index:\n lot_tmp = df.iloc[row_tmp, :].Lot\n if lot_tmp == lot_last:\n list_tmp.append(df.iloc[row_tmp, :])\n counter += 1\n else:\n df_tmp = pd.concat(list_tmp, axis=1)\n combined_all[df_tmp.T.time.iloc[-1]] = df_tmp.T\n feature_tmp = df_tmp.T.iloc[:, 7:]\n feature_tmp = df_scaler.fit_transform(feature_tmp)\n t1 = np.mean(feature_tmp)\n t2 = np.std(feature_tmp)\n t3 = np.mean(np.sqrt(np.abs(feature_tmp))) ** 2\n t4 = np.sqrt(np.mean(feature_tmp ** 2))\n t5 = np.max(feature_tmp)\n t6 = np.sum((feature_tmp - t1) ** 3) / ((len(feature_tmp) - 1) * t2 **\n 3)\n t7 = np.sum((feature_tmp - t1) ** 4) / ((len(feature_tmp) - 1) * t2 **\n 4)\n t8 = t5 / t4\n t9 = t5 / t3\n t10 = t4 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n t11 = t5 / (np.sum(np.abs(feature_tmp)) / len(feature_tmp))\n feature_all.loc[idx, :] = [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10,\n t11, df_tmp.T.time.iloc[0], df_tmp.T.time.iloc[-1], df_tmp.T.\n recipe.iloc[0], df_tmp.T.stage.iloc[0], lot_last]\n list_tmp = []\n idx += 1\n counter = 0\n lot_last = lot_tmp\n print(row_tmp)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jul 18 13:37:10 2018\n\n@author: ninja1mmm\n\"\"\"\nimport os\nimport numpy as np\nimport pandas as pd\nfrom sklearn import preprocessing\n\ndef file_name(file_dir): \n root_tmp=[]\n dirs_tmp=[]\n files_tmp=[]\n for root, dirs, files in os.walk(file_dir): \n root_tmp.append(root)\n dirs_tmp.append(dirs)\n files_tmp.append(files)\n return root_tmp, dirs_tmp, files_tmp\n \n \nroot = '/home/ninja1mmm/Desktop/phm/data'\nroot_tmp, dirs_tmp, files_tmp = file_name(root)\n\ncombined_all = {}\nfeature_all = pd.DataFrame(columns = ['mean', 'std','root amplitude',\n 'rms','max','skewness','kurtosis',\n 'peak factor','margin','waveform',\n 'pulse','start_time', 'end_time',\n 'recipe', 'stage', 'Lot'])\n#df_check = pd.DataFrame()\n\n# read the first file to test here\nfile_tmp = files_tmp[2][0]\n# iterate through the files if needed\n#for file_tmp in files_tmp[2]:\n \npath_tmp = root_tmp[2]+'/'+file_tmp\ndf = pd.read_pickle(path_tmp)\n#df_tmp = df[df['Lot']==28113]\n#if len(df_tmp)>0:\n# df_tmp = df_tmp.iloc[0,:]\n# df_check = df_check.append(df_tmp)\n #------------------------------------------------------------------------------\n# Crucial step\ndf=df.replace([np.inf, -np.inf], np.nan).dropna()\ndf=df.reset_index(drop=True)\ndf_scaler = preprocessing.MinMaxScaler(feature_range = (0,1))\n\n#------------------------------------------------------------------------------\n \n\n\n\nlot_list = list(set(df.Lot))\n# Check if Lot already existed\nfor key in lot_list:\n if key in combined_all.keys():\n print('The Lot %d in %s already existed in %s' % (key, file_tmp, \n combined_all[key]))\n \n# for tmp in lot_list:\n# combined_all[tmp] = file_tmp\n# Select and save all the wafer processing cycles\nlist_tmp = []\nlot_last = df.Lot[0]\ncounter = 0\nidx = 0\n# Specify the range. Here set to 100000 for the ease of test\nfor row_tmp in df.index:\n lot_tmp = df.iloc[row_tmp,:].Lot\n if lot_tmp == lot_last:\n list_tmp.append(df.iloc[row_tmp,:])\n counter += 1\n else:\n df_tmp = pd.concat(list_tmp, axis = 1)\n # lot_last serves as the key, can be changed \n# combined_all[lot_last] = df_tmp.T\n combined_all[df_tmp.T.time.iloc[-1]] = df_tmp.T\n # Calculate mean and save in feature dictionary as an example\n # Normalize the data again because for some parameters we need the local (within cycle) feature\n feature_tmp = df_tmp.T.iloc[:,7:] # Not a correct way, because shutter position also need to be excluded\n feature_tmp = df_scaler.fit_transform(feature_tmp)\n# ------------------------------------------------------------------\n # Add features here. Remember to add new columns when initialzing df\n t1 = np.mean(feature_tmp) \n t2 = np.std(feature_tmp)\n t3 = np.mean(np.sqrt(np.abs(feature_tmp)))**2\n t4 = np.sqrt(np.mean(feature_tmp**2))\n t5 = np.max(feature_tmp)\n t6 = np.sum((feature_tmp-t1)**3)/((len(feature_tmp)-1)*(t2**3))\n t7 = np.sum((feature_tmp-t1)**4)/((len(feature_tmp)-1)*(t2**4))\n t8 = t5/t4\n t9 = t5/t3\n t10 = t4/(np.sum(np.abs(feature_tmp))/len(feature_tmp))\n t11 = t5/(np.sum(np.abs(feature_tmp))/(len(feature_tmp)))\n # Newly added\n \n \n # First order difference\n \n# ---------------------------------------------------------------------\n feature_all.loc[idx,:] = [t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,\n df_tmp.T.time.iloc[0],df_tmp.T.time.iloc[-1],\n df_tmp.T.recipe.iloc[0],df_tmp.T.stage.iloc[0],\n lot_last]\n \n list_tmp = []\n idx += 1\n counter = 0\n lot_last = lot_tmp\n print(row_tmp)\n \n \n \n#------------------------------------------------------------------------------\n\n\n\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
learningRateBase = 0.001
learningRateDecreaseStep = 80
epochNum = 100
generateNum = 3
batchSize = 16
trainPoems = './data/poems.txt'
checkpointsPath = './model/'
<|reserved_special_token_1|>
learningRateBase = 0.001
learningRateDecreaseStep = 80
epochNum = 100
generateNum = 3
batchSize = 16
trainPoems = "./data/poems.txt"
checkpointsPath = "./model/"
|
flexible
|
{
"blob_id": "2fb299f5454c251dc1c77c2597ee23bf414c716e",
"index": 4845,
"step-1": "<mask token>\n",
"step-2": "learningRateBase = 0.001\nlearningRateDecreaseStep = 80\nepochNum = 100\ngenerateNum = 3\nbatchSize = 16\ntrainPoems = './data/poems.txt'\ncheckpointsPath = './model/'\n",
"step-3": "learningRateBase = 0.001\nlearningRateDecreaseStep = 80\nepochNum = 100\ngenerateNum = 3\nbatchSize = 16\n\ntrainPoems = \"./data/poems.txt\"\ncheckpointsPath = \"./model/\"",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# coding=utf-8
import re
import traceback
from pesto_common.config.configer import Configer
from pesto_common.log.logger_factory import LoggerFactory
from pesto_orm.core.base import db_config
from pesto_orm.core.executor import ExecutorFactory
from pesto_orm.core.model import BaseModel
from pesto_orm.core.repository import BaseRepository
from pesto_orm.dialect.base import DefaultDialect
logger = LoggerFactory.get_logger('dialect.mysql.domain')
class MySQLDialect(DefaultDialect):
def get_db_type(self):
return 'mysql'
def paginate_with(self, sql, page_number, page_size):
if page_number == 1 and page_size == 1:
if re.match(DefaultDialect.select_single_pattern, sql) is not None:
return sql
offset = page_size * (page_number - 1)
return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)
db_type = Configer.get('db.type')
if db_type == 'mysql':
import mysql.connector as connector
db_config['target'] = connector
db_config['use_pure'] = True
from mysql.connector.conversion import MySQLConverter
class NumpyMySQLConverter(MySQLConverter):
''' A mysql.connector Converter that handles Numpy types '''
def _float32_to_mysql(self, value):
return float(value)
def _float64_to_mysql(self, value):
return float(value)
def _int32_to_mysql(self, value):
return int(value)
def _int64_to_mysql(self, value):
return int(value)
db_config['converter_class'] = NumpyMySQLConverter
mysqlExecutor = ExecutorFactory.get_executor(db_config=db_config)
mysqlDialect = MySQLDialect()
class MysqlBaseModel(BaseModel):
def __init__(self, db_name=None, table_name=None, table_alias=None, primary_key='id'):
super(MysqlBaseModel, self).__init__(db_name, table_name, table_alias, primary_key)
def get_dialect(self):
return mysqlDialect
def get_executor(self):
return mysqlExecutor
class MysqlBaseRepository(BaseRepository):
def __init__(self, model_class=None):
super(MysqlBaseRepository, self).__init__(model_class)
def get_dialect(self):
return mysqlDialect
def get_executor(self):
return mysqlExecutor
def transaction(rollback_exceptions=[]):
def wrap(func):
def handle(result, **kwargs): # 真实执行原方法.
func = kwargs['func']
args = kwargs['args']
kwargs = kwargs['kwargs']
return_value = func(*args, **kwargs)
logger.info('Transaction method: ' + func.__name__)
result.append(return_value)
def to_do(*args, **kwargs):
new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}
result = []
try:
mysqlExecutor.begin_transaction()
handle(result, **new_kwargs)
mysqlExecutor.commit_transaction()
except Exception as e:
if len(rollback_exceptions) == 0 or e.__class__ in rollback_exceptions:
mysqlExecutor.rollback_transaction()
logger.error('Method execute error. method: ' + str(func.__name__) + ', error:' + traceback.format_exc() + ', transaction roll back.')
else:
mysqlExecutor.commit_transaction()
raise e
finally:
mysqlExecutor.close_transaction()
return to_do
return wrap
|
normal
|
{
"blob_id": "a68de7555fdab06014fd562e7db29ca2da03f443",
"index": 8240,
"step-1": "<mask token>\n\n\nclass MysqlBaseModel(BaseModel):\n\n def __init__(self, db_name=None, table_name=None, table_alias=None,\n primary_key='id'):\n super(MysqlBaseModel, self).__init__(db_name, table_name,\n table_alias, primary_key)\n <mask token>\n\n def get_executor(self):\n return mysqlExecutor\n\n\nclass MysqlBaseRepository(BaseRepository):\n\n def __init__(self, model_class=None):\n super(MysqlBaseRepository, self).__init__(model_class)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MySQLDialect(DefaultDialect):\n\n def get_db_type(self):\n return 'mysql'\n\n def paginate_with(self, sql, page_number, page_size):\n if page_number == 1 and page_size == 1:\n if re.match(DefaultDialect.select_single_pattern, sql) is not None:\n return sql\n offset = page_size * (page_number - 1)\n return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)\n\n\n<mask token>\n\n\nclass MysqlBaseModel(BaseModel):\n\n def __init__(self, db_name=None, table_name=None, table_alias=None,\n primary_key='id'):\n super(MysqlBaseModel, self).__init__(db_name, table_name,\n table_alias, primary_key)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\nclass MysqlBaseRepository(BaseRepository):\n\n def __init__(self, model_class=None):\n super(MysqlBaseRepository, self).__init__(model_class)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\ndef transaction(rollback_exceptions=[]):\n\n def wrap(func):\n\n def handle(result, **kwargs):\n func = kwargs['func']\n args = kwargs['args']\n kwargs = kwargs['kwargs']\n return_value = func(*args, **kwargs)\n logger.info('Transaction method: ' + func.__name__)\n result.append(return_value)\n\n def to_do(*args, **kwargs):\n new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}\n result = []\n try:\n mysqlExecutor.begin_transaction()\n handle(result, **new_kwargs)\n mysqlExecutor.commit_transaction()\n except Exception as e:\n if len(rollback_exceptions\n ) == 0 or e.__class__ in rollback_exceptions:\n mysqlExecutor.rollback_transaction()\n logger.error('Method execute error. method: ' + str(\n func.__name__) + ', error:' + traceback.format_exc\n () + ', transaction roll back.')\n else:\n mysqlExecutor.commit_transaction()\n raise e\n finally:\n mysqlExecutor.close_transaction()\n return to_do\n return wrap\n",
"step-3": "<mask token>\n\n\nclass MySQLDialect(DefaultDialect):\n\n def get_db_type(self):\n return 'mysql'\n\n def paginate_with(self, sql, page_number, page_size):\n if page_number == 1 and page_size == 1:\n if re.match(DefaultDialect.select_single_pattern, sql) is not None:\n return sql\n offset = page_size * (page_number - 1)\n return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)\n\n\n<mask token>\nif db_type == 'mysql':\n import mysql.connector as connector\n db_config['target'] = connector\n db_config['use_pure'] = True\n from mysql.connector.conversion import MySQLConverter\n\n\n class NumpyMySQLConverter(MySQLConverter):\n \"\"\" A mysql.connector Converter that handles Numpy types \"\"\"\n\n def _float32_to_mysql(self, value):\n return float(value)\n\n def _float64_to_mysql(self, value):\n return float(value)\n\n def _int32_to_mysql(self, value):\n return int(value)\n\n def _int64_to_mysql(self, value):\n return int(value)\n db_config['converter_class'] = NumpyMySQLConverter\n mysqlExecutor = ExecutorFactory.get_executor(db_config=db_config)\n mysqlDialect = MySQLDialect()\n\n\nclass MysqlBaseModel(BaseModel):\n\n def __init__(self, db_name=None, table_name=None, table_alias=None,\n primary_key='id'):\n super(MysqlBaseModel, self).__init__(db_name, table_name,\n table_alias, primary_key)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\nclass MysqlBaseRepository(BaseRepository):\n\n def __init__(self, model_class=None):\n super(MysqlBaseRepository, self).__init__(model_class)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\ndef transaction(rollback_exceptions=[]):\n\n def wrap(func):\n\n def handle(result, **kwargs):\n func = kwargs['func']\n args = kwargs['args']\n kwargs = kwargs['kwargs']\n return_value = func(*args, **kwargs)\n logger.info('Transaction method: ' + func.__name__)\n result.append(return_value)\n\n def to_do(*args, **kwargs):\n new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}\n result = []\n try:\n mysqlExecutor.begin_transaction()\n handle(result, **new_kwargs)\n mysqlExecutor.commit_transaction()\n except Exception as e:\n if len(rollback_exceptions\n ) == 0 or e.__class__ in rollback_exceptions:\n mysqlExecutor.rollback_transaction()\n logger.error('Method execute error. method: ' + str(\n func.__name__) + ', error:' + traceback.format_exc\n () + ', transaction roll back.')\n else:\n mysqlExecutor.commit_transaction()\n raise e\n finally:\n mysqlExecutor.close_transaction()\n return to_do\n return wrap\n",
"step-4": "import re\nimport traceback\nfrom pesto_common.config.configer import Configer\nfrom pesto_common.log.logger_factory import LoggerFactory\nfrom pesto_orm.core.base import db_config\nfrom pesto_orm.core.executor import ExecutorFactory\nfrom pesto_orm.core.model import BaseModel\nfrom pesto_orm.core.repository import BaseRepository\nfrom pesto_orm.dialect.base import DefaultDialect\nlogger = LoggerFactory.get_logger('dialect.mysql.domain')\n\n\nclass MySQLDialect(DefaultDialect):\n\n def get_db_type(self):\n return 'mysql'\n\n def paginate_with(self, sql, page_number, page_size):\n if page_number == 1 and page_size == 1:\n if re.match(DefaultDialect.select_single_pattern, sql) is not None:\n return sql\n offset = page_size * (page_number - 1)\n return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)\n\n\ndb_type = Configer.get('db.type')\nif db_type == 'mysql':\n import mysql.connector as connector\n db_config['target'] = connector\n db_config['use_pure'] = True\n from mysql.connector.conversion import MySQLConverter\n\n\n class NumpyMySQLConverter(MySQLConverter):\n \"\"\" A mysql.connector Converter that handles Numpy types \"\"\"\n\n def _float32_to_mysql(self, value):\n return float(value)\n\n def _float64_to_mysql(self, value):\n return float(value)\n\n def _int32_to_mysql(self, value):\n return int(value)\n\n def _int64_to_mysql(self, value):\n return int(value)\n db_config['converter_class'] = NumpyMySQLConverter\n mysqlExecutor = ExecutorFactory.get_executor(db_config=db_config)\n mysqlDialect = MySQLDialect()\n\n\nclass MysqlBaseModel(BaseModel):\n\n def __init__(self, db_name=None, table_name=None, table_alias=None,\n primary_key='id'):\n super(MysqlBaseModel, self).__init__(db_name, table_name,\n table_alias, primary_key)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\nclass MysqlBaseRepository(BaseRepository):\n\n def __init__(self, model_class=None):\n super(MysqlBaseRepository, self).__init__(model_class)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\ndef transaction(rollback_exceptions=[]):\n\n def wrap(func):\n\n def handle(result, **kwargs):\n func = kwargs['func']\n args = kwargs['args']\n kwargs = kwargs['kwargs']\n return_value = func(*args, **kwargs)\n logger.info('Transaction method: ' + func.__name__)\n result.append(return_value)\n\n def to_do(*args, **kwargs):\n new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}\n result = []\n try:\n mysqlExecutor.begin_transaction()\n handle(result, **new_kwargs)\n mysqlExecutor.commit_transaction()\n except Exception as e:\n if len(rollback_exceptions\n ) == 0 or e.__class__ in rollback_exceptions:\n mysqlExecutor.rollback_transaction()\n logger.error('Method execute error. method: ' + str(\n func.__name__) + ', error:' + traceback.format_exc\n () + ', transaction roll back.')\n else:\n mysqlExecutor.commit_transaction()\n raise e\n finally:\n mysqlExecutor.close_transaction()\n return to_do\n return wrap\n",
"step-5": "# coding=utf-8\nimport re\nimport traceback\n\nfrom pesto_common.config.configer import Configer\nfrom pesto_common.log.logger_factory import LoggerFactory\nfrom pesto_orm.core.base import db_config\nfrom pesto_orm.core.executor import ExecutorFactory\nfrom pesto_orm.core.model import BaseModel\nfrom pesto_orm.core.repository import BaseRepository\nfrom pesto_orm.dialect.base import DefaultDialect\n\nlogger = LoggerFactory.get_logger('dialect.mysql.domain')\n\n\nclass MySQLDialect(DefaultDialect):\n\n def get_db_type(self):\n return 'mysql'\n\n def paginate_with(self, sql, page_number, page_size):\n if page_number == 1 and page_size == 1:\n if re.match(DefaultDialect.select_single_pattern, sql) is not None:\n return sql\n\n offset = page_size * (page_number - 1)\n return '%s LIMIT %d OFFSET %d' % (sql, page_size, offset)\n\n\ndb_type = Configer.get('db.type')\nif db_type == 'mysql':\n import mysql.connector as connector\n\n db_config['target'] = connector\n db_config['use_pure'] = True\n\n from mysql.connector.conversion import MySQLConverter\n\n\n class NumpyMySQLConverter(MySQLConverter):\n ''' A mysql.connector Converter that handles Numpy types '''\n\n def _float32_to_mysql(self, value):\n return float(value)\n\n def _float64_to_mysql(self, value):\n return float(value)\n\n def _int32_to_mysql(self, value):\n return int(value)\n\n def _int64_to_mysql(self, value):\n return int(value)\n\n\n db_config['converter_class'] = NumpyMySQLConverter\n\n mysqlExecutor = ExecutorFactory.get_executor(db_config=db_config)\n\n mysqlDialect = MySQLDialect()\n\n\nclass MysqlBaseModel(BaseModel):\n\n def __init__(self, db_name=None, table_name=None, table_alias=None, primary_key='id'):\n super(MysqlBaseModel, self).__init__(db_name, table_name, table_alias, primary_key)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\nclass MysqlBaseRepository(BaseRepository):\n\n def __init__(self, model_class=None):\n super(MysqlBaseRepository, self).__init__(model_class)\n\n def get_dialect(self):\n return mysqlDialect\n\n def get_executor(self):\n return mysqlExecutor\n\n\ndef transaction(rollback_exceptions=[]):\n def wrap(func):\n def handle(result, **kwargs): # 真实执行原方法.\n func = kwargs['func']\n args = kwargs['args']\n kwargs = kwargs['kwargs']\n return_value = func(*args, **kwargs)\n logger.info('Transaction method: ' + func.__name__)\n result.append(return_value)\n\n def to_do(*args, **kwargs):\n new_kwargs = {'func': func, 'args': args, 'kwargs': kwargs}\n\n result = []\n try:\n mysqlExecutor.begin_transaction()\n handle(result, **new_kwargs)\n mysqlExecutor.commit_transaction()\n except Exception as e:\n\n if len(rollback_exceptions) == 0 or e.__class__ in rollback_exceptions:\n mysqlExecutor.rollback_transaction()\n logger.error('Method execute error. method: ' + str(func.__name__) + ', error:' + traceback.format_exc() + ', transaction roll back.')\n else:\n mysqlExecutor.commit_transaction()\n raise e\n finally:\n mysqlExecutor.close_transaction()\n\n return to_do\n\n return wrap\n",
"step-ids": [
7,
12,
13,
15,
16
]
}
|
[
7,
12,
13,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
'''
=======================================================================
AutoTest Team Source File.
Copyright(C), Changyou.com
-----------------------------------------------------------------------
Created: 2017/3/2 by ChengLongLong
-----------------------------------------------------------------------
Description:
-----------------------------------------------------------------------
History:
2017/3/2
=======================================================================
'''
|
flexible
|
{
"blob_id": "38f7c529cd0a8d85de266c6a932e6c8342aee273",
"index": 4969,
"step-1": "<mask token>\n",
"step-2": "# -*- coding: utf-8 -*-\n'''\n=======================================================================\nAutoTest Team Source File.\nCopyright(C), Changyou.com\n-----------------------------------------------------------------------\nCreated: 2017/3/2 by ChengLongLong\n-----------------------------------------------------------------------\nDescription: \n-----------------------------------------------------------------------\nHistory: \n2017/3/2 \n=======================================================================\n'''",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from can.interfaces.ics_neovi.neovi_bus import NeoViBus
|
normal
|
{
"blob_id": "6025b8d4015572ea1a760c1b4bc7200a1019c802",
"index": 5031,
"step-1": "<mask token>\n",
"step-2": "from can.interfaces.ics_neovi.neovi_bus import NeoViBus\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import datetime
import time
import requests
from config import url
from data import DistrictList
import random
import pymysql
def base_url():
default_request = {
'base_url': url,
'headers': {
"Content-Type": "application/json;charset=UTF-8"}
}
return default_request['base_url']
# 生成一个指定长度的随机数
def random_Num(length, string=[]):
for i in range(length):
y = str(random.randint(0, 9))
string.append(y)
string = ''.join(string)
return string
# a = random_Num(9, ['1','3'])
# b = random_Num(6, ['粤','B'])
# c = random_Num(9)
# print(a,b,c)
# 生成一个身份证号码,以及对应的生日
def generator():
# 生成身份证号码
districtcode = DistrictList[random.randint(0, len(DistrictList) - 1)]['code']
# date = datetime.date.today() - datetime.timedelta(weeks=random.randint(1, 3840))
date = datetime.datetime.now() - datetime.timedelta(weeks=random.randint(1, 2350))
birthDay = date.strftime('%Y%m%d')
randomNum = str(random.randint(100, 300))
idnum = districtcode + birthDay + randomNum
i = 0
count = 0
weight = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]
checkcode = {'0': '1', '1': '0', '2': 'X', '3': '9', '4': '8', '5': '7', '6': '6', '7': '5', '8': '5', '9': '3', '10': '2'}
for i in range(0, len(idnum)):
count = count + int(idnum[i]) * weight[i]
id = idnum + checkcode[str(count%11)]
# 生成生日时间戳
# timstamp = date.strftime('%Y%m%d%H%M%S')
# timstamp = datetime.datetime.strptime(date, '%Y%m%d%H%M%S').timestamp()
timstamp = int(time.mktime(date.timetuple()) * 1000)
return id, timstamp
a = generator()
def returnId():
return a[0]
def returnTimestamp():
return a[1]
# 连接数据库公用方法
def query_mysql(sql, *params, database="zbcf_injury_test"):
conn = pymysql.connect(host="rm-wz97oujls3998784i.mysql.rds.aliyuncs.com", user="testuser",
password="testuser@2018", database=database, charset='utf8',
cursorclass=pymysql.cursors.DictCursor)
cursor = conn.cursor()
cursor.execute(sql, params)
data = cursor.fetchone()
cursor.close()
conn.close()
return data
# 模拟订单超过48/12小时/7天
# 只需将数据库过期时间设置为当前时间
# def orderTimeOut(operation_id):
# now = datetime.datetime.now()
# now = now.strftime('%Y-%m-%d %H:%M:%S')
# # delta = datetime.timedelta(days=outTime)
# # now = now + delta
# print(now)
# # sql = "UPDATE t_auth_info t SET end_effect_time = str_to_date(\'%s\','%%Y-%%m-%%d %%H:%%i:%%s') WHERE t.operation_id = '%s'"
# sql = "UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = '%s'"
# params = [now, operation_id]
# update_result = query_mysql(sql, *params)
# return update_result
# # return now.strftime('%Y-%m-%d %H:%M:%S')
# # 有什么问题??
# a = orderTimeOut(289)
# print(a)
# 模拟订单超过48/12小时/7天
# 只需将数据库过期时间设置为当前时间
def orderTimeOut(order_id, database="zbcf_injury_test"):
conn = pymysql.connect(host="rm-wz97oujls3998784i.mysql.rds.aliyuncs.com", user="testuser",
password="testuser@2018", database=database, charset='utf8',
cursorclass=pymysql.cursors.DictCursor)
cursor = conn.cursor()
now = datetime.datetime.now()
now = now.strftime('%Y-%m-%d %H:%M:%S')
sql = "UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = (SELECT id from t_operation where order_id = '%s')" % (now, order_id)
effectRows = cursor.execute(sql)
conn.commit()
print('make order time out!')
cursor.close()
conn.close()
return effectRows
# a = orderTimeOut(260)
# print(a)
def sleep(num):
time.sleep(num)
# 依据order_Id查询登录码
def queryLoginNum(orderId):
sql = "SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'"
query_result = query_mysql(sql, orderId)
print(orderId)
return query_result['token']
# a = queryLoginNum(418)
# print (a)
# 依据operation_Id查询登录码
def opetationId_queryLoginNum(operation_Id):
sql = "SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'"
query_result = query_mysql(sql, operation_Id)
return query_result['token']
# a = queryLoginNum(290)
# print (a)
# 返回orderId的方法
def queryOrderId():
pass
# 返回operationId的方法
def queryOperationId():
pass
# 查询订单状态
def queryOrderStatus():
pass
# # 登录PC端获取token,拼接到headers中
# def setup_hook_token(request):
# #print(request)
# url_path="http://testrenshang.cias.cn/injury/user/pc/login"
# header={"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
# payload={"loginName": "haadmin003", "loginPass": "Y2lhczEyMzQ1Ng==", "verifyCode": "tubd"}
# req=requests.post(url=url_path, headers=header, params=payload).json()
# token=req['data']['token']
# request["headers"]['token']=token
# # print(token,'\n', req)
# # print(request)
# # request = {'headers':{'Content-Type': 'application/json;charset=UTF-8', 'method': 'GET', 'url': '$uri', 'token': '$token'}}
# # setup_hook_token(request)
# 登录H5端获取token
def getH5Token(accessCode):
url = "http://testrsapp.cias.cn/injury/user/h5/login"
headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
data = {"accessCode": accessCode}
req = requests.post(url=url, headers=headers, data=data).json()
return req['data']['token']
# a = getH5Token('31583310')
# print(a)
# 登录Web端获取token
def getWebToken(accessCode):
url = "http://testrsapp.cias.cn/injury/user/pc/login"
headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
data = {"loginName": "haadmin003", "loginPass": "Y2lhczEyMzQ1Ng==", "verifyCode": "tubd"}
req = requests.post(url=url, headers=headers, data=data).json()
return req['data']['token']
# a = getWebToken('31583310')
# print(a)
#
|
normal
|
{
"blob_id": "c55b6fed92a5f4f2961c6f8d5b150b22a5f622e8",
"index": 4520,
"step-1": "<mask token>\n\n\ndef base_url():\n default_request = {'base_url': url, 'headers': {'Content-Type':\n 'application/json;charset=UTF-8'}}\n return default_request['base_url']\n\n\ndef random_Num(length, string=[]):\n for i in range(length):\n y = str(random.randint(0, 9))\n string.append(y)\n string = ''.join(string)\n return string\n\n\n<mask token>\n\n\ndef returnId():\n return a[0]\n\n\ndef returnTimestamp():\n return a[1]\n\n\ndef query_mysql(sql, *params, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n cursor.execute(sql, params)\n data = cursor.fetchone()\n cursor.close()\n conn.close()\n return data\n\n\n<mask token>\n\n\ndef sleep(num):\n time.sleep(num)\n\n\ndef queryLoginNum(orderId):\n sql = (\n \"SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'\"\n )\n query_result = query_mysql(sql, orderId)\n print(orderId)\n return query_result['token']\n\n\ndef opetationId_queryLoginNum(operation_Id):\n sql = (\n \"SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'\"\n )\n query_result = query_mysql(sql, operation_Id)\n return query_result['token']\n\n\n<mask token>\n\n\ndef queryOperationId():\n pass\n\n\n<mask token>\n\n\ndef getH5Token(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/h5/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'accessCode': accessCode}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef base_url():\n default_request = {'base_url': url, 'headers': {'Content-Type':\n 'application/json;charset=UTF-8'}}\n return default_request['base_url']\n\n\ndef random_Num(length, string=[]):\n for i in range(length):\n y = str(random.randint(0, 9))\n string.append(y)\n string = ''.join(string)\n return string\n\n\n<mask token>\n\n\ndef returnId():\n return a[0]\n\n\ndef returnTimestamp():\n return a[1]\n\n\ndef query_mysql(sql, *params, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n cursor.execute(sql, params)\n data = cursor.fetchone()\n cursor.close()\n conn.close()\n return data\n\n\ndef orderTimeOut(order_id, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n now = datetime.datetime.now()\n now = now.strftime('%Y-%m-%d %H:%M:%S')\n sql = (\n \"UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = (SELECT id from t_operation where order_id = '%s')\"\n % (now, order_id))\n effectRows = cursor.execute(sql)\n conn.commit()\n print('make order time out!')\n cursor.close()\n conn.close()\n return effectRows\n\n\ndef sleep(num):\n time.sleep(num)\n\n\ndef queryLoginNum(orderId):\n sql = (\n \"SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'\"\n )\n query_result = query_mysql(sql, orderId)\n print(orderId)\n return query_result['token']\n\n\ndef opetationId_queryLoginNum(operation_Id):\n sql = (\n \"SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'\"\n )\n query_result = query_mysql(sql, operation_Id)\n return query_result['token']\n\n\ndef queryOrderId():\n pass\n\n\ndef queryOperationId():\n pass\n\n\n<mask token>\n\n\ndef getH5Token(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/h5/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'accessCode': accessCode}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n\ndef getWebToken(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/pc/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'loginName': 'haadmin003', 'loginPass': 'Y2lhczEyMzQ1Ng==',\n 'verifyCode': 'tubd'}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n",
"step-3": "<mask token>\n\n\ndef base_url():\n default_request = {'base_url': url, 'headers': {'Content-Type':\n 'application/json;charset=UTF-8'}}\n return default_request['base_url']\n\n\ndef random_Num(length, string=[]):\n for i in range(length):\n y = str(random.randint(0, 9))\n string.append(y)\n string = ''.join(string)\n return string\n\n\ndef generator():\n districtcode = DistrictList[random.randint(0, len(DistrictList) - 1)][\n 'code']\n date = datetime.datetime.now() - datetime.timedelta(weeks=random.\n randint(1, 2350))\n birthDay = date.strftime('%Y%m%d')\n randomNum = str(random.randint(100, 300))\n idnum = districtcode + birthDay + randomNum\n i = 0\n count = 0\n weight = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]\n checkcode = {'0': '1', '1': '0', '2': 'X', '3': '9', '4': '8', '5': '7',\n '6': '6', '7': '5', '8': '5', '9': '3', '10': '2'}\n for i in range(0, len(idnum)):\n count = count + int(idnum[i]) * weight[i]\n id = idnum + checkcode[str(count % 11)]\n timstamp = int(time.mktime(date.timetuple()) * 1000)\n return id, timstamp\n\n\n<mask token>\n\n\ndef returnId():\n return a[0]\n\n\ndef returnTimestamp():\n return a[1]\n\n\ndef query_mysql(sql, *params, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n cursor.execute(sql, params)\n data = cursor.fetchone()\n cursor.close()\n conn.close()\n return data\n\n\ndef orderTimeOut(order_id, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n now = datetime.datetime.now()\n now = now.strftime('%Y-%m-%d %H:%M:%S')\n sql = (\n \"UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = (SELECT id from t_operation where order_id = '%s')\"\n % (now, order_id))\n effectRows = cursor.execute(sql)\n conn.commit()\n print('make order time out!')\n cursor.close()\n conn.close()\n return effectRows\n\n\ndef sleep(num):\n time.sleep(num)\n\n\ndef queryLoginNum(orderId):\n sql = (\n \"SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'\"\n )\n query_result = query_mysql(sql, orderId)\n print(orderId)\n return query_result['token']\n\n\ndef opetationId_queryLoginNum(operation_Id):\n sql = (\n \"SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'\"\n )\n query_result = query_mysql(sql, operation_Id)\n return query_result['token']\n\n\ndef queryOrderId():\n pass\n\n\ndef queryOperationId():\n pass\n\n\ndef queryOrderStatus():\n pass\n\n\ndef getH5Token(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/h5/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'accessCode': accessCode}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n\ndef getWebToken(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/pc/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'loginName': 'haadmin003', 'loginPass': 'Y2lhczEyMzQ1Ng==',\n 'verifyCode': 'tubd'}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n",
"step-4": "import datetime\nimport time\nimport requests\nfrom config import url\nfrom data import DistrictList\nimport random\nimport pymysql\n\n\ndef base_url():\n default_request = {'base_url': url, 'headers': {'Content-Type':\n 'application/json;charset=UTF-8'}}\n return default_request['base_url']\n\n\ndef random_Num(length, string=[]):\n for i in range(length):\n y = str(random.randint(0, 9))\n string.append(y)\n string = ''.join(string)\n return string\n\n\ndef generator():\n districtcode = DistrictList[random.randint(0, len(DistrictList) - 1)][\n 'code']\n date = datetime.datetime.now() - datetime.timedelta(weeks=random.\n randint(1, 2350))\n birthDay = date.strftime('%Y%m%d')\n randomNum = str(random.randint(100, 300))\n idnum = districtcode + birthDay + randomNum\n i = 0\n count = 0\n weight = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]\n checkcode = {'0': '1', '1': '0', '2': 'X', '3': '9', '4': '8', '5': '7',\n '6': '6', '7': '5', '8': '5', '9': '3', '10': '2'}\n for i in range(0, len(idnum)):\n count = count + int(idnum[i]) * weight[i]\n id = idnum + checkcode[str(count % 11)]\n timstamp = int(time.mktime(date.timetuple()) * 1000)\n return id, timstamp\n\n\na = generator()\n\n\ndef returnId():\n return a[0]\n\n\ndef returnTimestamp():\n return a[1]\n\n\ndef query_mysql(sql, *params, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n cursor.execute(sql, params)\n data = cursor.fetchone()\n cursor.close()\n conn.close()\n return data\n\n\ndef orderTimeOut(order_id, database='zbcf_injury_test'):\n conn = pymysql.connect(host=\n 'rm-wz97oujls3998784i.mysql.rds.aliyuncs.com', user='testuser',\n password='testuser@2018', database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n now = datetime.datetime.now()\n now = now.strftime('%Y-%m-%d %H:%M:%S')\n sql = (\n \"UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = (SELECT id from t_operation where order_id = '%s')\"\n % (now, order_id))\n effectRows = cursor.execute(sql)\n conn.commit()\n print('make order time out!')\n cursor.close()\n conn.close()\n return effectRows\n\n\ndef sleep(num):\n time.sleep(num)\n\n\ndef queryLoginNum(orderId):\n sql = (\n \"SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'\"\n )\n query_result = query_mysql(sql, orderId)\n print(orderId)\n return query_result['token']\n\n\ndef opetationId_queryLoginNum(operation_Id):\n sql = (\n \"SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'\"\n )\n query_result = query_mysql(sql, operation_Id)\n return query_result['token']\n\n\ndef queryOrderId():\n pass\n\n\ndef queryOperationId():\n pass\n\n\ndef queryOrderStatus():\n pass\n\n\ndef getH5Token(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/h5/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'accessCode': accessCode}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n\ndef getWebToken(accessCode):\n url = 'http://testrsapp.cias.cn/injury/user/pc/login'\n headers = {'Content-Type':\n 'application/x-www-form-urlencoded; charset=UTF-8'}\n data = {'loginName': 'haadmin003', 'loginPass': 'Y2lhczEyMzQ1Ng==',\n 'verifyCode': 'tubd'}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n",
"step-5": "import datetime\nimport time\n\nimport requests\n\nfrom config import url\nfrom data import DistrictList\nimport random\nimport pymysql\n\ndef base_url():\n default_request = {\n 'base_url': url,\n 'headers': {\n \"Content-Type\": \"application/json;charset=UTF-8\"}\n }\n return default_request['base_url']\n\n\n# 生成一个指定长度的随机数\ndef random_Num(length, string=[]):\n for i in range(length):\n y = str(random.randint(0, 9))\n string.append(y)\n string = ''.join(string)\n return string\n\n# a = random_Num(9, ['1','3'])\n# b = random_Num(6, ['粤','B'])\n# c = random_Num(9)\n# print(a,b,c)\n\n# 生成一个身份证号码,以及对应的生日\ndef generator():\n # 生成身份证号码\n districtcode = DistrictList[random.randint(0, len(DistrictList) - 1)]['code']\n # date = datetime.date.today() - datetime.timedelta(weeks=random.randint(1, 3840))\n date = datetime.datetime.now() - datetime.timedelta(weeks=random.randint(1, 2350))\n birthDay = date.strftime('%Y%m%d')\n randomNum = str(random.randint(100, 300))\n idnum = districtcode + birthDay + randomNum\n i = 0\n count = 0\n weight = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]\n checkcode = {'0': '1', '1': '0', '2': 'X', '3': '9', '4': '8', '5': '7', '6': '6', '7': '5', '8': '5', '9': '3', '10': '2'}\n for i in range(0, len(idnum)):\n count = count + int(idnum[i]) * weight[i]\n id = idnum + checkcode[str(count%11)]\n # 生成生日时间戳\n # timstamp = date.strftime('%Y%m%d%H%M%S')\n # timstamp = datetime.datetime.strptime(date, '%Y%m%d%H%M%S').timestamp()\n timstamp = int(time.mktime(date.timetuple()) * 1000)\n return id, timstamp\n\na = generator()\n\ndef returnId():\n return a[0]\n\ndef returnTimestamp():\n return a[1]\n\n\n\n# 连接数据库公用方法\ndef query_mysql(sql, *params, database=\"zbcf_injury_test\"):\n conn = pymysql.connect(host=\"rm-wz97oujls3998784i.mysql.rds.aliyuncs.com\", user=\"testuser\",\n password=\"testuser@2018\", database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n cursor.execute(sql, params)\n data = cursor.fetchone()\n cursor.close()\n conn.close()\n return data\n\n\n# 模拟订单超过48/12小时/7天\n# 只需将数据库过期时间设置为当前时间\n# def orderTimeOut(operation_id):\n# now = datetime.datetime.now()\n# now = now.strftime('%Y-%m-%d %H:%M:%S')\n# # delta = datetime.timedelta(days=outTime)\n# # now = now + delta\n# print(now)\n# # sql = \"UPDATE t_auth_info t SET end_effect_time = str_to_date(\\'%s\\','%%Y-%%m-%%d %%H:%%i:%%s') WHERE t.operation_id = '%s'\"\n# sql = \"UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = '%s'\"\n# params = [now, operation_id]\n# update_result = query_mysql(sql, *params)\n# return update_result\n# # return now.strftime('%Y-%m-%d %H:%M:%S')\n# # 有什么问题??\n# a = orderTimeOut(289)\n# print(a)\n\n# 模拟订单超过48/12小时/7天\n# 只需将数据库过期时间设置为当前时间\ndef orderTimeOut(order_id, database=\"zbcf_injury_test\"):\n conn = pymysql.connect(host=\"rm-wz97oujls3998784i.mysql.rds.aliyuncs.com\", user=\"testuser\",\n password=\"testuser@2018\", database=database, charset='utf8',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = conn.cursor()\n now = datetime.datetime.now()\n now = now.strftime('%Y-%m-%d %H:%M:%S')\n sql = \"UPDATE t_auth_info t SET end_effect_time = '%s' WHERE t.operation_id = (SELECT id from t_operation where order_id = '%s')\" % (now, order_id)\n effectRows = cursor.execute(sql)\n conn.commit()\n print('make order time out!')\n cursor.close()\n conn.close()\n return effectRows\n# a = orderTimeOut(260)\n# print(a)\ndef sleep(num):\n time.sleep(num)\n\n# 依据order_Id查询登录码\ndef queryLoginNum(orderId):\n sql = \"SELECT token from t_auth_info t where t.operation_id = (SELECT id from t_operation where order_id = '%s') and t.del_flag = '0'\"\n query_result = query_mysql(sql, orderId)\n print(orderId)\n return query_result['token']\n\n# a = queryLoginNum(418)\n# print (a)\n\n\n# 依据operation_Id查询登录码\ndef opetationId_queryLoginNum(operation_Id):\n sql = \"SELECT token from t_auth_info where operation_id = '%s' and del_flag = '0'\"\n query_result = query_mysql(sql, operation_Id)\n return query_result['token']\n\n# a = queryLoginNum(290)\n# print (a)\n\n\n# 返回orderId的方法\ndef queryOrderId():\n pass\n\n\n# 返回operationId的方法\ndef queryOperationId():\n pass\n\n# 查询订单状态\ndef queryOrderStatus():\n pass\n\n\n# # 登录PC端获取token,拼接到headers中\n# def setup_hook_token(request):\n# #print(request)\n# url_path=\"http://testrenshang.cias.cn/injury/user/pc/login\"\n# header={\"Content-Type\": \"application/x-www-form-urlencoded; charset=UTF-8\"}\n# payload={\"loginName\": \"haadmin003\", \"loginPass\": \"Y2lhczEyMzQ1Ng==\", \"verifyCode\": \"tubd\"}\n# req=requests.post(url=url_path, headers=header, params=payload).json()\n# token=req['data']['token']\n# request[\"headers\"]['token']=token\n# # print(token,'\\n', req)\n# # print(request)\n# # request = {'headers':{'Content-Type': 'application/json;charset=UTF-8', 'method': 'GET', 'url': '$uri', 'token': '$token'}}\n# # setup_hook_token(request)\n\n# 登录H5端获取token\ndef getH5Token(accessCode):\n url = \"http://testrsapp.cias.cn/injury/user/h5/login\"\n headers = {\"Content-Type\": \"application/x-www-form-urlencoded; charset=UTF-8\"}\n data = {\"accessCode\": accessCode}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n# a = getH5Token('31583310')\n# print(a)\n\n# 登录Web端获取token\ndef getWebToken(accessCode):\n url = \"http://testrsapp.cias.cn/injury/user/pc/login\"\n headers = {\"Content-Type\": \"application/x-www-form-urlencoded; charset=UTF-8\"}\n data = {\"loginName\": \"haadmin003\", \"loginPass\": \"Y2lhczEyMzQ1Ng==\", \"verifyCode\": \"tubd\"}\n req = requests.post(url=url, headers=headers, data=data).json()\n return req['data']['token']\n\n# a = getWebToken('31583310')\n# print(a)\n\n\n#",
"step-ids": [
10,
13,
15,
17,
18
]
}
|
[
10,
13,
15,
17,
18
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [migrations.CreateModel(name='TOTPDevice', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('name', models.CharField(help_text=
'The human-readable name of this device.', max_length=64)), (
'confirmed', models.BooleanField(default=True, help_text=
'Is this device ready for use?')), ('key', models.CharField(default
=django_otp.plugins.otp_totp.models.default_key, help_text=
'A hex-encoded secret key of up to 40 bytes.', max_length=80,
validators=[django_otp.plugins.otp_totp.models.key_validator])), (
'step', models.PositiveSmallIntegerField(default=30, help_text=
'The time step in seconds.')), ('t0', models.BigIntegerField(
default=0, help_text=
'The Unix time at which to begin counting steps.')), ('digits',
models.PositiveSmallIntegerField(default=6, help_text=
'The number of digits to expect in a token.', choices=[(6, 6), (8,
8)])), ('tolerance', models.PositiveSmallIntegerField(default=1,
help_text=
'The number of time steps in the past or future to allow.')), (
'drift', models.SmallIntegerField(default=0, help_text=
'The number of time steps the prover is known to deviate from our clock.'
)), ('last_t', models.BigIntegerField(default=-1, help_text=
'The t value of the latest verified token. The next token must be at a higher time step.'
)), ('user', models.ForeignKey(help_text=
'The user that this device belongs to.', to=settings.
AUTH_USER_MODEL, on_delete=models.CASCADE))], options={'abstract':
False, 'verbose_name': 'TOTP device'}, bases=(models.Model,))]
<|reserved_special_token_1|>
from django.conf import settings
from django.db import migrations, models
import django_otp.plugins.otp_totp.models
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [migrations.CreateModel(name='TOTPDevice', fields=[('id',
models.AutoField(verbose_name='ID', serialize=False, auto_created=
True, primary_key=True)), ('name', models.CharField(help_text=
'The human-readable name of this device.', max_length=64)), (
'confirmed', models.BooleanField(default=True, help_text=
'Is this device ready for use?')), ('key', models.CharField(default
=django_otp.plugins.otp_totp.models.default_key, help_text=
'A hex-encoded secret key of up to 40 bytes.', max_length=80,
validators=[django_otp.plugins.otp_totp.models.key_validator])), (
'step', models.PositiveSmallIntegerField(default=30, help_text=
'The time step in seconds.')), ('t0', models.BigIntegerField(
default=0, help_text=
'The Unix time at which to begin counting steps.')), ('digits',
models.PositiveSmallIntegerField(default=6, help_text=
'The number of digits to expect in a token.', choices=[(6, 6), (8,
8)])), ('tolerance', models.PositiveSmallIntegerField(default=1,
help_text=
'The number of time steps in the past or future to allow.')), (
'drift', models.SmallIntegerField(default=0, help_text=
'The number of time steps the prover is known to deviate from our clock.'
)), ('last_t', models.BigIntegerField(default=-1, help_text=
'The t value of the latest verified token. The next token must be at a higher time step.'
)), ('user', models.ForeignKey(help_text=
'The user that this device belongs to.', to=settings.
AUTH_USER_MODEL, on_delete=models.CASCADE))], options={'abstract':
False, 'verbose_name': 'TOTP device'}, bases=(models.Model,))]
<|reserved_special_token_1|>
from django.conf import settings
from django.db import migrations, models
import django_otp.plugins.otp_totp.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='TOTPDevice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='The human-readable name of this device.', max_length=64)),
('confirmed', models.BooleanField(default=True, help_text='Is this device ready for use?')),
('key', models.CharField(default=django_otp.plugins.otp_totp.models.default_key, help_text='A hex-encoded secret key of up to 40 bytes.', max_length=80, validators=[django_otp.plugins.otp_totp.models.key_validator])),
('step', models.PositiveSmallIntegerField(default=30, help_text='The time step in seconds.')),
('t0', models.BigIntegerField(default=0, help_text='The Unix time at which to begin counting steps.')),
('digits', models.PositiveSmallIntegerField(default=6, help_text='The number of digits to expect in a token.', choices=[(6, 6), (8, 8)])),
('tolerance', models.PositiveSmallIntegerField(default=1, help_text='The number of time steps in the past or future to allow.')),
('drift', models.SmallIntegerField(default=0, help_text='The number of time steps the prover is known to deviate from our clock.')),
('last_t', models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.')),
('user', models.ForeignKey(help_text='The user that this device belongs to.', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'abstract': False,
'verbose_name': 'TOTP device',
},
bases=(models.Model,),
),
]
|
flexible
|
{
"blob_id": "2e448176a755828e5c7c90e4224102a285098460",
"index": 4852,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]\n operations = [migrations.CreateModel(name='TOTPDevice', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('name', models.CharField(help_text=\n 'The human-readable name of this device.', max_length=64)), (\n 'confirmed', models.BooleanField(default=True, help_text=\n 'Is this device ready for use?')), ('key', models.CharField(default\n =django_otp.plugins.otp_totp.models.default_key, help_text=\n 'A hex-encoded secret key of up to 40 bytes.', max_length=80,\n validators=[django_otp.plugins.otp_totp.models.key_validator])), (\n 'step', models.PositiveSmallIntegerField(default=30, help_text=\n 'The time step in seconds.')), ('t0', models.BigIntegerField(\n default=0, help_text=\n 'The Unix time at which to begin counting steps.')), ('digits',\n models.PositiveSmallIntegerField(default=6, help_text=\n 'The number of digits to expect in a token.', choices=[(6, 6), (8, \n 8)])), ('tolerance', models.PositiveSmallIntegerField(default=1,\n help_text=\n 'The number of time steps in the past or future to allow.')), (\n 'drift', models.SmallIntegerField(default=0, help_text=\n 'The number of time steps the prover is known to deviate from our clock.'\n )), ('last_t', models.BigIntegerField(default=-1, help_text=\n 'The t value of the latest verified token. The next token must be at a higher time step.'\n )), ('user', models.ForeignKey(help_text=\n 'The user that this device belongs to.', to=settings.\n AUTH_USER_MODEL, on_delete=models.CASCADE))], options={'abstract': \n False, 'verbose_name': 'TOTP device'}, bases=(models.Model,))]\n",
"step-4": "from django.conf import settings\nfrom django.db import migrations, models\nimport django_otp.plugins.otp_totp.models\n\n\nclass Migration(migrations.Migration):\n dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]\n operations = [migrations.CreateModel(name='TOTPDevice', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('name', models.CharField(help_text=\n 'The human-readable name of this device.', max_length=64)), (\n 'confirmed', models.BooleanField(default=True, help_text=\n 'Is this device ready for use?')), ('key', models.CharField(default\n =django_otp.plugins.otp_totp.models.default_key, help_text=\n 'A hex-encoded secret key of up to 40 bytes.', max_length=80,\n validators=[django_otp.plugins.otp_totp.models.key_validator])), (\n 'step', models.PositiveSmallIntegerField(default=30, help_text=\n 'The time step in seconds.')), ('t0', models.BigIntegerField(\n default=0, help_text=\n 'The Unix time at which to begin counting steps.')), ('digits',\n models.PositiveSmallIntegerField(default=6, help_text=\n 'The number of digits to expect in a token.', choices=[(6, 6), (8, \n 8)])), ('tolerance', models.PositiveSmallIntegerField(default=1,\n help_text=\n 'The number of time steps in the past or future to allow.')), (\n 'drift', models.SmallIntegerField(default=0, help_text=\n 'The number of time steps the prover is known to deviate from our clock.'\n )), ('last_t', models.BigIntegerField(default=-1, help_text=\n 'The t value of the latest verified token. The next token must be at a higher time step.'\n )), ('user', models.ForeignKey(help_text=\n 'The user that this device belongs to.', to=settings.\n AUTH_USER_MODEL, on_delete=models.CASCADE))], options={'abstract': \n False, 'verbose_name': 'TOTP device'}, bases=(models.Model,))]\n",
"step-5": "from django.conf import settings\nfrom django.db import migrations, models\n\nimport django_otp.plugins.otp_totp.models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='TOTPDevice',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('name', models.CharField(help_text='The human-readable name of this device.', max_length=64)),\n ('confirmed', models.BooleanField(default=True, help_text='Is this device ready for use?')),\n ('key', models.CharField(default=django_otp.plugins.otp_totp.models.default_key, help_text='A hex-encoded secret key of up to 40 bytes.', max_length=80, validators=[django_otp.plugins.otp_totp.models.key_validator])),\n ('step', models.PositiveSmallIntegerField(default=30, help_text='The time step in seconds.')),\n ('t0', models.BigIntegerField(default=0, help_text='The Unix time at which to begin counting steps.')),\n ('digits', models.PositiveSmallIntegerField(default=6, help_text='The number of digits to expect in a token.', choices=[(6, 6), (8, 8)])),\n ('tolerance', models.PositiveSmallIntegerField(default=1, help_text='The number of time steps in the past or future to allow.')),\n ('drift', models.SmallIntegerField(default=0, help_text='The number of time steps the prover is known to deviate from our clock.')),\n ('last_t', models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.')),\n ('user', models.ForeignKey(help_text='The user that this device belongs to.', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),\n ],\n options={\n 'abstract': False,\n 'verbose_name': 'TOTP device',\n },\n bases=(models.Model,),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for condition in conditions:
print(condition)
command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append('-' + var)
dir_name.append(''.join(var.split('_')))
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = '_'.join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name + '/' + str(i))
with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:
infile.write(' '.join(command))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
defaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,
'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':
5000, 'FILTER_LENGTH': 'POP_SIZE'}
conditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE':
0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {
'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {
'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]
seed = 0
for condition in conditions:
print(condition)
command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append('-' + var)
dir_name.append(''.join(var.split('_')))
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = '_'.join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name + '/' + str(i))
with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:
infile.write(' '.join(command))
<|reserved_special_token_1|>
import os
defaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,
'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':
5000, 'FILTER_LENGTH': 'POP_SIZE'}
conditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE':
0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {
'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {
'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]
seed = 0
for condition in conditions:
print(condition)
command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append('-' + var)
dir_name.append(''.join(var.split('_')))
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = '_'.join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name + '/' + str(i))
with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:
infile.write(' '.join(command))
<|reserved_special_token_1|>
import os
#defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":50}
defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":"POP_SIZE"}
conditions = [{},{"K":10}, {"N":100, "MUT_RATE":.01}, {"MUT_RATE":.005}, {"MUT_RATE": .1}, {"POP_SIZE":20}, {"POP_SIZE":2000}, {"SELECTION":1}, {"SELECTION":1, "FILTER_LENGTH":1000}, {"CHANGE_RATE":500}, {"CHANGE_RATE":500, "CHANGE_TYPE":1}]
seed = 0
for condition in conditions:
print(condition)
command = ["./nk_oee -MODES_RESOLUTION 10 -SEED", seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append("-"+var)
dir_name.append("".join(var.split("_"))) # Underscores in variable names will screw up parsing later
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = "_".join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name+"/"+str(i)+"/command.sh"):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name+"/"+str(i))
with open(str_dir_name+"/"+str(i)+"/command.sh", "w") as infile:
infile.write(" ".join(command))
|
flexible
|
{
"blob_id": "a826f33361ec59824f3c4a83d01e94c6b307b0a9",
"index": 9144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-3": "<mask token>\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-4": "import os\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-5": "import os\n\n\n#defaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":50}\ndefaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":\"POP_SIZE\"}\nconditions = [{},{\"K\":10}, {\"N\":100, \"MUT_RATE\":.01}, {\"MUT_RATE\":.005}, {\"MUT_RATE\": .1}, {\"POP_SIZE\":20}, {\"POP_SIZE\":2000}, {\"SELECTION\":1}, {\"SELECTION\":1, \"FILTER_LENGTH\":1000}, {\"CHANGE_RATE\":500}, {\"CHANGE_RATE\":500, \"CHANGE_TYPE\":1}]\n\nseed = 0\n\nfor condition in conditions:\n print(condition)\n command = [\"./nk_oee -MODES_RESOLUTION 10 -SEED\", seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n\n command.append(\"-\"+var)\n dir_name.append(\"\".join(var.split(\"_\"))) # Underscores in variable names will screw up parsing later\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n\n \n str_dir_name = \"_\".join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n \n for i in range(30):\n if os.path.exists(str_dir_name+\"/\"+str(i)+\"/command.sh\"):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name+\"/\"+str(i))\n with open(str_dir_name+\"/\"+str(i)+\"/command.sh\", \"w\") as infile:\n infile.write(\" \".join(command))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python3
import sys
import csv
infile = sys.stdin
for line in infile:
line = line.strip()
my_list = line.split(',')
if my_list[0] != "ball":
continue
batsman = my_list[4]
bowler = my_list[6]
if my_list[9] == 'run out' or my_list[9] == '""' or my_list[9] == "retired hurt":
dismissed = '0'
else:
dismissed = '1'
print('%s,%s\t%s\t%s' % (batsman,bowler,dismissed,'1'))
|
normal
|
{
"blob_id": "cfa7dc295c635bbdf707f1e899c4fbf8ea91df9a",
"index": 1209,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-3": "<mask token>\ninfile = sys.stdin\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-4": "import sys\nimport csv\ninfile = sys.stdin\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != 'ball':\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9\n ] == 'retired hurt':\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman, bowler, dismissed, '1'))\n",
"step-5": "#!/usr/bin/python3\nimport sys\nimport csv\ninfile = sys.stdin\n\nfor line in infile:\n line = line.strip()\n my_list = line.split(',')\n if my_list[0] != \"ball\":\n continue\n batsman = my_list[4]\n bowler = my_list[6]\n if my_list[9] == 'run out' or my_list[9] == '\"\"' or my_list[9] == \"retired hurt\":\n dismissed = '0'\n else:\n dismissed = '1'\n print('%s,%s\\t%s\\t%s' % (batsman,bowler,dismissed,'1')) \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def predict_batch(test_data, model, batch_size=None):
result = model.predict(test_data, batch_size=batch_size)
return result
<|reserved_special_token_0|>
def post_predict(test_path, score_path, entity_path, alpha=0.75):
candidate_dict = load_candidates2(score_path)
test_data, all_data = load_train_data(test_path)
entity_dict, _ = load_entity(entity_path)
acc_cnt, w_l = 0, ''
predict_dict = dict()
for mention, candidates in candidate_dict.items():
if len(candidates) == 1:
predict_dict[mention] = candidates[0][0], candidates[0][1]
continue
max_score, max_can = candidates[0][2], candidates[0]
for e_id, e_name, e_score in candidates:
if e_score > max_score:
max_score = e_score
max_can = e_id, e_name, e_score
e_id, e_name, e_score = max_can
if e_score < alpha:
e_id, e_name = 'cui-less', 'cui-less'
predict_dict[mention] = e_id, e_name
for doc_id, mention, label in all_data:
if str.lower(label) == 'cui-less':
label = 'cui-less'
pred_label, pred_entity_name = predict_dict[mention]
if pred_label == label:
acc_cnt += 1
else:
entity_name = 'None'
if label in entity_dict:
entity_name = entity_dict[label][0]
w_l += (doc_id + '\t' + mention + '\t' + label + '\t' +
entity_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
with open('../checkpoints/post_predict_result.txt', 'w') as f:
f.write(w_l)
total_cnt = len(all_data)
accuracy = 1.0 * acc_cnt / total_cnt
return accuracy
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def predict_batch(test_data, model, batch_size=None):
result = model.predict(test_data, batch_size=batch_size)
return result
def predict_data(test_data, entity_path, model, predict_path, score_path,
test_path, dataset):
entity_dict, id_map = load_entity(entity_path)
acc_cnt, total_cnt = 0, 0
w_l = ''
all_score = ''
for data, labels, raw_data in test_data:
total_cnt += 1
groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]
raw_entity_list = data['entity_name']
pred_result = predict_batch(data, model, batch_size=len(labels))
pred_result = [j for r in pred_result for j in r]
pred_index = np.argmax(pred_result)
pred_label = labels[pred_index]
pred_entity_name = raw_entity_list[pred_index]
all_score += doc_id + '\t' + mention
for index, score in enumerate(pred_result):
entity_id = labels[index]
entity_name = raw_entity_list[index]
all_score += '\t' + entity_id + '\t' + entity_name + '\t' + str(
round(score, 4))
all_score += '\n'
if pred_label == groud_truth:
acc_cnt += 1
else:
if groud_truth in id_map:
groud_truth = id_map[groud_truth]
ground_name = ''
if '+' in groud_truth:
ground_name = groud_truth
elif groud_truth not in entity_dict:
ground_name = ground_name
else:
ground_name = entity_dict[groud_truth][0]
w_l += (doc_id + '\t' + mention + '\t' + groud_truth + '\t' +
ground_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
accuracy = 1.0 * acc_cnt / (total_cnt + 1)
with open(predict_path, 'w', encoding='utf8') as f:
f.write(w_l)
with open(score_path, 'w', encoding='utf8') as f:
f.write(all_score)
if dataset == 'clef':
return post_predict(test_path, score_path, entity_path)
else:
return accuracy
def post_predict(test_path, score_path, entity_path, alpha=0.75):
candidate_dict = load_candidates2(score_path)
test_data, all_data = load_train_data(test_path)
entity_dict, _ = load_entity(entity_path)
acc_cnt, w_l = 0, ''
predict_dict = dict()
for mention, candidates in candidate_dict.items():
if len(candidates) == 1:
predict_dict[mention] = candidates[0][0], candidates[0][1]
continue
max_score, max_can = candidates[0][2], candidates[0]
for e_id, e_name, e_score in candidates:
if e_score > max_score:
max_score = e_score
max_can = e_id, e_name, e_score
e_id, e_name, e_score = max_can
if e_score < alpha:
e_id, e_name = 'cui-less', 'cui-less'
predict_dict[mention] = e_id, e_name
for doc_id, mention, label in all_data:
if str.lower(label) == 'cui-less':
label = 'cui-less'
pred_label, pred_entity_name = predict_dict[mention]
if pred_label == label:
acc_cnt += 1
else:
entity_name = 'None'
if label in entity_dict:
entity_name = entity_dict[label][0]
w_l += (doc_id + '\t' + mention + '\t' + label + '\t' +
entity_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
with open('../checkpoints/post_predict_result.txt', 'w') as f:
f.write(w_l)
total_cnt = len(all_data)
accuracy = 1.0 * acc_cnt / total_cnt
return accuracy
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def predict_batch(test_data, model, batch_size=None):
result = model.predict(test_data, batch_size=batch_size)
return result
def predict_data(test_data, entity_path, model, predict_path, score_path,
test_path, dataset):
entity_dict, id_map = load_entity(entity_path)
acc_cnt, total_cnt = 0, 0
w_l = ''
all_score = ''
for data, labels, raw_data in test_data:
total_cnt += 1
groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]
raw_entity_list = data['entity_name']
pred_result = predict_batch(data, model, batch_size=len(labels))
pred_result = [j for r in pred_result for j in r]
pred_index = np.argmax(pred_result)
pred_label = labels[pred_index]
pred_entity_name = raw_entity_list[pred_index]
all_score += doc_id + '\t' + mention
for index, score in enumerate(pred_result):
entity_id = labels[index]
entity_name = raw_entity_list[index]
all_score += '\t' + entity_id + '\t' + entity_name + '\t' + str(
round(score, 4))
all_score += '\n'
if pred_label == groud_truth:
acc_cnt += 1
else:
if groud_truth in id_map:
groud_truth = id_map[groud_truth]
ground_name = ''
if '+' in groud_truth:
ground_name = groud_truth
elif groud_truth not in entity_dict:
ground_name = ground_name
else:
ground_name = entity_dict[groud_truth][0]
w_l += (doc_id + '\t' + mention + '\t' + groud_truth + '\t' +
ground_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
accuracy = 1.0 * acc_cnt / (total_cnt + 1)
with open(predict_path, 'w', encoding='utf8') as f:
f.write(w_l)
with open(score_path, 'w', encoding='utf8') as f:
f.write(all_score)
if dataset == 'clef':
return post_predict(test_path, score_path, entity_path)
else:
return accuracy
def post_predict(test_path, score_path, entity_path, alpha=0.75):
candidate_dict = load_candidates2(score_path)
test_data, all_data = load_train_data(test_path)
entity_dict, _ = load_entity(entity_path)
acc_cnt, w_l = 0, ''
predict_dict = dict()
for mention, candidates in candidate_dict.items():
if len(candidates) == 1:
predict_dict[mention] = candidates[0][0], candidates[0][1]
continue
max_score, max_can = candidates[0][2], candidates[0]
for e_id, e_name, e_score in candidates:
if e_score > max_score:
max_score = e_score
max_can = e_id, e_name, e_score
e_id, e_name, e_score = max_can
if e_score < alpha:
e_id, e_name = 'cui-less', 'cui-less'
predict_dict[mention] = e_id, e_name
for doc_id, mention, label in all_data:
if str.lower(label) == 'cui-less':
label = 'cui-less'
pred_label, pred_entity_name = predict_dict[mention]
if pred_label == label:
acc_cnt += 1
else:
entity_name = 'None'
if label in entity_dict:
entity_name = entity_dict[label][0]
w_l += (doc_id + '\t' + mention + '\t' + label + '\t' +
entity_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
with open('../checkpoints/post_predict_result.txt', 'w') as f:
f.write(w_l)
total_cnt = len(all_data)
accuracy = 1.0 * acc_cnt / total_cnt
return accuracy
if __name__ == '__main__':
flag = 1
<|reserved_special_token_1|>
import numpy as np
from load_data import load_entity, load_candidates2, load_train_data
def predict_batch(test_data, model, batch_size=None):
result = model.predict(test_data, batch_size=batch_size)
return result
def predict_data(test_data, entity_path, model, predict_path, score_path,
test_path, dataset):
entity_dict, id_map = load_entity(entity_path)
acc_cnt, total_cnt = 0, 0
w_l = ''
all_score = ''
for data, labels, raw_data in test_data:
total_cnt += 1
groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]
raw_entity_list = data['entity_name']
pred_result = predict_batch(data, model, batch_size=len(labels))
pred_result = [j for r in pred_result for j in r]
pred_index = np.argmax(pred_result)
pred_label = labels[pred_index]
pred_entity_name = raw_entity_list[pred_index]
all_score += doc_id + '\t' + mention
for index, score in enumerate(pred_result):
entity_id = labels[index]
entity_name = raw_entity_list[index]
all_score += '\t' + entity_id + '\t' + entity_name + '\t' + str(
round(score, 4))
all_score += '\n'
if pred_label == groud_truth:
acc_cnt += 1
else:
if groud_truth in id_map:
groud_truth = id_map[groud_truth]
ground_name = ''
if '+' in groud_truth:
ground_name = groud_truth
elif groud_truth not in entity_dict:
ground_name = ground_name
else:
ground_name = entity_dict[groud_truth][0]
w_l += (doc_id + '\t' + mention + '\t' + groud_truth + '\t' +
ground_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
accuracy = 1.0 * acc_cnt / (total_cnt + 1)
with open(predict_path, 'w', encoding='utf8') as f:
f.write(w_l)
with open(score_path, 'w', encoding='utf8') as f:
f.write(all_score)
if dataset == 'clef':
return post_predict(test_path, score_path, entity_path)
else:
return accuracy
def post_predict(test_path, score_path, entity_path, alpha=0.75):
candidate_dict = load_candidates2(score_path)
test_data, all_data = load_train_data(test_path)
entity_dict, _ = load_entity(entity_path)
acc_cnt, w_l = 0, ''
predict_dict = dict()
for mention, candidates in candidate_dict.items():
if len(candidates) == 1:
predict_dict[mention] = candidates[0][0], candidates[0][1]
continue
max_score, max_can = candidates[0][2], candidates[0]
for e_id, e_name, e_score in candidates:
if e_score > max_score:
max_score = e_score
max_can = e_id, e_name, e_score
e_id, e_name, e_score = max_can
if e_score < alpha:
e_id, e_name = 'cui-less', 'cui-less'
predict_dict[mention] = e_id, e_name
for doc_id, mention, label in all_data:
if str.lower(label) == 'cui-less':
label = 'cui-less'
pred_label, pred_entity_name = predict_dict[mention]
if pred_label == label:
acc_cnt += 1
else:
entity_name = 'None'
if label in entity_dict:
entity_name = entity_dict[label][0]
w_l += (doc_id + '\t' + mention + '\t' + label + '\t' +
entity_name + '\t' + pred_label + '\t' + pred_entity_name +
'\n')
with open('../checkpoints/post_predict_result.txt', 'w') as f:
f.write(w_l)
total_cnt = len(all_data)
accuracy = 1.0 * acc_cnt / total_cnt
return accuracy
if __name__ == '__main__':
flag = 1
<|reserved_special_token_1|>
import numpy as np
from load_data import load_entity, load_candidates2, load_train_data
def predict_batch(test_data, model, batch_size=None):
result = model.predict(test_data, batch_size=batch_size)
return result
def predict_data(test_data, entity_path, model, predict_path, score_path, test_path, dataset):
entity_dict, id_map = load_entity(entity_path)
acc_cnt, total_cnt = 0, 0
w_l = ''
all_score = ''
for data, labels, raw_data in test_data:
total_cnt += 1
groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]
raw_entity_list = data['entity_name']
pred_result = predict_batch(data, model, batch_size=len(labels))
pred_result = [j for r in pred_result for j in r]
pred_index = np.argmax(pred_result)
pred_label = labels[pred_index]
pred_entity_name = raw_entity_list[pred_index]
#all score
all_score += doc_id + '\t' + mention
for index, score in enumerate(pred_result):
entity_id = labels[index]
entity_name = raw_entity_list[index]
all_score += '\t' + entity_id + '\t' + entity_name + '\t' + str(round(score, 4))
all_score += '\n'
if pred_label == groud_truth:
acc_cnt += 1
else:
# write wrong results down
if groud_truth in id_map:
groud_truth = id_map[groud_truth]
ground_name = ''
if '+' in groud_truth:
ground_name = groud_truth
else:
if groud_truth not in entity_dict:
ground_name = ground_name
else:
ground_name = entity_dict[groud_truth][0]
w_l += doc_id + '\t' + mention + '\t' + groud_truth + '\t' + \
ground_name + '\t' + pred_label + '\t' + pred_entity_name + '\n'
accuracy = 1.0 * acc_cnt / (total_cnt+1)
with open(predict_path, 'w', encoding='utf8')as f:
f.write(w_l)
with open(score_path, 'w', encoding='utf8')as f:
f.write(all_score)
if dataset == 'clef':
return post_predict(test_path, score_path, entity_path)
else:
return accuracy
def post_predict(test_path, score_path, entity_path, alpha=0.75):
candidate_dict = load_candidates2(score_path)
test_data, all_data = load_train_data(test_path)
entity_dict, _ = load_entity(entity_path)
acc_cnt, w_l = 0, ''
predict_dict = dict()
for mention, candidates in candidate_dict.items():
if len(candidates) == 1:
predict_dict[mention] = (candidates[0][0], candidates[0][1])
continue
max_score, max_can = candidates[0][2], candidates[0]
for e_id, e_name, e_score in candidates:
if e_score > max_score:
max_score = e_score
max_can = (e_id, e_name, e_score)
e_id, e_name, e_score = max_can
if e_score < alpha:
e_id, e_name = 'cui-less', 'cui-less'
predict_dict[mention] = (e_id, e_name)
for doc_id, mention, label in all_data:
if str.lower(label) == 'cui-less':
label = 'cui-less'
pred_label, pred_entity_name = predict_dict[mention]
if pred_label == label:
acc_cnt += 1
else:
entity_name = 'None'
if label in entity_dict:
entity_name = entity_dict[label][0]
w_l += doc_id + '\t' + mention + '\t' + label + '\t' + \
entity_name + '\t' + pred_label + '\t' + pred_entity_name + '\n'
with open('../checkpoints/post_predict_result.txt', 'w')as f:
f.write(w_l)
total_cnt = len(all_data)
accuracy = 1.0 * acc_cnt / (total_cnt)
return accuracy
if __name__ == '__main__':
flag = 1
|
flexible
|
{
"blob_id": "a19616d448da057d5be0af841467a25baaacf5b3",
"index": 9299,
"step-1": "<mask token>\n\n\ndef predict_batch(test_data, model, batch_size=None):\n result = model.predict(test_data, batch_size=batch_size)\n return result\n\n\n<mask token>\n\n\ndef post_predict(test_path, score_path, entity_path, alpha=0.75):\n candidate_dict = load_candidates2(score_path)\n test_data, all_data = load_train_data(test_path)\n entity_dict, _ = load_entity(entity_path)\n acc_cnt, w_l = 0, ''\n predict_dict = dict()\n for mention, candidates in candidate_dict.items():\n if len(candidates) == 1:\n predict_dict[mention] = candidates[0][0], candidates[0][1]\n continue\n max_score, max_can = candidates[0][2], candidates[0]\n for e_id, e_name, e_score in candidates:\n if e_score > max_score:\n max_score = e_score\n max_can = e_id, e_name, e_score\n e_id, e_name, e_score = max_can\n if e_score < alpha:\n e_id, e_name = 'cui-less', 'cui-less'\n predict_dict[mention] = e_id, e_name\n for doc_id, mention, label in all_data:\n if str.lower(label) == 'cui-less':\n label = 'cui-less'\n pred_label, pred_entity_name = predict_dict[mention]\n if pred_label == label:\n acc_cnt += 1\n else:\n entity_name = 'None'\n if label in entity_dict:\n entity_name = entity_dict[label][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + label + '\\t' +\n entity_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n with open('../checkpoints/post_predict_result.txt', 'w') as f:\n f.write(w_l)\n total_cnt = len(all_data)\n accuracy = 1.0 * acc_cnt / total_cnt\n return accuracy\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef predict_batch(test_data, model, batch_size=None):\n result = model.predict(test_data, batch_size=batch_size)\n return result\n\n\ndef predict_data(test_data, entity_path, model, predict_path, score_path,\n test_path, dataset):\n entity_dict, id_map = load_entity(entity_path)\n acc_cnt, total_cnt = 0, 0\n w_l = ''\n all_score = ''\n for data, labels, raw_data in test_data:\n total_cnt += 1\n groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]\n raw_entity_list = data['entity_name']\n pred_result = predict_batch(data, model, batch_size=len(labels))\n pred_result = [j for r in pred_result for j in r]\n pred_index = np.argmax(pred_result)\n pred_label = labels[pred_index]\n pred_entity_name = raw_entity_list[pred_index]\n all_score += doc_id + '\\t' + mention\n for index, score in enumerate(pred_result):\n entity_id = labels[index]\n entity_name = raw_entity_list[index]\n all_score += '\\t' + entity_id + '\\t' + entity_name + '\\t' + str(\n round(score, 4))\n all_score += '\\n'\n if pred_label == groud_truth:\n acc_cnt += 1\n else:\n if groud_truth in id_map:\n groud_truth = id_map[groud_truth]\n ground_name = ''\n if '+' in groud_truth:\n ground_name = groud_truth\n elif groud_truth not in entity_dict:\n ground_name = ground_name\n else:\n ground_name = entity_dict[groud_truth][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + groud_truth + '\\t' +\n ground_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n accuracy = 1.0 * acc_cnt / (total_cnt + 1)\n with open(predict_path, 'w', encoding='utf8') as f:\n f.write(w_l)\n with open(score_path, 'w', encoding='utf8') as f:\n f.write(all_score)\n if dataset == 'clef':\n return post_predict(test_path, score_path, entity_path)\n else:\n return accuracy\n\n\ndef post_predict(test_path, score_path, entity_path, alpha=0.75):\n candidate_dict = load_candidates2(score_path)\n test_data, all_data = load_train_data(test_path)\n entity_dict, _ = load_entity(entity_path)\n acc_cnt, w_l = 0, ''\n predict_dict = dict()\n for mention, candidates in candidate_dict.items():\n if len(candidates) == 1:\n predict_dict[mention] = candidates[0][0], candidates[0][1]\n continue\n max_score, max_can = candidates[0][2], candidates[0]\n for e_id, e_name, e_score in candidates:\n if e_score > max_score:\n max_score = e_score\n max_can = e_id, e_name, e_score\n e_id, e_name, e_score = max_can\n if e_score < alpha:\n e_id, e_name = 'cui-less', 'cui-less'\n predict_dict[mention] = e_id, e_name\n for doc_id, mention, label in all_data:\n if str.lower(label) == 'cui-less':\n label = 'cui-less'\n pred_label, pred_entity_name = predict_dict[mention]\n if pred_label == label:\n acc_cnt += 1\n else:\n entity_name = 'None'\n if label in entity_dict:\n entity_name = entity_dict[label][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + label + '\\t' +\n entity_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n with open('../checkpoints/post_predict_result.txt', 'w') as f:\n f.write(w_l)\n total_cnt = len(all_data)\n accuracy = 1.0 * acc_cnt / total_cnt\n return accuracy\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef predict_batch(test_data, model, batch_size=None):\n result = model.predict(test_data, batch_size=batch_size)\n return result\n\n\ndef predict_data(test_data, entity_path, model, predict_path, score_path,\n test_path, dataset):\n entity_dict, id_map = load_entity(entity_path)\n acc_cnt, total_cnt = 0, 0\n w_l = ''\n all_score = ''\n for data, labels, raw_data in test_data:\n total_cnt += 1\n groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]\n raw_entity_list = data['entity_name']\n pred_result = predict_batch(data, model, batch_size=len(labels))\n pred_result = [j for r in pred_result for j in r]\n pred_index = np.argmax(pred_result)\n pred_label = labels[pred_index]\n pred_entity_name = raw_entity_list[pred_index]\n all_score += doc_id + '\\t' + mention\n for index, score in enumerate(pred_result):\n entity_id = labels[index]\n entity_name = raw_entity_list[index]\n all_score += '\\t' + entity_id + '\\t' + entity_name + '\\t' + str(\n round(score, 4))\n all_score += '\\n'\n if pred_label == groud_truth:\n acc_cnt += 1\n else:\n if groud_truth in id_map:\n groud_truth = id_map[groud_truth]\n ground_name = ''\n if '+' in groud_truth:\n ground_name = groud_truth\n elif groud_truth not in entity_dict:\n ground_name = ground_name\n else:\n ground_name = entity_dict[groud_truth][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + groud_truth + '\\t' +\n ground_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n accuracy = 1.0 * acc_cnt / (total_cnt + 1)\n with open(predict_path, 'w', encoding='utf8') as f:\n f.write(w_l)\n with open(score_path, 'w', encoding='utf8') as f:\n f.write(all_score)\n if dataset == 'clef':\n return post_predict(test_path, score_path, entity_path)\n else:\n return accuracy\n\n\ndef post_predict(test_path, score_path, entity_path, alpha=0.75):\n candidate_dict = load_candidates2(score_path)\n test_data, all_data = load_train_data(test_path)\n entity_dict, _ = load_entity(entity_path)\n acc_cnt, w_l = 0, ''\n predict_dict = dict()\n for mention, candidates in candidate_dict.items():\n if len(candidates) == 1:\n predict_dict[mention] = candidates[0][0], candidates[0][1]\n continue\n max_score, max_can = candidates[0][2], candidates[0]\n for e_id, e_name, e_score in candidates:\n if e_score > max_score:\n max_score = e_score\n max_can = e_id, e_name, e_score\n e_id, e_name, e_score = max_can\n if e_score < alpha:\n e_id, e_name = 'cui-less', 'cui-less'\n predict_dict[mention] = e_id, e_name\n for doc_id, mention, label in all_data:\n if str.lower(label) == 'cui-less':\n label = 'cui-less'\n pred_label, pred_entity_name = predict_dict[mention]\n if pred_label == label:\n acc_cnt += 1\n else:\n entity_name = 'None'\n if label in entity_dict:\n entity_name = entity_dict[label][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + label + '\\t' +\n entity_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n with open('../checkpoints/post_predict_result.txt', 'w') as f:\n f.write(w_l)\n total_cnt = len(all_data)\n accuracy = 1.0 * acc_cnt / total_cnt\n return accuracy\n\n\nif __name__ == '__main__':\n flag = 1\n",
"step-4": "import numpy as np\nfrom load_data import load_entity, load_candidates2, load_train_data\n\n\ndef predict_batch(test_data, model, batch_size=None):\n result = model.predict(test_data, batch_size=batch_size)\n return result\n\n\ndef predict_data(test_data, entity_path, model, predict_path, score_path,\n test_path, dataset):\n entity_dict, id_map = load_entity(entity_path)\n acc_cnt, total_cnt = 0, 0\n w_l = ''\n all_score = ''\n for data, labels, raw_data in test_data:\n total_cnt += 1\n groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]\n raw_entity_list = data['entity_name']\n pred_result = predict_batch(data, model, batch_size=len(labels))\n pred_result = [j for r in pred_result for j in r]\n pred_index = np.argmax(pred_result)\n pred_label = labels[pred_index]\n pred_entity_name = raw_entity_list[pred_index]\n all_score += doc_id + '\\t' + mention\n for index, score in enumerate(pred_result):\n entity_id = labels[index]\n entity_name = raw_entity_list[index]\n all_score += '\\t' + entity_id + '\\t' + entity_name + '\\t' + str(\n round(score, 4))\n all_score += '\\n'\n if pred_label == groud_truth:\n acc_cnt += 1\n else:\n if groud_truth in id_map:\n groud_truth = id_map[groud_truth]\n ground_name = ''\n if '+' in groud_truth:\n ground_name = groud_truth\n elif groud_truth not in entity_dict:\n ground_name = ground_name\n else:\n ground_name = entity_dict[groud_truth][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + groud_truth + '\\t' +\n ground_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n accuracy = 1.0 * acc_cnt / (total_cnt + 1)\n with open(predict_path, 'w', encoding='utf8') as f:\n f.write(w_l)\n with open(score_path, 'w', encoding='utf8') as f:\n f.write(all_score)\n if dataset == 'clef':\n return post_predict(test_path, score_path, entity_path)\n else:\n return accuracy\n\n\ndef post_predict(test_path, score_path, entity_path, alpha=0.75):\n candidate_dict = load_candidates2(score_path)\n test_data, all_data = load_train_data(test_path)\n entity_dict, _ = load_entity(entity_path)\n acc_cnt, w_l = 0, ''\n predict_dict = dict()\n for mention, candidates in candidate_dict.items():\n if len(candidates) == 1:\n predict_dict[mention] = candidates[0][0], candidates[0][1]\n continue\n max_score, max_can = candidates[0][2], candidates[0]\n for e_id, e_name, e_score in candidates:\n if e_score > max_score:\n max_score = e_score\n max_can = e_id, e_name, e_score\n e_id, e_name, e_score = max_can\n if e_score < alpha:\n e_id, e_name = 'cui-less', 'cui-less'\n predict_dict[mention] = e_id, e_name\n for doc_id, mention, label in all_data:\n if str.lower(label) == 'cui-less':\n label = 'cui-less'\n pred_label, pred_entity_name = predict_dict[mention]\n if pred_label == label:\n acc_cnt += 1\n else:\n entity_name = 'None'\n if label in entity_dict:\n entity_name = entity_dict[label][0]\n w_l += (doc_id + '\\t' + mention + '\\t' + label + '\\t' +\n entity_name + '\\t' + pred_label + '\\t' + pred_entity_name +\n '\\n')\n with open('../checkpoints/post_predict_result.txt', 'w') as f:\n f.write(w_l)\n total_cnt = len(all_data)\n accuracy = 1.0 * acc_cnt / total_cnt\n return accuracy\n\n\nif __name__ == '__main__':\n flag = 1\n",
"step-5": "import numpy as np\nfrom load_data import load_entity, load_candidates2, load_train_data\n\n\ndef predict_batch(test_data, model, batch_size=None):\n result = model.predict(test_data, batch_size=batch_size)\n return result\n\n\ndef predict_data(test_data, entity_path, model, predict_path, score_path, test_path, dataset):\n entity_dict, id_map = load_entity(entity_path)\n acc_cnt, total_cnt = 0, 0\n w_l = ''\n all_score = ''\n for data, labels, raw_data in test_data:\n total_cnt += 1\n groud_truth, doc_id, mention = raw_data[0], raw_data[1], raw_data[2]\n\n raw_entity_list = data['entity_name']\n pred_result = predict_batch(data, model, batch_size=len(labels))\n pred_result = [j for r in pred_result for j in r]\n pred_index = np.argmax(pred_result)\n pred_label = labels[pred_index]\n pred_entity_name = raw_entity_list[pred_index]\n\n #all score\n all_score += doc_id + '\\t' + mention\n for index, score in enumerate(pred_result):\n entity_id = labels[index]\n entity_name = raw_entity_list[index]\n all_score += '\\t' + entity_id + '\\t' + entity_name + '\\t' + str(round(score, 4))\n all_score += '\\n'\n\n if pred_label == groud_truth:\n acc_cnt += 1\n else:\n # write wrong results down\n if groud_truth in id_map:\n groud_truth = id_map[groud_truth]\n\n ground_name = ''\n if '+' in groud_truth:\n ground_name = groud_truth\n else:\n if groud_truth not in entity_dict:\n ground_name = ground_name\n else:\n ground_name = entity_dict[groud_truth][0]\n w_l += doc_id + '\\t' + mention + '\\t' + groud_truth + '\\t' + \\\n ground_name + '\\t' + pred_label + '\\t' + pred_entity_name + '\\n'\n\n accuracy = 1.0 * acc_cnt / (total_cnt+1)\n with open(predict_path, 'w', encoding='utf8')as f:\n f.write(w_l)\n\n with open(score_path, 'w', encoding='utf8')as f:\n f.write(all_score)\n\n if dataset == 'clef':\n return post_predict(test_path, score_path, entity_path)\n else:\n return accuracy\n\n\ndef post_predict(test_path, score_path, entity_path, alpha=0.75):\n candidate_dict = load_candidates2(score_path)\n test_data, all_data = load_train_data(test_path)\n entity_dict, _ = load_entity(entity_path)\n\n acc_cnt, w_l = 0, ''\n\n predict_dict = dict()\n for mention, candidates in candidate_dict.items():\n if len(candidates) == 1:\n predict_dict[mention] = (candidates[0][0], candidates[0][1])\n continue\n max_score, max_can = candidates[0][2], candidates[0]\n for e_id, e_name, e_score in candidates:\n if e_score > max_score:\n max_score = e_score\n max_can = (e_id, e_name, e_score)\n\n e_id, e_name, e_score = max_can\n if e_score < alpha:\n e_id, e_name = 'cui-less', 'cui-less'\n predict_dict[mention] = (e_id, e_name)\n\n for doc_id, mention, label in all_data:\n if str.lower(label) == 'cui-less':\n label = 'cui-less'\n pred_label, pred_entity_name = predict_dict[mention]\n if pred_label == label:\n acc_cnt += 1\n else:\n entity_name = 'None'\n if label in entity_dict:\n entity_name = entity_dict[label][0]\n w_l += doc_id + '\\t' + mention + '\\t' + label + '\\t' + \\\n entity_name + '\\t' + pred_label + '\\t' + pred_entity_name + '\\n'\n\n with open('../checkpoints/post_predict_result.txt', 'w')as f:\n f.write(w_l)\n\n total_cnt = len(all_data)\n accuracy = 1.0 * acc_cnt / (total_cnt)\n return accuracy\n\n\nif __name__ == '__main__':\n flag = 1",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Countries(TemplateView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Countries(TemplateView):
<|reserved_special_token_0|>
def get_context_data(self, **kwargs):
return Countries.objects.all()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Countries(TemplateView):
template_name = 'home.html'
def get_context_data(self, **kwargs):
return Countries.objects.all()
<|reserved_special_token_1|>
from django.shortcuts import render
from django.http import response, HttpResponse, Http404
from django.views.generic import TemplateView
from django.db.models import Q
class Countries(TemplateView):
template_name = 'home.html'
def get_context_data(self, **kwargs):
return Countries.objects.all()
<|reserved_special_token_1|>
from django.shortcuts import render
from django.http import response, HttpResponse, Http404
from django.views.generic import TemplateView
from django.db.models import Q
# Create your views here.
class Countries(TemplateView):
template_name = 'home.html'
def get_context_data(self, **kwargs):
return Countries.objects.all()
|
flexible
|
{
"blob_id": "fd7fe2e4ffaa4de913931e83fd1de40f79b08d98",
"index": 6222,
"step-1": "<mask token>\n\n\nclass Countries(TemplateView):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Countries(TemplateView):\n <mask token>\n\n def get_context_data(self, **kwargs):\n return Countries.objects.all()\n",
"step-3": "<mask token>\n\n\nclass Countries(TemplateView):\n template_name = 'home.html'\n\n def get_context_data(self, **kwargs):\n return Countries.objects.all()\n",
"step-4": "from django.shortcuts import render\nfrom django.http import response, HttpResponse, Http404\nfrom django.views.generic import TemplateView\nfrom django.db.models import Q\n\n\nclass Countries(TemplateView):\n template_name = 'home.html'\n\n def get_context_data(self, **kwargs):\n return Countries.objects.all()\n",
"step-5": "from django.shortcuts import render\nfrom django.http import response, HttpResponse, Http404\nfrom django.views.generic import TemplateView\nfrom django.db.models import Q\n# Create your views here.\n\nclass Countries(TemplateView):\n template_name = 'home.html'\n\n def get_context_data(self, **kwargs):\n return Countries.objects.all()\n\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [url('buy', views.BuyPage, name='BuyPage'), url('sell', views
.SellPage, name='SellPage'), url('', views.TradePage, name='TradePage')]
<|reserved_special_token_1|>
from django.conf.urls import include, url
from . import views
urlpatterns = [url('buy', views.BuyPage, name='BuyPage'), url('sell', views
.SellPage, name='SellPage'), url('', views.TradePage, name='TradePage')]
<|reserved_special_token_1|>
# from django.urls import path,include
from django.conf.urls import include, url
from . import views
urlpatterns = [
url('buy',views.BuyPage,name='BuyPage'),
url('sell',views.SellPage,name='SellPage'),
url('',views.TradePage,name='TradePage'),
]
|
flexible
|
{
"blob_id": "5bbaffb35a89558b5cf0b4364f78d68ff2d69a01",
"index": 5726,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('buy', views.BuyPage, name='BuyPage'), url('sell', views\n .SellPage, name='SellPage'), url('', views.TradePage, name='TradePage')]\n",
"step-3": "from django.conf.urls import include, url\nfrom . import views\nurlpatterns = [url('buy', views.BuyPage, name='BuyPage'), url('sell', views\n .SellPage, name='SellPage'), url('', views.TradePage, name='TradePage')]\n",
"step-4": "# from django.urls import path,include\nfrom django.conf.urls import include, url\n\nfrom . import views\n\nurlpatterns = [\n url('buy',views.BuyPage,name='BuyPage'),\n url('sell',views.SellPage,name='SellPage'),\n url('',views.TradePage,name='TradePage'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
include: '../ngs.settings.smk'
<|reserved_special_token_0|>
update_config(config_default, config)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
include: '../ngs.settings.smk'
config_default = {'bio.ngs.motif.centipede': {'options': ''}}
update_config(config_default, config)
config = config_default
<|reserved_special_token_1|>
# -*- snakemake -*-
#
# CENTIPEDE: Transcription factor footprinting and binding site prediction
# install.packages("CENTIPEDE", repos="http://R-Forge.R-project.org")
#
# http://centipede.uchicago.edu/
#
include: '../ngs.settings.smk'
config_default = {
'bio.ngs.motif.centipede' : {
'options' : '',
},
}
update_config(config_default, config)
config = config_default
|
flexible
|
{
"blob_id": "4620b52a43f2469ff0350d8ef6548de3a7fe1b55",
"index": 5019,
"step-1": "<mask token>\n",
"step-2": "include: '../ngs.settings.smk'\n<mask token>\nupdate_config(config_default, config)\n<mask token>\n",
"step-3": "include: '../ngs.settings.smk'\nconfig_default = {'bio.ngs.motif.centipede': {'options': ''}}\nupdate_config(config_default, config)\nconfig = config_default\n",
"step-4": "# -*- snakemake -*-\n#\n# CENTIPEDE: Transcription factor footprinting and binding site prediction\n# install.packages(\"CENTIPEDE\", repos=\"http://R-Forge.R-project.org\") \n# \n# http://centipede.uchicago.edu/\n#\ninclude: '../ngs.settings.smk'\n\nconfig_default = {\n 'bio.ngs.motif.centipede' : {\n 'options' : '',\n },\n}\n\nupdate_config(config_default, config)\nconfig = config_default\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}"
)
conn.commit()
return True, ' Product Updated Successfully '
if qry == 'delete':
cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')
conn.commit()
return True, ' Product Deleted Successfully '
conn.commit()
conn.close()
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
<|reserved_special_token_0|>
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, ' Please Enter Product Id ', 1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == 'add':
try:
cur.execute(
"""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """
)
except:
pass
data = cur.execute(f"SELECT * FROM products WHERE id = '{prod_id}'"
).fetchall()
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
cur.execute(
f"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
elif len(cart_check) > 0:
cur.execute(
f"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
if qry == 'remove':
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, " Product Doesn't Exist ", all_prods
elif len(cart_check) > 0:
data = cur.execute(
f"SELECT * FROM products WHERE id = '{prod_id}'").fetchall(
)
cur.execute(
f"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Deleted Successfully ', all_prods
conn.close()
<|reserved_special_token_0|>
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute('DROP TABLE cart')
conn.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def add_product(id_, name, quantity, cost):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(id_, name, quantity, cost)
try:
quantity = int(quantity)
cost = int(cost)
print(id_, name, quantity, cost)
print(type(id_), type(name), type(quantity), type(cost))
check = cur.execute(f"SELECT * FROM products WHERE id = '{id_}'"
).fetchall()
if len(check) > 0:
return False, ' This Product Already Exist Try Updating '
else:
cur.execute('INSERT INTO products VALUES("{}","{}",{},{})'.
format(id_, name, quantity, cost))
conn.commit()
conn.close()
return True, ' Product Added Successfully '
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def get_product_detail(prod_id):
if prod_id == '':
return False, ' Enter Product Id '
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute(f"SELECT rowid,* FROM products where id='{prod_id}'"
).fetchall()
conn.close()
if len(data) == 0:
return False, " Product Don't Exist "
return True, data
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}"
)
conn.commit()
return True, ' Product Updated Successfully '
if qry == 'delete':
cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')
conn.commit()
return True, ' Product Deleted Successfully '
conn.commit()
conn.close()
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def showProducts_all():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute('SELECT * FROM products').fetchall()
return True, data
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, ' Please Enter Product Id ', 1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == 'add':
try:
cur.execute(
"""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """
)
except:
pass
data = cur.execute(f"SELECT * FROM products WHERE id = '{prod_id}'"
).fetchall()
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
cur.execute(
f"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
elif len(cart_check) > 0:
cur.execute(
f"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
if qry == 'remove':
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, " Product Doesn't Exist ", all_prods
elif len(cart_check) > 0:
data = cur.execute(
f"SELECT * FROM products WHERE id = '{prod_id}'").fetchall(
)
cur.execute(
f"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Deleted Successfully ', all_prods
conn.close()
<|reserved_special_token_0|>
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute('DROP TABLE cart')
conn.commit()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def verif_admin(username, password):
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(username)
print(password)
data = cur.execute('SELECT password FROM admin WHERE username = "{}"'
.format(username)).fetchall()[0][0]
conn.close()
if password == data:
return True
else:
return False
except:
return False
def add_product(id_, name, quantity, cost):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(id_, name, quantity, cost)
try:
quantity = int(quantity)
cost = int(cost)
print(id_, name, quantity, cost)
print(type(id_), type(name), type(quantity), type(cost))
check = cur.execute(f"SELECT * FROM products WHERE id = '{id_}'"
).fetchall()
if len(check) > 0:
return False, ' This Product Already Exist Try Updating '
else:
cur.execute('INSERT INTO products VALUES("{}","{}",{},{})'.
format(id_, name, quantity, cost))
conn.commit()
conn.close()
return True, ' Product Added Successfully '
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def get_product_detail(prod_id):
if prod_id == '':
return False, ' Enter Product Id '
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute(f"SELECT rowid,* FROM products where id='{prod_id}'"
).fetchall()
conn.close()
if len(data) == 0:
return False, " Product Don't Exist "
return True, data
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}"
)
conn.commit()
return True, ' Product Updated Successfully '
if qry == 'delete':
cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')
conn.commit()
return True, ' Product Deleted Successfully '
conn.commit()
conn.close()
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def showProducts_all():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute('SELECT * FROM products').fetchall()
return True, data
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, ' Please Enter Product Id ', 1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == 'add':
try:
cur.execute(
"""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """
)
except:
pass
data = cur.execute(f"SELECT * FROM products WHERE id = '{prod_id}'"
).fetchall()
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
cur.execute(
f"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
elif len(cart_check) > 0:
cur.execute(
f"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
if qry == 'remove':
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, " Product Doesn't Exist ", all_prods
elif len(cart_check) > 0:
data = cur.execute(
f"SELECT * FROM products WHERE id = '{prod_id}'").fetchall(
)
cur.execute(
f"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Deleted Successfully ', all_prods
conn.close()
<|reserved_special_token_0|>
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute('DROP TABLE cart')
conn.commit()
<|reserved_special_token_1|>
import sqlite3
def verif_admin(username, password):
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(username)
print(password)
data = cur.execute('SELECT password FROM admin WHERE username = "{}"'
.format(username)).fetchall()[0][0]
conn.close()
if password == data:
return True
else:
return False
except:
return False
def add_product(id_, name, quantity, cost):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(id_, name, quantity, cost)
try:
quantity = int(quantity)
cost = int(cost)
print(id_, name, quantity, cost)
print(type(id_), type(name), type(quantity), type(cost))
check = cur.execute(f"SELECT * FROM products WHERE id = '{id_}'"
).fetchall()
if len(check) > 0:
return False, ' This Product Already Exist Try Updating '
else:
cur.execute('INSERT INTO products VALUES("{}","{}",{},{})'.
format(id_, name, quantity, cost))
conn.commit()
conn.close()
return True, ' Product Added Successfully '
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def get_product_detail(prod_id):
if prod_id == '':
return False, ' Enter Product Id '
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute(f"SELECT rowid,* FROM products where id='{prod_id}'"
).fetchall()
conn.close()
if len(data) == 0:
return False, " Product Don't Exist "
return True, data
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, ' You Cannot Leave It Empty '
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}"
)
conn.commit()
return True, ' Product Updated Successfully '
if qry == 'delete':
cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')
conn.commit()
return True, ' Product Deleted Successfully '
conn.commit()
conn.close()
except:
return False, ' Quantity and Cost are Integers '
except:
return False, ' Failed Connecting Database '
def showProducts_all():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute('SELECT * FROM products').fetchall()
return True, data
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, ' Please Enter Product Id ', 1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == 'add':
try:
cur.execute(
"""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """
)
except:
pass
data = cur.execute(f"SELECT * FROM products WHERE id = '{prod_id}'"
).fetchall()
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
cur.execute(
f"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
elif len(cart_check) > 0:
cur.execute(
f"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(
f"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'"
)
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Added To Cart Successfully ', all_prods
if qry == 'remove':
cart_check = cur.execute(
f"SELECT * FROM cart WHERE id = '{prod_id}' ").fetchall()
if len(cart_check) == 0:
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, " Product Doesn't Exist ", all_prods
elif len(cart_check) > 0:
data = cur.execute(
f"SELECT * FROM products WHERE id = '{prod_id}'").fetchall(
)
cur.execute(
f"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'"
)
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute('SELECT * FROM cart').fetchall()
return True, ' Product Deleted Successfully ', all_prods
conn.close()
def get_cost():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute('SELECT * FROM cart').fetchall()
cost = 0
for i in data:
cost = cost + i[3]
return cost
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute('DROP TABLE cart')
conn.commit()
<|reserved_special_token_1|>
import sqlite3
# cur.execute('CREATE TABLE admin(username TEXT,password TEXT)')
# conn.commit()
# cur.execute("INSERT INTO admin VALUES('nilesh','nilesh')")
# conn.commit()
def verif_admin(username, password):
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(username)
print(password)
data = cur.execute('SELECT password FROM admin WHERE username = "{}"'.format(username)).fetchall()[0][0]
conn.close()
if password == data:
return True
else:
return False
except:
return False
def add_product(id_, name, quantity, cost):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, " You Cannot Leave It Empty "
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
print(id_, name, quantity, cost)
try:
quantity = int(quantity)
cost = int(cost)
print(id_, name, quantity, cost)
print(type(id_), type(name), type(quantity), type(cost))
check = cur.execute(f"SELECT * FROM products WHERE id = '{id_}'").fetchall()
if len(check) > 0:
return False, " This Product Already Exist Try Updating "
else:
cur.execute('INSERT INTO products VALUES("{}","{}",{},{})'.format(id_, name, quantity, cost))
conn.commit()
conn.close()
return True, " Product Added Successfully "
except:
return False, " Quantity and Cost are Integers "
except:
return False, " Failed Connecting Database "
def get_product_detail(prod_id):
if prod_id == '':
return False, " Enter Product Id "
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute(f"SELECT rowid,* FROM products where id='{prod_id}'").fetchall()
conn.close()
if len(data) == 0:
return False, " Product Don't Exist "
return True, data
def update_delete_product(rowid, id_, name, quantity, cost, qry):
if id_ == '' and name == '' and quantity == '' and cost == '':
return False, " You Cannot Leave It Empty "
try:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
try:
quantity = int(quantity)
cost = int(cost)
if qry == 'update':
cur.execute(
f"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}")
conn.commit()
return True, " Product Updated Successfully "
if qry == "delete":
cur.execute(f"DELETE FROM products WHERE rowid={rowid} ")
conn.commit()
return True, " Product Deleted Successfully "
conn.commit()
conn.close()
except:
return False, " Quantity and Cost are Integers "
except:
return False, " Failed Connecting Database "
def showProducts_all():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute("SELECT * FROM products").fetchall()
return True, data
def added_to_cart(prod_id, qry):
if prod_id == '':
return False, " Please Enter Product Id ",1
else:
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
if qry == "add":
try:
cur.execute("""CREATE TABLE cart(
id TEXT,
name TEXT,
quantity INTEGER,
cost INTEGER) """)
except:
pass
data = cur.execute(f"""SELECT * FROM products WHERE id = '{prod_id}'""").fetchall()
cart_check = cur.execute(f"""SELECT * FROM cart WHERE id = '{prod_id}' """).fetchall()
if len(cart_check) == 0:
cur.execute(f"""INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})""")
conn.commit()
cur.execute(f"""UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'""")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True, " Product Added To Cart Successfully ",all_prods
elif len(cart_check) > 0:
cur.execute(
f"""UPDATE cart SET quantity = {(cart_check[0][2] + 1)},cost={(cart_check[0][3] + data[0][3])} WHERE id ='{prod_id}'""")
conn.commit()
cur.execute(f"""UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'""")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True, " Product Added To Cart Successfully ",all_prods
if qry == "remove":
cart_check = cur.execute(f"""SELECT * FROM cart WHERE id = '{prod_id}' """).fetchall()
if len(cart_check) == 0:
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True," Product Doesn't Exist ",all_prods
elif len(cart_check) > 0:
data = cur.execute(f"""SELECT * FROM products WHERE id = '{prod_id}'""").fetchall()
cur.execute(f"UPDATE products SET quantity = {(data[0][2]+cart_check[0][2])} WHERE id ='{prod_id}'")
conn.commit()
cur.execute(f"DELETE FROM cart WHERE id = '{prod_id}'")
conn.commit()
all_prods = cur.execute("SELECT * FROM cart").fetchall()
return True," Product Deleted Successfully ",all_prods
conn.close()
def get_cost():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
data = cur.execute("SELECT * FROM cart").fetchall()
cost = 0
for i in data:
cost = cost+i[3]
return cost
def done_Drp():
conn = sqlite3.connect('SuperMarket.db')
cur = conn.cursor()
cur.execute("DROP TABLE cart")
conn.commit()
|
flexible
|
{
"blob_id": "88d0ced41a8f176a8a12bba6406b4162ea6dfc52",
"index": 9308,
"step-1": "<mask token>\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\n<mask token>\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-2": "<mask token>\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-3": "<mask token>\n\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'\n .format(username)).fetchall()[0][0]\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\n<mask token>\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-4": "import sqlite3\n\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'\n .format(username)).fetchall()[0][0]\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\"\n ).fetchall()\n if len(check) > 0:\n return False, ' This Product Already Exist Try Updating '\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.\n format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, ' Product Added Successfully '\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, ' Enter Product Id '\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\"\n ).fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, ' You Cannot Leave It Empty '\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\"\n )\n conn.commit()\n return True, ' Product Updated Successfully '\n if qry == 'delete':\n cur.execute(f'DELETE FROM products WHERE rowid={rowid} ')\n conn.commit()\n return True, ' Product Deleted Successfully '\n conn.commit()\n conn.close()\n except:\n return False, ' Quantity and Cost are Integers '\n except:\n return False, ' Failed Connecting Database '\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM products').fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, ' Please Enter Product Id ', 1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == 'add':\n try:\n cur.execute(\n \"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\"\n )\n except:\n pass\n data = cur.execute(f\"SELECT * FROM products WHERE id = '{prod_id}'\"\n ).fetchall()\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n cur.execute(\n f\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n elif len(cart_check) > 0:\n cur.execute(\n f\"UPDATE cart SET quantity = {cart_check[0][2] + 1},cost={cart_check[0][3] + data[0][3]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] - 1} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Added To Cart Successfully ', all_prods\n if qry == 'remove':\n cart_check = cur.execute(\n f\"SELECT * FROM cart WHERE id = '{prod_id}' \").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, \" Product Doesn't Exist \", all_prods\n elif len(cart_check) > 0:\n data = cur.execute(\n f\"SELECT * FROM products WHERE id = '{prod_id}'\").fetchall(\n )\n cur.execute(\n f\"UPDATE products SET quantity = {data[0][2] + cart_check[0][2]} WHERE id ='{prod_id}'\"\n )\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute('SELECT * FROM cart').fetchall()\n return True, ' Product Deleted Successfully ', all_prods\n conn.close()\n\n\ndef get_cost():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute('SELECT * FROM cart').fetchall()\n cost = 0\n for i in data:\n cost = cost + i[3]\n return cost\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute('DROP TABLE cart')\n conn.commit()\n",
"step-5": "import sqlite3\n\n\n# cur.execute('CREATE TABLE admin(username TEXT,password TEXT)')\n# conn.commit()\n# cur.execute(\"INSERT INTO admin VALUES('nilesh','nilesh')\")\n# conn.commit()\n\ndef verif_admin(username, password):\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(username)\n print(password)\n data = cur.execute('SELECT password FROM admin WHERE username = \"{}\"'.format(username)).fetchall()[0][0]\n\n conn.close()\n if password == data:\n return True\n else:\n return False\n except:\n return False\n\n\ndef add_product(id_, name, quantity, cost):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, \" You Cannot Leave It Empty \"\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n print(id_, name, quantity, cost)\n try:\n quantity = int(quantity)\n cost = int(cost)\n print(id_, name, quantity, cost)\n print(type(id_), type(name), type(quantity), type(cost))\n check = cur.execute(f\"SELECT * FROM products WHERE id = '{id_}'\").fetchall()\n if len(check) > 0:\n return False, \" This Product Already Exist Try Updating \"\n else:\n cur.execute('INSERT INTO products VALUES(\"{}\",\"{}\",{},{})'.format(id_, name, quantity, cost))\n conn.commit()\n conn.close()\n return True, \" Product Added Successfully \"\n except:\n\n return False, \" Quantity and Cost are Integers \"\n\n except:\n\n return False, \" Failed Connecting Database \"\n\n\ndef get_product_detail(prod_id):\n if prod_id == '':\n return False, \" Enter Product Id \"\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(f\"SELECT rowid,* FROM products where id='{prod_id}'\").fetchall()\n conn.close()\n if len(data) == 0:\n return False, \" Product Don't Exist \"\n return True, data\n\n\ndef update_delete_product(rowid, id_, name, quantity, cost, qry):\n if id_ == '' and name == '' and quantity == '' and cost == '':\n return False, \" You Cannot Leave It Empty \"\n try:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n try:\n quantity = int(quantity)\n cost = int(cost)\n if qry == 'update':\n cur.execute(\n f\"UPDATE products SET id = '{id_}',name='{name}',quantity = {quantity},cost={cost} WHERE rowid = {rowid}\")\n conn.commit()\n return True, \" Product Updated Successfully \"\n if qry == \"delete\":\n cur.execute(f\"DELETE FROM products WHERE rowid={rowid} \")\n conn.commit()\n return True, \" Product Deleted Successfully \"\n conn.commit()\n conn.close()\n\n except:\n\n return False, \" Quantity and Cost are Integers \"\n except:\n return False, \" Failed Connecting Database \"\n\n\ndef showProducts_all():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(\"SELECT * FROM products\").fetchall()\n return True, data\n\n\ndef added_to_cart(prod_id, qry):\n if prod_id == '':\n return False, \" Please Enter Product Id \",1\n else:\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n if qry == \"add\":\n try:\n cur.execute(\"\"\"CREATE TABLE cart(\n id TEXT,\n name TEXT,\n quantity INTEGER,\n cost INTEGER) \"\"\")\n except:\n pass\n\n data = cur.execute(f\"\"\"SELECT * FROM products WHERE id = '{prod_id}'\"\"\").fetchall()\n cart_check = cur.execute(f\"\"\"SELECT * FROM cart WHERE id = '{prod_id}' \"\"\").fetchall()\n if len(cart_check) == 0:\n cur.execute(f\"\"\"INSERT INTO cart VALUES('{data[0][0]}','{data[0][1]}',1,{data[0][3]})\"\"\")\n conn.commit()\n cur.execute(f\"\"\"UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True, \" Product Added To Cart Successfully \",all_prods\n\n elif len(cart_check) > 0:\n cur.execute(\n f\"\"\"UPDATE cart SET quantity = {(cart_check[0][2] + 1)},cost={(cart_check[0][3] + data[0][3])} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n cur.execute(f\"\"\"UPDATE products SET quantity = {(data[0][2] - 1)} WHERE id ='{prod_id}'\"\"\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True, \" Product Added To Cart Successfully \",all_prods\n\n\n if qry == \"remove\":\n\n cart_check = cur.execute(f\"\"\"SELECT * FROM cart WHERE id = '{prod_id}' \"\"\").fetchall()\n if len(cart_check) == 0:\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True,\" Product Doesn't Exist \",all_prods\n elif len(cart_check) > 0:\n data = cur.execute(f\"\"\"SELECT * FROM products WHERE id = '{prod_id}'\"\"\").fetchall()\n cur.execute(f\"UPDATE products SET quantity = {(data[0][2]+cart_check[0][2])} WHERE id ='{prod_id}'\")\n conn.commit()\n cur.execute(f\"DELETE FROM cart WHERE id = '{prod_id}'\")\n conn.commit()\n all_prods = cur.execute(\"SELECT * FROM cart\").fetchall()\n return True,\" Product Deleted Successfully \",all_prods\n\n conn.close()\n\n\ndef get_cost():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n data = cur.execute(\"SELECT * FROM cart\").fetchall()\n cost = 0\n for i in data:\n cost = cost+i[3]\n return cost\n\n\ndef done_Drp():\n conn = sqlite3.connect('SuperMarket.db')\n cur = conn.cursor()\n cur.execute(\"DROP TABLE cart\")\n conn.commit()\n\n",
"step-ids": [
3,
6,
7,
9,
10
]
}
|
[
3,
6,
7,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def notification(message: str):
"""
Display notification to the desktop
Task:
1. show() -> it will generate a complete new pop
2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.
Usage : python <filename.py> typeObj:str value:int objective:str
typeObj: RAM/SWAP/NORMAL
value: current usage of RAM or SWAP (for NORMAL, the value = 0)
objective: show/update
"""
notify2.init('notifywhenLOAD')
notifyObj = notify2.Notification('Emergency Alert!', message)
notifyObj.set_timeout(12000)
return notifyObj
def main():
a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')
if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':
a.update(f'{sys.argv[1]} Alert!! Warning for death')
a.set_urgency(2)
a.show()
elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':
a.set_timeout(10000)
a.set_urgency(1)
a.show()
elif sys.argv[1] == 'NORMAL':
a.update('ChiLLax!!! Nothing to worry about')
a.set_urgency(0)
a.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def notification(message: str):
"""
Display notification to the desktop
Task:
1. show() -> it will generate a complete new pop
2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.
Usage : python <filename.py> typeObj:str value:int objective:str
typeObj: RAM/SWAP/NORMAL
value: current usage of RAM or SWAP (for NORMAL, the value = 0)
objective: show/update
"""
notify2.init('notifywhenLOAD')
notifyObj = notify2.Notification('Emergency Alert!', message)
notifyObj.set_timeout(12000)
return notifyObj
def main():
a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')
if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':
a.update(f'{sys.argv[1]} Alert!! Warning for death')
a.set_urgency(2)
a.show()
elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':
a.set_timeout(10000)
a.set_urgency(1)
a.show()
elif sys.argv[1] == 'NORMAL':
a.update('ChiLLax!!! Nothing to worry about')
a.set_urgency(0)
a.show()
main()
<|reserved_special_token_1|>
import sys
import notify2
import subprocess
from time import sleep
def notification(message: str):
"""
Display notification to the desktop
Task:
1. show() -> it will generate a complete new pop
2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.
Usage : python <filename.py> typeObj:str value:int objective:str
typeObj: RAM/SWAP/NORMAL
value: current usage of RAM or SWAP (for NORMAL, the value = 0)
objective: show/update
"""
notify2.init('notifywhenLOAD')
notifyObj = notify2.Notification('Emergency Alert!', message)
notifyObj.set_timeout(12000)
return notifyObj
def main():
a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')
if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':
a.update(f'{sys.argv[1]} Alert!! Warning for death')
a.set_urgency(2)
a.show()
elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':
a.set_timeout(10000)
a.set_urgency(1)
a.show()
elif sys.argv[1] == 'NORMAL':
a.update('ChiLLax!!! Nothing to worry about')
a.set_urgency(0)
a.show()
main()
<|reserved_special_token_1|>
#!/bin/python
import sys
import notify2
import subprocess
from time import sleep
def notification(message: str):
"""
Display notification to the desktop
Task:
1. show() -> it will generate a complete new pop
2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.
Usage : python <filename.py> typeObj:str value:int objective:str
typeObj: RAM/SWAP/NORMAL
value: current usage of RAM or SWAP (for NORMAL, the value = 0)
objective: show/update
"""
# initialize the notification
notify2.init("notifywhenLOAD")
notifyObj = notify2.Notification("Emergency Alert!", message)
notifyObj.set_timeout(12000)
return notifyObj
def main():
a = notification(f"{sys.argv[1]} exceeds {sys.argv[2]}")
if sys.argv[1] in ["RAM", "SWAP"] and sys.argv[3] == "update":
a.update(f"{sys.argv[1]} Alert!! Warning for death")
# a.update('river')
a.set_urgency(2)
a.show()
elif sys.argv[1] in ["RAM", "SWAP"] and sys.argv[3] == "show":
a.set_timeout(10000)
a.set_urgency(1)
a.show()
elif sys.argv[1] == "NORMAL":
a.update("ChiLLax!!! Nothing to worry about")
a.set_urgency(0)
a.show()
main()
|
flexible
|
{
"blob_id": "8a7904881d936a3cb421ed5550856b600894fcee",
"index": 5397,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef notification(message: str):\n \"\"\"\n Display notification to the desktop\n Task:\n 1. show() -> it will generate a complete new pop\n 2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.\n Usage : python <filename.py> typeObj:str value:int objective:str\n typeObj: RAM/SWAP/NORMAL\n value: current usage of RAM or SWAP (for NORMAL, the value = 0)\n objective: show/update \n \"\"\"\n notify2.init('notifywhenLOAD')\n notifyObj = notify2.Notification('Emergency Alert!', message)\n notifyObj.set_timeout(12000)\n return notifyObj\n\n\ndef main():\n a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')\n if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':\n a.update(f'{sys.argv[1]} Alert!! Warning for death')\n a.set_urgency(2)\n a.show()\n elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':\n a.set_timeout(10000)\n a.set_urgency(1)\n a.show()\n elif sys.argv[1] == 'NORMAL':\n a.update('ChiLLax!!! Nothing to worry about')\n a.set_urgency(0)\n a.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef notification(message: str):\n \"\"\"\n Display notification to the desktop\n Task:\n 1. show() -> it will generate a complete new pop\n 2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.\n Usage : python <filename.py> typeObj:str value:int objective:str\n typeObj: RAM/SWAP/NORMAL\n value: current usage of RAM or SWAP (for NORMAL, the value = 0)\n objective: show/update \n \"\"\"\n notify2.init('notifywhenLOAD')\n notifyObj = notify2.Notification('Emergency Alert!', message)\n notifyObj.set_timeout(12000)\n return notifyObj\n\n\ndef main():\n a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')\n if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':\n a.update(f'{sys.argv[1]} Alert!! Warning for death')\n a.set_urgency(2)\n a.show()\n elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':\n a.set_timeout(10000)\n a.set_urgency(1)\n a.show()\n elif sys.argv[1] == 'NORMAL':\n a.update('ChiLLax!!! Nothing to worry about')\n a.set_urgency(0)\n a.show()\n\n\nmain()\n",
"step-4": "import sys\nimport notify2\nimport subprocess\nfrom time import sleep\n\n\ndef notification(message: str):\n \"\"\"\n Display notification to the desktop\n Task:\n 1. show() -> it will generate a complete new pop\n 2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.\n Usage : python <filename.py> typeObj:str value:int objective:str\n typeObj: RAM/SWAP/NORMAL\n value: current usage of RAM or SWAP (for NORMAL, the value = 0)\n objective: show/update \n \"\"\"\n notify2.init('notifywhenLOAD')\n notifyObj = notify2.Notification('Emergency Alert!', message)\n notifyObj.set_timeout(12000)\n return notifyObj\n\n\ndef main():\n a = notification(f'{sys.argv[1]} exceeds {sys.argv[2]}')\n if sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'update':\n a.update(f'{sys.argv[1]} Alert!! Warning for death')\n a.set_urgency(2)\n a.show()\n elif sys.argv[1] in ['RAM', 'SWAP'] and sys.argv[3] == 'show':\n a.set_timeout(10000)\n a.set_urgency(1)\n a.show()\n elif sys.argv[1] == 'NORMAL':\n a.update('ChiLLax!!! Nothing to worry about')\n a.set_urgency(0)\n a.show()\n\n\nmain()\n",
"step-5": "#!/bin/python\nimport sys\nimport notify2\nimport subprocess\nfrom time import sleep\n\n\ndef notification(message: str):\n \"\"\"\n Display notification to the desktop\n Task:\n 1. show() -> it will generate a complete new pop\n 2. update() -> it will update the payload part of same notification pop-up, not issuing any new one.\n Usage : python <filename.py> typeObj:str value:int objective:str\n typeObj: RAM/SWAP/NORMAL\n value: current usage of RAM or SWAP (for NORMAL, the value = 0)\n objective: show/update \n \"\"\"\n # initialize the notification\n notify2.init(\"notifywhenLOAD\")\n notifyObj = notify2.Notification(\"Emergency Alert!\", message)\n notifyObj.set_timeout(12000)\n return notifyObj\n\n\ndef main():\n a = notification(f\"{sys.argv[1]} exceeds {sys.argv[2]}\")\n if sys.argv[1] in [\"RAM\", \"SWAP\"] and sys.argv[3] == \"update\":\n a.update(f\"{sys.argv[1]} Alert!! Warning for death\")\n # a.update('river')\n a.set_urgency(2)\n a.show()\n elif sys.argv[1] in [\"RAM\", \"SWAP\"] and sys.argv[3] == \"show\":\n a.set_timeout(10000)\n a.set_urgency(1)\n a.show()\n elif sys.argv[1] == \"NORMAL\":\n a.update(\"ChiLLax!!! Nothing to worry about\")\n a.set_urgency(0)\n a.show()\n\n\nmain()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def download(url):
print('Downloading ', url)
userAgent = (
'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'
)
userAgent = (
'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'
)
AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
AcceptEncoding = 'gzip, deflate'
Accept = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
)
Cookie = (
'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'
)
http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':
userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':
AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',
'Cache-Control': 'max-age=0', 'Cookie': Cookie})
r = http.request('GET', url)
print(r.status)
html = r.data.decode()
return html
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def download(url):
print('Downloading ', url)
userAgent = (
'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'
)
userAgent = (
'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'
)
AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
AcceptEncoding = 'gzip, deflate'
Accept = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
)
Cookie = (
'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'
)
http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':
userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':
AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',
'Cache-Control': 'max-age=0', 'Cookie': Cookie})
r = http.request('GET', url)
print(r.status)
html = r.data.decode()
return html
if __name__ == '__main__':
demoURL = (
'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'
)
demoDetailUrl = (
'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'
)
demoDetailUrl = (
'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='
)
for i in range(1, 10):
demoURL = (
'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='
+ str(i) + '&pageSize=1500')
ss = download(demoURL)
print(ss)
data = json.loads(ss)
for item in data:
searchK = item['ID']
print(item['CONTENT'])
detailInfoJson = download(demoDetailUrl + str(searchK))
detailInfo = json.loads(detailInfoJson)
detailJson = '{'
for detail in detailInfo:
if detail['NAME'] != '注':
detailJson = detailJson + '"' + detail['NAME'
] + '":"' + detail['CONTENT'] + '",'
detailJson = detailJson[:-1]
detailJson = detailJson + '}'
print(detailJson)
detailData = json.loads(detailJson)
<|reserved_special_token_1|>
import urllib3
import json
def download(url):
print('Downloading ', url)
userAgent = (
'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'
)
userAgent = (
'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'
)
AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
AcceptEncoding = 'gzip, deflate'
Accept = (
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
)
Cookie = (
'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'
)
http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':
userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':
AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',
'Cache-Control': 'max-age=0', 'Cookie': Cookie})
r = http.request('GET', url)
print(r.status)
html = r.data.decode()
return html
if __name__ == '__main__':
demoURL = (
'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'
)
demoDetailUrl = (
'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'
)
demoDetailUrl = (
'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='
)
for i in range(1, 10):
demoURL = (
'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='
+ str(i) + '&pageSize=1500')
ss = download(demoURL)
print(ss)
data = json.loads(ss)
for item in data:
searchK = item['ID']
print(item['CONTENT'])
detailInfoJson = download(demoDetailUrl + str(searchK))
detailInfo = json.loads(detailInfoJson)
detailJson = '{'
for detail in detailInfo:
if detail['NAME'] != '注':
detailJson = detailJson + '"' + detail['NAME'
] + '":"' + detail['CONTENT'] + '",'
detailJson = detailJson[:-1]
detailJson = detailJson + '}'
print(detailJson)
detailData = json.loads(detailJson)
<|reserved_special_token_1|>
import urllib3
import json
def download(url):
print('Downloading ', url)
userAgent = 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'
userAgent = 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'
AcceptLanguage ='zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
AcceptEncoding= 'gzip, deflate'
Accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
Cookie = 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'
http = urllib3.PoolManager(num_pools=5, headers={'User-Agent': userAgent,'Accept - Language': AcceptLanguage,
'Accept-Encoding': AcceptEncoding ,'Accept':Accept,
'Proxy-Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Cookie':Cookie})
r = http.request('GET', url)
print(r.status)
html = r.data.decode()
return html
if __name__ == '__main__':
demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'
demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'
demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='
for i in range(1,10):
demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='+str(i)+'&pageSize=1500'
ss = download(demoURL)
print(ss)
data = json.loads(ss)
for item in data:
# searchK = item['COUNT']
searchK = item['ID']
print(item['CONTENT'])
detailInfoJson = download(demoDetailUrl + str(searchK))
detailInfo = json.loads(detailInfoJson)
detailJson = '{'
for detail in detailInfo:
if detail['NAME'] != '注':
detailJson = detailJson + '"' + detail['NAME'] + '":"' + detail['CONTENT'] + '",'
detailJson = detailJson[:-1]
detailJson = detailJson + '}'
print(detailJson)
detailData = json.loads(detailJson)
# print(item['CONTENT'])
|
flexible
|
{
"blob_id": "9d302ff2de8280bd8786794cdd533107d2a458bc",
"index": 5611,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef download(url):\n print('Downloading ', url)\n userAgent = (\n 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'\n )\n userAgent = (\n 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'\n )\n AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'\n AcceptEncoding = 'gzip, deflate'\n Accept = (\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'\n )\n Cookie = (\n 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'\n )\n http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':\n userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':\n AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',\n 'Cache-Control': 'max-age=0', 'Cookie': Cookie})\n r = http.request('GET', url)\n print(r.status)\n html = r.data.decode()\n return html\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef download(url):\n print('Downloading ', url)\n userAgent = (\n 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'\n )\n userAgent = (\n 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'\n )\n AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'\n AcceptEncoding = 'gzip, deflate'\n Accept = (\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'\n )\n Cookie = (\n 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'\n )\n http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':\n userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':\n AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',\n 'Cache-Control': 'max-age=0', 'Cookie': Cookie})\n r = http.request('GET', url)\n print(r.status)\n html = r.data.decode()\n return html\n\n\nif __name__ == '__main__':\n demoURL = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'\n )\n demoDetailUrl = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'\n )\n demoDetailUrl = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='\n )\n for i in range(1, 10):\n demoURL = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='\n + str(i) + '&pageSize=1500')\n ss = download(demoURL)\n print(ss)\n data = json.loads(ss)\n for item in data:\n searchK = item['ID']\n print(item['CONTENT'])\n detailInfoJson = download(demoDetailUrl + str(searchK))\n detailInfo = json.loads(detailInfoJson)\n detailJson = '{'\n for detail in detailInfo:\n if detail['NAME'] != '注':\n detailJson = detailJson + '\"' + detail['NAME'\n ] + '\":\"' + detail['CONTENT'] + '\",'\n detailJson = detailJson[:-1]\n detailJson = detailJson + '}'\n print(detailJson)\n detailData = json.loads(detailJson)\n",
"step-4": "import urllib3\nimport json\n\n\ndef download(url):\n print('Downloading ', url)\n userAgent = (\n 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'\n )\n userAgent = (\n 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'\n )\n AcceptLanguage = 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'\n AcceptEncoding = 'gzip, deflate'\n Accept = (\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'\n )\n Cookie = (\n 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'\n )\n http = urllib3.PoolManager(num_pools=5, headers={'User-Agent':\n userAgent, 'Accept - Language': AcceptLanguage, 'Accept-Encoding':\n AcceptEncoding, 'Accept': Accept, 'Proxy-Connection': 'keep-alive',\n 'Cache-Control': 'max-age=0', 'Cookie': Cookie})\n r = http.request('GET', url)\n print(r.status)\n html = r.data.decode()\n return html\n\n\nif __name__ == '__main__':\n demoURL = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'\n )\n demoDetailUrl = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'\n )\n demoDetailUrl = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='\n )\n for i in range(1, 10):\n demoURL = (\n 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='\n + str(i) + '&pageSize=1500')\n ss = download(demoURL)\n print(ss)\n data = json.loads(ss)\n for item in data:\n searchK = item['ID']\n print(item['CONTENT'])\n detailInfoJson = download(demoDetailUrl + str(searchK))\n detailInfo = json.loads(detailInfoJson)\n detailJson = '{'\n for detail in detailInfo:\n if detail['NAME'] != '注':\n detailJson = detailJson + '\"' + detail['NAME'\n ] + '\":\"' + detail['CONTENT'] + '\",'\n detailJson = detailJson[:-1]\n detailJson = detailJson + '}'\n print(detailJson)\n detailData = json.loads(detailJson)\n",
"step-5": "import urllib3\nimport json\ndef download(url):\n print('Downloading ', url)\n userAgent = 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'\n userAgent = 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'\n AcceptLanguage ='zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'\n AcceptEncoding= 'gzip, deflate'\n Accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'\n\n Cookie = 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'\n\n http = urllib3.PoolManager(num_pools=5, headers={'User-Agent': userAgent,'Accept - Language': AcceptLanguage,\n 'Accept-Encoding': AcceptEncoding ,'Accept':Accept,\n 'Proxy-Connection': 'keep-alive',\n 'Cache-Control': 'max-age=0',\n 'Cookie':Cookie})\n r = http.request('GET', url)\n print(r.status)\n html = r.data.decode()\n return html\n\n\nif __name__ == '__main__':\n demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'\n demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'\n demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='\n\n for i in range(1,10):\n demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='+str(i)+'&pageSize=1500'\n ss = download(demoURL)\n\n print(ss)\n data = json.loads(ss)\n for item in data:\n # searchK = item['COUNT']\n searchK = item['ID']\n print(item['CONTENT'])\n detailInfoJson = download(demoDetailUrl + str(searchK))\n detailInfo = json.loads(detailInfoJson)\n detailJson = '{'\n for detail in detailInfo:\n if detail['NAME'] != '注':\n detailJson = detailJson + '\"' + detail['NAME'] + '\":\"' + detail['CONTENT'] + '\",'\n detailJson = detailJson[:-1]\n detailJson = detailJson + '}'\n print(detailJson)\n detailData = json.loads(detailJson)\n # print(item['CONTENT'])\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.2.4 on 2021-06-18 01:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('eCom', '0014_auto_20210617_1503'),
]
operations = [
migrations.RemoveField(
model_name='order',
name='items',
),
migrations.AddField(
model_name='order',
name='items',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'),
),
]
|
normal
|
{
"blob_id": "ef57f0dfea261f022ced36ef9e27a07d63c21026",
"index": 2156,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('eCom', '0014_auto_20210617_1503')]\n operations = [migrations.RemoveField(model_name='order', name='items'),\n migrations.AddField(model_name='order', name='items', field=models.\n ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='eCom.orderitem'))]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('eCom', '0014_auto_20210617_1503')]\n operations = [migrations.RemoveField(model_name='order', name='items'),\n migrations.AddField(model_name='order', name='items', field=models.\n ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='eCom.orderitem'))]\n",
"step-5": "# Generated by Django 3.2.4 on 2021-06-18 01:20\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('eCom', '0014_auto_20210617_1503'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='order',\n name='items',\n ),\n migrations.AddField(\n model_name='order',\n name='items',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='eCom.orderitem'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ListItem:
"""A custom object that stores four pieces of data representing each
entry in the todo list. Contains the text of the todo list entry,
the priority of the entry, the group code (NYI), and the visibility of
the entry"""
def __init__(self, text, priority, group, visible):
self.text = text
self.priority = priority
self.group = group
self.visible = visible
<|reserved_special_token_0|>
def check_priority_overlap(priority_to_check, todo_list):
"""The purpose of this function is to check if the user's priority
number input overlaps with a priority number already in the list,
and if it does, prompts the user whether they want to keep it, change
it, or move everything in the list that has a larger priority value up
by one.
:param priority_to_check: the number to check for overlap with
:param todo_list: the list of ListItem objects to check in
:returns the priority value, either changed or the original input"""
overlap = False
for item in todo_list:
if item.priority == priority_to_check:
overlap = True
if overlap:
answer = 0
while answer > 3 or answer < 1:
answer = clean_input(
"""The priority number you entered overlaps with another entry's priority. Enter:
1 to change priority number
2 to leave as is with overlap
3 to push all priority numbers below this entry down by 1"""
)
if answer > 3 or answer < 1:
print('Invalid Option Selected\nPlease Try Again')
if answer == 1:
priority_to_check = check_priority_overlap(int(clean_input(
'New Priority:')), todo_list)
elif answer == 3:
cascade_list(priority_to_check, todo_list)
return priority_to_check
<|reserved_special_token_0|>
def clean_input(prompt='Error'):
"""The purpose of this function is to prompt the user for a numerical
input and only accept a numerical input, rejects no input and text input.
:param prompt: the prompt the user sees, default is Error
:returns the user input as a float"""
text = True
phrase = '0'
while text:
phrase = input(prompt + '\n')
try:
float(phrase)
text = False
except ValueError:
print('Error: Non-Numeric Entry Detected')
return float(phrase)
<|reserved_special_token_0|>
def add_item(todo_list):
"""The purpose of this function is to prompt the user for the two
fields of necessary information to make a new entry in the todo list,
the item name and priority, checking if the priority overlaps with an
existing entry in the todo list.
:param todo_list: the list of ListItem objects to add a new ListItem
object to
:returns nothing"""
text = input('Please enter the name of the new item\n')
priority = check_priority_overlap(int(clean_input(
'Please enter the priority of this item')), todo_list)
group = 0
visible = True
todo_list.insert(0, ListItem(text, priority, group, visible))
return
<|reserved_special_token_0|>
def mark_complete(todo_list):
"""The purpose of this function is to mark a selectedListItem object as
hidden and not to be printed unless specified, apart from selecting items.
:param todo_list: the list of ListItem objects to modify
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to Mark Completed and hide from the list
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list[item].visible = False
return
<|reserved_special_token_0|>
def check_list_status(todo_list):
"""The purpose of this function is to check whether there are visible
items in the list, the entire list is hidden, or the list contains no
more ListItem objects
:param todo_list: the list of ListItem objects to check
:returns which condition using integer codes"""
if len(todo_list) == 0:
state = 1
else:
state = 2
for item_index in range(len(todo_list)):
if todo_list[item_index].visible:
state = 0
return state
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ListItem:
"""A custom object that stores four pieces of data representing each
entry in the todo list. Contains the text of the todo list entry,
the priority of the entry, the group code (NYI), and the visibility of
the entry"""
def __init__(self, text, priority, group, visible):
self.text = text
self.priority = priority
self.group = group
self.visible = visible
<|reserved_special_token_0|>
def check_priority_overlap(priority_to_check, todo_list):
"""The purpose of this function is to check if the user's priority
number input overlaps with a priority number already in the list,
and if it does, prompts the user whether they want to keep it, change
it, or move everything in the list that has a larger priority value up
by one.
:param priority_to_check: the number to check for overlap with
:param todo_list: the list of ListItem objects to check in
:returns the priority value, either changed or the original input"""
overlap = False
for item in todo_list:
if item.priority == priority_to_check:
overlap = True
if overlap:
answer = 0
while answer > 3 or answer < 1:
answer = clean_input(
"""The priority number you entered overlaps with another entry's priority. Enter:
1 to change priority number
2 to leave as is with overlap
3 to push all priority numbers below this entry down by 1"""
)
if answer > 3 or answer < 1:
print('Invalid Option Selected\nPlease Try Again')
if answer == 1:
priority_to_check = check_priority_overlap(int(clean_input(
'New Priority:')), todo_list)
elif answer == 3:
cascade_list(priority_to_check, todo_list)
return priority_to_check
<|reserved_special_token_0|>
def clean_input(prompt='Error'):
"""The purpose of this function is to prompt the user for a numerical
input and only accept a numerical input, rejects no input and text input.
:param prompt: the prompt the user sees, default is Error
:returns the user input as a float"""
text = True
phrase = '0'
while text:
phrase = input(prompt + '\n')
try:
float(phrase)
text = False
except ValueError:
print('Error: Non-Numeric Entry Detected')
return float(phrase)
<|reserved_special_token_0|>
def save_list(todo_list, save_location):
"""The purpose of this function is to save a list of ListItem objects to a
specified location in a .txt file with the first line of the document
being an explanation of the file format being used.
:param todo_list: the list of ListItem objects to save to the save file
:param save_location: the location to create or overwrite the save file
:returns nothing"""
data_file_w = open(save_location, 'w')
data_file_w.write(
"""Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped
"""
)
for item in todo_list:
data_file_w.write('{0}\n{1}\n{2}\n{3}\n\n'.format(item.text, str(
item.priority), str(item.group), str(item.visible)))
data_file_w.close()
return
def add_item(todo_list):
"""The purpose of this function is to prompt the user for the two
fields of necessary information to make a new entry in the todo list,
the item name and priority, checking if the priority overlaps with an
existing entry in the todo list.
:param todo_list: the list of ListItem objects to add a new ListItem
object to
:returns nothing"""
text = input('Please enter the name of the new item\n')
priority = check_priority_overlap(int(clean_input(
'Please enter the priority of this item')), todo_list)
group = 0
visible = True
todo_list.insert(0, ListItem(text, priority, group, visible))
return
<|reserved_special_token_0|>
def remove_item(todo_list):
"""The purpose of this function is to delete a ListItem object from a
list of ListItem objects by prompting the user for the index and
verifying they want to delete the item.
:param todo_list: the list of ListItem objects from which to remove
one object
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to remove
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list.pop(item)
return
def mark_complete(todo_list):
"""The purpose of this function is to mark a selectedListItem object as
hidden and not to be printed unless specified, apart from selecting items.
:param todo_list: the list of ListItem objects to modify
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to Mark Completed and hide from the list
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list[item].visible = False
return
def edit_item(todo_list):
"""The purpose of this function is to edit a ListItem object in the
list of ListItem objects, changing either the name or priority
:param todo_list: the list of ListItem objects that gets one object
modified
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to edit
Enter a negative number or zero to cancel"""
)
if item >= 0:
while True:
value = clean_input(
"""Which value would you like to edit? Enter:
1 for the Item Text (Currently: {0})
2 for the Item Priority (Currently: {1})
3 to Cancel and Exit"""
.format(todo_list[item].text, str(todo_list[item].priority)))
if value == 1:
print('The Current Text is: {0}'.format(todo_list[item].text))
todo_list[item].text = input('New Text:\n')
elif value == 2:
print('The Current Priority is: {0}'.format(str(todo_list[
item].priority)))
todo_list[item].priority = check_priority_overlap(int(
clean_input('New Priority:')), todo_list)
elif value == 3:
break
else:
print('Invalid Input - Please Try Again')
return
def check_list_status(todo_list):
"""The purpose of this function is to check whether there are visible
items in the list, the entire list is hidden, or the list contains no
more ListItem objects
:param todo_list: the list of ListItem objects to check
:returns which condition using integer codes"""
if len(todo_list) == 0:
state = 1
else:
state = 2
for item_index in range(len(todo_list)):
if todo_list[item_index].visible:
state = 0
return state
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ListItem:
"""A custom object that stores four pieces of data representing each
entry in the todo list. Contains the text of the todo list entry,
the priority of the entry, the group code (NYI), and the visibility of
the entry"""
def __init__(self, text, priority, group, visible):
self.text = text
self.priority = priority
self.group = group
self.visible = visible
def concept_demonstration():
"""The purpose of this function is to prompt the user for numbers and
strings and manipulate them to demonstrate programming fluency with
string and integer operations.
:returns nothing"""
number = clean_input('Please enter a positive number')
number2 = clean_input('Please enter a number')
while number2 == 0:
print('Error: Cannot Divide by 0')
number2 = clean_input('Please enter a different number')
color = input('Please enter a color\n')
thing = input('Please enter a thing\n')
thing2 = thing + ' '
location = input('Please enter a location\n')
print(str(number) + ' raised to the power of ' + str(number2) + ' is ' +
str(number ** number2))
print('{0} multiplied by {1} is {2}'.format(str(number), str(number2),
str(number * number2)))
print('{0} divided by {1} is {2}'.format(str(number), str(number2), str
(number / number2)))
print('The remainder from dividing {0} by {1} is {2}'.format(str(
number), str(number2), str(number % number2)))
print('{0} divided by {1} rounded down is {2}'.format(str(number), str(
number2), str(number // number2)))
print('{0} plus {1} is {2}'.format(str(number), str(number2), str(
number + number2)))
print('{0} minus {1} is {2}'.format(str(number), str(number2), str(
number - number2)))
if number > 1:
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing2 * int(number - 1) + thing))
elif number < 0:
print(
"""The {0} at {1} yelled '{2}'
You entered a negative number when a positive number was requested, so you made the {3} mute. Good Job."""
.format(color + ' ' + thing, location, thing2 * int(number), thing)
)
else:
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing * int(number)))
return
<|reserved_special_token_0|>
def check_priority_overlap(priority_to_check, todo_list):
"""The purpose of this function is to check if the user's priority
number input overlaps with a priority number already in the list,
and if it does, prompts the user whether they want to keep it, change
it, or move everything in the list that has a larger priority value up
by one.
:param priority_to_check: the number to check for overlap with
:param todo_list: the list of ListItem objects to check in
:returns the priority value, either changed or the original input"""
overlap = False
for item in todo_list:
if item.priority == priority_to_check:
overlap = True
if overlap:
answer = 0
while answer > 3 or answer < 1:
answer = clean_input(
"""The priority number you entered overlaps with another entry's priority. Enter:
1 to change priority number
2 to leave as is with overlap
3 to push all priority numbers below this entry down by 1"""
)
if answer > 3 or answer < 1:
print('Invalid Option Selected\nPlease Try Again')
if answer == 1:
priority_to_check = check_priority_overlap(int(clean_input(
'New Priority:')), todo_list)
elif answer == 3:
cascade_list(priority_to_check, todo_list)
return priority_to_check
<|reserved_special_token_0|>
def clean_input(prompt='Error'):
"""The purpose of this function is to prompt the user for a numerical
input and only accept a numerical input, rejects no input and text input.
:param prompt: the prompt the user sees, default is Error
:returns the user input as a float"""
text = True
phrase = '0'
while text:
phrase = input(prompt + '\n')
try:
float(phrase)
text = False
except ValueError:
print('Error: Non-Numeric Entry Detected')
return float(phrase)
<|reserved_special_token_0|>
def save_list(todo_list, save_location):
"""The purpose of this function is to save a list of ListItem objects to a
specified location in a .txt file with the first line of the document
being an explanation of the file format being used.
:param todo_list: the list of ListItem objects to save to the save file
:param save_location: the location to create or overwrite the save file
:returns nothing"""
data_file_w = open(save_location, 'w')
data_file_w.write(
"""Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped
"""
)
for item in todo_list:
data_file_w.write('{0}\n{1}\n{2}\n{3}\n\n'.format(item.text, str(
item.priority), str(item.group), str(item.visible)))
data_file_w.close()
return
def add_item(todo_list):
"""The purpose of this function is to prompt the user for the two
fields of necessary information to make a new entry in the todo list,
the item name and priority, checking if the priority overlaps with an
existing entry in the todo list.
:param todo_list: the list of ListItem objects to add a new ListItem
object to
:returns nothing"""
text = input('Please enter the name of the new item\n')
priority = check_priority_overlap(int(clean_input(
'Please enter the priority of this item')), todo_list)
group = 0
visible = True
todo_list.insert(0, ListItem(text, priority, group, visible))
return
<|reserved_special_token_0|>
def remove_item(todo_list):
"""The purpose of this function is to delete a ListItem object from a
list of ListItem objects by prompting the user for the index and
verifying they want to delete the item.
:param todo_list: the list of ListItem objects from which to remove
one object
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to remove
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list.pop(item)
return
def mark_complete(todo_list):
"""The purpose of this function is to mark a selectedListItem object as
hidden and not to be printed unless specified, apart from selecting items.
:param todo_list: the list of ListItem objects to modify
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to Mark Completed and hide from the list
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list[item].visible = False
return
def edit_item(todo_list):
"""The purpose of this function is to edit a ListItem object in the
list of ListItem objects, changing either the name or priority
:param todo_list: the list of ListItem objects that gets one object
modified
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to edit
Enter a negative number or zero to cancel"""
)
if item >= 0:
while True:
value = clean_input(
"""Which value would you like to edit? Enter:
1 for the Item Text (Currently: {0})
2 for the Item Priority (Currently: {1})
3 to Cancel and Exit"""
.format(todo_list[item].text, str(todo_list[item].priority)))
if value == 1:
print('The Current Text is: {0}'.format(todo_list[item].text))
todo_list[item].text = input('New Text:\n')
elif value == 2:
print('The Current Priority is: {0}'.format(str(todo_list[
item].priority)))
todo_list[item].priority = check_priority_overlap(int(
clean_input('New Priority:')), todo_list)
elif value == 3:
break
else:
print('Invalid Input - Please Try Again')
return
def check_list_status(todo_list):
"""The purpose of this function is to check whether there are visible
items in the list, the entire list is hidden, or the list contains no
more ListItem objects
:param todo_list: the list of ListItem objects to check
:returns which condition using integer codes"""
if len(todo_list) == 0:
state = 1
else:
state = 2
for item_index in range(len(todo_list)):
if todo_list[item_index].visible:
state = 0
return state
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ListItem:
"""A custom object that stores four pieces of data representing each
entry in the todo list. Contains the text of the todo list entry,
the priority of the entry, the group code (NYI), and the visibility of
the entry"""
def __init__(self, text, priority, group, visible):
self.text = text
self.priority = priority
self.group = group
self.visible = visible
def concept_demonstration():
"""The purpose of this function is to prompt the user for numbers and
strings and manipulate them to demonstrate programming fluency with
string and integer operations.
:returns nothing"""
number = clean_input('Please enter a positive number')
number2 = clean_input('Please enter a number')
while number2 == 0:
print('Error: Cannot Divide by 0')
number2 = clean_input('Please enter a different number')
color = input('Please enter a color\n')
thing = input('Please enter a thing\n')
thing2 = thing + ' '
location = input('Please enter a location\n')
print(str(number) + ' raised to the power of ' + str(number2) + ' is ' +
str(number ** number2))
print('{0} multiplied by {1} is {2}'.format(str(number), str(number2),
str(number * number2)))
print('{0} divided by {1} is {2}'.format(str(number), str(number2), str
(number / number2)))
print('The remainder from dividing {0} by {1} is {2}'.format(str(
number), str(number2), str(number % number2)))
print('{0} divided by {1} rounded down is {2}'.format(str(number), str(
number2), str(number // number2)))
print('{0} plus {1} is {2}'.format(str(number), str(number2), str(
number + number2)))
print('{0} minus {1} is {2}'.format(str(number), str(number2), str(
number - number2)))
if number > 1:
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing2 * int(number - 1) + thing))
elif number < 0:
print(
"""The {0} at {1} yelled '{2}'
You entered a negative number when a positive number was requested, so you made the {3} mute. Good Job."""
.format(color + ' ' + thing, location, thing2 * int(number), thing)
)
else:
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing * int(number)))
return
def cascade_list(priority_to_cascade_from, todo_list):
"""The purpose of this function is to decrement the priority number of
every item in the provided todo list greater than the priority number
provided.
:param priority_to_cascade_from: the number that is inserted by moving
everything equal to or greater than up by one
:param todo_list: the list of ListItem objects to check in"""
for item in todo_list:
if item.priority >= priority_to_cascade_from:
item.priority += 1
return
def check_priority_overlap(priority_to_check, todo_list):
"""The purpose of this function is to check if the user's priority
number input overlaps with a priority number already in the list,
and if it does, prompts the user whether they want to keep it, change
it, or move everything in the list that has a larger priority value up
by one.
:param priority_to_check: the number to check for overlap with
:param todo_list: the list of ListItem objects to check in
:returns the priority value, either changed or the original input"""
overlap = False
for item in todo_list:
if item.priority == priority_to_check:
overlap = True
if overlap:
answer = 0
while answer > 3 or answer < 1:
answer = clean_input(
"""The priority number you entered overlaps with another entry's priority. Enter:
1 to change priority number
2 to leave as is with overlap
3 to push all priority numbers below this entry down by 1"""
)
if answer > 3 or answer < 1:
print('Invalid Option Selected\nPlease Try Again')
if answer == 1:
priority_to_check = check_priority_overlap(int(clean_input(
'New Priority:')), todo_list)
elif answer == 3:
cascade_list(priority_to_check, todo_list)
return priority_to_check
def sorting(list_object):
"""The purpose of this function is to take in a ListItem custom object
and return the priority value stored in it to be used in sorting.
:param list_object: one ListItem object
:returns the priority value stored in the ListItem object"""
return list_object.priority
<|reserved_special_token_0|>
def clean_input(prompt='Error'):
"""The purpose of this function is to prompt the user for a numerical
input and only accept a numerical input, rejects no input and text input.
:param prompt: the prompt the user sees, default is Error
:returns the user input as a float"""
text = True
phrase = '0'
while text:
phrase = input(prompt + '\n')
try:
float(phrase)
text = False
except ValueError:
print('Error: Non-Numeric Entry Detected')
return float(phrase)
def load_from_file(save_location):
"""The purpose of this function is to open the .txt save file and read
the contents into memory in the form of a list of custom ListItem
objects.
:param save_location: the location the save file is stored in
:returns a list of ListItem objects that is populated with the data from
the save file"""
data_file_r = open(save_location, 'r')
list_item = ['Text', -1, 2, True]
todo = []
temp = 1
line_counter = 1
try:
for item in data_file_r:
if (line_counter - 1) % 5 != 0 and line_counter > 0:
cleaned_item = ''
for character_index in range(len(item)):
if character_index != len(item) - 1:
cleaned_item += item[character_index]
if temp == 1:
list_item[0] = cleaned_item
temp = 2
elif temp == 2:
list_item[1] = int(cleaned_item)
temp = 3
elif temp == 3:
list_item[2] = int(cleaned_item)
temp = 4
elif temp == 4:
if cleaned_item == 'False':
list_item[3] = False
else:
list_item[3] = True
todo.insert(0, ListItem(list_item[0], list_item[1],
list_item[2], list_item[3]))
temp = 1
else:
temp = 1
line_counter += 1
except ValueError:
print('An error has occurred trying to load the file')
result = int(clean_input(
'Please enter a 2 to overwrite the current save file and start over or any other number to exit the program'
))
if result == 2:
key = random.randint(2, 9)
if key == 2:
key = 1
result2 = int(clean_input(
"""Are you sure you want to delete all of your saved data
Enter {0} to proceed, or anything else to cancel"""
.format(str(key))))
if result2 == key:
data_file_w = open('C:Item_List.txt', 'w')
data_file_w.close()
todo = []
print('Save Data Erased')
return todo
else:
print('Program Exiting')
quit(1)
else:
print('Program Exiting')
quit(1)
data_file_r.close()
return todo
def save_list(todo_list, save_location):
"""The purpose of this function is to save a list of ListItem objects to a
specified location in a .txt file with the first line of the document
being an explanation of the file format being used.
:param todo_list: the list of ListItem objects to save to the save file
:param save_location: the location to create or overwrite the save file
:returns nothing"""
data_file_w = open(save_location, 'w')
data_file_w.write(
"""Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped
"""
)
for item in todo_list:
data_file_w.write('{0}\n{1}\n{2}\n{3}\n\n'.format(item.text, str(
item.priority), str(item.group), str(item.visible)))
data_file_w.close()
return
def add_item(todo_list):
"""The purpose of this function is to prompt the user for the two
fields of necessary information to make a new entry in the todo list,
the item name and priority, checking if the priority overlaps with an
existing entry in the todo list.
:param todo_list: the list of ListItem objects to add a new ListItem
object to
:returns nothing"""
text = input('Please enter the name of the new item\n')
priority = check_priority_overlap(int(clean_input(
'Please enter the priority of this item')), todo_list)
group = 0
visible = True
todo_list.insert(0, ListItem(text, priority, group, visible))
return
def select_item(todo_list, prompt='Error'):
"""The purpose of this function is to display a list of all items in the
todo list and number each individually to allow the user to select an
item to modify or delete. The available numbers may
skip some if some items are hidden
:param todo_list: the list of ListItem objects to display
:param prompt: the prompt to display to the user, default is Error
:returns the user selected item's index in a computer friendly form (
starting at 0 instead of 1)"""
valid = False
index = 0
while not valid:
counter = 1
for item in todo_list:
if item.visible:
print(counter, item.text, sep='\t')
else:
print(counter, '~ {0} ~'.format(item.text), sep='\t')
counter += 1
index = int(clean_input(prompt))
if index < counter:
valid = True
else:
print('Invalid Input: Number is too big')
return index - 1
def remove_item(todo_list):
"""The purpose of this function is to delete a ListItem object from a
list of ListItem objects by prompting the user for the index and
verifying they want to delete the item.
:param todo_list: the list of ListItem objects from which to remove
one object
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to remove
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list.pop(item)
return
def mark_complete(todo_list):
"""The purpose of this function is to mark a selectedListItem object as
hidden and not to be printed unless specified, apart from selecting items.
:param todo_list: the list of ListItem objects to modify
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to Mark Completed and hide from the list
Enter a negative number or zero to cancel"""
)
if item >= 0:
todo_list[item].visible = False
return
def edit_item(todo_list):
"""The purpose of this function is to edit a ListItem object in the
list of ListItem objects, changing either the name or priority
:param todo_list: the list of ListItem objects that gets one object
modified
:returns nothing"""
item = select_item(todo_list,
"""Please enter the item number you wish to edit
Enter a negative number or zero to cancel"""
)
if item >= 0:
while True:
value = clean_input(
"""Which value would you like to edit? Enter:
1 for the Item Text (Currently: {0})
2 for the Item Priority (Currently: {1})
3 to Cancel and Exit"""
.format(todo_list[item].text, str(todo_list[item].priority)))
if value == 1:
print('The Current Text is: {0}'.format(todo_list[item].text))
todo_list[item].text = input('New Text:\n')
elif value == 2:
print('The Current Priority is: {0}'.format(str(todo_list[
item].priority)))
todo_list[item].priority = check_priority_overlap(int(
clean_input('New Priority:')), todo_list)
elif value == 3:
break
else:
print('Invalid Input - Please Try Again')
return
def check_list_status(todo_list):
"""The purpose of this function is to check whether there are visible
items in the list, the entire list is hidden, or the list contains no
more ListItem objects
:param todo_list: the list of ListItem objects to check
:returns which condition using integer codes"""
if len(todo_list) == 0:
state = 1
else:
state = 2
for item_index in range(len(todo_list)):
if todo_list[item_index].visible:
state = 0
return state
def menu_loop(todo_list, save_file_location):
"""The purpose of this function is to repeatedly display the todo list
and user prompts menu until the program is closed
:param todo_list: the list of ListItem objects to display or modify
:param save_file_location: where the .txt save file is located for saving
:returns nothing"""
show_hidden = False
selection = 0
invalid_input = False
while selection != 6:
if invalid_input:
invalid_input = False
else:
print_list(save_file_location, todo_list, True, show_hidden)
divider(137 + 17)
list_status = check_list_status(todo_list)
if list_status == 0:
selection = int(clean_input(
"""Please enter: 1 for Add Item, 2 for Remove Item, 3 for Edit Item, 4 for Mark Item Complete, 5 for Toggle Hidden, and 6 for Exit, 7 for Concept Demonstration
"""
))
elif list_status == 1:
selection = int(clean_input(
"""Please enter: 1 for Add Item, and 6 for Exit, 7 for Concept Demonstration
"""
))
else:
selection = int(clean_input(
"""Please enter: 1 for Add Item, 5 for Toggle Hidden, and 6 for Exit, 7 for Concept Demonstration
"""
))
print('')
if selection == 1:
add_item(todo_list)
elif selection == 2:
if list_status == 0:
remove_item(todo_list)
elif list_status == 2:
print(
'Invalid Command: The Todo List has no visible items to remove'
)
else:
print('Invalid Command: The Todo List has no items to remove')
elif selection == 3:
if list_status == 0:
edit_item(todo_list)
elif list_status == 2:
print(
'Invalid Command: The Todo List has no visible items to edit'
)
else:
print('Invalid Command: The Todo List has no items to edit')
elif selection == 4:
if list_status == 0:
mark_complete(todo_list)
elif list_status == 2:
print(
'Invalid Command: The Todo List has no visible items to mark complete'
)
else:
print(
'Invalid Command: The Todo List has no items to mark complete'
)
elif selection == 5:
if list_status == 0 or list_status == 2:
if show_hidden:
print('No longer showing hidden items')
show_hidden = False
else:
print('Now showing hidden items')
show_hidden = True
else:
print(
'Invalid Command: The Todo List has no items to show or hide'
)
elif selection == 6:
print('Now Closing')
elif selection == 7:
concept_demonstration()
else:
invalid_input = True
print('Invalid Input\nPlease Try Again')
def main():
"""The purpose of this function is to ensure the save file exists at the
specified save file location, load the save file into memory, display a
welcome message with a divider, then start the menu loop until the
program is closed
:returns nothing"""
save_file_location = 'Item_List.txt'
data_file_a = open(save_file_location, 'a')
data_file_a.close()
loaded_list = load_from_file(save_file_location)
print('Welcome to the To-Do List - Version: 0.1.2')
divider(42)
menu_loop(loaded_list, save_file_location)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
"""This program displays a customizable list of items by priority value,
with priority 1 being the highest. Allows the user to add, edit,
mark complete, show completed (hidden), and remove items. Stores the list of
items in a .txt file located where this program's main.py file is. All
changes are automatically saved to the .txt file. Also includes a fun
technical knowledge demonstration using numbers and text responses. The
program will create a new save file if none exists, and prompts for save
file overwrite if data cannot be read successfully. Menu navigation is
accomplished through numeric inputs due to the text-only interface and
tedium of typing out each word accurately and repeatedly."""
__author__ = 'Jordan Kooyman'
# 1/26/21 - 4/15/2021 To-Do List Program - Integration Project for COP 1500
# Spring 2021
# Configurable settings saved to a separate file (?)
# Ability to load a different data or config file (?)
# Color code items by group (?)
# Add a basic calculator to meet math (and string?) command requirements (?)
# TODO: Implement a group system that shows all groups combined, just one
# group, or all categorized by group, and group names - be able to change
# group names (new function) - all functions support groups (individual or
# combined)
import random
# Random number generation used as random verification number when
# overwriting the save file in the event of a failure to load from the save
# file
class ListItem: # Create a class object that will store the data for each
# entry in the list (custom variable)
"""A custom object that stores four pieces of data representing each
entry in the todo list. Contains the text of the todo list entry,
the priority of the entry, the group code (NYI), and the visibility of
the entry"""
def __init__(self, text, priority, group, visible): # From w3schools.com
self.text = text
self.priority = priority
self.group = group
self.visible = visible
def concept_demonstration():
"""The purpose of this function is to prompt the user for numbers and
strings and manipulate them to demonstrate programming fluency with
string and integer operations.
:returns nothing"""
number = clean_input("Please enter a positive number")
number2 = clean_input("Please enter a number")
while number2 == 0: # Rejects a 0 if it is input as the second number
print("Error: Cannot Divide by 0")
number2 = clean_input("Please enter a different number")
color = input("Please enter a color\n")
thing = input("Please enter a thing\n")
thing2 = thing + ' ' # Adding space so that when thing is repeated, it
# has a space in between
# Raise the first number to the second number
location = input("Please enter a location\n")
print(str(number) + " raised to the power of " + str(number2) + " is " +
str(number ** number2))
# Multiply the two numbers
print("{0} multiplied by {1} is {2}".format(str(number), str(number2),
str(number * number2)))
# Divide the first number by the second number
print("{0} divided by {1} is {2}".format(str(number), str(number2),
str(number / number2)))
# Find the modulus of the two numbers
print("The remainder from dividing {0} by {1} is {2}".format(str(number),
str(number2),
str(number %
number2))
)
# Divide the first number by the second and round it down (floor it)
print("{0} divided by {1} rounded down is {2}".format(str(number),
str(number2),
str(number // number2
)))
# Add the two numbers
print("{0} plus {1} is {2}".format(str(number), str(number2),
str(number + number2)))
# Subtract the second number from the first number
print("{0} minus {1} is {2}".format(str(number), str(number2),
str(number - number2)))
if number > 1: # if the first number entered is greater than 1
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing2 *
int(number - 1) + thing))
# Combine two strings with + (no added space), repeat a string x
# number of times with * (must use an integer) (I have the minus 1
# and + thing to get the spacing to look proper and still repeat
# number amount of times) -if a negative number is used when
# multiplying a string, it does nothing (but does not crash) - but
# it is still handled in the other statement with some added user
# shaming
elif number < 0: # if the first number entered is negative
print("The {0} at {1} yelled '{2}'\nYou entered a negative number "
"when a positive number was requested, so you made the {3} "
"mute. Good Job.".format(color + ' ' + thing, location, thing2 *
int(number), thing))
# Same as above, expect that it will print nothing in the yelled
# section if the first number entered is negative
else: # if the first number entered is 0 or 1 (because of the int()
# function removing a decimal)
print("The {0} at {1} yelled '{2}'".format(color + ' ' + thing,
location, thing *
int(number)))
# this is to prevent errant spaces or showing the phrase too many times
return
def cascade_list(priority_to_cascade_from, todo_list):
"""The purpose of this function is to decrement the priority number of
every item in the provided todo list greater than the priority number
provided.
:param priority_to_cascade_from: the number that is inserted by moving
everything equal to or greater than up by one
:param todo_list: the list of ListItem objects to check in"""
for item in todo_list:
if item.priority >= priority_to_cascade_from:
item.priority += 1
return
def check_priority_overlap(priority_to_check, todo_list):
"""The purpose of this function is to check if the user's priority
number input overlaps with a priority number already in the list,
and if it does, prompts the user whether they want to keep it, change
it, or move everything in the list that has a larger priority value up
by one.
:param priority_to_check: the number to check for overlap with
:param todo_list: the list of ListItem objects to check in
:returns the priority value, either changed or the original input"""
overlap = False
for item in todo_list:
if item.priority == priority_to_check:
overlap = True
if overlap:
answer = 0
while answer > 3 or answer < 1:
answer = clean_input("The priority number you entered overlaps "
"with another entry's priority. Enter:\n1 to "
"change priority number\n2 to leave as is "
"with overlap\n3 to push all priority numbers"
" below this entry down by 1")
if answer > 3 or answer < 1:
print("Invalid Option Selected\nPlease Try Again")
if answer == 1:
priority_to_check = check_priority_overlap(
int(clean_input("New Priority:")), todo_list)
# change the priority value input
elif answer == 3:
cascade_list(priority_to_check, todo_list)
return priority_to_check
def sorting(list_object): # Takes in a ListItem object and returns the
# priority value - from w3schools.com
"""The purpose of this function is to take in a ListItem custom object
and return the priority value stored in it to be used in sorting.
:param list_object: one ListItem object
:returns the priority value stored in the ListItem object"""
return list_object.priority
def print_list(save_file_location, my_list, to_save=False, show_hidden=False):
# Prints out the To-Do list from the common list variable and saves list
# to the .txt file
"""The purpose of this function is to take in the location of the save
file, the todo list variable, whether or not to save, and whether or not
to show hidden and print out the todo list variable, skipping items
marked as hidden unless it is told to show hidden, and saving the todo
list to the file in the save file location if it is told to save.
:param save_file_location: the file path to get to the .txt save file
:param my_list: the list of ListItem objects to check in
:param to_save: whether or not to save the list of items to the file,
default
is false
:param show_hidden: whether or not to display the hidden list items,
default
it false
:returns nothing"""
my_list.sort(key=sorting) # Uses a custom function to be able to get the
# right value to sort by
print("To-Do:")
for item_index in my_list: # The range needs to be the length of the list
# being printed
if item_index.visible and not show_hidden: # Only print visible items
# if show hidden is false
print(item_index.priority, item_index.text, sep='.\t')
elif show_hidden: # Print everything is show hidden is trues
if item_index.visible:
print(item_index.priority, item_index.text, sep='.\t')
else:
print("{0}.~\t{1}".format(item_index.priority, item_index.text)
)
# Indicate hidden items
# Printing the item priority with a dot, then the item, with a tab
# separating them
if to_save:
save_list(my_list, save_file_location)
return
def divider(size=100): # Draws a dividing line to go between sections
# (default 100 characters long)
"""The purpose of this function is to print a dashed line across the
screen with a specified length.
:param size: how many characters long the line should be, default is 100
:returns nothing"""
for i in range(size):
print('-', end='') # Prints out a single dash, no newline afterwards
# (the end= sets the last character to blank
print('') # Print out a newline (using the default ending of a print
# statement being a newline
return
def clean_input(prompt='Error'): # A special input function that will reject a
# user's input of text when a number is requested -- if no prompt is
# specified in the program, it will display "Error"
"""The purpose of this function is to prompt the user for a numerical
input and only accept a numerical input, rejects no input and text input.
:param prompt: the prompt the user sees, default is Error
:returns the user input as a float"""
text = True
phrase = '0'
while text:
phrase = input(prompt + '\n')
try: # Adapted from an example in the ThinkPython textbook (15.7) -
# Checks whether the input is a number, positive or negative. If
# not, rejects the input and user gets to try again
float(phrase)
text = False
except ValueError:
print("Error: Non-Numeric Entry Detected")
# if phrase.isnumeric(): # Checks for a positive number (negative
# rejected as well as text) - replaced with superior form from textbook
# example
# return float(phrase) # Return the number the user entered
# else:
# print("Error: Non-Numeric Entry Detected")
return float(phrase) # Return the number the user entered
def load_from_file(save_location): # This is a function for readability -
# opens txt file in read mode and loads it
"""The purpose of this function is to open the .txt save file and read
the contents into memory in the form of a list of custom ListItem
objects.
:param save_location: the location the save file is stored in
:returns a list of ListItem objects that is populated with the data from
the save file"""
# into an array (list) of ListItem variables
data_file_r = open(save_location, "r") # Open txt file in read mode
list_item = ["Text", -1, 2, True] # Item, Item Priority, group, is visible
todo = [] # make a list of lists
temp = 1 # Temporary counter variable to reconstruct lists from .txt file
line_counter = 1
try:
for item in data_file_r: # loop through each line in the file, one at
# a time - from w3schools.com
if (line_counter - 1) % 5 != 0 and line_counter > 0:
cleaned_item = ""
for character_index in range(len(
item)): # Loop through each character in the extracted
# string
if character_index != len(
item) - 1: # if it is not the last character, add
# it to the cleaned string
cleaned_item += item[character_index]
# Add every character to a
# but \n
if temp == 1: # Item Text
list_item[0] = cleaned_item
temp = 2
elif temp == 2: # Item Priority
list_item[1] = int(cleaned_item)
temp = 3
elif temp == 3: # Item Group
list_item[2] = int(cleaned_item)
temp = 4
elif temp == 4: # Is Visible
if cleaned_item == "False":
list_item[3] = False
else: # Assume the item is visible if the text is not
# False
list_item[3] = True
todo.insert(0, ListItem(list_item[0], list_item[1],
list_item[2], list_item[3]))
temp = 1
else: # If some error occurred and a condition outside of the
# possible four is met, restart
temp = 1
line_counter += 1
except ValueError:
print("An error has occurred trying to load the file")
result = int(clean_input(
"Please enter a 2 to overwrite the current save file and start "
"over or any other number to exit the program"))
if result == 2:
key = random.randint(2, 9) # Generate a random integer between 2
# and 9 to be used as a second dynamic check
if key == 2:
key = 1 # If the random number is 2, set it to one so that
# the same number (2) cannot be used as the verification number
result2 = int(clean_input("Are you sure you want to delete all "
"of your saved data\nEnter {0} to "
"proceed, or anything else to "
"cancel".format(str(key))))
if result2 == key:
data_file_w = open("C:Item_List.txt", "w")
data_file_w.close()
todo = []
print("Save Data Erased")
return todo # Return an empty list if file load failed
else:
print("Program Exiting")
quit(1)
else:
print("Program Exiting")
quit(1) # Exit the program with the exit code of 1
data_file_r.close()
# All the list functions above referenced from w3schools.com What is
# happening above: Opening the file, initializing a list to hold all
# four pieces of data, then after pulling the data from the file and
# storing in the list, it is copied (not referenced) into my main list
# of ListItem objects
return todo
def save_list(todo_list, save_location):
"""The purpose of this function is to save a list of ListItem objects to a
specified location in a .txt file with the first line of the document
being an explanation of the file format being used.
:param todo_list: the list of ListItem objects to save to the save file
:param save_location: the location to create or overwrite the save file
:returns nothing"""
data_file_w = open(save_location,
"w") # open the save file and clear the data from it
data_file_w.write("Warning: The Todo-List Program will not be able to "
"load this save file if it is incorrectly modified. "
"Modify at your own risk. The structure is Entry "
"Text, Entry Priority as a number, Entry Group as a "
"number (Not Yet Utilized, but necessary), and Entry "
"Visibility as a boolean, each on a separate line, a "
"single line gap in between, and the "
"very first line is skipped\n")
for item in todo_list:
data_file_w.write("{0}\n{1}\n{2}\n{3}\n\n".format(item.text,
str(item.priority),
str(item.group),
str(item.visible)))
data_file_w.close()
return
def add_item(todo_list):
"""The purpose of this function is to prompt the user for the two
fields of necessary information to make a new entry in the todo list,
the item name and priority, checking if the priority overlaps with an
existing entry in the todo list.
:param todo_list: the list of ListItem objects to add a new ListItem
object to
:returns nothing"""
text = input("Please enter the name of the new item\n")
priority = check_priority_overlap(
int(clean_input("Please enter the priority of this item")), todo_list)
# group = int(clean_input("Please enter the group number of this item"))
group = 0 # Set the group value to zero, group system NYI
visible = True
todo_list.insert(0, ListItem(text, priority, group, visible)) # Join
# the inputs to be added to the overall list
return
def select_item(todo_list, prompt='Error'): # Ask the user
# which item from the list is to be modified
"""The purpose of this function is to display a list of all items in the
todo list and number each individually to allow the user to select an
item to modify or delete. The available numbers may
skip some if some items are hidden
:param todo_list: the list of ListItem objects to display
:param prompt: the prompt to display to the user, default is Error
:returns the user selected item's index in a computer friendly form (
starting at 0 instead of 1)"""
valid = False
index = 0
while not valid:
counter = 1 # counter for index printing
for item in todo_list: # The range needs to be the length of the list
# being printed
if item.visible:
print(counter, item.text, sep='\t')
else:
print(counter, "~ {0} ~".format(item.text), sep='\t')
counter += 1
# Printing the item number, then the item, with a tab separating
# them
index = int(clean_input(prompt))
if index < counter:
valid = True
else:
print("Invalid Input: Number is too big")
return index - 1
def remove_item(todo_list):
"""The purpose of this function is to delete a ListItem object from a
list of ListItem objects by prompting the user for the index and
verifying they want to delete the item.
:param todo_list: the list of ListItem objects from which to remove
one object
:returns nothing"""
item = select_item(todo_list, "Please enter the item number you wish to "
"remove\nEnter a negative number or zero "
"to cancel")
if item >= 0: # 0, not 1 because the index returned is shifted to be
# computer friendly
todo_list.pop(item)
return
def mark_complete(todo_list):
"""The purpose of this function is to mark a selectedListItem object as
hidden and not to be printed unless specified, apart from selecting items.
:param todo_list: the list of ListItem objects to modify
:returns nothing"""
item = select_item(todo_list, "Please enter the item number you wish to "
"Mark Completed and hide from the "
"list\nEnter a negative number or zero to "
"cancel")
if item >= 0:
todo_list[item].visible = False
return
def edit_item(todo_list):
"""The purpose of this function is to edit a ListItem object in the
list of ListItem objects, changing either the name or priority
:param todo_list: the list of ListItem objects that gets one object
modified
:returns nothing"""
item = select_item(todo_list, "Please enter the item number you wish to "
"edit\nEnter a negative number or zero to "
"cancel")
if item >= 0:
while True:
value = clean_input("Which value would you like to edit? Enter:\n1"
" for the Item Text (Currently: {0})\n2 for "
"the Item Priority (Currently: {1})\n3 to "
"Cancel and Exit".format(todo_list[item].text,
str(todo_list[item].
priority)))
if value == 1: # Item Text Change
print("The Current Text is: {0}".format(todo_list[item].text))
todo_list[item].text = input("New Text:\n")
elif value == 2: # Item Priority Change
print("The Current Priority is: {0}".format(str(todo_list[item]
.priority)))
todo_list[item].priority = check_priority_overlap(
int(clean_input("New Priority:")), todo_list)
# elif value == 3: # Item Group Change
# print(f"The Current Group is: {todo_list[item].group}")
# todo_list[item].group = int(clean_input("New Group Number:"))
elif value == 3: # Exit Changing Menu
break
else:
print("Invalid Input - Please Try Again")
return
def check_list_status(todo_list): # Checks if the list is completely hidden
# (2), completely empty (1), or neither (0)
"""The purpose of this function is to check whether there are visible
items in the list, the entire list is hidden, or the list contains no
more ListItem objects
:param todo_list: the list of ListItem objects to check
:returns which condition using integer codes"""
if len(todo_list) == 0:
state = 1 # Empty List
else:
state = 2 # Entirely Hidden List
for item_index in range(len(todo_list)):
if todo_list[item_index].visible: # If an item is visible, then
# they are not all hidden
state = 0 # Neither
return state
def menu_loop(todo_list, save_file_location):
"""The purpose of this function is to repeatedly display the todo list
and user prompts menu until the program is closed
:param todo_list: the list of ListItem objects to display or modify
:param save_file_location: where the .txt save file is located for saving
:returns nothing"""
show_hidden = False
selection = 0
invalid_input = False
while selection != 6:
if invalid_input:
invalid_input = False
else:
print_list(save_file_location, todo_list, True, show_hidden)
divider(137 + 17) # Length of prompt statement below
list_status = check_list_status(todo_list)
if list_status == 0: # No Issues
selection = int(clean_input("Please enter: 1 for Add Item, 2 for "
"Remove Item, 3 for Edit Item, "
"4 for Mark Item Complete, "
"5 for Toggle Hidden, and 6 for "
"Exit, 7 for Concept "
"Demonstration\n"))
elif list_status == 1: # Empty List - No Remove, Edit, Mark, or Toggle
selection = int(clean_input("Please enter: 1 for Add Item, and 6 "
"for Exit, 7 for Concept "
"Demonstration\n"))
else: # Entirely Hidden List
selection = int(clean_input("Please enter: 1 for Add Item, 5 for "
"Toggle Hidden, and 6 for Exit, "
"7 for Concept Demonstration\n"))
# Uses the clean_input function above to get a number from the
# user, converting it to an int so a decimal won't return an
# invalid input in the following steps
print("") # Blank Print statement to add an extra blank line after
# user input before displaying response
if selection == 1: # Add Item - modify the list variable, then save
# to file
add_item(todo_list)
elif selection == 2: # Remove Item - modify the list variable, then
# save to file
if list_status == 0:
remove_item(todo_list)
elif list_status == 2:
print("Invalid Command: The Todo List has no visible items "
"to remove")
else:
print("Invalid Command: The Todo List has no items to remove")
elif selection == 3: # Edit Item - modify the list variable, then save
# to file
if list_status == 0:
edit_item(todo_list)
elif list_status == 2:
print("Invalid Command: The Todo List has no visible items "
"to edit")
else:
print("Invalid Command: The Todo List has no items to edit")
elif selection == 4: # Mark Item Complete - modify the list variable,
# then save to file
if list_status == 0:
mark_complete(todo_list)
elif list_status == 2:
print("Invalid Command: The Todo List has no visible items "
"to mark complete")
else:
print("Invalid Command: The Todo List has no items to mark "
"complete")
elif selection == 5: # Show Hidden - modify the list variable, then
# save to file
if list_status == 0 or list_status == 2:
if show_hidden:
print("No longer showing hidden items")
show_hidden = False
else:
print("Now showing hidden items")
show_hidden = True
else:
print("Invalid Command: The Todo List has no items to show or "
"hide")
elif selection == 6: # Exit Program
print("Now Closing")
elif selection == 7: # Extra section to demonstrate proficiency with
# topics covered in class - Sprint 1
concept_demonstration()
else:
invalid_input = True
print("Invalid Input\nPlease Try Again")
def main():
"""The purpose of this function is to ensure the save file exists at the
specified save file location, load the save file into memory, display a
welcome message with a divider, then start the menu loop until the
program is closed
:returns nothing"""
save_file_location = "Item_List.txt"
data_file_a = open(save_file_location, "a") # Opens ItemList.txt which
# is accessible in the file variable, in append mode (using this so that
# if the file exists, nothing happens, but if it does not exist, it gets
# created from w3schools.com
data_file_a.close() # Close the file, I now know it exists
loaded_list = load_from_file(save_file_location)
print("Welcome to the To-Do List - Version: 0.1.2")
divider(42) # Length of welcome statement above
menu_loop(loaded_list, save_file_location)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "168a12e6653a0526f29c163913def50147481154",
"index": 632,
"step-1": "<mask token>\n\n\nclass ListItem:\n \"\"\"A custom object that stores four pieces of data representing each\n entry in the todo list. Contains the text of the todo list entry,\n the priority of the entry, the group code (NYI), and the visibility of\n the entry\"\"\"\n\n def __init__(self, text, priority, group, visible):\n self.text = text\n self.priority = priority\n self.group = group\n self.visible = visible\n\n\n<mask token>\n\n\ndef check_priority_overlap(priority_to_check, todo_list):\n \"\"\"The purpose of this function is to check if the user's priority\n number input overlaps with a priority number already in the list,\n and if it does, prompts the user whether they want to keep it, change\n it, or move everything in the list that has a larger priority value up\n by one.\n :param priority_to_check: the number to check for overlap with\n :param todo_list: the list of ListItem objects to check in\n :returns the priority value, either changed or the original input\"\"\"\n overlap = False\n for item in todo_list:\n if item.priority == priority_to_check:\n overlap = True\n if overlap:\n answer = 0\n while answer > 3 or answer < 1:\n answer = clean_input(\n \"\"\"The priority number you entered overlaps with another entry's priority. Enter:\n1 to change priority number\n2 to leave as is with overlap\n3 to push all priority numbers below this entry down by 1\"\"\"\n )\n if answer > 3 or answer < 1:\n print('Invalid Option Selected\\nPlease Try Again')\n if answer == 1:\n priority_to_check = check_priority_overlap(int(clean_input(\n 'New Priority:')), todo_list)\n elif answer == 3:\n cascade_list(priority_to_check, todo_list)\n return priority_to_check\n\n\n<mask token>\n\n\ndef clean_input(prompt='Error'):\n \"\"\"The purpose of this function is to prompt the user for a numerical\n input and only accept a numerical input, rejects no input and text input.\n :param prompt: the prompt the user sees, default is Error\n :returns the user input as a float\"\"\"\n text = True\n phrase = '0'\n while text:\n phrase = input(prompt + '\\n')\n try:\n float(phrase)\n text = False\n except ValueError:\n print('Error: Non-Numeric Entry Detected')\n return float(phrase)\n\n\n<mask token>\n\n\ndef add_item(todo_list):\n \"\"\"The purpose of this function is to prompt the user for the two\n fields of necessary information to make a new entry in the todo list,\n the item name and priority, checking if the priority overlaps with an\n existing entry in the todo list.\n :param todo_list: the list of ListItem objects to add a new ListItem\n object to\n :returns nothing\"\"\"\n text = input('Please enter the name of the new item\\n')\n priority = check_priority_overlap(int(clean_input(\n 'Please enter the priority of this item')), todo_list)\n group = 0\n visible = True\n todo_list.insert(0, ListItem(text, priority, group, visible))\n return\n\n\n<mask token>\n\n\ndef mark_complete(todo_list):\n \"\"\"The purpose of this function is to mark a selectedListItem object as\n hidden and not to be printed unless specified, apart from selecting items.\n :param todo_list: the list of ListItem objects to modify\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to Mark Completed and hide from the list\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list[item].visible = False\n return\n\n\n<mask token>\n\n\ndef check_list_status(todo_list):\n \"\"\"The purpose of this function is to check whether there are visible\n items in the list, the entire list is hidden, or the list contains no\n more ListItem objects\n :param todo_list: the list of ListItem objects to check\n :returns which condition using integer codes\"\"\"\n if len(todo_list) == 0:\n state = 1\n else:\n state = 2\n for item_index in range(len(todo_list)):\n if todo_list[item_index].visible:\n state = 0\n return state\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ListItem:\n \"\"\"A custom object that stores four pieces of data representing each\n entry in the todo list. Contains the text of the todo list entry,\n the priority of the entry, the group code (NYI), and the visibility of\n the entry\"\"\"\n\n def __init__(self, text, priority, group, visible):\n self.text = text\n self.priority = priority\n self.group = group\n self.visible = visible\n\n\n<mask token>\n\n\ndef check_priority_overlap(priority_to_check, todo_list):\n \"\"\"The purpose of this function is to check if the user's priority\n number input overlaps with a priority number already in the list,\n and if it does, prompts the user whether they want to keep it, change\n it, or move everything in the list that has a larger priority value up\n by one.\n :param priority_to_check: the number to check for overlap with\n :param todo_list: the list of ListItem objects to check in\n :returns the priority value, either changed or the original input\"\"\"\n overlap = False\n for item in todo_list:\n if item.priority == priority_to_check:\n overlap = True\n if overlap:\n answer = 0\n while answer > 3 or answer < 1:\n answer = clean_input(\n \"\"\"The priority number you entered overlaps with another entry's priority. Enter:\n1 to change priority number\n2 to leave as is with overlap\n3 to push all priority numbers below this entry down by 1\"\"\"\n )\n if answer > 3 or answer < 1:\n print('Invalid Option Selected\\nPlease Try Again')\n if answer == 1:\n priority_to_check = check_priority_overlap(int(clean_input(\n 'New Priority:')), todo_list)\n elif answer == 3:\n cascade_list(priority_to_check, todo_list)\n return priority_to_check\n\n\n<mask token>\n\n\ndef clean_input(prompt='Error'):\n \"\"\"The purpose of this function is to prompt the user for a numerical\n input and only accept a numerical input, rejects no input and text input.\n :param prompt: the prompt the user sees, default is Error\n :returns the user input as a float\"\"\"\n text = True\n phrase = '0'\n while text:\n phrase = input(prompt + '\\n')\n try:\n float(phrase)\n text = False\n except ValueError:\n print('Error: Non-Numeric Entry Detected')\n return float(phrase)\n\n\n<mask token>\n\n\ndef save_list(todo_list, save_location):\n \"\"\"The purpose of this function is to save a list of ListItem objects to a\n specified location in a .txt file with the first line of the document\n being an explanation of the file format being used.\n :param todo_list: the list of ListItem objects to save to the save file\n :param save_location: the location to create or overwrite the save file\n :returns nothing\"\"\"\n data_file_w = open(save_location, 'w')\n data_file_w.write(\n \"\"\"Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped\n\"\"\"\n )\n for item in todo_list:\n data_file_w.write('{0}\\n{1}\\n{2}\\n{3}\\n\\n'.format(item.text, str(\n item.priority), str(item.group), str(item.visible)))\n data_file_w.close()\n return\n\n\ndef add_item(todo_list):\n \"\"\"The purpose of this function is to prompt the user for the two\n fields of necessary information to make a new entry in the todo list,\n the item name and priority, checking if the priority overlaps with an\n existing entry in the todo list.\n :param todo_list: the list of ListItem objects to add a new ListItem\n object to\n :returns nothing\"\"\"\n text = input('Please enter the name of the new item\\n')\n priority = check_priority_overlap(int(clean_input(\n 'Please enter the priority of this item')), todo_list)\n group = 0\n visible = True\n todo_list.insert(0, ListItem(text, priority, group, visible))\n return\n\n\n<mask token>\n\n\ndef remove_item(todo_list):\n \"\"\"The purpose of this function is to delete a ListItem object from a\n list of ListItem objects by prompting the user for the index and\n verifying they want to delete the item.\n :param todo_list: the list of ListItem objects from which to remove\n one object\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to remove\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list.pop(item)\n return\n\n\ndef mark_complete(todo_list):\n \"\"\"The purpose of this function is to mark a selectedListItem object as\n hidden and not to be printed unless specified, apart from selecting items.\n :param todo_list: the list of ListItem objects to modify\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to Mark Completed and hide from the list\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list[item].visible = False\n return\n\n\ndef edit_item(todo_list):\n \"\"\"The purpose of this function is to edit a ListItem object in the\n list of ListItem objects, changing either the name or priority\n :param todo_list: the list of ListItem objects that gets one object\n modified\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to edit\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n while True:\n value = clean_input(\n \"\"\"Which value would you like to edit? Enter:\n1 for the Item Text (Currently: {0})\n2 for the Item Priority (Currently: {1})\n3 to Cancel and Exit\"\"\"\n .format(todo_list[item].text, str(todo_list[item].priority)))\n if value == 1:\n print('The Current Text is: {0}'.format(todo_list[item].text))\n todo_list[item].text = input('New Text:\\n')\n elif value == 2:\n print('The Current Priority is: {0}'.format(str(todo_list[\n item].priority)))\n todo_list[item].priority = check_priority_overlap(int(\n clean_input('New Priority:')), todo_list)\n elif value == 3:\n break\n else:\n print('Invalid Input - Please Try Again')\n return\n\n\ndef check_list_status(todo_list):\n \"\"\"The purpose of this function is to check whether there are visible\n items in the list, the entire list is hidden, or the list contains no\n more ListItem objects\n :param todo_list: the list of ListItem objects to check\n :returns which condition using integer codes\"\"\"\n if len(todo_list) == 0:\n state = 1\n else:\n state = 2\n for item_index in range(len(todo_list)):\n if todo_list[item_index].visible:\n state = 0\n return state\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ListItem:\n \"\"\"A custom object that stores four pieces of data representing each\n entry in the todo list. Contains the text of the todo list entry,\n the priority of the entry, the group code (NYI), and the visibility of\n the entry\"\"\"\n\n def __init__(self, text, priority, group, visible):\n self.text = text\n self.priority = priority\n self.group = group\n self.visible = visible\n\n\ndef concept_demonstration():\n \"\"\"The purpose of this function is to prompt the user for numbers and\n strings and manipulate them to demonstrate programming fluency with\n string and integer operations.\n :returns nothing\"\"\"\n number = clean_input('Please enter a positive number')\n number2 = clean_input('Please enter a number')\n while number2 == 0:\n print('Error: Cannot Divide by 0')\n number2 = clean_input('Please enter a different number')\n color = input('Please enter a color\\n')\n thing = input('Please enter a thing\\n')\n thing2 = thing + ' '\n location = input('Please enter a location\\n')\n print(str(number) + ' raised to the power of ' + str(number2) + ' is ' +\n str(number ** number2))\n print('{0} multiplied by {1} is {2}'.format(str(number), str(number2),\n str(number * number2)))\n print('{0} divided by {1} is {2}'.format(str(number), str(number2), str\n (number / number2)))\n print('The remainder from dividing {0} by {1} is {2}'.format(str(\n number), str(number2), str(number % number2)))\n print('{0} divided by {1} rounded down is {2}'.format(str(number), str(\n number2), str(number // number2)))\n print('{0} plus {1} is {2}'.format(str(number), str(number2), str(\n number + number2)))\n print('{0} minus {1} is {2}'.format(str(number), str(number2), str(\n number - number2)))\n if number > 1:\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\n location, thing2 * int(number - 1) + thing))\n elif number < 0:\n print(\n \"\"\"The {0} at {1} yelled '{2}'\nYou entered a negative number when a positive number was requested, so you made the {3} mute. Good Job.\"\"\"\n .format(color + ' ' + thing, location, thing2 * int(number), thing)\n )\n else:\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\n location, thing * int(number)))\n return\n\n\n<mask token>\n\n\ndef check_priority_overlap(priority_to_check, todo_list):\n \"\"\"The purpose of this function is to check if the user's priority\n number input overlaps with a priority number already in the list,\n and if it does, prompts the user whether they want to keep it, change\n it, or move everything in the list that has a larger priority value up\n by one.\n :param priority_to_check: the number to check for overlap with\n :param todo_list: the list of ListItem objects to check in\n :returns the priority value, either changed or the original input\"\"\"\n overlap = False\n for item in todo_list:\n if item.priority == priority_to_check:\n overlap = True\n if overlap:\n answer = 0\n while answer > 3 or answer < 1:\n answer = clean_input(\n \"\"\"The priority number you entered overlaps with another entry's priority. Enter:\n1 to change priority number\n2 to leave as is with overlap\n3 to push all priority numbers below this entry down by 1\"\"\"\n )\n if answer > 3 or answer < 1:\n print('Invalid Option Selected\\nPlease Try Again')\n if answer == 1:\n priority_to_check = check_priority_overlap(int(clean_input(\n 'New Priority:')), todo_list)\n elif answer == 3:\n cascade_list(priority_to_check, todo_list)\n return priority_to_check\n\n\n<mask token>\n\n\ndef clean_input(prompt='Error'):\n \"\"\"The purpose of this function is to prompt the user for a numerical\n input and only accept a numerical input, rejects no input and text input.\n :param prompt: the prompt the user sees, default is Error\n :returns the user input as a float\"\"\"\n text = True\n phrase = '0'\n while text:\n phrase = input(prompt + '\\n')\n try:\n float(phrase)\n text = False\n except ValueError:\n print('Error: Non-Numeric Entry Detected')\n return float(phrase)\n\n\n<mask token>\n\n\ndef save_list(todo_list, save_location):\n \"\"\"The purpose of this function is to save a list of ListItem objects to a\n specified location in a .txt file with the first line of the document\n being an explanation of the file format being used.\n :param todo_list: the list of ListItem objects to save to the save file\n :param save_location: the location to create or overwrite the save file\n :returns nothing\"\"\"\n data_file_w = open(save_location, 'w')\n data_file_w.write(\n \"\"\"Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped\n\"\"\"\n )\n for item in todo_list:\n data_file_w.write('{0}\\n{1}\\n{2}\\n{3}\\n\\n'.format(item.text, str(\n item.priority), str(item.group), str(item.visible)))\n data_file_w.close()\n return\n\n\ndef add_item(todo_list):\n \"\"\"The purpose of this function is to prompt the user for the two\n fields of necessary information to make a new entry in the todo list,\n the item name and priority, checking if the priority overlaps with an\n existing entry in the todo list.\n :param todo_list: the list of ListItem objects to add a new ListItem\n object to\n :returns nothing\"\"\"\n text = input('Please enter the name of the new item\\n')\n priority = check_priority_overlap(int(clean_input(\n 'Please enter the priority of this item')), todo_list)\n group = 0\n visible = True\n todo_list.insert(0, ListItem(text, priority, group, visible))\n return\n\n\n<mask token>\n\n\ndef remove_item(todo_list):\n \"\"\"The purpose of this function is to delete a ListItem object from a\n list of ListItem objects by prompting the user for the index and\n verifying they want to delete the item.\n :param todo_list: the list of ListItem objects from which to remove\n one object\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to remove\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list.pop(item)\n return\n\n\ndef mark_complete(todo_list):\n \"\"\"The purpose of this function is to mark a selectedListItem object as\n hidden and not to be printed unless specified, apart from selecting items.\n :param todo_list: the list of ListItem objects to modify\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to Mark Completed and hide from the list\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list[item].visible = False\n return\n\n\ndef edit_item(todo_list):\n \"\"\"The purpose of this function is to edit a ListItem object in the\n list of ListItem objects, changing either the name or priority\n :param todo_list: the list of ListItem objects that gets one object\n modified\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to edit\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n while True:\n value = clean_input(\n \"\"\"Which value would you like to edit? Enter:\n1 for the Item Text (Currently: {0})\n2 for the Item Priority (Currently: {1})\n3 to Cancel and Exit\"\"\"\n .format(todo_list[item].text, str(todo_list[item].priority)))\n if value == 1:\n print('The Current Text is: {0}'.format(todo_list[item].text))\n todo_list[item].text = input('New Text:\\n')\n elif value == 2:\n print('The Current Priority is: {0}'.format(str(todo_list[\n item].priority)))\n todo_list[item].priority = check_priority_overlap(int(\n clean_input('New Priority:')), todo_list)\n elif value == 3:\n break\n else:\n print('Invalid Input - Please Try Again')\n return\n\n\ndef check_list_status(todo_list):\n \"\"\"The purpose of this function is to check whether there are visible\n items in the list, the entire list is hidden, or the list contains no\n more ListItem objects\n :param todo_list: the list of ListItem objects to check\n :returns which condition using integer codes\"\"\"\n if len(todo_list) == 0:\n state = 1\n else:\n state = 2\n for item_index in range(len(todo_list)):\n if todo_list[item_index].visible:\n state = 0\n return state\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ListItem:\n \"\"\"A custom object that stores four pieces of data representing each\n entry in the todo list. Contains the text of the todo list entry,\n the priority of the entry, the group code (NYI), and the visibility of\n the entry\"\"\"\n\n def __init__(self, text, priority, group, visible):\n self.text = text\n self.priority = priority\n self.group = group\n self.visible = visible\n\n\ndef concept_demonstration():\n \"\"\"The purpose of this function is to prompt the user for numbers and\n strings and manipulate them to demonstrate programming fluency with\n string and integer operations.\n :returns nothing\"\"\"\n number = clean_input('Please enter a positive number')\n number2 = clean_input('Please enter a number')\n while number2 == 0:\n print('Error: Cannot Divide by 0')\n number2 = clean_input('Please enter a different number')\n color = input('Please enter a color\\n')\n thing = input('Please enter a thing\\n')\n thing2 = thing + ' '\n location = input('Please enter a location\\n')\n print(str(number) + ' raised to the power of ' + str(number2) + ' is ' +\n str(number ** number2))\n print('{0} multiplied by {1} is {2}'.format(str(number), str(number2),\n str(number * number2)))\n print('{0} divided by {1} is {2}'.format(str(number), str(number2), str\n (number / number2)))\n print('The remainder from dividing {0} by {1} is {2}'.format(str(\n number), str(number2), str(number % number2)))\n print('{0} divided by {1} rounded down is {2}'.format(str(number), str(\n number2), str(number // number2)))\n print('{0} plus {1} is {2}'.format(str(number), str(number2), str(\n number + number2)))\n print('{0} minus {1} is {2}'.format(str(number), str(number2), str(\n number - number2)))\n if number > 1:\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\n location, thing2 * int(number - 1) + thing))\n elif number < 0:\n print(\n \"\"\"The {0} at {1} yelled '{2}'\nYou entered a negative number when a positive number was requested, so you made the {3} mute. Good Job.\"\"\"\n .format(color + ' ' + thing, location, thing2 * int(number), thing)\n )\n else:\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\n location, thing * int(number)))\n return\n\n\ndef cascade_list(priority_to_cascade_from, todo_list):\n \"\"\"The purpose of this function is to decrement the priority number of\n every item in the provided todo list greater than the priority number\n provided.\n :param priority_to_cascade_from: the number that is inserted by moving\n everything equal to or greater than up by one\n :param todo_list: the list of ListItem objects to check in\"\"\"\n for item in todo_list:\n if item.priority >= priority_to_cascade_from:\n item.priority += 1\n return\n\n\ndef check_priority_overlap(priority_to_check, todo_list):\n \"\"\"The purpose of this function is to check if the user's priority\n number input overlaps with a priority number already in the list,\n and if it does, prompts the user whether they want to keep it, change\n it, or move everything in the list that has a larger priority value up\n by one.\n :param priority_to_check: the number to check for overlap with\n :param todo_list: the list of ListItem objects to check in\n :returns the priority value, either changed or the original input\"\"\"\n overlap = False\n for item in todo_list:\n if item.priority == priority_to_check:\n overlap = True\n if overlap:\n answer = 0\n while answer > 3 or answer < 1:\n answer = clean_input(\n \"\"\"The priority number you entered overlaps with another entry's priority. Enter:\n1 to change priority number\n2 to leave as is with overlap\n3 to push all priority numbers below this entry down by 1\"\"\"\n )\n if answer > 3 or answer < 1:\n print('Invalid Option Selected\\nPlease Try Again')\n if answer == 1:\n priority_to_check = check_priority_overlap(int(clean_input(\n 'New Priority:')), todo_list)\n elif answer == 3:\n cascade_list(priority_to_check, todo_list)\n return priority_to_check\n\n\ndef sorting(list_object):\n \"\"\"The purpose of this function is to take in a ListItem custom object\n and return the priority value stored in it to be used in sorting.\n :param list_object: one ListItem object\n :returns the priority value stored in the ListItem object\"\"\"\n return list_object.priority\n\n\n<mask token>\n\n\ndef clean_input(prompt='Error'):\n \"\"\"The purpose of this function is to prompt the user for a numerical\n input and only accept a numerical input, rejects no input and text input.\n :param prompt: the prompt the user sees, default is Error\n :returns the user input as a float\"\"\"\n text = True\n phrase = '0'\n while text:\n phrase = input(prompt + '\\n')\n try:\n float(phrase)\n text = False\n except ValueError:\n print('Error: Non-Numeric Entry Detected')\n return float(phrase)\n\n\ndef load_from_file(save_location):\n \"\"\"The purpose of this function is to open the .txt save file and read\n the contents into memory in the form of a list of custom ListItem\n objects.\n :param save_location: the location the save file is stored in\n :returns a list of ListItem objects that is populated with the data from\n the save file\"\"\"\n data_file_r = open(save_location, 'r')\n list_item = ['Text', -1, 2, True]\n todo = []\n temp = 1\n line_counter = 1\n try:\n for item in data_file_r:\n if (line_counter - 1) % 5 != 0 and line_counter > 0:\n cleaned_item = ''\n for character_index in range(len(item)):\n if character_index != len(item) - 1:\n cleaned_item += item[character_index]\n if temp == 1:\n list_item[0] = cleaned_item\n temp = 2\n elif temp == 2:\n list_item[1] = int(cleaned_item)\n temp = 3\n elif temp == 3:\n list_item[2] = int(cleaned_item)\n temp = 4\n elif temp == 4:\n if cleaned_item == 'False':\n list_item[3] = False\n else:\n list_item[3] = True\n todo.insert(0, ListItem(list_item[0], list_item[1],\n list_item[2], list_item[3]))\n temp = 1\n else:\n temp = 1\n line_counter += 1\n except ValueError:\n print('An error has occurred trying to load the file')\n result = int(clean_input(\n 'Please enter a 2 to overwrite the current save file and start over or any other number to exit the program'\n ))\n if result == 2:\n key = random.randint(2, 9)\n if key == 2:\n key = 1\n result2 = int(clean_input(\n \"\"\"Are you sure you want to delete all of your saved data\nEnter {0} to proceed, or anything else to cancel\"\"\"\n .format(str(key))))\n if result2 == key:\n data_file_w = open('C:Item_List.txt', 'w')\n data_file_w.close()\n todo = []\n print('Save Data Erased')\n return todo\n else:\n print('Program Exiting')\n quit(1)\n else:\n print('Program Exiting')\n quit(1)\n data_file_r.close()\n return todo\n\n\ndef save_list(todo_list, save_location):\n \"\"\"The purpose of this function is to save a list of ListItem objects to a\n specified location in a .txt file with the first line of the document\n being an explanation of the file format being used.\n :param todo_list: the list of ListItem objects to save to the save file\n :param save_location: the location to create or overwrite the save file\n :returns nothing\"\"\"\n data_file_w = open(save_location, 'w')\n data_file_w.write(\n \"\"\"Warning: The Todo-List Program will not be able to load this save file if it is incorrectly modified. Modify at your own risk. The structure is Entry Text, Entry Priority as a number, Entry Group as a number (Not Yet Utilized, but necessary), and Entry Visibility as a boolean, each on a separate line, a single line gap in between, and the very first line is skipped\n\"\"\"\n )\n for item in todo_list:\n data_file_w.write('{0}\\n{1}\\n{2}\\n{3}\\n\\n'.format(item.text, str(\n item.priority), str(item.group), str(item.visible)))\n data_file_w.close()\n return\n\n\ndef add_item(todo_list):\n \"\"\"The purpose of this function is to prompt the user for the two\n fields of necessary information to make a new entry in the todo list,\n the item name and priority, checking if the priority overlaps with an\n existing entry in the todo list.\n :param todo_list: the list of ListItem objects to add a new ListItem\n object to\n :returns nothing\"\"\"\n text = input('Please enter the name of the new item\\n')\n priority = check_priority_overlap(int(clean_input(\n 'Please enter the priority of this item')), todo_list)\n group = 0\n visible = True\n todo_list.insert(0, ListItem(text, priority, group, visible))\n return\n\n\ndef select_item(todo_list, prompt='Error'):\n \"\"\"The purpose of this function is to display a list of all items in the\n todo list and number each individually to allow the user to select an\n item to modify or delete. The available numbers may\n skip some if some items are hidden\n :param todo_list: the list of ListItem objects to display\n :param prompt: the prompt to display to the user, default is Error\n :returns the user selected item's index in a computer friendly form (\n starting at 0 instead of 1)\"\"\"\n valid = False\n index = 0\n while not valid:\n counter = 1\n for item in todo_list:\n if item.visible:\n print(counter, item.text, sep='\\t')\n else:\n print(counter, '~ {0} ~'.format(item.text), sep='\\t')\n counter += 1\n index = int(clean_input(prompt))\n if index < counter:\n valid = True\n else:\n print('Invalid Input: Number is too big')\n return index - 1\n\n\ndef remove_item(todo_list):\n \"\"\"The purpose of this function is to delete a ListItem object from a\n list of ListItem objects by prompting the user for the index and\n verifying they want to delete the item.\n :param todo_list: the list of ListItem objects from which to remove\n one object\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to remove\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list.pop(item)\n return\n\n\ndef mark_complete(todo_list):\n \"\"\"The purpose of this function is to mark a selectedListItem object as\n hidden and not to be printed unless specified, apart from selecting items.\n :param todo_list: the list of ListItem objects to modify\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to Mark Completed and hide from the list\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n todo_list[item].visible = False\n return\n\n\ndef edit_item(todo_list):\n \"\"\"The purpose of this function is to edit a ListItem object in the\n list of ListItem objects, changing either the name or priority\n :param todo_list: the list of ListItem objects that gets one object\n modified\n :returns nothing\"\"\"\n item = select_item(todo_list,\n \"\"\"Please enter the item number you wish to edit\nEnter a negative number or zero to cancel\"\"\"\n )\n if item >= 0:\n while True:\n value = clean_input(\n \"\"\"Which value would you like to edit? Enter:\n1 for the Item Text (Currently: {0})\n2 for the Item Priority (Currently: {1})\n3 to Cancel and Exit\"\"\"\n .format(todo_list[item].text, str(todo_list[item].priority)))\n if value == 1:\n print('The Current Text is: {0}'.format(todo_list[item].text))\n todo_list[item].text = input('New Text:\\n')\n elif value == 2:\n print('The Current Priority is: {0}'.format(str(todo_list[\n item].priority)))\n todo_list[item].priority = check_priority_overlap(int(\n clean_input('New Priority:')), todo_list)\n elif value == 3:\n break\n else:\n print('Invalid Input - Please Try Again')\n return\n\n\ndef check_list_status(todo_list):\n \"\"\"The purpose of this function is to check whether there are visible\n items in the list, the entire list is hidden, or the list contains no\n more ListItem objects\n :param todo_list: the list of ListItem objects to check\n :returns which condition using integer codes\"\"\"\n if len(todo_list) == 0:\n state = 1\n else:\n state = 2\n for item_index in range(len(todo_list)):\n if todo_list[item_index].visible:\n state = 0\n return state\n\n\ndef menu_loop(todo_list, save_file_location):\n \"\"\"The purpose of this function is to repeatedly display the todo list\n and user prompts menu until the program is closed\n :param todo_list: the list of ListItem objects to display or modify\n :param save_file_location: where the .txt save file is located for saving\n :returns nothing\"\"\"\n show_hidden = False\n selection = 0\n invalid_input = False\n while selection != 6:\n if invalid_input:\n invalid_input = False\n else:\n print_list(save_file_location, todo_list, True, show_hidden)\n divider(137 + 17)\n list_status = check_list_status(todo_list)\n if list_status == 0:\n selection = int(clean_input(\n \"\"\"Please enter: 1 for Add Item, 2 for Remove Item, 3 for Edit Item, 4 for Mark Item Complete, 5 for Toggle Hidden, and 6 for Exit, 7 for Concept Demonstration\n\"\"\"\n ))\n elif list_status == 1:\n selection = int(clean_input(\n \"\"\"Please enter: 1 for Add Item, and 6 for Exit, 7 for Concept Demonstration\n\"\"\"\n ))\n else:\n selection = int(clean_input(\n \"\"\"Please enter: 1 for Add Item, 5 for Toggle Hidden, and 6 for Exit, 7 for Concept Demonstration\n\"\"\"\n ))\n print('')\n if selection == 1:\n add_item(todo_list)\n elif selection == 2:\n if list_status == 0:\n remove_item(todo_list)\n elif list_status == 2:\n print(\n 'Invalid Command: The Todo List has no visible items to remove'\n )\n else:\n print('Invalid Command: The Todo List has no items to remove')\n elif selection == 3:\n if list_status == 0:\n edit_item(todo_list)\n elif list_status == 2:\n print(\n 'Invalid Command: The Todo List has no visible items to edit'\n )\n else:\n print('Invalid Command: The Todo List has no items to edit')\n elif selection == 4:\n if list_status == 0:\n mark_complete(todo_list)\n elif list_status == 2:\n print(\n 'Invalid Command: The Todo List has no visible items to mark complete'\n )\n else:\n print(\n 'Invalid Command: The Todo List has no items to mark complete'\n )\n elif selection == 5:\n if list_status == 0 or list_status == 2:\n if show_hidden:\n print('No longer showing hidden items')\n show_hidden = False\n else:\n print('Now showing hidden items')\n show_hidden = True\n else:\n print(\n 'Invalid Command: The Todo List has no items to show or hide'\n )\n elif selection == 6:\n print('Now Closing')\n elif selection == 7:\n concept_demonstration()\n else:\n invalid_input = True\n print('Invalid Input\\nPlease Try Again')\n\n\ndef main():\n \"\"\"The purpose of this function is to ensure the save file exists at the\n specified save file location, load the save file into memory, display a\n welcome message with a divider, then start the menu loop until the\n program is closed\n :returns nothing\"\"\"\n save_file_location = 'Item_List.txt'\n data_file_a = open(save_file_location, 'a')\n data_file_a.close()\n loaded_list = load_from_file(save_file_location)\n print('Welcome to the To-Do List - Version: 0.1.2')\n divider(42)\n menu_loop(loaded_list, save_file_location)\n\n\n<mask token>\n",
"step-5": "\"\"\"This program displays a customizable list of items by priority value,\r\nwith priority 1 being the highest. Allows the user to add, edit,\r\nmark complete, show completed (hidden), and remove items. Stores the list of\r\nitems in a .txt file located where this program's main.py file is. All\r\nchanges are automatically saved to the .txt file. Also includes a fun\r\ntechnical knowledge demonstration using numbers and text responses. The\r\nprogram will create a new save file if none exists, and prompts for save\r\nfile overwrite if data cannot be read successfully. Menu navigation is\r\naccomplished through numeric inputs due to the text-only interface and\r\ntedium of typing out each word accurately and repeatedly.\"\"\"\r\n__author__ = 'Jordan Kooyman'\r\n\r\n# 1/26/21 - 4/15/2021 To-Do List Program - Integration Project for COP 1500\r\n# Spring 2021\r\n# Configurable settings saved to a separate file (?)\r\n# Ability to load a different data or config file (?)\r\n# Color code items by group (?)\r\n# Add a basic calculator to meet math (and string?) command requirements (?)\r\n\r\n# TODO: Implement a group system that shows all groups combined, just one\r\n# group, or all categorized by group, and group names - be able to change\r\n# group names (new function) - all functions support groups (individual or\r\n# combined)\r\n\r\nimport random\r\n\r\n\r\n# Random number generation used as random verification number when\r\n# overwriting the save file in the event of a failure to load from the save\r\n# file\r\n\r\n\r\nclass ListItem: # Create a class object that will store the data for each\r\n # entry in the list (custom variable)\r\n \"\"\"A custom object that stores four pieces of data representing each\r\n entry in the todo list. Contains the text of the todo list entry,\r\n the priority of the entry, the group code (NYI), and the visibility of\r\n the entry\"\"\"\r\n\r\n def __init__(self, text, priority, group, visible): # From w3schools.com\r\n self.text = text\r\n self.priority = priority\r\n self.group = group\r\n self.visible = visible\r\n\r\n\r\ndef concept_demonstration():\r\n \"\"\"The purpose of this function is to prompt the user for numbers and\r\n strings and manipulate them to demonstrate programming fluency with\r\n string and integer operations.\r\n :returns nothing\"\"\"\r\n number = clean_input(\"Please enter a positive number\")\r\n number2 = clean_input(\"Please enter a number\")\r\n while number2 == 0: # Rejects a 0 if it is input as the second number\r\n print(\"Error: Cannot Divide by 0\")\r\n number2 = clean_input(\"Please enter a different number\")\r\n color = input(\"Please enter a color\\n\")\r\n thing = input(\"Please enter a thing\\n\")\r\n thing2 = thing + ' ' # Adding space so that when thing is repeated, it\r\n # has a space in between\r\n # Raise the first number to the second number\r\n location = input(\"Please enter a location\\n\")\r\n print(str(number) + \" raised to the power of \" + str(number2) + \" is \" +\r\n str(number ** number2))\r\n # Multiply the two numbers\r\n print(\"{0} multiplied by {1} is {2}\".format(str(number), str(number2),\r\n str(number * number2)))\r\n # Divide the first number by the second number\r\n print(\"{0} divided by {1} is {2}\".format(str(number), str(number2),\r\n str(number / number2)))\r\n # Find the modulus of the two numbers\r\n print(\"The remainder from dividing {0} by {1} is {2}\".format(str(number),\r\n str(number2),\r\n str(number %\r\n number2))\r\n )\r\n # Divide the first number by the second and round it down (floor it)\r\n print(\"{0} divided by {1} rounded down is {2}\".format(str(number),\r\n str(number2),\r\n str(number // number2\r\n )))\r\n # Add the two numbers\r\n print(\"{0} plus {1} is {2}\".format(str(number), str(number2),\r\n str(number + number2)))\r\n # Subtract the second number from the first number\r\n print(\"{0} minus {1} is {2}\".format(str(number), str(number2),\r\n str(number - number2)))\r\n if number > 1: # if the first number entered is greater than 1\r\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\r\n location, thing2 *\r\n int(number - 1) + thing))\r\n # Combine two strings with + (no added space), repeat a string x\r\n # number of times with * (must use an integer) (I have the minus 1\r\n # and + thing to get the spacing to look proper and still repeat\r\n # number amount of times) -if a negative number is used when\r\n # multiplying a string, it does nothing (but does not crash) - but\r\n # it is still handled in the other statement with some added user\r\n # shaming\r\n elif number < 0: # if the first number entered is negative\r\n print(\"The {0} at {1} yelled '{2}'\\nYou entered a negative number \"\r\n \"when a positive number was requested, so you made the {3} \"\r\n \"mute. Good Job.\".format(color + ' ' + thing, location, thing2 *\r\n int(number), thing))\r\n # Same as above, expect that it will print nothing in the yelled\r\n # section if the first number entered is negative\r\n else: # if the first number entered is 0 or 1 (because of the int()\r\n # function removing a decimal)\r\n print(\"The {0} at {1} yelled '{2}'\".format(color + ' ' + thing,\r\n location, thing *\r\n int(number)))\r\n # this is to prevent errant spaces or showing the phrase too many times\r\n return\r\n\r\n\r\ndef cascade_list(priority_to_cascade_from, todo_list):\r\n \"\"\"The purpose of this function is to decrement the priority number of\r\n every item in the provided todo list greater than the priority number\r\n provided.\r\n :param priority_to_cascade_from: the number that is inserted by moving\r\n everything equal to or greater than up by one\r\n :param todo_list: the list of ListItem objects to check in\"\"\"\r\n for item in todo_list:\r\n if item.priority >= priority_to_cascade_from:\r\n item.priority += 1\r\n return\r\n\r\n\r\ndef check_priority_overlap(priority_to_check, todo_list):\r\n \"\"\"The purpose of this function is to check if the user's priority\r\n number input overlaps with a priority number already in the list,\r\n and if it does, prompts the user whether they want to keep it, change\r\n it, or move everything in the list that has a larger priority value up\r\n by one.\r\n :param priority_to_check: the number to check for overlap with\r\n :param todo_list: the list of ListItem objects to check in\r\n :returns the priority value, either changed or the original input\"\"\"\r\n overlap = False\r\n for item in todo_list:\r\n if item.priority == priority_to_check:\r\n overlap = True\r\n if overlap:\r\n answer = 0\r\n while answer > 3 or answer < 1:\r\n answer = clean_input(\"The priority number you entered overlaps \"\r\n \"with another entry's priority. Enter:\\n1 to \"\r\n \"change priority number\\n2 to leave as is \"\r\n \"with overlap\\n3 to push all priority numbers\"\r\n \" below this entry down by 1\")\r\n if answer > 3 or answer < 1:\r\n print(\"Invalid Option Selected\\nPlease Try Again\")\r\n if answer == 1:\r\n priority_to_check = check_priority_overlap(\r\n int(clean_input(\"New Priority:\")), todo_list)\r\n # change the priority value input\r\n elif answer == 3:\r\n cascade_list(priority_to_check, todo_list)\r\n return priority_to_check\r\n\r\n\r\ndef sorting(list_object): # Takes in a ListItem object and returns the\r\n # priority value - from w3schools.com\r\n \"\"\"The purpose of this function is to take in a ListItem custom object\r\n and return the priority value stored in it to be used in sorting.\r\n :param list_object: one ListItem object\r\n :returns the priority value stored in the ListItem object\"\"\"\r\n return list_object.priority\r\n\r\n\r\ndef print_list(save_file_location, my_list, to_save=False, show_hidden=False):\r\n # Prints out the To-Do list from the common list variable and saves list\r\n # to the .txt file\r\n \"\"\"The purpose of this function is to take in the location of the save\r\n file, the todo list variable, whether or not to save, and whether or not\r\n to show hidden and print out the todo list variable, skipping items\r\n marked as hidden unless it is told to show hidden, and saving the todo\r\n list to the file in the save file location if it is told to save.\r\n :param save_file_location: the file path to get to the .txt save file\r\n :param my_list: the list of ListItem objects to check in\r\n :param to_save: whether or not to save the list of items to the file,\r\n default\r\n is false\r\n :param show_hidden: whether or not to display the hidden list items,\r\n default\r\n it false\r\n :returns nothing\"\"\"\r\n my_list.sort(key=sorting) # Uses a custom function to be able to get the\r\n # right value to sort by\r\n print(\"To-Do:\")\r\n for item_index in my_list: # The range needs to be the length of the list\r\n # being printed\r\n if item_index.visible and not show_hidden: # Only print visible items\r\n # if show hidden is false\r\n print(item_index.priority, item_index.text, sep='.\\t')\r\n elif show_hidden: # Print everything is show hidden is trues\r\n if item_index.visible:\r\n print(item_index.priority, item_index.text, sep='.\\t')\r\n else:\r\n print(\"{0}.~\\t{1}\".format(item_index.priority, item_index.text)\r\n )\r\n # Indicate hidden items\r\n # Printing the item priority with a dot, then the item, with a tab\r\n # separating them\r\n if to_save:\r\n save_list(my_list, save_file_location)\r\n return\r\n\r\n\r\ndef divider(size=100): # Draws a dividing line to go between sections\r\n # (default 100 characters long)\r\n \"\"\"The purpose of this function is to print a dashed line across the\r\n screen with a specified length.\r\n :param size: how many characters long the line should be, default is 100\r\n :returns nothing\"\"\"\r\n for i in range(size):\r\n print('-', end='') # Prints out a single dash, no newline afterwards\r\n # (the end= sets the last character to blank\r\n print('') # Print out a newline (using the default ending of a print\r\n # statement being a newline\r\n return\r\n\r\n\r\ndef clean_input(prompt='Error'): # A special input function that will reject a\r\n # user's input of text when a number is requested -- if no prompt is\r\n # specified in the program, it will display \"Error\"\r\n \"\"\"The purpose of this function is to prompt the user for a numerical\r\n input and only accept a numerical input, rejects no input and text input.\r\n :param prompt: the prompt the user sees, default is Error\r\n :returns the user input as a float\"\"\"\r\n text = True\r\n phrase = '0'\r\n while text:\r\n phrase = input(prompt + '\\n')\r\n try: # Adapted from an example in the ThinkPython textbook (15.7) -\r\n # Checks whether the input is a number, positive or negative. If\r\n # not, rejects the input and user gets to try again\r\n float(phrase)\r\n text = False\r\n except ValueError:\r\n print(\"Error: Non-Numeric Entry Detected\")\r\n # if phrase.isnumeric(): # Checks for a positive number (negative\r\n # rejected as well as text) - replaced with superior form from textbook\r\n # example\r\n # return float(phrase) # Return the number the user entered\r\n # else:\r\n # print(\"Error: Non-Numeric Entry Detected\")\r\n return float(phrase) # Return the number the user entered\r\n\r\n\r\ndef load_from_file(save_location): # This is a function for readability -\r\n # opens txt file in read mode and loads it\r\n \"\"\"The purpose of this function is to open the .txt save file and read\r\n the contents into memory in the form of a list of custom ListItem\r\n objects.\r\n :param save_location: the location the save file is stored in\r\n :returns a list of ListItem objects that is populated with the data from\r\n the save file\"\"\"\r\n # into an array (list) of ListItem variables\r\n data_file_r = open(save_location, \"r\") # Open txt file in read mode\r\n list_item = [\"Text\", -1, 2, True] # Item, Item Priority, group, is visible\r\n todo = [] # make a list of lists\r\n temp = 1 # Temporary counter variable to reconstruct lists from .txt file\r\n line_counter = 1\r\n try:\r\n for item in data_file_r: # loop through each line in the file, one at\r\n # a time - from w3schools.com\r\n if (line_counter - 1) % 5 != 0 and line_counter > 0:\r\n cleaned_item = \"\"\r\n for character_index in range(len(\r\n item)): # Loop through each character in the extracted\r\n # string\r\n if character_index != len(\r\n item) - 1: # if it is not the last character, add\r\n # it to the cleaned string\r\n cleaned_item += item[character_index]\r\n # Add every character to a\r\n # but \\n\r\n if temp == 1: # Item Text\r\n list_item[0] = cleaned_item\r\n temp = 2\r\n elif temp == 2: # Item Priority\r\n list_item[1] = int(cleaned_item)\r\n temp = 3\r\n elif temp == 3: # Item Group\r\n list_item[2] = int(cleaned_item)\r\n temp = 4\r\n elif temp == 4: # Is Visible\r\n if cleaned_item == \"False\":\r\n list_item[3] = False\r\n else: # Assume the item is visible if the text is not\r\n # False\r\n list_item[3] = True\r\n todo.insert(0, ListItem(list_item[0], list_item[1],\r\n list_item[2], list_item[3]))\r\n temp = 1\r\n else: # If some error occurred and a condition outside of the\r\n # possible four is met, restart\r\n temp = 1\r\n line_counter += 1\r\n except ValueError:\r\n print(\"An error has occurred trying to load the file\")\r\n result = int(clean_input(\r\n \"Please enter a 2 to overwrite the current save file and start \"\r\n \"over or any other number to exit the program\"))\r\n if result == 2:\r\n key = random.randint(2, 9) # Generate a random integer between 2\r\n # and 9 to be used as a second dynamic check\r\n if key == 2:\r\n key = 1 # If the random number is 2, set it to one so that\r\n # the same number (2) cannot be used as the verification number\r\n result2 = int(clean_input(\"Are you sure you want to delete all \"\r\n \"of your saved data\\nEnter {0} to \"\r\n \"proceed, or anything else to \"\r\n \"cancel\".format(str(key))))\r\n if result2 == key:\r\n data_file_w = open(\"C:Item_List.txt\", \"w\")\r\n data_file_w.close()\r\n todo = []\r\n print(\"Save Data Erased\")\r\n return todo # Return an empty list if file load failed\r\n else:\r\n print(\"Program Exiting\")\r\n quit(1)\r\n else:\r\n print(\"Program Exiting\")\r\n quit(1) # Exit the program with the exit code of 1\r\n data_file_r.close()\r\n # All the list functions above referenced from w3schools.com What is\r\n # happening above: Opening the file, initializing a list to hold all\r\n # four pieces of data, then after pulling the data from the file and\r\n # storing in the list, it is copied (not referenced) into my main list\r\n # of ListItem objects\r\n return todo\r\n\r\n\r\ndef save_list(todo_list, save_location):\r\n \"\"\"The purpose of this function is to save a list of ListItem objects to a\r\n specified location in a .txt file with the first line of the document\r\n being an explanation of the file format being used.\r\n :param todo_list: the list of ListItem objects to save to the save file\r\n :param save_location: the location to create or overwrite the save file\r\n :returns nothing\"\"\"\r\n data_file_w = open(save_location,\r\n \"w\") # open the save file and clear the data from it\r\n data_file_w.write(\"Warning: The Todo-List Program will not be able to \"\r\n \"load this save file if it is incorrectly modified. \"\r\n \"Modify at your own risk. The structure is Entry \"\r\n \"Text, Entry Priority as a number, Entry Group as a \"\r\n \"number (Not Yet Utilized, but necessary), and Entry \"\r\n \"Visibility as a boolean, each on a separate line, a \"\r\n \"single line gap in between, and the \"\r\n \"very first line is skipped\\n\")\r\n for item in todo_list:\r\n data_file_w.write(\"{0}\\n{1}\\n{2}\\n{3}\\n\\n\".format(item.text,\r\n str(item.priority),\r\n str(item.group),\r\n str(item.visible)))\r\n data_file_w.close()\r\n return\r\n\r\n\r\ndef add_item(todo_list):\r\n \"\"\"The purpose of this function is to prompt the user for the two\r\n fields of necessary information to make a new entry in the todo list,\r\n the item name and priority, checking if the priority overlaps with an\r\n existing entry in the todo list.\r\n :param todo_list: the list of ListItem objects to add a new ListItem\r\n object to\r\n :returns nothing\"\"\"\r\n text = input(\"Please enter the name of the new item\\n\")\r\n priority = check_priority_overlap(\r\n int(clean_input(\"Please enter the priority of this item\")), todo_list)\r\n # group = int(clean_input(\"Please enter the group number of this item\"))\r\n group = 0 # Set the group value to zero, group system NYI\r\n visible = True\r\n todo_list.insert(0, ListItem(text, priority, group, visible)) # Join\r\n # the inputs to be added to the overall list\r\n return\r\n\r\n\r\ndef select_item(todo_list, prompt='Error'): # Ask the user\r\n # which item from the list is to be modified\r\n \"\"\"The purpose of this function is to display a list of all items in the\r\n todo list and number each individually to allow the user to select an\r\n item to modify or delete. The available numbers may\r\n skip some if some items are hidden\r\n :param todo_list: the list of ListItem objects to display\r\n :param prompt: the prompt to display to the user, default is Error\r\n :returns the user selected item's index in a computer friendly form (\r\n starting at 0 instead of 1)\"\"\"\r\n valid = False\r\n index = 0\r\n while not valid:\r\n counter = 1 # counter for index printing\r\n for item in todo_list: # The range needs to be the length of the list\r\n # being printed\r\n if item.visible:\r\n print(counter, item.text, sep='\\t')\r\n else:\r\n print(counter, \"~ {0} ~\".format(item.text), sep='\\t')\r\n counter += 1\r\n # Printing the item number, then the item, with a tab separating\r\n # them\r\n index = int(clean_input(prompt))\r\n if index < counter:\r\n valid = True\r\n else:\r\n print(\"Invalid Input: Number is too big\")\r\n return index - 1\r\n\r\n\r\ndef remove_item(todo_list):\r\n \"\"\"The purpose of this function is to delete a ListItem object from a\r\n list of ListItem objects by prompting the user for the index and\r\n verifying they want to delete the item.\r\n :param todo_list: the list of ListItem objects from which to remove\r\n one object\r\n :returns nothing\"\"\"\r\n item = select_item(todo_list, \"Please enter the item number you wish to \"\r\n \"remove\\nEnter a negative number or zero \"\r\n \"to cancel\")\r\n if item >= 0: # 0, not 1 because the index returned is shifted to be\r\n # computer friendly\r\n todo_list.pop(item)\r\n return\r\n\r\n\r\ndef mark_complete(todo_list):\r\n \"\"\"The purpose of this function is to mark a selectedListItem object as\r\n hidden and not to be printed unless specified, apart from selecting items.\r\n :param todo_list: the list of ListItem objects to modify\r\n :returns nothing\"\"\"\r\n item = select_item(todo_list, \"Please enter the item number you wish to \"\r\n \"Mark Completed and hide from the \"\r\n \"list\\nEnter a negative number or zero to \"\r\n \"cancel\")\r\n if item >= 0:\r\n todo_list[item].visible = False\r\n return\r\n\r\n\r\ndef edit_item(todo_list):\r\n \"\"\"The purpose of this function is to edit a ListItem object in the\r\n list of ListItem objects, changing either the name or priority\r\n :param todo_list: the list of ListItem objects that gets one object\r\n modified\r\n :returns nothing\"\"\"\r\n item = select_item(todo_list, \"Please enter the item number you wish to \"\r\n \"edit\\nEnter a negative number or zero to \"\r\n \"cancel\")\r\n if item >= 0:\r\n while True:\r\n value = clean_input(\"Which value would you like to edit? Enter:\\n1\"\r\n \" for the Item Text (Currently: {0})\\n2 for \"\r\n \"the Item Priority (Currently: {1})\\n3 to \"\r\n \"Cancel and Exit\".format(todo_list[item].text,\r\n str(todo_list[item].\r\n priority)))\r\n if value == 1: # Item Text Change\r\n print(\"The Current Text is: {0}\".format(todo_list[item].text))\r\n todo_list[item].text = input(\"New Text:\\n\")\r\n elif value == 2: # Item Priority Change\r\n print(\"The Current Priority is: {0}\".format(str(todo_list[item]\r\n .priority)))\r\n todo_list[item].priority = check_priority_overlap(\r\n int(clean_input(\"New Priority:\")), todo_list)\r\n # elif value == 3: # Item Group Change\r\n # print(f\"The Current Group is: {todo_list[item].group}\")\r\n # todo_list[item].group = int(clean_input(\"New Group Number:\"))\r\n elif value == 3: # Exit Changing Menu\r\n break\r\n else:\r\n print(\"Invalid Input - Please Try Again\")\r\n return\r\n\r\n\r\ndef check_list_status(todo_list): # Checks if the list is completely hidden\r\n # (2), completely empty (1), or neither (0)\r\n \"\"\"The purpose of this function is to check whether there are visible\r\n items in the list, the entire list is hidden, or the list contains no\r\n more ListItem objects\r\n :param todo_list: the list of ListItem objects to check\r\n :returns which condition using integer codes\"\"\"\r\n if len(todo_list) == 0:\r\n state = 1 # Empty List\r\n else:\r\n state = 2 # Entirely Hidden List\r\n for item_index in range(len(todo_list)):\r\n if todo_list[item_index].visible: # If an item is visible, then\r\n # they are not all hidden\r\n state = 0 # Neither\r\n return state\r\n\r\n\r\ndef menu_loop(todo_list, save_file_location):\r\n \"\"\"The purpose of this function is to repeatedly display the todo list\r\n and user prompts menu until the program is closed\r\n :param todo_list: the list of ListItem objects to display or modify\r\n :param save_file_location: where the .txt save file is located for saving\r\n :returns nothing\"\"\"\r\n show_hidden = False\r\n selection = 0\r\n invalid_input = False\r\n while selection != 6:\r\n if invalid_input:\r\n invalid_input = False\r\n else:\r\n print_list(save_file_location, todo_list, True, show_hidden)\r\n divider(137 + 17) # Length of prompt statement below\r\n list_status = check_list_status(todo_list)\r\n if list_status == 0: # No Issues\r\n selection = int(clean_input(\"Please enter: 1 for Add Item, 2 for \"\r\n \"Remove Item, 3 for Edit Item, \"\r\n \"4 for Mark Item Complete, \"\r\n \"5 for Toggle Hidden, and 6 for \"\r\n \"Exit, 7 for Concept \"\r\n \"Demonstration\\n\"))\r\n elif list_status == 1: # Empty List - No Remove, Edit, Mark, or Toggle\r\n selection = int(clean_input(\"Please enter: 1 for Add Item, and 6 \"\r\n \"for Exit, 7 for Concept \"\r\n \"Demonstration\\n\"))\r\n else: # Entirely Hidden List\r\n selection = int(clean_input(\"Please enter: 1 for Add Item, 5 for \"\r\n \"Toggle Hidden, and 6 for Exit, \"\r\n \"7 for Concept Demonstration\\n\"))\r\n # Uses the clean_input function above to get a number from the\r\n # user, converting it to an int so a decimal won't return an\r\n # invalid input in the following steps\r\n print(\"\") # Blank Print statement to add an extra blank line after\r\n # user input before displaying response\r\n if selection == 1: # Add Item - modify the list variable, then save\r\n # to file\r\n add_item(todo_list)\r\n elif selection == 2: # Remove Item - modify the list variable, then\r\n # save to file\r\n if list_status == 0:\r\n remove_item(todo_list)\r\n elif list_status == 2:\r\n print(\"Invalid Command: The Todo List has no visible items \"\r\n \"to remove\")\r\n else:\r\n print(\"Invalid Command: The Todo List has no items to remove\")\r\n elif selection == 3: # Edit Item - modify the list variable, then save\r\n # to file\r\n if list_status == 0:\r\n edit_item(todo_list)\r\n elif list_status == 2:\r\n print(\"Invalid Command: The Todo List has no visible items \"\r\n \"to edit\")\r\n else:\r\n print(\"Invalid Command: The Todo List has no items to edit\")\r\n elif selection == 4: # Mark Item Complete - modify the list variable,\r\n # then save to file\r\n if list_status == 0:\r\n mark_complete(todo_list)\r\n elif list_status == 2:\r\n print(\"Invalid Command: The Todo List has no visible items \"\r\n \"to mark complete\")\r\n else:\r\n print(\"Invalid Command: The Todo List has no items to mark \"\r\n \"complete\")\r\n elif selection == 5: # Show Hidden - modify the list variable, then\r\n # save to file\r\n if list_status == 0 or list_status == 2:\r\n if show_hidden:\r\n print(\"No longer showing hidden items\")\r\n show_hidden = False\r\n else:\r\n print(\"Now showing hidden items\")\r\n show_hidden = True\r\n else:\r\n print(\"Invalid Command: The Todo List has no items to show or \"\r\n \"hide\")\r\n elif selection == 6: # Exit Program\r\n print(\"Now Closing\")\r\n elif selection == 7: # Extra section to demonstrate proficiency with\r\n # topics covered in class - Sprint 1\r\n concept_demonstration()\r\n else:\r\n invalid_input = True\r\n print(\"Invalid Input\\nPlease Try Again\")\r\n\r\n\r\ndef main():\r\n \"\"\"The purpose of this function is to ensure the save file exists at the\r\n specified save file location, load the save file into memory, display a\r\n welcome message with a divider, then start the menu loop until the\r\n program is closed\r\n :returns nothing\"\"\"\r\n save_file_location = \"Item_List.txt\"\r\n data_file_a = open(save_file_location, \"a\") # Opens ItemList.txt which\r\n # is accessible in the file variable, in append mode (using this so that\r\n # if the file exists, nothing happens, but if it does not exist, it gets\r\n # created from w3schools.com\r\n data_file_a.close() # Close the file, I now know it exists\r\n loaded_list = load_from_file(save_file_location)\r\n print(\"Welcome to the To-Do List - Version: 0.1.2\")\r\n divider(42) # Length of welcome statement above\r\n menu_loop(loaded_list, save_file_location)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n",
"step-ids": [
8,
11,
12,
18,
24
]
}
|
[
8,
11,
12,
18,
24
] |
<|reserved_special_token_0|>
class PlSqlLexer(Lexer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PlSqlLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]
T__0 = 1
A_LETTER = 2
ADD = 3
AFTER = 4
AGENT = 5
AGGREGATE = 6
ALL = 7
ALTER = 8
ANALYZE = 9
AND = 10
ANY = 11
ARRAY = 12
AS = 13
ASSUME = 14
ASSERT = 15
ASC = 16
ASSOCIATE = 17
AT = 18
ATTRIBUTE = 19
AUDIT = 20
AUTHID = 21
AUTO = 22
AUTOMATIC = 23
AUTONOMOUS_TRANSACTION = 24
BATCH = 25
BEFORE = 26
BEGIN = 27
BETWEEN = 28
BFILE = 29
BINARY_DOUBLE = 30
BINARY_FLOAT = 31
BINARY_INTEGER = 32
BLOB = 33
BLOCK = 34
BODY = 35
BOOLEAN = 36
BOTH = 37
BREADTH = 38
BULK = 39
BY = 40
BYTE = 41
C_LETTER = 42
CACHE = 43
CALL = 44
CANONICAL = 45
CASCADE = 46
CASE = 47
CAST = 48
CHAR = 49
CHAR_CS = 50
CHARACTER = 51
CHECK = 52
CHR = 53
CLOB = 54
CLOSE = 55
CLUSTER = 56
COLLECT = 57
COLUMNS = 58
COMMENT = 59
COMMIT = 60
COMMITTED = 61
COMPATIBILITY = 62
COMPILE = 63
COMPOUND = 64
CONNECT = 65
CONNECT_BY_ROOT = 66
CONSTANT = 67
CONSTRAINT = 68
CONSTRAINTS = 69
CONSTRUCTOR = 70
CONTENT = 71
CONTEXT = 72
CONTINUE = 73
CONVERT = 74
CORRUPT_XID = 75
CORRUPT_XID_ALL = 76
COST = 77
COUNT = 78
CREATE = 79
CROSS = 80
CUBE = 81
CURRENT = 82
CURRENT_USER = 83
CURSOR = 84
CUSTOMDATUM = 85
CYCLE = 86
DATA = 87
DATABASE = 88
DATE = 89
DAY = 90
DB_ROLE_CHANGE = 91
DBTIMEZONE = 92
DDL = 93
DEBUG = 94
DEC = 95
DECIMAL = 96
DECLARE = 97
DECOMPOSE = 98
DECREMENT = 99
DEFAULT = 100
DEFAULTS = 101
DEFERRED = 102
DEFINER = 103
DELETE = 104
DEPTH = 105
DESC = 106
DETERMINISTIC = 107
DIMENSION = 108
DISABLE = 109
DISASSOCIATE = 110
DISTINCT = 111
DOCUMENT = 112
DOUBLE = 113
DROP = 114
DSINTERVAL_UNCONSTRAINED = 115
EACH = 116
ELEMENT = 117
ELSE = 118
ELSIF = 119
EMPTY = 120
ENABLE = 121
ENCODING = 122
END = 123
ENTITYESCAPING = 124
ERR = 125
ERRORS = 126
ESCAPE = 127
EVALNAME = 128
EXCEPT = 129
EXCEPTION = 130
EXCEPTION_INIT = 131
EXCEPTIONS = 132
EXCLUDE = 133
EXCLUSIVE = 134
EXECUTE = 135
EXISTS = 136
EXIT = 137
EXPLAIN = 138
EXTERNAL = 139
EXTRACT = 140
FAILURE = 141
FALSE = 142
FETCH = 143
FINAL = 144
FIRST = 145
FIRST_VALUE = 146
FLOAT = 147
FOLLOWING = 148
FOLLOWS = 149
FOR = 150
FORALL = 151
FORCE = 152
FROM = 153
FULL = 154
FUNCTION = 155
GOTO = 156
GRANT = 157
GROUP = 158
GROUPING = 159
HASH = 160
HAVING = 161
HIDE = 162
HOUR = 163
IF = 164
IGNORE = 165
IMMEDIATE = 166
IN = 167
INCLUDE = 168
INCLUDING = 169
INCREMENT = 170
INDENT = 171
INDEX = 172
INDEXED = 173
INDICATOR = 174
INDICES = 175
INFINITE = 176
INLINE = 177
INNER = 178
INOUT = 179
INSERT = 180
INSTANTIABLE = 181
INSTEAD = 182
INT = 183
INTEGER = 184
INTERSECT = 185
INTERVAL = 186
INTO = 187
INVALIDATE = 188
IS = 189
ISOLATION = 190
ITERATE = 191
JAVA = 192
JOIN = 193
KEEP = 194
LANGUAGE = 195
LAST = 196
LAST_VALUE = 197
LEADING = 198
LEFT = 199
LEVEL = 200
LIBRARY = 201
LIKE = 202
LIKE2 = 203
LIKE4 = 204
LIKEC = 205
LIMIT = 206
LOCAL = 207
LOCK = 208
LOCKED = 209
LOG = 210
LOGOFF = 211
LOGON = 212
LONG = 213
LOOP = 214
MAIN = 215
MAP = 216
MATCHED = 217
MAXVALUE = 218
MEASURES = 219
MEMBER = 220
MERGE = 221
MINUS = 222
MINUTE = 223
MINVALUE = 224
MLSLABEL = 225
MODE = 226
MODEL = 227
MODIFY = 228
MONTH = 229
MULTISET = 230
NAME = 231
NAN = 232
NATURAL = 233
NATURALN = 234
NAV = 235
NCHAR = 236
NCHAR_CS = 237
NCLOB = 238
NESTED = 239
NEW = 240
NO = 241
NOAUDIT = 242
NOCACHE = 243
NOCOPY = 244
NOCYCLE = 245
NOENTITYESCAPING = 246
NOMAXVALUE = 247
NOMINVALUE = 248
NONE = 249
NOORDER = 250
NOSCHEMACHECK = 251
NOT = 252
NOWAIT = 253
NULL = 254
NULLS = 255
NUMBER = 256
NUMERIC = 257
NVARCHAR2 = 258
OBJECT = 259
OF = 260
OFF = 261
OID = 262
OLD = 263
ON = 264
ONLY = 265
OPEN = 266
OPTION = 267
OR = 268
ORADATA = 269
ORDER = 270
ORDINALITY = 271
OSERROR = 272
OUT = 273
OUTER = 274
OVER = 275
OVERRIDING = 276
PACKAGE = 277
PARALLEL_ENABLE = 278
PARAMETERS = 279
PARENT = 280
PARTITION = 281
PASSING = 282
PATH = 283
PERCENT_ROWTYPE = 284
PERCENT_TYPE = 285
PIPELINED = 286
PIVOT = 287
PLAN = 288
PLS_INTEGER = 289
POSITIVE = 290
POSITIVEN = 291
PRAGMA = 292
PRECEDING = 293
PRECISION = 294
PRESENT = 295
PRIOR = 296
PROCEDURE = 297
RAISE = 298
RANGE = 299
RAW = 300
READ = 301
REAL = 302
RECORD = 303
REF = 304
REFERENCE = 305
REFERENCING = 306
REJECT = 307
RELIES_ON = 308
RENAME = 309
REPLACE = 310
RESPECT = 311
RESTRICT_REFERENCES = 312
RESULT = 313
RESULT_CACHE = 314
RETURN = 315
RETURNING = 316
REUSE = 317
REVERSE = 318
REVOKE = 319
RIGHT = 320
ROLLBACK = 321
ROLLUP = 322
ROW = 323
ROWID = 324
ROWS = 325
RULES = 326
SAMPLE = 327
SAVE = 328
SAVEPOINT = 329
SCHEMA = 330
SCHEMACHECK = 331
SCN = 332
SEARCH = 333
SECOND = 334
SEED = 335
SEGMENT = 336
SELECT = 337
SELF = 338
SEQUENCE = 339
SEQUENTIAL = 340
SERIALIZABLE = 341
SERIALLY_REUSABLE = 342
SERVERERROR = 343
SESSIONTIMEZONE = 344
SET = 345
SETS = 346
SETTINGS = 347
SHARE = 348
SHOW = 349
SHUTDOWN = 350
SIBLINGS = 351
SIGNTYPE = 352
SIMPLE_INTEGER = 353
SINGLE = 354
SIZE = 355
SKIP_ = 356
SMALLINT = 357
SNAPSHOT = 358
SOME = 359
SPECIFICATION = 360
SQLDATA = 361
SQLERROR = 362
STANDALONE = 363
START = 364
STARTUP = 365
STATEMENT = 366
STATEMENT_ID = 367
STATIC = 368
STATISTICS = 369
STRING = 370
SUBMULTISET = 371
SUBPARTITION = 372
SUBSTITUTABLE = 373
SUBTYPE = 374
SUCCESS = 375
SUSPEND = 376
TABLE = 377
THE = 378
THEN = 379
TIME = 380
TIMESTAMP = 381
TIMESTAMP_LTZ_UNCONSTRAINED = 382
TIMESTAMP_TZ_UNCONSTRAINED = 383
TIMESTAMP_UNCONSTRAINED = 384
TIMEZONE_ABBR = 385
TIMEZONE_HOUR = 386
TIMEZONE_MINUTE = 387
TIMEZONE_REGION = 388
TO = 389
TRAILING = 390
TRANSACTION = 391
TRANSLATE = 392
TREAT = 393
TRIGGER = 394
TRIM = 395
TRUE = 396
TRUNCATE = 397
TYPE = 398
UNBOUNDED = 399
UNDER = 400
UNION = 401
UNIQUE = 402
UNLIMITED = 403
UNPIVOT = 404
UNTIL = 405
UPDATE = 406
UPDATED = 407
UPSERT = 408
UROWID = 409
USE = 410
USING = 411
VALIDATE = 412
VALUE = 413
VALUES = 414
VARCHAR = 415
VARCHAR2 = 416
VARIABLE = 417
VARRAY = 418
VARYING = 419
VERSION = 420
VERSIONS = 421
WAIT = 422
WARNING = 423
WELLFORMED = 424
WHEN = 425
WHENEVER = 426
WHERE = 427
WHILE = 428
WITH = 429
WITHIN = 430
WORK = 431
WRITE = 432
XML = 433
XMLAGG = 434
XMLATTRIBUTES = 435
XMLCAST = 436
XMLCOLATTVAL = 437
XMLELEMENT = 438
XMLEXISTS = 439
XMLFOREST = 440
XMLNAMESPACES = 441
XMLPARSE = 442
XMLPI = 443
XMLQUERY = 444
XMLROOT = 445
XMLSERIALIZE = 446
XMLTABLE = 447
YEAR = 448
YES = 449
YMINTERVAL_UNCONSTRAINED = 450
ZONE = 451
PREDICTION = 452
PREDICTION_BOUNDS = 453
PREDICTION_COST = 454
PREDICTION_DETAILS = 455
PREDICTION_PROBABILITY = 456
PREDICTION_SET = 457
CUME_DIST = 458
DENSE_RANK = 459
LISTAGG = 460
PERCENT_RANK = 461
PERCENTILE_CONT = 462
PERCENTILE_DISC = 463
RANK = 464
AVG = 465
CORR = 466
LAG = 467
LEAD = 468
MAX = 469
MEDIAN = 470
MIN = 471
NTILE = 472
RATIO_TO_REPORT = 473
ROW_NUMBER = 474
SUM = 475
VARIANCE = 476
REGR_ = 477
STDDEV = 478
VAR_ = 479
COVAR_ = 480
NATIONAL_CHAR_STRING_LIT = 481
BIT_STRING_LIT = 482
HEX_STRING_LIT = 483
DOUBLE_PERIOD = 484
PERIOD = 485
UNSIGNED_INTEGER = 486
APPROXIMATE_NUM_LIT = 487
CHAR_STRING = 488
DELIMITED_ID = 489
PERCENT = 490
AMPERSAND = 491
LEFT_PAREN = 492
RIGHT_PAREN = 493
DOUBLE_ASTERISK = 494
ASTERISK = 495
PLUS_SIGN = 496
MINUS_SIGN = 497
COMMA = 498
SOLIDUS = 499
AT_SIGN = 500
ASSIGN_OP = 501
BINDVAR = 502
COLON = 503
SEMICOLON = 504
LESS_THAN_OR_EQUALS_OP = 505
LESS_THAN_OP = 506
GREATER_THAN_OR_EQUALS_OP = 507
NOT_EQUAL_OP = 508
CARRET_OPERATOR_PART = 509
TILDE_OPERATOR_PART = 510
EXCLAMATION_OPERATOR_PART = 511
GREATER_THAN_OP = 512
CONCATENATION_OP = 513
VERTICAL_BAR = 514
EQUALS_OP = 515
LEFT_BRACKET = 516
RIGHT_BRACKET = 517
INTRODUCER = 518
SPACES = 519
SINGLE_LINE_COMMENT = 520
MULTI_LINE_COMMENT = 521
PROMPT = 522
REGULAR_ID = 523
ZV = 524
channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']
modeNames = ['DEFAULT_MODE']
literalNames = ['<INVALID>', "'..'", "'.'", "'%'", "'&'", "'('", "')'",
"'**'", "'*'", "'+'", "'-'", "','", "'/'", "'@'", "':='", "':'",
"';'", "'<='", "'<'", "'>='", "'^'", "'~'", "'!'", "'>'", "'||'",
"'|'", "'='", "'['", "']'", "'_'", "'@!'"]
symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',
'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',
'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',
'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',
'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',
'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',
'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',
'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',
'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',
'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',
'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',
'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',
'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',
'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',
'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',
'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',
'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',
'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',
'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',
'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',
'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',
'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',
'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',
'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',
'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',
'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',
'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',
'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',
'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',
'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',
'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',
'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',
'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',
'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',
'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',
'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',
'ZV']
ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',
'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',
'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',
'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',
'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',
'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',
'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',
'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',
'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',
'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',
'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',
'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',
'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',
'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',
'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',
'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',
'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',
'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',
'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',
'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',
'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',
'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',
'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',
'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',
'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',
'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',
'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',
'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',
'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',
'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',
'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',
'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',
'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',
'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',
'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',
'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',
'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',
'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',
'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
'V', 'W', 'X', 'Y', 'Z']
grammarFileName = 'PlSql.g4'
def __init__(self, input=None, output: TextIO=sys.stdout):
super().__init__(input, output)
self.checkVersion('4.7.2')
self._interp = LexerATNSimulator(self, self.atn, self.
decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def serializedATN():
with StringIO() as buf:
buf.write('\x03悋Ꜫ脳맭䅼㯧瞆奤\x02Ȏ')
buf.write(
'ᓗ\x08\x01\x04\x02\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06\t\x06\x04\x07'
)
buf.write(
'\t\x07\x04\x08\t\x08\x04\t\t\t\x04\n\t\n\x04\x0b\t\x0b\x04\x0c\t\x0c\x04\r\t\r'
)
buf.write(
'\x04\x0e\t\x0e\x04\x0f\t\x0f\x04\x10\t\x10\x04\x11\t\x11\x04\x12\t\x12\x04\x13'
)
buf.write(
'\t\x13\x04\x14\t\x14\x04\x15\t\x15\x04\x16\t\x16\x04\x17\t\x17\x04\x18\t\x18'
)
buf.write(
'\x04\x19\t\x19\x04\x1a\t\x1a\x04\x1b\t\x1b\x04\x1c\t\x1c\x04\x1d\t\x1d\x04\x1e'
)
buf.write(
'\t\x1e\x04\x1f\t\x1f\x04 \t \x04!\t!\x04"\t"\x04#\t#\x04$\t$\x04%\t%'
)
buf.write(
"\x04&\t&\x04'\t'\x04(\t(\x04)\t)\x04*\t*\x04+\t+\x04,\t,\x04-\t-\x04."
)
buf.write('\t.\x04/\t/\x040\t0\x041\t1\x042\t2\x043\t3\x044')
buf.write('\t4\x045\t5\x046\t6\x047\t7\x048\t8\x049\t9\x04:\t:')
buf.write(
'\x04;\t;\x04<\t<\x04=\t=\x04>\t>\x04?\t?\x04@\t@\x04A\tA\x04B\tB\x04C\t'
)
buf.write(
'C\x04D\tD\x04E\tE\x04F\tF\x04G\tG\x04H\tH\x04I\tI\x04J\tJ\x04K\tK\x04L\t'
)
buf.write(
'L\x04M\tM\x04N\tN\x04O\tO\x04P\tP\x04Q\tQ\x04R\tR\x04S\tS\x04T\tT\x04U\t'
)
buf.write(
'U\x04V\tV\x04W\tW\x04X\tX\x04Y\tY\x04Z\tZ\x04[\t[\x04\\\t\\\x04]\t]\x04'
)
buf.write(
'^\t^\x04_\t_\x04`\t`\x04a\ta\x04b\tb\x04c\tc\x04d\td\x04e\te\x04f\tf\x04'
)
buf.write(
'g\tg\x04h\th\x04i\ti\x04j\tj\x04k\tk\x04l\tl\x04m\tm\x04n\tn\x04o\to\x04'
)
buf.write(
'p\tp\x04q\tq\x04r\tr\x04s\ts\x04t\tt\x04u\tu\x04v\tv\x04w\tw\x04x\tx\x04'
)
buf.write(
'y\ty\x04z\tz\x04{\t{\x04|\t|\x04}\t}\x04~\t~\x04\x7f\t\x7f\x04\x80'
)
buf.write('\t\x80\x04\x81\t\x81\x04\x82\t\x82\x04\x83\t\x83')
buf.write('\x04\x84\t\x84\x04\x85\t\x85\x04\x86\t\x86\x04\x87')
buf.write('\t\x87\x04\x88\t\x88\x04\x89\t\x89\x04\x8a\t\x8a')
buf.write('\x04\x8b\t\x8b\x04\x8c\t\x8c\x04\x8d\t\x8d\x04\x8e')
buf.write('\t\x8e\x04\x8f\t\x8f\x04\x90\t\x90\x04\x91\t\x91')
buf.write('\x04\x92\t\x92\x04\x93\t\x93\x04\x94\t\x94\x04\x95')
buf.write('\t\x95\x04\x96\t\x96\x04\x97\t\x97\x04\x98\t\x98')
buf.write('\x04\x99\t\x99\x04\x9a\t\x9a\x04\x9b\t\x9b\x04\x9c')
buf.write('\t\x9c\x04\x9d\t\x9d\x04\x9e\t\x9e\x04\x9f\t\x9f')
buf.write('\x04\xa0\t\xa0\x04¡\t¡\x04¢\t¢\x04£')
buf.write('\t£\x04¤\t¤\x04¥\t¥\x04¦\t¦')
buf.write('\x04§\t§\x04¨\t¨\x04©\t©\x04ª')
buf.write('\tª\x04«\t«\x04¬\t¬\x04\xad\t\xad')
buf.write('\x04®\t®\x04¯\t¯\x04°\t°\x04±')
buf.write('\t±\x04²\t²\x04³\t³\x04´\t´')
buf.write('\x04µ\tµ\x04¶\t¶\x04·\t·\x04¸')
buf.write('\t¸\x04¹\t¹\x04º\tº\x04»\t»')
buf.write('\x04¼\t¼\x04½\t½\x04¾\t¾\x04¿')
buf.write('\t¿\x04À\tÀ\x04Á\tÁ\x04Â\tÂ')
buf.write('\x04Ã\tÃ\x04Ä\tÄ\x04Å\tÅ\x04Æ')
buf.write('\tÆ\x04Ç\tÇ\x04È\tÈ\x04É\tÉ')
buf.write('\x04Ê\tÊ\x04Ë\tË\x04Ì\tÌ\x04Í')
buf.write('\tÍ\x04Î\tÎ\x04Ï\tÏ\x04Ð\tÐ')
buf.write('\x04Ñ\tÑ\x04Ò\tÒ\x04Ó\tÓ\x04Ô')
buf.write('\tÔ\x04Õ\tÕ\x04Ö\tÖ\x04×\t×')
buf.write('\x04Ø\tØ\x04Ù\tÙ\x04Ú\tÚ\x04Û')
buf.write('\tÛ\x04Ü\tÜ\x04Ý\tÝ\x04Þ\tÞ')
buf.write('\x04ß\tß\x04à\tà\x04á\tá\x04â')
buf.write('\tâ\x04ã\tã\x04ä\tä\x04å\tå')
buf.write('\x04æ\tæ\x04ç\tç\x04è\tè\x04é')
buf.write('\té\x04ê\tê\x04ë\të\x04ì\tì')
buf.write('\x04í\tí\x04î\tî\x04ï\tï\x04ð')
buf.write('\tð\x04ñ\tñ\x04ò\tò\x04ó\tó')
buf.write('\x04ô\tô\x04õ\tõ\x04ö\tö\x04÷')
buf.write('\t÷\x04ø\tø\x04ù\tù\x04ú\tú')
buf.write('\x04û\tû\x04ü\tü\x04ý\tý\x04þ')
buf.write('\tþ\x04ÿ\tÿ\x04Ā\tĀ\x04ā\tā')
buf.write('\x04Ă\tĂ\x04ă\tă\x04Ą\tĄ\x04ą')
buf.write('\tą\x04Ć\tĆ\x04ć\tć\x04Ĉ\tĈ')
buf.write('\x04ĉ\tĉ\x04Ċ\tĊ\x04ċ\tċ\x04Č')
buf.write('\tČ\x04č\tč\x04Ď\tĎ\x04ď\tď')
buf.write('\x04Đ\tĐ\x04đ\tđ\x04Ē\tĒ\x04ē')
buf.write('\tē\x04Ĕ\tĔ\x04ĕ\tĕ\x04Ė\tĖ')
buf.write('\x04ė\tė\x04Ę\tĘ\x04ę\tę\x04Ě')
buf.write('\tĚ\x04ě\tě\x04Ĝ\tĜ\x04ĝ\tĝ')
buf.write('\x04Ğ\tĞ\x04ğ\tğ\x04Ġ\tĠ\x04ġ')
buf.write('\tġ\x04Ģ\tĢ\x04ģ\tģ\x04Ĥ\tĤ')
buf.write('\x04ĥ\tĥ\x04Ħ\tĦ\x04ħ\tħ\x04Ĩ')
buf.write('\tĨ\x04ĩ\tĩ\x04Ī\tĪ\x04ī\tī')
buf.write('\x04Ĭ\tĬ\x04ĭ\tĭ\x04Į\tĮ\x04į')
buf.write('\tį\x04İ\tİ\x04ı\tı\x04IJ\tIJ')
buf.write('\x04ij\tij\x04Ĵ\tĴ\x04ĵ\tĵ\x04Ķ')
buf.write('\tĶ\x04ķ\tķ\x04ĸ\tĸ\x04Ĺ\tĹ')
buf.write('\x04ĺ\tĺ\x04Ļ\tĻ\x04ļ\tļ\x04Ľ')
buf.write('\tĽ\x04ľ\tľ\x04Ŀ\tĿ\x04ŀ\tŀ')
buf.write('\x04Ł\tŁ\x04ł\tł\x04Ń\tŃ\x04ń')
buf.write('\tń\x04Ņ\tŅ\x04ņ\tņ\x04Ň\tŇ')
buf.write('\x04ň\tň\x04ʼn\tʼn\x04Ŋ\tŊ\x04ŋ')
buf.write('\tŋ\x04Ō\tŌ\x04ō\tō\x04Ŏ\tŎ')
buf.write('\x04ŏ\tŏ\x04Ő\tŐ\x04ő\tő\x04Œ')
buf.write('\tŒ\x04œ\tœ\x04Ŕ\tŔ\x04ŕ\tŕ')
buf.write('\x04Ŗ\tŖ\x04ŗ\tŗ\x04Ř\tŘ\x04ř')
buf.write('\tř\x04Ś\tŚ\x04ś\tś\x04Ŝ\tŜ')
buf.write('\x04ŝ\tŝ\x04Ş\tŞ\x04ş\tş\x04Š')
buf.write('\tŠ\x04š\tš\x04Ţ\tŢ\x04ţ\tţ')
buf.write('\x04Ť\tŤ\x04ť\tť\x04Ŧ\tŦ\x04ŧ')
buf.write('\tŧ\x04Ũ\tŨ\x04ũ\tũ\x04Ū\tŪ')
buf.write('\x04ū\tū\x04Ŭ\tŬ\x04ŭ\tŭ\x04Ů')
buf.write('\tŮ\x04ů\tů\x04Ű\tŰ\x04ű\tű')
buf.write('\x04Ų\tŲ\x04ų\tų\x04Ŵ\tŴ\x04ŵ')
buf.write('\tŵ\x04Ŷ\tŶ\x04ŷ\tŷ\x04Ÿ\tŸ')
buf.write('\x04Ź\tŹ\x04ź\tź\x04Ż\tŻ\x04ż')
buf.write('\tż\x04Ž\tŽ\x04ž\tž\x04ſ\tſ')
buf.write('\x04ƀ\tƀ\x04Ɓ\tƁ\x04Ƃ\tƂ\x04ƃ')
buf.write('\tƃ\x04Ƅ\tƄ\x04ƅ\tƅ\x04Ɔ\tƆ')
buf.write('\x04Ƈ\tƇ\x04ƈ\tƈ\x04Ɖ\tƉ\x04Ɗ')
buf.write('\tƊ\x04Ƌ\tƋ\x04ƌ\tƌ\x04ƍ\tƍ')
buf.write('\x04Ǝ\tƎ\x04Ə\tƏ\x04Ɛ\tƐ\x04Ƒ')
buf.write('\tƑ\x04ƒ\tƒ\x04Ɠ\tƓ\x04Ɣ\tƔ')
buf.write('\x04ƕ\tƕ\x04Ɩ\tƖ\x04Ɨ\tƗ\x04Ƙ')
buf.write('\tƘ\x04ƙ\tƙ\x04ƚ\tƚ\x04ƛ\tƛ')
buf.write('\x04Ɯ\tƜ\x04Ɲ\tƝ\x04ƞ\tƞ\x04Ɵ')
buf.write('\tƟ\x04Ơ\tƠ\x04ơ\tơ\x04Ƣ\tƢ')
buf.write('\x04ƣ\tƣ\x04Ƥ\tƤ\x04ƥ\tƥ\x04Ʀ')
buf.write('\tƦ\x04Ƨ\tƧ\x04ƨ\tƨ\x04Ʃ\tƩ')
buf.write('\x04ƪ\tƪ\x04ƫ\tƫ\x04Ƭ\tƬ\x04ƭ')
buf.write('\tƭ\x04Ʈ\tƮ\x04Ư\tƯ\x04ư\tư')
buf.write('\x04Ʊ\tƱ\x04Ʋ\tƲ\x04Ƴ\tƳ\x04ƴ')
buf.write('\tƴ\x04Ƶ\tƵ\x04ƶ\tƶ\x04Ʒ\tƷ')
buf.write('\x04Ƹ\tƸ\x04ƹ\tƹ\x04ƺ\tƺ\x04ƻ')
buf.write('\tƻ\x04Ƽ\tƼ\x04ƽ\tƽ\x04ƾ\tƾ')
buf.write('\x04ƿ\tƿ\x04ǀ\tǀ\x04ǁ\tǁ\x04ǂ')
buf.write('\tǂ\x04ǃ\tǃ\x04DŽ\tDŽ\x04Dž\tDž')
buf.write('\x04dž\tdž\x04LJ\tLJ\x04Lj\tLj\x04lj')
buf.write('\tlj\x04NJ\tNJ\x04Nj\tNj\x04nj\tnj')
buf.write('\x04Ǎ\tǍ\x04ǎ\tǎ\x04Ǐ\tǏ\x04ǐ')
buf.write('\tǐ\x04Ǒ\tǑ\x04ǒ\tǒ\x04Ǔ\tǓ')
buf.write('\x04ǔ\tǔ\x04Ǖ\tǕ\x04ǖ\tǖ\x04Ǘ')
buf.write('\tǗ\x04ǘ\tǘ\x04Ǚ\tǙ\x04ǚ\tǚ')
buf.write('\x04Ǜ\tǛ\x04ǜ\tǜ\x04ǝ\tǝ\x04Ǟ')
buf.write('\tǞ\x04ǟ\tǟ\x04Ǡ\tǠ\x04ǡ\tǡ')
buf.write('\x04Ǣ\tǢ\x04ǣ\tǣ\x04Ǥ\tǤ\x04ǥ')
buf.write('\tǥ\x04Ǧ\tǦ\x04ǧ\tǧ\x04Ǩ\tǨ')
buf.write('\x04ǩ\tǩ\x04Ǫ\tǪ\x04ǫ\tǫ\x04Ǭ')
buf.write('\tǬ\x04ǭ\tǭ\x04Ǯ\tǮ\x04ǯ\tǯ')
buf.write('\x04ǰ\tǰ\x04DZ\tDZ\x04Dz\tDz\x04dz')
buf.write('\tdz\x04Ǵ\tǴ\x04ǵ\tǵ\x04Ƕ\tǶ')
buf.write('\x04Ƿ\tǷ\x04Ǹ\tǸ\x04ǹ\tǹ\x04Ǻ')
buf.write('\tǺ\x04ǻ\tǻ\x04Ǽ\tǼ\x04ǽ\tǽ')
buf.write('\x04Ǿ\tǾ\x04ǿ\tǿ\x04Ȁ\tȀ\x04ȁ')
buf.write('\tȁ\x04Ȃ\tȂ\x04ȃ\tȃ\x04Ȅ\tȄ')
buf.write('\x04ȅ\tȅ\x04Ȇ\tȆ\x04ȇ\tȇ\x04Ȉ')
buf.write('\tȈ\x04ȉ\tȉ\x04Ȋ\tȊ\x04ȋ\tȋ')
buf.write('\x04Ȍ\tȌ\x04ȍ\tȍ\x04Ȏ\tȎ\x04ȏ')
buf.write('\tȏ\x04Ȑ\tȐ\x04ȑ\tȑ\x04Ȓ\tȒ')
buf.write('\x04ȓ\tȓ\x04Ȕ\tȔ\x04ȕ\tȕ\x04Ȗ')
buf.write('\tȖ\x04ȗ\tȗ\x04Ș\tȘ\x04ș\tș')
buf.write('\x04Ț\tȚ\x04ț\tț\x04Ȝ\tȜ\x04ȝ')
buf.write('\tȝ\x04Ȟ\tȞ\x04ȟ\tȟ\x04Ƞ\tȠ')
buf.write('\x04ȡ\tȡ\x04Ȣ\tȢ\x04ȣ\tȣ\x04Ȥ')
buf.write('\tȤ\x04ȥ\tȥ\x04Ȧ\tȦ\x04ȧ\tȧ')
buf.write('\x04Ȩ\tȨ\x04ȩ\tȩ\x04Ȫ\tȪ\x04ȫ')
buf.write('\tȫ\x04Ȭ\tȬ\x04ȭ\tȭ\x04Ȯ\tȮ')
buf.write('\x04ȯ\tȯ\x04Ȱ\tȰ\x04ȱ\tȱ\x04Ȳ')
buf.write('\tȲ\x04ȳ\tȳ\x04ȴ\tȴ\x03\x02\x03\x02\x03\x02\x03')
buf.write(
'\x03\x03\x03\x03\x04\x03\x04\x03\x04\x03\x04\x03\x05\x03\x05\x03\x05\x03\x05\x03\x05\x03\x05\x03\x06\x03\x06'
)
buf.write(
'\x03\x06\x03\x06\x03\x06\x03\x06\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03'
)
buf.write("""
""")
buf.write("""
""")
buf.write(
'\x0c\x03\r\x03\r\x03\r\x03\r\x03\r\x03\r\x03\x0e\x03\x0e\x03\x0e\x03\x0f\x03\x0f\x03'
)
buf.write(
'\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10'
)
buf.write(
'\x03\x10\x03\x11\x03\x11\x03\x11\x03\x11\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12'
)
buf.write(
'\x03\x12\x03\x12\x03\x12\x03\x12\x03\x13\x03\x13\x03\x13\x03\x14\x03\x14\x03\x14\x03\x14'
)
buf.write(
'\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15'
)
buf.write(
'\x03\x15\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x17\x03\x17\x03\x17'
)
buf.write(
'\x03\x17\x03\x17\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18'
)
buf.write(
'\x03\x18\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19'
)
buf.write(
'\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19'
)
buf.write(
'\x03\x19\x03\x19\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1b\x03\x1b\x03\x1b'
)
buf.write(
'\x03\x1b\x03\x1b\x03\x1b\x03\x1b\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1d'
)
buf.write(
'\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1e\x03\x1e\x03\x1e\x03\x1e'
)
buf.write(
'\x03\x1e\x03\x1e\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f'
)
buf.write(
'\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03'
)
buf.write(
' \x03 \x03 \x03 \x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03'
)
buf.write(
'!\x03"\x03"\x03"\x03"\x03"\x03#\x03#\x03#\x03#\x03#\x03#\x03$\x03$\x03$\x03$\x03'
)
buf.write(
"$\x03%\x03%\x03%\x03%\x03%\x03%\x03%\x03%\x03&\x03&\x03&\x03&\x03&\x03'\x03'\x03'\x03"
)
buf.write(
"'\x03'\x03'\x03'\x03'\x03(\x03(\x03(\x03(\x03(\x03)\x03)\x03)\x03*\x03*\x03*\x03"
)
buf.write(
'*\x03*\x03+\x03+\x03,\x03,\x03,\x03,\x03,\x03,\x03-\x03-\x03-\x03-\x03-\x03.\x03.\x03.\x03'
)
buf.write(
'.\x03.\x03.\x03.\x03.\x03.\x03.\x03/\x03/\x03/\x03/\x03/\x03/\x03/\x03/\x030\x030'
)
buf.write('\x030\x030\x030\x031\x031\x031\x031\x031\x032\x032\x032')
buf.write('\x032\x032\x033\x033\x033\x033\x033\x033\x033\x033\x034')
buf.write('\x034\x034\x034\x034\x034\x034\x034\x034\x034\x035\x035')
buf.write('\x035\x035\x035\x035\x036\x036\x036\x036\x037\x037\x037')
buf.write(
'\x037\x037\x038\x038\x038\x038\x038\x038\x039\x039\x039\x039\x039\x039\x039\x039\x03'
)
buf.write(
':\x03:\x03:\x03:\x03:\x03:\x03:\x03:\x03;\x03;\x03;\x03;\x03;\x03;\x03;\x03;\x03<\x03<\x03'
)
buf.write(
'<\x03<\x03<\x03<\x03<\x03<\x03=\x03=\x03=\x03=\x03=\x03=\x03=\x03>\x03>\x03>\x03>\x03>\x03'
)
buf.write(
'>\x03>\x03>\x03>\x03>\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03'
)
buf.write(
'?\x03@\x03@\x03@\x03@\x03@\x03@\x03@\x03@\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03'
)
buf.write(
'B\x03B\x03B\x03B\x03B\x03B\x03B\x03B\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03'
)
buf.write(
'C\x03C\x03C\x03C\x03C\x03C\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03E\x03E\x03E\x03'
)
buf.write(
'E\x03E\x03E\x03E\x03E\x03E\x03E\x03E\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03'
)
buf.write(
'F\x03F\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03H\x03H\x03H\x03H\x03'
)
buf.write(
'H\x03H\x03H\x03H\x03I\x03I\x03I\x03I\x03I\x03I\x03I\x03I\x03J\x03J\x03J\x03J\x03J\x03J\x03'
)
buf.write(
'J\x03J\x03J\x03K\x03K\x03K\x03K\x03K\x03K\x03K\x03K\x03L\x03L\x03L\x03L\x03L\x03L\x03L\x03'
)
buf.write(
'L\x03L\x03L\x03L\x03L\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03'
)
buf.write(
'M\x03M\x03M\x03N\x03N\x03N\x03N\x03N\x03O\x03O\x03O\x03O\x03O\x03O\x03P\x03P\x03P\x03P\x03'
)
buf.write(
'P\x03P\x03P\x03Q\x03Q\x03Q\x03Q\x03Q\x03Q\x03R\x03R\x03R\x03R\x03R\x03S\x03S\x03S\x03S\x03'
)
buf.write(
'S\x03S\x03S\x03S\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03U\x03'
)
buf.write(
'U\x03U\x03U\x03U\x03U\x03U\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03'
)
buf.write(
'W\x03W\x03W\x03W\x03W\x03W\x03X\x03X\x03X\x03X\x03X\x03Y\x03Y\x03Y\x03Y\x03Y\x03Y\x03Y\x03'
)
buf.write(
'Y\x03Y\x03Z\x03Z\x03Z\x03Z\x03Z\x03[\x03[\x03[\x03[\x03\\\x03\\\x03\\\x03\\\x03\\\x03'
)
buf.write(
'\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03]\x03]\x03]\x03]\x03]'
)
buf.write(
'\x03]\x03]\x03]\x03]\x03]\x03]\x03^\x03^\x03^\x03^\x03_\x03_\x03_\x03_\x03_\x03_\x03`\x03'
)
buf.write(
'`\x03`\x03`\x03a\x03a\x03a\x03a\x03a\x03a\x03a\x03a\x03b\x03b\x03b\x03b\x03b\x03b\x03b\x03'
)
buf.write(
'b\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03d\x03d\x03d\x03d\x03d\x03d\x03d\x03'
)
buf.write(
'd\x03d\x03d\x03e\x03e\x03e\x03e\x03e\x03e\x03e\x03e\x03f\x03f\x03f\x03f\x03f\x03f\x03f\x03'
)
buf.write(
'f\x03f\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03h\x03h\x03h\x03h\x03h\x03h\x03h\x03'
)
buf.write(
'h\x03i\x03i\x03i\x03i\x03i\x03i\x03i\x03j\x03j\x03j\x03j\x03j\x03j\x03k\x03k\x03k\x03k\x03'
)
buf.write(
'k\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03m\x03m\x03m\x03'
)
buf.write(
'm\x03m\x03m\x03m\x03m\x03m\x03m\x03n\x03n\x03n\x03n\x03n\x03n\x03n\x03n\x03o\x03o\x03o\x03'
)
buf.write(
'o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03p\x03p\x03p\x03p\x03p\x03p\x03p\x03p\x03'
)
buf.write(
'p\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03r\x03r\x03r\x03r\x03r\x03r\x03r\x03s\x03'
)
buf.write(
's\x03s\x03s\x03s\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03'
)
buf.write(
't\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03u\x03u\x03u\x03u\x03u\x03v\x03v\x03'
)
buf.write(
'v\x03v\x03v\x03v\x03v\x03v\x03w\x03w\x03w\x03w\x03w\x03x\x03x\x03x\x03x\x03x\x03x\x03y\x03'
)
buf.write(
'y\x03y\x03y\x03y\x03y\x03z\x03z\x03z\x03z\x03z\x03z\x03z\x03{\x03{\x03{\x03{\x03{\x03{\x03'
)
buf.write(
'{\x03{\x03{\x03|\x03|\x03|\x03|\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03'
)
buf.write(
'}\x03}\x03}\x03}\x03~\x03~\x03~\x03~\x03\x7f\x03\x7f\x03\x7f\x03\x7f\x03\x7f\x03'
)
buf.write(
'\x7f\x03\x7f\x03\x80\x03\x80\x03\x80\x03\x80\x03\x80\x03\x80')
buf.write('\x03\x80\x03\x81\x03\x81\x03\x81\x03\x81\x03\x81\x03\x81')
buf.write('\x03\x81\x03\x81\x03\x81\x03\x82\x03\x82\x03\x82\x03\x82')
buf.write('\x03\x82\x03\x82\x03\x82\x03\x83\x03\x83\x03\x83\x03\x83')
buf.write('\x03\x83\x03\x83\x03\x83\x03\x83\x03\x83\x03\x83\x03\x84')
buf.write('\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84')
buf.write('\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84')
buf.write('\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85')
buf.write('\x03\x85\x03\x85\x03\x85\x03\x85\x03\x86\x03\x86\x03\x86')
buf.write('\x03\x86\x03\x86\x03\x86\x03\x86\x03\x86\x03\x87\x03\x87')
buf.write('\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87')
buf.write('\x03\x87\x03\x88\x03\x88\x03\x88\x03\x88\x03\x88\x03\x88')
buf.write('\x03\x88\x03\x88\x03\x89\x03\x89\x03\x89\x03\x89\x03\x89')
buf.write('\x03\x89\x03\x89\x03\x8a\x03\x8a\x03\x8a\x03\x8a\x03\x8a')
buf.write('\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b')
buf.write('\x03\x8b\x03\x8c\x03\x8c\x03\x8c\x03\x8c\x03\x8c\x03\x8c')
buf.write('\x03\x8c\x03\x8c\x03\x8c\x03\x8d\x03\x8d\x03\x8d\x03\x8d')
buf.write('\x03\x8d\x03\x8d\x03\x8d\x03\x8d\x03\x8e\x03\x8e\x03\x8e')
buf.write('\x03\x8e\x03\x8e\x03\x8e\x03\x8e\x03\x8e\x03\x8f\x03\x8f')
buf.write('\x03\x8f\x03\x8f\x03\x8f\x03\x8f\x03\x90\x03\x90\x03\x90')
buf.write('\x03\x90\x03\x90\x03\x90\x03\x91\x03\x91\x03\x91\x03\x91')
buf.write('\x03\x91\x03\x91\x03\x92\x03\x92\x03\x92\x03\x92\x03\x92')
buf.write('\x03\x92\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93')
buf.write('\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x94')
buf.write('\x03\x94\x03\x94\x03\x94\x03\x94\x03\x94\x03\x95\x03\x95')
buf.write('\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95')
buf.write('\x03\x95\x03\x96\x03\x96\x03\x96\x03\x96\x03\x96\x03\x96')
buf.write('\x03\x96\x03\x96\x03\x97\x03\x97\x03\x97\x03\x97\x03\x98')
buf.write('\x03\x98\x03\x98\x03\x98\x03\x98\x03\x98\x03\x98\x03\x99')
buf.write('\x03\x99\x03\x99\x03\x99\x03\x99\x03\x99\x03\x9a\x03\x9a')
buf.write('\x03\x9a\x03\x9a\x03\x9a\x03\x9b\x03\x9b\x03\x9b\x03\x9b')
buf.write('\x03\x9b\x03\x9c\x03\x9c\x03\x9c\x03\x9c\x03\x9c\x03\x9c')
buf.write('\x03\x9c\x03\x9c\x03\x9c\x03\x9d\x03\x9d\x03\x9d\x03\x9d')
buf.write('\x03\x9d\x03\x9e\x03\x9e\x03\x9e\x03\x9e\x03\x9e\x03\x9e')
buf.write('\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\xa0')
buf.write('\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0')
buf.write('\x03\xa0\x03¡\x03¡\x03¡\x03¡\x03¡\x03¢')
buf.write('\x03¢\x03¢\x03¢\x03¢\x03¢\x03¢\x03£')
buf.write('\x03£\x03£\x03£\x03£\x03¤\x03¤\x03¤')
buf.write('\x03¤\x03¤\x03¥\x03¥\x03¥\x03¦\x03¦')
buf.write('\x03¦\x03¦\x03¦\x03¦\x03¦\x03§\x03§')
buf.write('\x03§\x03§\x03§\x03§\x03§\x03§\x03§')
buf.write('\x03§\x03¨\x03¨\x03¨\x03©\x03©\x03©')
buf.write('\x03©\x03©\x03©\x03©\x03©\x03ª\x03ª')
buf.write('\x03ª\x03ª\x03ª\x03ª\x03ª\x03ª\x03ª')
buf.write('\x03ª\x03«\x03«\x03«\x03«\x03«\x03«')
buf.write('\x03«\x03«\x03«\x03«\x03¬\x03¬\x03¬')
buf.write('\x03¬\x03¬\x03¬\x03¬\x03\xad\x03\xad\x03\xad')
buf.write('\x03\xad\x03\xad\x03\xad\x03®\x03®\x03®\x03®')
buf.write('\x03®\x03®\x03®\x03®\x03¯\x03¯\x03¯')
buf.write('\x03¯\x03¯\x03¯\x03¯\x03¯\x03¯\x03¯')
buf.write('\x03°\x03°\x03°\x03°\x03°\x03°\x03°')
buf.write('\x03°\x03±\x03±\x03±\x03±\x03±\x03±')
buf.write('\x03±\x03±\x03±\x03²\x03²\x03²\x03²')
buf.write('\x03²\x03²\x03²\x03³\x03³\x03³\x03³')
buf.write('\x03³\x03³\x03´\x03´\x03´\x03´\x03´')
buf.write('\x03´\x03µ\x03µ\x03µ\x03µ\x03µ\x03µ')
buf.write('\x03µ\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶')
buf.write('\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶')
buf.write('\x03·\x03·\x03·\x03·\x03·\x03·\x03·')
buf.write('\x03·\x03¸\x03¸\x03¸\x03¸\x03¹\x03¹')
buf.write('\x03¹\x03¹\x03¹\x03¹\x03¹\x03¹\x03º')
buf.write('\x03º\x03º\x03º\x03º\x03º\x03º\x03º')
buf.write('\x03º\x03º\x03»\x03»\x03»\x03»\x03»')
buf.write('\x03»\x03»\x03»\x03»\x03¼\x03¼\x03¼')
buf.write('\x03¼\x03¼\x03½\x03½\x03½\x03½\x03½')
buf.write('\x03½\x03½\x03½\x03½\x03½\x03½\x03¾')
buf.write('\x03¾\x03¾\x03¿\x03¿\x03¿\x03¿\x03¿')
buf.write('\x03¿\x03¿\x03¿\x03¿\x03¿\x03À\x03À')
buf.write('\x03À\x03À\x03À\x03À\x03À\x03À\x03Á')
buf.write('\x03Á\x03Á\x03Á\x03Á\x03Â\x03Â\x03Â')
buf.write('\x03Â\x03Â\x03Ã\x03Ã\x03Ã\x03Ã\x03Ã')
buf.write('\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä')
buf.write('\x03Ä\x03Ä\x03Å\x03Å\x03Å\x03Å\x03Å')
buf.write('\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ')
buf.write('\x03Æ\x03Æ\x03Æ\x03Æ\x03Ç\x03Ç\x03Ç')
buf.write('\x03Ç\x03Ç\x03Ç\x03Ç\x03Ç\x03È\x03È')
buf.write('\x03È\x03È\x03È\x03É\x03É\x03É\x03É')
buf.write('\x03É\x03É\x03Ê\x03Ê\x03Ê\x03Ê\x03Ê')
buf.write('\x03Ê\x03Ê\x03Ê\x03Ë\x03Ë\x03Ë\x03Ë')
buf.write('\x03Ë\x03Ì\x03Ì\x03Ì\x03Ì\x03Ì\x03Ì')
buf.write('\x03Í\x03Í\x03Í\x03Í\x03Í\x03Í\x03Î')
buf.write('\x03Î\x03Î\x03Î\x03Î\x03Î\x03Ï\x03Ï')
buf.write('\x03Ï\x03Ï\x03Ï\x03Ï\x03Ð\x03Ð\x03Ð')
buf.write('\x03Ð\x03Ð\x03Ð\x03Ñ\x03Ñ\x03Ñ\x03Ñ')
buf.write('\x03Ñ\x03Ò\x03Ò\x03Ò\x03Ò\x03Ò\x03Ò')
buf.write('\x03Ò\x03Ó\x03Ó\x03Ó\x03Ó\x03Ô\x03Ô')
buf.write('\x03Ô\x03Ô\x03Ô\x03Ô\x03Ô\x03Õ\x03Õ')
buf.write('\x03Õ\x03Õ\x03Õ\x03Õ\x03Ö\x03Ö\x03Ö')
buf.write('\x03Ö\x03Ö\x03×\x03×\x03×\x03×\x03×')
buf.write('\x03Ø\x03Ø\x03Ø\x03Ø\x03Ø\x03Ù\x03Ù')
buf.write('\x03Ù\x03Ù\x03Ú\x03Ú\x03Ú\x03Ú\x03Ú')
buf.write('\x03Ú\x03Ú\x03Ú\x03Û\x03Û\x03Û\x03Û')
buf.write('\x03Û\x03Û\x03Û\x03Û\x03Û\x03Ü\x03Ü')
buf.write('\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü')
buf.write('\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý')
buf.write('\x03Þ\x03Þ\x03Þ\x03Þ\x03Þ\x03Þ\x03ß')
buf.write('\x03ß\x03ß\x03ß\x03ß\x03ß\x03à\x03à')
buf.write('\x03à\x03à\x03à\x03à\x03à\x03á\x03á')
buf.write('\x03á\x03á\x03á\x03á\x03á\x03á\x03á')
buf.write('\x03â\x03â\x03â\x03â\x03â\x03â\x03â')
buf.write('\x03â\x03â\x03ã\x03ã\x03ã\x03ã\x03ã')
buf.write('\x03ä\x03ä\x03ä\x03ä\x03ä\x03ä\x03å')
buf.write('\x03å\x03å\x03å\x03å\x03å\x03å\x03æ')
buf.write('\x03æ\x03æ\x03æ\x03æ\x03æ\x03ç\x03ç')
buf.write('\x03ç\x03ç\x03ç\x03ç\x03ç\x03ç\x03ç')
buf.write('\x03è\x03è\x03è\x03è\x03è\x03é\x03é')
buf.write('\x03é\x03é\x03ê\x03ê\x03ê\x03ê\x03ê')
buf.write('\x03ê\x03ê\x03ê\x03ë\x03ë\x03ë\x03ë')
buf.write('\x03ë\x03ë\x03ë\x03ë\x03ë\x03ì\x03ì')
buf.write('\x03ì\x03ì\x03í\x03í\x03í\x03í\x03í')
buf.write('\x03í\x03î\x03î\x03î\x03î\x03î\x03î')
buf.write('\x03î\x03î\x03î\x03ï\x03ï\x03ï\x03ï')
buf.write('\x03ï\x03ï\x03ð\x03ð\x03ð\x03ð\x03ð')
buf.write('\x03ð\x03ð\x03ñ\x03ñ\x03ñ\x03ñ\x03ò')
buf.write('\x03ò\x03ò\x03ó\x03ó\x03ó\x03ó\x03ó')
buf.write('\x03ó\x03ó\x03ó\x03ô\x03ô\x03ô\x03ô')
buf.write('\x03ô\x03ô\x03ô\x03ô\x03õ\x03õ\x03õ')
buf.write('\x03õ\x03õ\x03õ\x03õ\x03ö\x03ö\x03ö')
buf.write('\x03ö\x03ö\x03ö\x03ö\x03ö\x03÷\x03÷')
buf.write('\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷')
buf.write('\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷')
buf.write('\x03÷\x03ø\x03ø\x03ø\x03ø\x03ø\x03ø')
buf.write('\x03ø\x03ø\x03ø\x03ø\x03ø\x03ù\x03ù')
buf.write('\x03ù\x03ù\x03ù\x03ù\x03ù\x03ù\x03ù')
buf.write('\x03ù\x03ù\x03ú\x03ú\x03ú\x03ú\x03ú')
buf.write('\x03û\x03û\x03û\x03û\x03û\x03û\x03û')
buf.write('\x03û\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü')
buf.write('\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü')
buf.write('\x03ü\x03ý\x03ý\x03ý\x03ý\x03þ\x03þ')
buf.write('\x03þ\x03þ\x03þ\x03þ\x03þ\x03ÿ\x03ÿ')
buf.write('\x03ÿ\x03ÿ\x03ÿ\x03Ā\x03Ā\x03Ā\x03Ā')
buf.write('\x03Ā\x03Ā\x03ā\x03ā\x03ā\x03ā\x03ā')
buf.write('\x03ā\x03ā\x03Ă\x03Ă\x03Ă\x03Ă\x03Ă')
buf.write('\x03Ă\x03Ă\x03Ă\x03ă\x03ă\x03ă\x03ă')
buf.write('\x03ă\x03ă\x03ă\x03ă\x03ă\x03ă\x03Ą')
buf.write('\x03Ą\x03Ą\x03Ą\x03Ą\x03Ą\x03Ą\x03ą')
buf.write('\x03ą\x03ą\x03Ć\x03Ć\x03Ć\x03Ć\x03ć')
buf.write('\x03ć\x03ć\x03ć\x03Ĉ\x03Ĉ\x03Ĉ\x03Ĉ')
buf.write('\x03ĉ\x03ĉ\x03ĉ\x03Ċ\x03Ċ\x03Ċ\x03Ċ')
buf.write('\x03Ċ\x03ċ\x03ċ\x03ċ\x03ċ\x03ċ\x03Č')
buf.write('\x03Č\x03Č\x03Č\x03Č\x03Č\x03Č\x03č')
buf.write('\x03č\x03č\x03Ď\x03Ď\x03Ď\x03Ď\x03Ď')
buf.write('\x03Ď\x03Ď\x03Ď\x03ď\x03ď\x03ď\x03ď')
buf.write('\x03ď\x03ď\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ')
buf.write('\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ\x03đ')
buf.write('\x03đ\x03đ\x03đ\x03đ\x03đ\x03đ\x03đ')
buf.write('\x03Ē\x03Ē\x03Ē\x03Ē\x03ē\x03ē\x03ē')
buf.write('\x03ē\x03ē\x03ē\x03Ĕ\x03Ĕ\x03Ĕ\x03Ĕ')
buf.write('\x03Ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ')
buf.write('\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03Ė\x03Ė')
buf.write('\x03Ė\x03Ė\x03Ė\x03Ė\x03Ė\x03Ė\x03ė')
buf.write('\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė')
buf.write('\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė')
buf.write('\x03ė\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę')
buf.write('\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę\x03ę\x03ę')
buf.write('\x03ę\x03ę\x03ę\x03ę\x03ę\x03Ě\x03Ě')
buf.write('\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě')
buf.write('\x03Ě\x03ě\x03ě\x03ě\x03ě\x03ě\x03ě')
buf.write('\x03ě\x03ě\x03Ĝ\x03Ĝ\x03Ĝ\x03Ĝ\x03Ĝ')
buf.write('\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ')
buf.write('\x03ĝ\x03ĝ\x03Ğ\x03Ğ\x03Ğ\x03Ğ\x03Ğ')
buf.write('\x03Ğ\x03ğ\x03ğ\x03ğ\x03ğ\x03ğ\x03ğ')
buf.write('\x03ğ\x03ğ\x03ğ\x03ğ\x03Ġ\x03Ġ\x03Ġ')
buf.write('\x03Ġ\x03Ġ\x03Ġ\x03ġ\x03ġ\x03ġ\x03ġ')
buf.write('\x03ġ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ')
buf.write('\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03ģ')
buf.write('\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ')
buf.write('\x03ģ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ')
buf.write('\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03ĥ\x03ĥ\x03ĥ')
buf.write('\x03ĥ\x03ĥ\x03ĥ\x03ĥ\x03Ħ\x03Ħ\x03Ħ')
buf.write('\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ')
buf.write('\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ')
buf.write('\x03ħ\x03ħ\x03ħ\x03Ĩ\x03Ĩ\x03Ĩ\x03Ĩ')
buf.write('\x03Ĩ\x03Ĩ\x03Ĩ\x03Ĩ\x03ĩ\x03ĩ\x03ĩ')
buf.write('\x03ĩ\x03ĩ\x03ĩ\x03Ī\x03Ī\x03Ī\x03Ī')
buf.write('\x03Ī\x03Ī\x03Ī\x03Ī\x03Ī\x03Ī\x03ī')
buf.write('\x03ī\x03ī\x03ī\x03ī\x03ī\x03Ĭ\x03Ĭ')
buf.write('\x03Ĭ\x03Ĭ\x03Ĭ\x03Ĭ\x03ĭ\x03ĭ\x03ĭ')
buf.write('\x03ĭ\x03Į\x03Į\x03Į\x03Į\x03Į\x03į')
buf.write('\x03į\x03į\x03į\x03į\x03İ\x03İ\x03İ')
buf.write('\x03İ\x03İ\x03İ\x03İ\x03ı\x03ı\x03ı')
buf.write('\x03ı\x03IJ\x03IJ\x03IJ\x03IJ\x03IJ\x03IJ')
buf.write('\x03IJ\x03IJ\x03IJ\x03IJ\x03ij\x03ij\x03ij')
buf.write('\x03ij\x03ij\x03ij\x03ij\x03ij\x03ij\x03ij')
buf.write('\x03ij\x03ij\x03Ĵ\x03Ĵ\x03Ĵ\x03Ĵ\x03Ĵ')
buf.write('\x03Ĵ\x03Ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ')
buf.write('\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03Ķ\x03Ķ')
buf.write('\x03Ķ\x03Ķ\x03Ķ\x03Ķ\x03Ķ\x03ķ\x03ķ')
buf.write('\x03ķ\x03ķ\x03ķ\x03ķ\x03ķ\x03ķ\x03ĸ')
buf.write('\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03ĺ')
buf.write('\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03Ļ')
buf.write('\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ')
buf.write('\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03ļ\x03ļ')
buf.write('\x03ļ\x03ļ\x03ļ\x03ļ\x03ļ\x03Ľ\x03Ľ')
buf.write('\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ')
buf.write('\x03Ľ\x03ľ\x03ľ\x03ľ\x03ľ\x03ľ\x03ľ')
buf.write('\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ')
buf.write('\x03Ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ')
buf.write('\x03ŀ\x03Ł\x03Ł\x03Ł\x03Ł\x03Ł\x03Ł')
buf.write('\x03ł\x03ł\x03ł\x03ł\x03ł\x03ł\x03ł')
buf.write('\x03ł\x03ł\x03Ń\x03Ń\x03Ń\x03Ń\x03Ń')
buf.write('\x03Ń\x03Ń\x03ń\x03ń\x03ń\x03ń\x03Ņ')
buf.write('\x03Ņ\x03Ņ\x03Ņ\x03Ņ\x03Ņ\x03ņ\x03ņ')
buf.write('\x03ņ\x03ņ\x03ņ\x03Ň\x03Ň\x03Ň\x03Ň')
buf.write('\x03Ň\x03Ň\x03ň\x03ň\x03ň\x03ň\x03ň')
buf.write('\x03ň\x03ň\x03ʼn\x03ʼn\x03ʼn\x03ʼn\x03ʼn')
buf.write('\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ')
buf.write('\x03Ŋ\x03Ŋ\x03Ŋ\x03ŋ\x03ŋ\x03ŋ\x03ŋ')
buf.write('\x03ŋ\x03ŋ\x03ŋ\x03Ō\x03Ō\x03Ō\x03Ō')
buf.write('\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō')
buf.write('\x03Ō\x03ō\x03ō\x03ō\x03ō\x03Ŏ\x03Ŏ')
buf.write('\x03Ŏ\x03Ŏ\x03Ŏ\x03Ŏ\x03Ŏ\x03ŏ\x03ŏ')
buf.write('\x03ŏ\x03ŏ\x03ŏ\x03ŏ\x03ŏ\x03Ő\x03Ő')
buf.write('\x03Ő\x03Ő\x03Ő\x03ő\x03ő\x03ő\x03ő')
buf.write('\x03ő\x03ő\x03ő\x03ő\x03Œ\x03Œ\x03Œ')
buf.write('\x03Œ\x03Œ\x03Œ\x03Œ\x03œ\x03œ\x03œ')
buf.write('\x03œ\x03œ\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ')
buf.write('\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ\x03ŕ\x03ŕ\x03ŕ')
buf.write('\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ')
buf.write('\x03ŕ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ')
buf.write('\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03Ř\x03Ř\x03Ř')
buf.write('\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř')
buf.write('\x03Ř\x03Ř\x03ř\x03ř\x03ř\x03ř\x03ř')
buf.write('\x03ř\x03ř\x03ř\x03ř\x03ř\x03ř\x03ř')
buf.write('\x03ř\x03ř\x03ř\x03ř\x03Ś\x03Ś\x03Ś')
buf.write('\x03Ś\x03ś\x03ś\x03ś\x03ś\x03ś\x03Ŝ')
buf.write('\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ')
buf.write('\x03Ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ')
buf.write('\x03Ş\x03Ş\x03Ş\x03Ş\x03Ş\x03ş\x03ş')
buf.write('\x03ş\x03ş\x03ş\x03ş\x03ş\x03ş\x03ş')
buf.write('\x03Š\x03Š\x03Š\x03Š\x03Š\x03Š\x03Š')
buf.write('\x03Š\x03Š\x03š\x03š\x03š\x03š\x03š')
buf.write('\x03š\x03š\x03š\x03š\x03Ţ\x03Ţ\x03Ţ')
buf.write('\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ')
buf.write('\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03ţ\x03ţ')
buf.write('\x03ţ\x03ţ\x03ţ\x03ţ\x03ţ\x03Ť\x03Ť')
buf.write('\x03Ť\x03Ť\x03Ť\x03ť\x03ť\x03ť\x03ť')
buf.write('\x03ť\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ')
buf.write('\x03Ŧ\x03Ŧ\x03Ŧ\x03ŧ\x03ŧ\x03ŧ\x03ŧ')
buf.write('\x03ŧ\x03ŧ\x03ŧ\x03ŧ\x03ŧ\x03Ũ\x03Ũ')
buf.write('\x03Ũ\x03Ũ\x03Ũ\x03ũ\x03ũ\x03ũ\x03ũ')
buf.write('\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ')
buf.write('\x03ũ\x03ũ\x03ũ\x03Ū\x03Ū\x03Ū\x03Ū')
buf.write('\x03Ū\x03Ū\x03Ū\x03Ū\x03ū\x03ū\x03ū')
buf.write('\x03ū\x03ū\x03ū\x03ū\x03ū\x03ū\x03Ŭ')
buf.write('\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ')
buf.write('\x03Ŭ\x03Ŭ\x03Ŭ\x03ŭ\x03ŭ\x03ŭ\x03ŭ')
buf.write('\x03ŭ\x03ŭ\x03Ů\x03Ů\x03Ů\x03Ů\x03Ů')
buf.write('\x03Ů\x03Ů\x03Ů\x03ů\x03ů\x03ů\x03ů')
buf.write('\x03ů\x03ů\x03ů\x03ů\x03ů\x03ů\x03Ű')
buf.write('\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű')
buf.write('\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03ű\x03ű')
buf.write('\x03ű\x03ű\x03ű\x03ű\x03ű\x03Ų\x03Ų')
buf.write('\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų')
buf.write('\x03Ų\x03Ų\x03ų\x03ų\x03ų\x03ų\x03ų')
buf.write('\x03ų\x03ų\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ')
buf.write('\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ')
buf.write('\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ')
buf.write('\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03Ŷ')
buf.write('\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ')
buf.write('\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03ŷ')
buf.write('\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ')
buf.write('\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ')
buf.write('\x03Ÿ\x03Ź\x03Ź\x03Ź\x03Ź\x03Ź\x03Ź')
buf.write('\x03Ź\x03Ź\x03ź\x03ź\x03ź\x03ź\x03ź')
buf.write('\x03ź\x03Ż\x03Ż\x03Ż\x03Ż\x03ż\x03ż')
buf.write('\x03ż\x03ż\x03ż\x03Ž\x03Ž\x03Ž\x03Ž')
buf.write('\x03Ž\x03ž\x03ž\x03ž\x03ž\x03ž\x03ž')
buf.write('\x03ž\x03ž\x03ž\x03ž\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ƃ')
buf.write('\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ')
buf.write('\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03ƃ')
buf.write('\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ')
buf.write('\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03Ƅ')
buf.write('\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ')
buf.write('\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ')
buf.write('\x03Ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ')
buf.write('\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ')
buf.write('\x03ƅ\x03ƅ\x03ƅ\x03Ɔ\x03Ɔ\x03Ɔ\x03Ƈ')
buf.write('\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ')
buf.write('\x03Ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ')
buf.write('\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03Ɖ')
buf.write('\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ')
buf.write('\x03Ɖ\x03Ɖ\x03Ɗ\x03Ɗ\x03Ɗ\x03Ɗ\x03Ɗ')
buf.write('\x03Ɗ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ')
buf.write('\x03Ƌ\x03Ƌ\x03ƌ\x03ƌ\x03ƌ\x03ƌ\x03ƌ')
buf.write('\x03ƍ\x03ƍ\x03ƍ\x03ƍ\x03ƍ\x03Ǝ\x03Ǝ')
buf.write('\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ')
buf.write('\x03Ə\x03Ə\x03Ə\x03Ə\x03Ə\x03Ɛ\x03Ɛ')
buf.write('\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ')
buf.write('\x03Ɛ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ')
buf.write('\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03Ɠ')
buf.write('\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɣ')
buf.write('\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ')
buf.write('\x03Ɣ\x03Ɣ\x03ƕ\x03ƕ\x03ƕ\x03ƕ\x03ƕ')
buf.write('\x03ƕ\x03ƕ\x03ƕ\x03Ɩ\x03Ɩ\x03Ɩ\x03Ɩ')
buf.write('\x03Ɩ\x03Ɩ\x03Ɨ\x03Ɨ\x03Ɨ\x03Ɨ\x03Ɨ')
buf.write('\x03Ɨ\x03Ɨ\x03Ƙ\x03Ƙ\x03Ƙ\x03Ƙ\x03Ƙ')
buf.write('\x03Ƙ\x03Ƙ\x03Ƙ\x03ƙ\x03ƙ\x03ƙ\x03ƙ')
buf.write('\x03ƙ\x03ƙ\x03ƙ\x03ƚ\x03ƚ\x03ƚ\x03ƚ')
buf.write('\x03ƚ\x03ƚ\x03ƚ\x03ƛ\x03ƛ\x03ƛ\x03ƛ')
buf.write('\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɲ')
buf.write('\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ')
buf.write('\x03Ɲ\x03ƞ\x03ƞ\x03ƞ\x03ƞ\x03ƞ\x03ƞ')
buf.write('\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ')
buf.write('\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ')
buf.write('\x03Ơ\x03ơ\x03ơ\x03ơ\x03ơ\x03ơ\x03ơ')
buf.write('\x03ơ\x03ơ\x03ơ\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ')
buf.write('\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ\x03ƣ\x03ƣ')
buf.write('\x03ƣ\x03ƣ\x03ƣ\x03ƣ\x03ƣ\x03Ƥ\x03Ƥ')
buf.write('\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03ƥ')
buf.write('\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ')
buf.write('\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ')
buf.write('\x03Ʀ\x03Ʀ\x03Ƨ\x03Ƨ\x03Ƨ\x03Ƨ\x03Ƨ')
buf.write('\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ')
buf.write('\x03ƨ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ')
buf.write('\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03ƪ\x03ƪ')
buf.write('\x03ƪ\x03ƪ\x03ƪ\x03ƫ\x03ƫ\x03ƫ\x03ƫ')
buf.write('\x03ƫ\x03ƫ\x03ƫ\x03ƫ\x03ƫ\x03Ƭ\x03Ƭ')
buf.write('\x03Ƭ\x03Ƭ\x03Ƭ\x03Ƭ\x03ƭ\x03ƭ\x03ƭ')
buf.write('\x03ƭ\x03ƭ\x03ƭ\x03Ʈ\x03Ʈ\x03Ʈ\x03Ʈ')
buf.write('\x03Ʈ\x03Ư\x03Ư\x03Ư\x03Ư\x03Ư\x03Ư')
buf.write('\x03Ư\x03ư\x03ư\x03ư\x03ư\x03ư\x03Ʊ')
buf.write('\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʋ\x03Ʋ')
buf.write('\x03Ʋ\x03Ʋ\x03Ƴ\x03Ƴ\x03Ƴ\x03Ƴ\x03Ƴ')
buf.write('\x03Ƴ\x03Ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ')
buf.write('\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ')
buf.write('\x03ƴ\x03ƴ\x03Ƶ\x03Ƶ\x03Ƶ\x03Ƶ\x03Ƶ')
buf.write('\x03Ƶ\x03Ƶ\x03Ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ')
buf.write('\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ')
buf.write('\x03ƶ\x03ƶ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ')
buf.write('\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ƹ')
buf.write('\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ')
buf.write('\x03Ƹ\x03Ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ')
buf.write('\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƺ\x03ƺ')
buf.write('\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ')
buf.write('\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƻ\x03ƻ')
buf.write('\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ')
buf.write('\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03ƽ')
buf.write('\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ')
buf.write('\x03ƽ\x03ƾ\x03ƾ\x03ƾ\x03ƾ\x03ƾ\x03ƾ')
buf.write('\x03ƾ\x03ƾ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ')
buf.write('\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ')
buf.write('\x03ƿ\x03ǀ\x03ǀ\x03ǀ\x03ǀ\x03ǀ\x03ǀ')
buf.write('\x03ǀ\x03ǀ\x03ǀ\x03ǁ\x03ǁ\x03ǁ\x03ǁ')
buf.write('\x03ǁ\x03ǂ\x03ǂ\x03ǂ\x03ǂ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03DŽ\x03DŽ\x03DŽ\x03DŽ\x03DŽ')
buf.write('\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž')
buf.write('\x03Dž\x03Dž\x03Dž\x03Dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ')
buf.write('\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ')
buf.write('\x03LJ\x03LJ\x03LJ\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03NJ\x03NJ\x03NJ\x03NJ')
buf.write('\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ')
buf.write('\x03NJ\x03NJ\x03NJ\x03NJ\x03Nj\x03Nj\x03Nj')
buf.write('\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj')
buf.write('\x03nj\x03nj\x03nj\x03nj\x03nj\x03nj\x03nj')
buf.write('\x03nj\x03nj\x03nj\x03nj\x03Ǎ\x03Ǎ\x03Ǎ')
buf.write('\x03Ǎ\x03Ǎ\x03Ǎ\x03Ǎ\x03Ǎ\x03ǎ\x03ǎ')
buf.write('\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ')
buf.write('\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03Ǐ\x03Ǐ\x03Ǐ')
buf.write('\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ')
buf.write('\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03ǐ')
buf.write('\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ')
buf.write('\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ')
buf.write('\x03ǐ\x03Ǒ\x03Ǒ\x03Ǒ\x03Ǒ\x03Ǒ\x03ǒ')
buf.write('\x03ǒ\x03ǒ\x03ǒ\x03Ǔ\x03Ǔ\x03Ǔ\x03Ǔ')
buf.write('\x03Ǔ\x03ǔ\x03ǔ\x03ǔ\x03ǔ\x03Ǖ\x03Ǖ')
buf.write('\x03Ǖ\x03Ǖ\x03Ǖ\x03ǖ\x03ǖ\x03ǖ\x03ǖ')
buf.write('\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ')
buf.write('\x03ǘ\x03ǘ\x03ǘ\x03ǘ\x03Ǚ\x03Ǚ\x03Ǚ')
buf.write('\x03Ǚ\x03Ǚ\x03Ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ')
buf.write('\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ')
buf.write('\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03Ǜ\x03Ǜ')
buf.write('\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ')
buf.write('\x03Ǜ\x03Ǜ\x03ǜ\x03ǜ\x03ǜ\x03ǜ\x03ǝ')
buf.write('\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ')
buf.write('\x03ǝ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ')
buf.write('\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ')
buf.write('\x03Ǡ\x03Ǡ\x03Ǡ\x03Ǡ\x03Ǡ\x03ǡ\x03ǡ')
buf.write('\x03ǡ\x03ǡ\x03ǡ\x03ǡ\x03ǡ\x03Ǣ\x03Ǣ')
buf.write('\x03Ǣ\x03Ǣ\x03Ǣ\x03Ǣ\x07Ǣ፨\nǢ')
buf.write('\x0cǢ\x0eǢ፫\x0bǢ\x03Ǣ\x03Ǣ\x03ǣ')
buf.write('\x03ǣ\x03ǣ\x07ǣ፲\nǣ\x0cǣ\x0eǣ')
buf.write('፵\x0bǣ\x03ǣ\x06ǣ፸\nǣ\rǣ')
buf.write('\x0eǣ፹\x03Ǥ\x03Ǥ\x03Ǥ\x07Ǥ\u137f')
buf.write('\nǤ\x0cǤ\x0eǤᎂ\x0bǤ\x03Ǥ\x06Ǥ')
buf.write('ᎅ\nǤ\rǤ\x0eǤᎆ\x03ǥ\x03ǥ')
buf.write('\x03ǥ\x03Ǧ\x03Ǧ\x03ǧ\x03ǧ\x03Ǩ\x03Ǩ')
buf.write('\x03Ǩ\x05Ǩ᎓\nǨ\x03Ǩ\x03Ǩ\x05Ǩ')
buf.write('᎗\nǨ\x05Ǩ᎙\nǨ\x03Ǩ\x03Ǩ\x05')
buf.write('Ǩ\u139d\nǨ\x03ǩ\x03ǩ\x03ǩ\x03ǩ\x03')
buf.write('ǩ\x07ǩᎤ\nǩ\x0cǩ\x0eǩᎧ\x0b')
buf.write('ǩ\x03ǩ\x03ǩ\x03Ǫ\x03Ǫ\x03Ǫ\x03Ǫ')
buf.write('\x03Ǫ\x05ǪᎰ\nǪ\x03Ǫ\x03Ǫ\x03ǫ')
buf.write('\x03ǫ\x03Ǭ\x03Ǭ\x03Ǭ\x07ǬᎹ\nǬ')
buf.write('\x0cǬ\x0eǬᎼ\x0bǬ\x03Ǭ\x03Ǭ\x03Ǭ')
buf.write('\x03ǭ\x03ǭ\x03ǭ\x07ǭᏄ\nǭ\x0cǭ')
buf.write('\x0eǭᏇ\x0bǭ\x03ǭ\x03ǭ\x03ǭ\x03Ǯ')
buf.write('\x03Ǯ\x03Ǯ\x07ǮᏏ\nǮ\x0cǮ\x0eǮ')
buf.write('Ꮢ\x0bǮ\x03Ǯ\x03Ǯ\x03Ǯ\x03ǯ\x03ǯ')
buf.write('\x03ǯ\x07ǯᏚ\nǯ\x0cǯ\x0eǯᏝ')
buf.write('\x0bǯ\x03ǯ\x03ǯ\x03ǯ\x03ǰ\x03ǰ\x03DZ')
buf.write('\x03DZ\x03DZ\x03DZ\x06DZᏨ\nDZ\rDZ')
buf.write('\x0eDZᏩ\x03DZ\x03DZ\x03Dz\x03Dz\x03dz')
buf.write('\x03dz\x03Ǵ\x03Ǵ\x03ǵ\x03ǵ\x03Ƕ\x03Ƕ')
buf.write('\x03Ƕ\x03Ƿ\x03Ƿ\x03Ǹ\x03Ǹ\x03ǹ\x03ǹ')
buf.write('\x03Ǻ\x03Ǻ\x03ǻ\x03ǻ\x03Ǽ\x03Ǽ\x03ǽ')
buf.write('\x03ǽ\x03ǽ\x03Ǿ\x03Ǿ\x03Ǿ\x03Ǿ\x07Ǿ')
buf.write('ᐌ\nǾ\x0cǾ\x0eǾᐏ\x0bǾ\x03Ǿ')
buf.write('\x03Ǿ\x03Ǿ\x03Ǿ\x03Ǿ\x05Ǿᐖ\nǾ')
buf.write('\x03ǿ\x03ǿ\x03Ȁ\x03Ȁ\x03ȁ\x03ȁ\x03ȁ')
buf.write('\x03Ȃ\x03Ȃ\x03ȃ\x03ȃ\x03ȃ\x03Ȅ\x03Ȅ')
buf.write('\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x05Ȅ')
buf.write('ᐬ\nȄ\x03ȅ\x03ȅ\x03Ȇ\x03Ȇ\x03ȇ')
buf.write('\x03ȇ\x03Ȉ\x03Ȉ\x03ȉ\x03ȉ\x03Ȋ\x03Ȋ')
buf.write('\x03Ȋ\x03ȋ\x03ȋ\x03Ȍ\x03Ȍ\x03ȍ\x03ȍ')
buf.write('\x03Ȏ\x03Ȏ\x03ȏ\x03ȏ\x03Ȑ\x06Ȑᑆ')
buf.write('\nȐ\rȐ\x0eȐᑇ\x03Ȑ\x03Ȑ\x03ȑ')
buf.write('\x03ȑ\x03Ȓ\x06Ȓᑏ\nȒ\rȒ\x0eȒ')
buf.write('ᑐ\x03ȓ\x07ȓᑔ\nȓ\x0cȓ\x0eȓ')
buf.write('ᑗ\x0bȓ\x03ȓ\x05ȓᑚ\nȓ\x03ȓ')
buf.write('\x06ȓᑝ\nȓ\rȓ\x0eȓᑞ\x03Ȕ')
buf.write('\x03Ȕ\x03Ȕ\x03Ȕ\x07Ȕᑥ\nȔ\x0cȔ')
buf.write('\x0eȔᑨ\x0bȔ\x03Ȕ\x03Ȕ\x05Ȕᑬ')
buf.write('\nȔ\x03Ȕ\x03Ȕ\x03ȕ\x03ȕ\x03ȕ\x03ȕ')
buf.write('\x07ȕᑴ\nȕ\x0cȕ\x0eȕᑷ\x0bȕ')
buf.write('\x03ȕ\x03ȕ\x03ȕ\x03ȕ\x03ȕ\x03Ȗ\x03Ȗ')
buf.write('\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ')
buf.write('\x07Ȗᒇ\nȖ\x0cȖ\x0eȖᒊ\x0bȖ')
buf.write('\x03Ȗ\x03Ȗ\x05Ȗᒎ\nȖ\x03ȗ\x05ȗ')
buf.write('ᒑ\nȗ\x03ȗ\x03ȗ\x03Ș\x03Ș\x03ș')
buf.write('\x03ș\x03ș\x07șᒚ\nș\x0cș\x0eș')
buf.write('ᒝ\x0bș\x03Ț\x03Ț\x03Ț\x03Ț\x03Ț')
buf.write('\x03ț\x03ț\x03Ȝ\x03Ȝ\x03ȝ\x03ȝ\x03Ȟ')
buf.write('\x03Ȟ\x03ȟ\x03ȟ\x03Ƞ\x03Ƞ\x03ȡ\x03ȡ')
buf.write('\x03Ȣ\x03Ȣ\x03ȣ\x03ȣ\x03Ȥ\x03Ȥ\x03ȥ')
buf.write('\x03ȥ\x03Ȧ\x03Ȧ\x03ȧ\x03ȧ\x03Ȩ\x03Ȩ')
buf.write('\x03ȩ\x03ȩ\x03Ȫ\x03Ȫ\x03ȫ\x03ȫ\x03Ȭ')
buf.write('\x03Ȭ\x03ȭ\x03ȭ\x03Ȯ\x03Ȯ\x03ȯ\x03ȯ')
buf.write('\x03Ȱ\x03Ȱ\x03ȱ\x03ȱ\x03Ȳ\x03Ȳ\x03ȳ')
buf.write('\x03ȳ\x03ȴ\x03ȴ\x07ᎺᏅᏐᏛᑵ')
buf.write(
'\x02ȵ\x03\x03\x05\x04\x07\x05\t\x06\x0b\x07\r\x08\x0f\t\x11\n\x13\x0b\x15\x0c'
)
buf.write(
"\x17\r\x19\x0e\x1b\x0f\x1d\x10\x1f\x11!\x12#\x13%\x14'\x15)\x16+\x17"
)
buf.write('-\x18/\x191\x1a3\x1b5\x1c7\x1d9\x1e;\x1f= ?!A"C#E$G%')
buf.write("I&K'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7")
buf.write('m8o9q:s;u<w=y>{?}@\x7fA\x81B\x83C\x85D\x87E\x89')
buf.write('F\x8bG\x8dH\x8fI\x91J\x93K\x95L\x97M\x99')
buf.write('N\x9bO\x9dP\x9fQ¡R£S¥T§U©')
buf.write('V«W\xadX¯Y±Z³[µ\\·]¹')
buf.write('^»_½`¿aÁbÃcÅdÇeÉ')
buf.write('fËgÍhÏiÑjÓkÕl×mÙ')
buf.write('nÛoÝpßqárãsåtçué')
buf.write('vëwíxïyñzó{õ|÷}ù')
buf.write('~û\x7fý\x80ÿ\x81ā\x82ă')
buf.write('\x83ą\x84ć\x85ĉ\x86ċ\x87')
buf.write('č\x88ď\x89đ\x8aē\x8bĕ')
buf.write('\x8cė\x8dę\x8eě\x8fĝ\x90')
buf.write('ğ\x91ġ\x92ģ\x93ĥ\x94ħ')
buf.write('\x95ĩ\x96ī\x97ĭ\x98į\x99')
buf.write('ı\x9aij\x9bĵ\x9cķ\x9dĹ')
buf.write('\x9eĻ\x9fĽ\xa0Ŀ¡Ł¢')
buf.write('Ń£Ņ¤Ň¥ʼn¦ŋ')
buf.write('§ō¨ŏ©őªœ«')
buf.write('ŕ¬ŗ\xadř®ś¯ŝ')
buf.write('°ş±š²ţ³ť´')
buf.write('ŧµũ¶ū·ŭ¸ů')
buf.write('¹űºų»ŵ¼ŷ½')
buf.write('Ź¾Ż¿ŽÀſÁƁ')
buf.write('ÂƃÃƅÄƇÅƉÆ')
buf.write('ƋÇƍÈƏÉƑÊƓ')
buf.write('ËƕÌƗÍƙÎƛÏ')
buf.write('ƝÐƟÑơÒƣÓƥ')
buf.write('ÔƧÕƩÖƫ×ƭØ')
buf.write('ƯÙƱÚƳÛƵÜƷ')
buf.write('ÝƹÞƻßƽàƿá')
buf.write('ǁâǃãDžäLJålj')
buf.write('æNjçǍèǏéǑê')
buf.write('ǓëǕìǗíǙîǛ')
buf.write('ïǝðǟñǡòǣó')
buf.write('ǥôǧõǩöǫ÷ǭ')
buf.write('øǯùDZúdzûǵü')
buf.write('ǷýǹþǻÿǽĀǿ')
buf.write('āȁĂȃăȅĄȇą')
buf.write('ȉĆȋćȍĈȏĉȑ')
buf.write('ĊȓċȕČȗčșĎ')
buf.write('țďȝĐȟđȡĒȣ')
buf.write('ēȥĔȧĕȩĖȫė')
buf.write('ȭĘȯęȱĚȳěȵ')
buf.write('ĜȷĝȹĞȻğȽĠ')
buf.write('ȿġɁĢɃģɅĤɇ')
buf.write('ĥɉĦɋħɍĨɏĩ')
buf.write('ɑĪɓīɕĬɗĭə')
buf.write('ĮɛįɝİɟıɡIJ')
buf.write('ɣijɥĴɧĵɩĶɫ')
buf.write('ķɭĸɯĹɱĺɳĻ')
buf.write('ɵļɷĽɹľɻĿɽ')
buf.write('ŀɿŁʁłʃŃʅń')
buf.write('ʇŅʉņʋŇʍňʏ')
buf.write('ʼnʑŊʓŋʕŌʗō')
buf.write('ʙŎʛŏʝŐʟőʡ')
buf.write('ŒʣœʥŔʧŕʩŖ')
buf.write('ʫŗʭŘʯřʱŚʳ')
buf.write('śʵŜʷŝʹŞʻş')
buf.write('ʽŠʿšˁŢ˃ţ˅')
buf.write('ŤˇťˉŦˋŧˍŨ')
buf.write('ˏũˑŪ˓ū˕Ŭ˗')
buf.write('ŭ˙ٲů˝Ű˟ű')
buf.write('ˡŲˣų˥Ŵ˧ŵ˩')
buf.write('Ŷ˫ŷ˭Ÿ˯Ź˱ź')
buf.write('˳Ż˵ż˷Ž˹ž˻')
buf.write('ſ˽ƀ˿Ɓ́Ƃ̃ƃ')
buf.write('̅Ƅ̇ƅ̉Ɔ̋Ƈ̍')
buf.write('ƈ̏Ɖ̑Ɗ̓Ƌ̕ƌ')
buf.write('̗ƍ̙Ǝ̛Ə̝Ɛ̟')
buf.write('Ƒ̡ƒ̣Ɠ̥Ɣ̧ƕ')
buf.write('̩Ɩ̫Ɨ̭Ƙ̯ƙ̱')
buf.write('ƚ̳ƛ̵Ɯ̷Ɲ̹ƞ')
buf.write('̻Ɵ̽Ơ̿ớƢ̓')
buf.write('ƣͅƤ͇ƥ͉Ʀ͋Ƨ')
buf.write('͍ƨ͏Ʃ͑ƪ͓ƫ͕')
buf.write('Ƭ͗ƭ͙Ʈ͛Ư͝ư')
buf.write('͟Ʊ͡ƲͣƳͥƴͧ')
buf.write('ƵͩƶͫƷͭƸͯƹ')
buf.write('ͱƺͳƻ͵Ƽͷƽ\u0379')
buf.write('ƾͻƿͽǀͿǁ\u0381ǂ')
buf.write('\u0383ǃ΅DŽ·DžΉdž\u038b')
buf.write('LJ\u038dLjΏljΑNJΓNj')
buf.write('ΕnjΗǍΙǎΛǏΝ')
buf.write('ǐΟǑΡǒΣǓΥǔ')
buf.write('ΧǕΩǖΫǗέǘί')
buf.write('ǙαǚγǛεǜηǝ')
buf.write('ιǞλǟνǠοǡρ')
buf.write('ǢσǣυǤχǥωǦ')
buf.write('ϋǧύǨϏǩϑǪϓ')
buf.write('\x02ϕ\x02ϗ\x02ϙ\x02ϛ\x02ϝ\x02ϟ\x02ϡ')
buf.write('ǫϣǬϥǭϧǮϩǯ')
buf.write('ϫǰϭDZϯDzϱdzϳ')
buf.write('ǴϵǵϷǶϹǷϻǸ')
buf.write('ϽǹϿǺЁǻЃǼЅ')
buf.write('ǽЇǾЉǿЋȀЍȁ')
buf.write('ЏȂБ\x02ГȃЕȄЗȅ')
buf.write('ЙȆЛȇНȈПȉС')
buf.write('\x02У\x02Х\x02ЧȊЩȋЫȌ')
buf.write('Э\x02Я\x02бȍгȎе\x02з')
buf.write('\x02й\x02л\x02н\x02п\x02с\x02у\x02х')
buf.write('\x02ч\x02щ\x02ы\x02э\x02я\x02ё\x02ѓ')
buf.write('\x02ѕ\x02ї\x02љ\x02ћ\x02ѝ\x02џ\x02ѡ')
buf.write(
"\x02ѣ\x02ѥ\x02ѧ\x02\x03\x02'\x05\x02\x0c\x0c\x0f\x0f))\x05\x022")
buf.write(
';CHch\x04\x02GGgg\x04\x02--//\t\x02\x0b\x0c\x0f\x0f""**>>]]}}\x05\x02\x0c'
)
buf.write(
'\x0c\x0f\x0f$$\x04\x022;aa\x05\x02\x0b\x0c\x0f\x0f""\x04\x02C\\c|\x04\x02\x0c'
)
buf.write(
'\x0c\x0f\x0f\x04\x02\x0b\x0b""\x05\x02%&2;aa\x04\x02CCcc\x04\x02DDdd\x04\x02'
)
buf.write(
'EEee\x04\x02FFff\x04\x02HHhh\x04\x02IIii\x04\x02JJjj\x04\x02KKkk\x04\x02LLll\x04'
)
buf.write(
'\x02MMmm\x04\x02NNnn\x04\x02OOoo\x04\x02PPpp\x04\x02QQqq\x04\x02RRrr\x04\x02SSs'
)
buf.write(
's\x04\x02TTtt\x04\x02UUuu\x04\x02VVvv\x04\x02WWww\x04\x02XXxx\x04\x02YYyy\x04\x02'
)
buf.write(
'ZZzz\x04\x02[[{{\x04\x02\\\\||\x02ᓝ\x02\x03\x03\x02\x02\x02\x02\x05\x03\x02\x02\x02'
)
buf.write(
'\x02\x07\x03\x02\x02\x02\x02\t\x03\x02\x02\x02\x02\x0b\x03\x02\x02\x02\x02\r\x03\x02\x02\x02\x02\x0f'
)
buf.write(
'\x03\x02\x02\x02\x02\x11\x03\x02\x02\x02\x02\x13\x03\x02\x02\x02\x02\x15\x03\x02\x02\x02\x02\x17\x03'
)
buf.write(
'\x02\x02\x02\x02\x19\x03\x02\x02\x02\x02\x1b\x03\x02\x02\x02\x02\x1d\x03\x02\x02\x02\x02\x1f\x03\x02'
)
buf.write(
"\x02\x02\x02!\x03\x02\x02\x02\x02#\x03\x02\x02\x02\x02%\x03\x02\x02\x02\x02'\x03\x02\x02\x02\x02)\x03"
)
buf.write(
'\x02\x02\x02\x02+\x03\x02\x02\x02\x02-\x03\x02\x02\x02\x02/\x03\x02\x02\x02\x021\x03\x02\x02\x02\x02'
)
buf.write(
'3\x03\x02\x02\x02\x025\x03\x02\x02\x02\x027\x03\x02\x02\x02\x029\x03\x02\x02\x02\x02;\x03'
)
buf.write(
'\x02\x02\x02\x02=\x03\x02\x02\x02\x02?\x03\x02\x02\x02\x02A\x03\x02\x02\x02\x02C\x03\x02\x02\x02\x02E'
)
buf.write(
'\x03\x02\x02\x02\x02G\x03\x02\x02\x02\x02I\x03\x02\x02\x02\x02K\x03\x02\x02\x02\x02M\x03\x02\x02\x02\x02'
)
buf.write(
'O\x03\x02\x02\x02\x02Q\x03\x02\x02\x02\x02S\x03\x02\x02\x02\x02U\x03\x02\x02\x02\x02W\x03\x02\x02\x02'
)
buf.write(
'\x02Y\x03\x02\x02\x02\x02[\x03\x02\x02\x02\x02]\x03\x02\x02\x02\x02_\x03\x02\x02\x02\x02a\x03\x02\x02'
)
buf.write(
'\x02\x02c\x03\x02\x02\x02\x02e\x03\x02\x02\x02\x02g\x03\x02\x02\x02\x02i\x03\x02\x02\x02\x02k\x03\x02'
)
buf.write(
'\x02\x02\x02m\x03\x02\x02\x02\x02o\x03\x02\x02\x02\x02q\x03\x02\x02\x02\x02s\x03\x02\x02\x02\x02u\x03'
)
buf.write(
'\x02\x02\x02\x02w\x03\x02\x02\x02\x02y\x03\x02\x02\x02\x02{\x03\x02\x02\x02\x02}\x03\x02\x02\x02\x02\x7f'
)
buf.write(
'\x03\x02\x02\x02\x02\x81\x03\x02\x02\x02\x02\x83\x03\x02\x02\x02\x02\x85\x03\x02\x02'
)
buf.write(
'\x02\x02\x87\x03\x02\x02\x02\x02\x89\x03\x02\x02\x02\x02\x8b\x03\x02\x02\x02\x02\x8d'
)
buf.write(
'\x03\x02\x02\x02\x02\x8f\x03\x02\x02\x02\x02\x91\x03\x02\x02\x02\x02\x93\x03\x02\x02'
)
buf.write(
'\x02\x02\x95\x03\x02\x02\x02\x02\x97\x03\x02\x02\x02\x02\x99\x03\x02\x02\x02\x02\x9b'
)
buf.write(
'\x03\x02\x02\x02\x02\x9d\x03\x02\x02\x02\x02\x9f\x03\x02\x02\x02\x02¡\x03\x02\x02'
)
buf.write(
'\x02\x02£\x03\x02\x02\x02\x02¥\x03\x02\x02\x02\x02§\x03\x02\x02\x02\x02©'
)
buf.write(
'\x03\x02\x02\x02\x02«\x03\x02\x02\x02\x02\xad\x03\x02\x02\x02\x02¯\x03\x02\x02'
)
buf.write(
'\x02\x02±\x03\x02\x02\x02\x02³\x03\x02\x02\x02\x02µ\x03\x02\x02\x02\x02·'
)
buf.write(
'\x03\x02\x02\x02\x02¹\x03\x02\x02\x02\x02»\x03\x02\x02\x02\x02½\x03\x02\x02'
)
buf.write(
'\x02\x02¿\x03\x02\x02\x02\x02Á\x03\x02\x02\x02\x02Ã\x03\x02\x02\x02\x02Å'
)
buf.write(
'\x03\x02\x02\x02\x02Ç\x03\x02\x02\x02\x02É\x03\x02\x02\x02\x02Ë\x03\x02\x02'
)
buf.write(
'\x02\x02Í\x03\x02\x02\x02\x02Ï\x03\x02\x02\x02\x02Ñ\x03\x02\x02\x02\x02Ó'
)
buf.write(
'\x03\x02\x02\x02\x02Õ\x03\x02\x02\x02\x02×\x03\x02\x02\x02\x02Ù\x03\x02\x02'
)
buf.write(
'\x02\x02Û\x03\x02\x02\x02\x02Ý\x03\x02\x02\x02\x02ß\x03\x02\x02\x02\x02á'
)
buf.write(
'\x03\x02\x02\x02\x02ã\x03\x02\x02\x02\x02å\x03\x02\x02\x02\x02ç\x03\x02\x02'
)
buf.write(
'\x02\x02é\x03\x02\x02\x02\x02ë\x03\x02\x02\x02\x02í\x03\x02\x02\x02\x02ï'
)
buf.write(
'\x03\x02\x02\x02\x02ñ\x03\x02\x02\x02\x02ó\x03\x02\x02\x02\x02õ\x03\x02\x02'
)
buf.write(
'\x02\x02÷\x03\x02\x02\x02\x02ù\x03\x02\x02\x02\x02û\x03\x02\x02\x02\x02ý'
)
buf.write(
'\x03\x02\x02\x02\x02ÿ\x03\x02\x02\x02\x02ā\x03\x02\x02\x02\x02ă\x03\x02\x02'
)
buf.write(
'\x02\x02ą\x03\x02\x02\x02\x02ć\x03\x02\x02\x02\x02ĉ\x03\x02\x02\x02\x02ċ'
)
buf.write(
'\x03\x02\x02\x02\x02č\x03\x02\x02\x02\x02ď\x03\x02\x02\x02\x02đ\x03\x02\x02'
)
buf.write(
'\x02\x02ē\x03\x02\x02\x02\x02ĕ\x03\x02\x02\x02\x02ė\x03\x02\x02\x02\x02ę'
)
buf.write(
'\x03\x02\x02\x02\x02ě\x03\x02\x02\x02\x02ĝ\x03\x02\x02\x02\x02ğ\x03\x02\x02'
)
buf.write(
'\x02\x02ġ\x03\x02\x02\x02\x02ģ\x03\x02\x02\x02\x02ĥ\x03\x02\x02\x02\x02ħ'
)
buf.write(
'\x03\x02\x02\x02\x02ĩ\x03\x02\x02\x02\x02ī\x03\x02\x02\x02\x02ĭ\x03\x02\x02'
)
buf.write(
'\x02\x02į\x03\x02\x02\x02\x02ı\x03\x02\x02\x02\x02ij\x03\x02\x02\x02\x02ĵ'
)
buf.write(
'\x03\x02\x02\x02\x02ķ\x03\x02\x02\x02\x02Ĺ\x03\x02\x02\x02\x02Ļ\x03\x02\x02'
)
buf.write(
'\x02\x02Ľ\x03\x02\x02\x02\x02Ŀ\x03\x02\x02\x02\x02Ł\x03\x02\x02\x02\x02Ń'
)
buf.write(
'\x03\x02\x02\x02\x02Ņ\x03\x02\x02\x02\x02Ň\x03\x02\x02\x02\x02ʼn\x03\x02\x02'
)
buf.write(
'\x02\x02ŋ\x03\x02\x02\x02\x02ō\x03\x02\x02\x02\x02ŏ\x03\x02\x02\x02\x02ő'
)
buf.write(
'\x03\x02\x02\x02\x02œ\x03\x02\x02\x02\x02ŕ\x03\x02\x02\x02\x02ŗ\x03\x02\x02'
)
buf.write(
'\x02\x02ř\x03\x02\x02\x02\x02ś\x03\x02\x02\x02\x02ŝ\x03\x02\x02\x02\x02ş'
)
buf.write(
'\x03\x02\x02\x02\x02š\x03\x02\x02\x02\x02ţ\x03\x02\x02\x02\x02ť\x03\x02\x02'
)
buf.write(
'\x02\x02ŧ\x03\x02\x02\x02\x02ũ\x03\x02\x02\x02\x02ū\x03\x02\x02\x02\x02ŭ'
)
buf.write(
'\x03\x02\x02\x02\x02ů\x03\x02\x02\x02\x02ű\x03\x02\x02\x02\x02ų\x03\x02\x02'
)
buf.write(
'\x02\x02ŵ\x03\x02\x02\x02\x02ŷ\x03\x02\x02\x02\x02Ź\x03\x02\x02\x02\x02Ż'
)
buf.write(
'\x03\x02\x02\x02\x02Ž\x03\x02\x02\x02\x02ſ\x03\x02\x02\x02\x02Ɓ\x03\x02\x02'
)
buf.write(
'\x02\x02ƃ\x03\x02\x02\x02\x02ƅ\x03\x02\x02\x02\x02Ƈ\x03\x02\x02\x02\x02Ɖ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƌ\x03\x02\x02\x02\x02ƍ\x03\x02\x02\x02\x02Ə\x03\x02\x02'
)
buf.write(
'\x02\x02Ƒ\x03\x02\x02\x02\x02Ɠ\x03\x02\x02\x02\x02ƕ\x03\x02\x02\x02\x02Ɨ'
)
buf.write(
'\x03\x02\x02\x02\x02ƙ\x03\x02\x02\x02\x02ƛ\x03\x02\x02\x02\x02Ɲ\x03\x02\x02'
)
buf.write(
'\x02\x02Ɵ\x03\x02\x02\x02\x02ơ\x03\x02\x02\x02\x02ƣ\x03\x02\x02\x02\x02ƥ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƨ\x03\x02\x02\x02\x02Ʃ\x03\x02\x02\x02\x02ƫ\x03\x02\x02'
)
buf.write(
'\x02\x02ƭ\x03\x02\x02\x02\x02Ư\x03\x02\x02\x02\x02Ʊ\x03\x02\x02\x02\x02Ƴ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƶ\x03\x02\x02\x02\x02Ʒ\x03\x02\x02\x02\x02ƹ\x03\x02\x02'
)
buf.write(
'\x02\x02ƻ\x03\x02\x02\x02\x02ƽ\x03\x02\x02\x02\x02ƿ\x03\x02\x02\x02\x02ǁ'
)
buf.write(
'\x03\x02\x02\x02\x02ǃ\x03\x02\x02\x02\x02Dž\x03\x02\x02\x02\x02LJ\x03\x02\x02'
)
buf.write(
'\x02\x02lj\x03\x02\x02\x02\x02Nj\x03\x02\x02\x02\x02Ǎ\x03\x02\x02\x02\x02Ǐ'
)
buf.write(
'\x03\x02\x02\x02\x02Ǒ\x03\x02\x02\x02\x02Ǔ\x03\x02\x02\x02\x02Ǖ\x03\x02\x02'
)
buf.write(
'\x02\x02Ǘ\x03\x02\x02\x02\x02Ǚ\x03\x02\x02\x02\x02Ǜ\x03\x02\x02\x02\x02ǝ'
)
buf.write(
'\x03\x02\x02\x02\x02ǟ\x03\x02\x02\x02\x02ǡ\x03\x02\x02\x02\x02ǣ\x03\x02\x02'
)
buf.write(
'\x02\x02ǥ\x03\x02\x02\x02\x02ǧ\x03\x02\x02\x02\x02ǩ\x03\x02\x02\x02\x02ǫ'
)
buf.write(
'\x03\x02\x02\x02\x02ǭ\x03\x02\x02\x02\x02ǯ\x03\x02\x02\x02\x02DZ\x03\x02\x02'
)
buf.write(
'\x02\x02dz\x03\x02\x02\x02\x02ǵ\x03\x02\x02\x02\x02Ƿ\x03\x02\x02\x02\x02ǹ'
)
buf.write(
'\x03\x02\x02\x02\x02ǻ\x03\x02\x02\x02\x02ǽ\x03\x02\x02\x02\x02ǿ\x03\x02\x02'
)
buf.write(
'\x02\x02ȁ\x03\x02\x02\x02\x02ȃ\x03\x02\x02\x02\x02ȅ\x03\x02\x02\x02\x02ȇ'
)
buf.write(
'\x03\x02\x02\x02\x02ȉ\x03\x02\x02\x02\x02ȋ\x03\x02\x02\x02\x02ȍ\x03\x02\x02'
)
buf.write(
'\x02\x02ȏ\x03\x02\x02\x02\x02ȑ\x03\x02\x02\x02\x02ȓ\x03\x02\x02\x02\x02ȕ'
)
buf.write(
'\x03\x02\x02\x02\x02ȗ\x03\x02\x02\x02\x02ș\x03\x02\x02\x02\x02ț\x03\x02\x02'
)
buf.write(
'\x02\x02ȝ\x03\x02\x02\x02\x02ȟ\x03\x02\x02\x02\x02ȡ\x03\x02\x02\x02\x02ȣ'
)
buf.write(
'\x03\x02\x02\x02\x02ȥ\x03\x02\x02\x02\x02ȧ\x03\x02\x02\x02\x02ȩ\x03\x02\x02'
)
buf.write(
'\x02\x02ȫ\x03\x02\x02\x02\x02ȭ\x03\x02\x02\x02\x02ȯ\x03\x02\x02\x02\x02ȱ'
)
buf.write(
'\x03\x02\x02\x02\x02ȳ\x03\x02\x02\x02\x02ȵ\x03\x02\x02\x02\x02ȷ\x03\x02\x02'
)
buf.write(
'\x02\x02ȹ\x03\x02\x02\x02\x02Ȼ\x03\x02\x02\x02\x02Ƚ\x03\x02\x02\x02\x02ȿ'
)
buf.write(
'\x03\x02\x02\x02\x02Ɂ\x03\x02\x02\x02\x02Ƀ\x03\x02\x02\x02\x02Ʌ\x03\x02\x02'
)
buf.write(
'\x02\x02ɇ\x03\x02\x02\x02\x02ɉ\x03\x02\x02\x02\x02ɋ\x03\x02\x02\x02\x02ɍ'
)
buf.write(
'\x03\x02\x02\x02\x02ɏ\x03\x02\x02\x02\x02ɑ\x03\x02\x02\x02\x02ɓ\x03\x02\x02'
)
buf.write(
'\x02\x02ɕ\x03\x02\x02\x02\x02ɗ\x03\x02\x02\x02\x02ə\x03\x02\x02\x02\x02ɛ'
)
buf.write(
'\x03\x02\x02\x02\x02ɝ\x03\x02\x02\x02\x02ɟ\x03\x02\x02\x02\x02ɡ\x03\x02\x02'
)
buf.write(
'\x02\x02ɣ\x03\x02\x02\x02\x02ɥ\x03\x02\x02\x02\x02ɧ\x03\x02\x02\x02\x02ɩ'
)
buf.write(
'\x03\x02\x02\x02\x02ɫ\x03\x02\x02\x02\x02ɭ\x03\x02\x02\x02\x02ɯ\x03\x02\x02'
)
buf.write(
'\x02\x02ɱ\x03\x02\x02\x02\x02ɳ\x03\x02\x02\x02\x02ɵ\x03\x02\x02\x02\x02ɷ'
)
buf.write(
'\x03\x02\x02\x02\x02ɹ\x03\x02\x02\x02\x02ɻ\x03\x02\x02\x02\x02ɽ\x03\x02\x02'
)
buf.write(
'\x02\x02ɿ\x03\x02\x02\x02\x02ʁ\x03\x02\x02\x02\x02ʃ\x03\x02\x02\x02\x02ʅ'
)
buf.write(
'\x03\x02\x02\x02\x02ʇ\x03\x02\x02\x02\x02ʉ\x03\x02\x02\x02\x02ʋ\x03\x02\x02'
)
buf.write(
'\x02\x02ʍ\x03\x02\x02\x02\x02ʏ\x03\x02\x02\x02\x02ʑ\x03\x02\x02\x02\x02ʓ'
)
buf.write(
'\x03\x02\x02\x02\x02ʕ\x03\x02\x02\x02\x02ʗ\x03\x02\x02\x02\x02ʙ\x03\x02\x02'
)
buf.write(
'\x02\x02ʛ\x03\x02\x02\x02\x02ʝ\x03\x02\x02\x02\x02ʟ\x03\x02\x02\x02\x02ʡ'
)
buf.write(
'\x03\x02\x02\x02\x02ʣ\x03\x02\x02\x02\x02ʥ\x03\x02\x02\x02\x02ʧ\x03\x02\x02'
)
buf.write(
'\x02\x02ʩ\x03\x02\x02\x02\x02ʫ\x03\x02\x02\x02\x02ʭ\x03\x02\x02\x02\x02ʯ'
)
buf.write(
'\x03\x02\x02\x02\x02ʱ\x03\x02\x02\x02\x02ʳ\x03\x02\x02\x02\x02ʵ\x03\x02\x02'
)
buf.write(
'\x02\x02ʷ\x03\x02\x02\x02\x02ʹ\x03\x02\x02\x02\x02ʻ\x03\x02\x02\x02\x02ʽ'
)
buf.write(
'\x03\x02\x02\x02\x02ʿ\x03\x02\x02\x02\x02ˁ\x03\x02\x02\x02\x02˃\x03\x02\x02'
)
buf.write(
'\x02\x02˅\x03\x02\x02\x02\x02ˇ\x03\x02\x02\x02\x02ˉ\x03\x02\x02\x02\x02ˋ'
)
buf.write(
'\x03\x02\x02\x02\x02ˍ\x03\x02\x02\x02\x02ˏ\x03\x02\x02\x02\x02ˑ\x03\x02\x02'
)
buf.write(
'\x02\x02˓\x03\x02\x02\x02\x02˕\x03\x02\x02\x02\x02˗\x03\x02\x02\x02\x02˙'
)
buf.write(
'\x03\x02\x02\x02\x02˛\x03\x02\x02\x02\x02˝\x03\x02\x02\x02\x02˟\x03\x02\x02'
)
buf.write(
'\x02\x02ˡ\x03\x02\x02\x02\x02ˣ\x03\x02\x02\x02\x02˥\x03\x02\x02\x02\x02˧'
)
buf.write(
'\x03\x02\x02\x02\x02˩\x03\x02\x02\x02\x02˫\x03\x02\x02\x02\x02˭\x03\x02\x02'
)
buf.write(
'\x02\x02˯\x03\x02\x02\x02\x02˱\x03\x02\x02\x02\x02˳\x03\x02\x02\x02\x02˵'
)
buf.write(
'\x03\x02\x02\x02\x02˷\x03\x02\x02\x02\x02˹\x03\x02\x02\x02\x02˻\x03\x02\x02'
)
buf.write(
'\x02\x02˽\x03\x02\x02\x02\x02˿\x03\x02\x02\x02\x02́\x03\x02\x02\x02\x02̃'
)
buf.write(
'\x03\x02\x02\x02\x02̅\x03\x02\x02\x02\x02̇\x03\x02\x02\x02\x02̉\x03\x02\x02'
)
buf.write(
'\x02\x02̋\x03\x02\x02\x02\x02̍\x03\x02\x02\x02\x02̏\x03\x02\x02\x02\x02̑'
)
buf.write(
'\x03\x02\x02\x02\x02̓\x03\x02\x02\x02\x02̕\x03\x02\x02\x02\x02̗\x03\x02\x02'
)
buf.write(
'\x02\x02̙\x03\x02\x02\x02\x02̛\x03\x02\x02\x02\x02̝\x03\x02\x02\x02\x02̟'
)
buf.write(
'\x03\x02\x02\x02\x02̡\x03\x02\x02\x02\x02̣\x03\x02\x02\x02\x02̥\x03\x02\x02'
)
buf.write(
'\x02\x02̧\x03\x02\x02\x02\x02̩\x03\x02\x02\x02\x02̫\x03\x02\x02\x02\x02̭'
)
buf.write(
'\x03\x02\x02\x02\x02̯\x03\x02\x02\x02\x02̱\x03\x02\x02\x02\x02̳\x03\x02\x02'
)
buf.write(
'\x02\x02̵\x03\x02\x02\x02\x02̷\x03\x02\x02\x02\x02̹\x03\x02\x02\x02\x02̻'
)
buf.write(
'\x03\x02\x02\x02\x02̽\x03\x02\x02\x02\x02̿\x03\x02\x02\x02\x02́\x03\x02\x02'
)
buf.write(
'\x02\x02̓\x03\x02\x02\x02\x02ͅ\x03\x02\x02\x02\x02͇\x03\x02\x02\x02\x02͉'
)
buf.write(
'\x03\x02\x02\x02\x02͋\x03\x02\x02\x02\x02͍\x03\x02\x02\x02\x02͏\x03\x02\x02'
)
buf.write(
'\x02\x02͑\x03\x02\x02\x02\x02͓\x03\x02\x02\x02\x02͕\x03\x02\x02\x02\x02͗'
)
buf.write(
'\x03\x02\x02\x02\x02͙\x03\x02\x02\x02\x02͛\x03\x02\x02\x02\x02͝\x03\x02\x02'
)
buf.write(
'\x02\x02͟\x03\x02\x02\x02\x02͡\x03\x02\x02\x02\x02ͣ\x03\x02\x02\x02\x02ͥ'
)
buf.write(
'\x03\x02\x02\x02\x02ͧ\x03\x02\x02\x02\x02ͩ\x03\x02\x02\x02\x02ͫ\x03\x02\x02'
)
buf.write(
'\x02\x02ͭ\x03\x02\x02\x02\x02ͯ\x03\x02\x02\x02\x02ͱ\x03\x02\x02\x02\x02ͳ'
)
buf.write(
'\x03\x02\x02\x02\x02͵\x03\x02\x02\x02\x02ͷ\x03\x02\x02\x02\x02\u0379\x03\x02\x02'
)
buf.write(
'\x02\x02ͻ\x03\x02\x02\x02\x02ͽ\x03\x02\x02\x02\x02Ϳ\x03\x02\x02\x02\x02\u0381'
)
buf.write(
'\x03\x02\x02\x02\x02\u0383\x03\x02\x02\x02\x02΅\x03\x02\x02\x02\x02·\x03\x02\x02'
)
buf.write(
'\x02\x02Ή\x03\x02\x02\x02\x02\u038b\x03\x02\x02\x02\x02\u038d\x03\x02\x02\x02\x02Ώ'
)
buf.write(
'\x03\x02\x02\x02\x02Α\x03\x02\x02\x02\x02Γ\x03\x02\x02\x02\x02Ε\x03\x02\x02'
)
buf.write(
'\x02\x02Η\x03\x02\x02\x02\x02Ι\x03\x02\x02\x02\x02Λ\x03\x02\x02\x02\x02Ν'
)
buf.write(
'\x03\x02\x02\x02\x02Ο\x03\x02\x02\x02\x02Ρ\x03\x02\x02\x02\x02Σ\x03\x02\x02'
)
buf.write(
'\x02\x02Υ\x03\x02\x02\x02\x02Χ\x03\x02\x02\x02\x02Ω\x03\x02\x02\x02\x02Ϋ'
)
buf.write(
'\x03\x02\x02\x02\x02έ\x03\x02\x02\x02\x02ί\x03\x02\x02\x02\x02α\x03\x02\x02'
)
buf.write(
'\x02\x02γ\x03\x02\x02\x02\x02ε\x03\x02\x02\x02\x02η\x03\x02\x02\x02\x02ι'
)
buf.write(
'\x03\x02\x02\x02\x02λ\x03\x02\x02\x02\x02ν\x03\x02\x02\x02\x02ο\x03\x02\x02'
)
buf.write(
'\x02\x02ρ\x03\x02\x02\x02\x02σ\x03\x02\x02\x02\x02υ\x03\x02\x02\x02\x02χ'
)
buf.write(
'\x03\x02\x02\x02\x02ω\x03\x02\x02\x02\x02ϋ\x03\x02\x02\x02\x02ύ\x03\x02\x02'
)
buf.write(
'\x02\x02Ϗ\x03\x02\x02\x02\x02ϑ\x03\x02\x02\x02\x02ϓ\x03\x02\x02\x02\x02ϡ'
)
buf.write(
'\x03\x02\x02\x02\x02ϣ\x03\x02\x02\x02\x02ϥ\x03\x02\x02\x02\x02ϧ\x03\x02\x02'
)
buf.write(
'\x02\x02ϩ\x03\x02\x02\x02\x02ϫ\x03\x02\x02\x02\x02ϭ\x03\x02\x02\x02\x02ϯ'
)
buf.write(
'\x03\x02\x02\x02\x02ϱ\x03\x02\x02\x02\x02ϳ\x03\x02\x02\x02\x02ϵ\x03\x02\x02'
)
buf.write(
'\x02\x02Ϸ\x03\x02\x02\x02\x02Ϲ\x03\x02\x02\x02\x02ϻ\x03\x02\x02\x02\x02Ͻ'
)
buf.write(
'\x03\x02\x02\x02\x02Ͽ\x03\x02\x02\x02\x02Ё\x03\x02\x02\x02\x02Ѓ\x03\x02\x02'
)
buf.write(
'\x02\x02Ѕ\x03\x02\x02\x02\x02Ї\x03\x02\x02\x02\x02Љ\x03\x02\x02\x02\x02Ћ'
)
buf.write(
'\x03\x02\x02\x02\x02Ѝ\x03\x02\x02\x02\x02Џ\x03\x02\x02\x02\x02Г\x03\x02\x02'
)
buf.write(
'\x02\x02Е\x03\x02\x02\x02\x02З\x03\x02\x02\x02\x02Й\x03\x02\x02\x02\x02Л'
)
buf.write(
'\x03\x02\x02\x02\x02Н\x03\x02\x02\x02\x02П\x03\x02\x02\x02\x02Ч\x03\x02\x02'
)
buf.write(
'\x02\x02Щ\x03\x02\x02\x02\x02Ы\x03\x02\x02\x02\x02б\x03\x02\x02\x02\x02г'
)
buf.write(
'\x03\x02\x02\x02\x03ѩ\x03\x02\x02\x02\x05Ѭ\x03\x02\x02\x02\x07Ѯ\x03\x02\x02'
)
buf.write(
'\x02\tѲ\x03\x02\x02\x02\x0bѸ\x03\x02\x02\x02\rѾ\x03\x02\x02\x02\x0f'
)
buf.write(
'҈\x03\x02\x02\x02\x11Ҍ\x03\x02\x02\x02\x13Ғ\x03\x02\x02\x02\x15Қ')
buf.write(
'\x03\x02\x02\x02\x17Ҟ\x03\x02\x02\x02\x19Ң\x03\x02\x02\x02\x1bҨ\x03'
)
buf.write(
'\x02\x02\x02\x1dҫ\x03\x02\x02\x02\x1fҲ\x03\x02\x02\x02!ҹ\x03\x02\x02'
)
buf.write(
"\x02#ҽ\x03\x02\x02\x02%Ӈ\x03\x02\x02\x02'ӊ\x03\x02\x02\x02)Ӕ")
buf.write(
'\x03\x02\x02\x02+Ӛ\x03\x02\x02\x02-ӡ\x03\x02\x02\x02/Ӧ\x03\x02\x02\x02'
)
buf.write('1Ӱ\x03\x02\x02\x023ԇ\x03\x02\x02\x025ԍ\x03\x02\x02\x027')
buf.write('Ԕ\x03\x02\x02\x029Ԛ\x03\x02\x02\x02;Ԣ\x03\x02\x02\x02=Ԩ\x03'
)
buf.write(
'\x02\x02\x02?Զ\x03\x02\x02\x02AՃ\x03\x02\x02\x02CՒ\x03\x02\x02\x02E\u0557'
)
buf.write(
'\x03\x02\x02\x02G՝\x03\x02\x02\x02Iբ\x03\x02\x02\x02Kժ\x03\x02\x02\x02'
)
buf.write(
'Mկ\x03\x02\x02\x02Oշ\x03\x02\x02\x02Qռ\x03\x02\x02\x02Sտ\x03')
buf.write(
'\x02\x02\x02Uք\x03\x02\x02\x02Wֆ\x03\x02\x02\x02Y\u058c\x03\x02\x02\x02[֑'
)
buf.write(
'\x03\x02\x02\x02]֛\x03\x02\x02\x02_֣\x03\x02\x02\x02a֨\x03\x02\x02\x02'
)
buf.write(
'c֭\x03\x02\x02\x02eֲ\x03\x02\x02\x02gֺ\x03\x02\x02\x02iׄ\x03')
buf.write(
'\x02\x02\x02k\u05ca\x03\x02\x02\x02m\u05ce\x03\x02\x02\x02oד\x03\x02\x02\x02qי'
)
buf.write(
'\x03\x02\x02\x02sס\x03\x02\x02\x02uש\x03\x02\x02\x02wױ\x03\x02\x02\x02'
)
buf.write(
'y\u05f9\x03\x02\x02\x02{\u0600\x03\x02\x02\x02}؊\x03\x02\x02\x02\x7fؘ'
)
buf.write(
'\x03\x02\x02\x02\x81ؠ\x03\x02\x02\x02\x83ة\x03\x02\x02\x02\x85')
buf.write('ر\x03\x02\x02\x02\x87ف\x03\x02\x02\x02\x89ي\x03\x02\x02\x02'
)
buf.write('\x8bٕ\x03\x02\x02\x02\x8d١\x03\x02\x02\x02\x8f٭\x03')
buf.write('\x02\x02\x02\x91ٵ\x03\x02\x02\x02\x93ٽ\x03\x02\x02\x02\x95چ'
)
buf.write(
'\x03\x02\x02\x02\x97ڎ\x03\x02\x02\x02\x99ښ\x03\x02\x02\x02\x9b')
buf.write('ڪ\x03\x02\x02\x02\x9dگ\x03\x02\x02\x02\x9fڵ\x03\x02\x02\x02'
)
buf.write('¡ڼ\x03\x02\x02\x02£ۂ\x03\x02\x02\x02¥ۇ\x03')
buf.write('\x02\x02\x02§ۏ\x03\x02\x02\x02©ۜ\x03\x02\x02\x02«ۣ')
buf.write('\x03\x02\x02\x02\xadۯ\x03\x02\x02\x02¯۵\x03\x02\x02\x02±')
buf.write('ۺ\x03\x02\x02\x02³܃\x03\x02\x02\x02µ܈\x03\x02\x02\x02')
buf.write('·܌\x03\x02\x02\x02¹ܛ\x03\x02\x02\x02»ܦ\x03')
buf.write('\x02\x02\x02½ܪ\x03\x02\x02\x02¿ܰ\x03\x02\x02\x02Áܴ')
buf.write('\x03\x02\x02\x02Ãܼ\x03\x02\x02\x02Å݄\x03\x02\x02\x02Ç')
buf.write('ݎ\x03\x02\x02\x02Éݘ\x03\x02\x02\x02Ëݠ\x03\x02\x02\x02')
buf.write('Íݩ\x03\x02\x02\x02Ïݲ\x03\x02\x02\x02Ñݺ\x03')
buf.write('\x02\x02\x02Óށ\x03\x02\x02\x02Õއ\x03\x02\x02\x02×ތ')
buf.write('\x03\x02\x02\x02Ùޚ\x03\x02\x02\x02Ûޤ\x03\x02\x02\x02Ý')
buf.write('ެ\x03\x02\x02\x02ß\u07b9\x03\x02\x02\x02á߂\x03\x02\x02\x02')
buf.write('ãߋ\x03\x02\x02\x02åߒ\x03\x02\x02\x02çߗ\x03')
buf.write('\x02\x02\x02é߰\x03\x02\x02\x02ëߵ\x03\x02\x02\x02í߽')
buf.write('\x03\x02\x02\x02ïࠂ\x03\x02\x02\x02ñࠈ\x03\x02\x02\x02ó')
buf.write('ࠎ\x03\x02\x02\x02õࠕ\x03\x02\x02\x02÷ࠞ\x03\x02\x02\x02')
buf.write('ùࠢ\x03\x02\x02\x02û࠱\x03\x02\x02\x02ý࠵\x03')
buf.write('\x02\x02\x02ÿ࠼\x03\x02\x02\x02āࡃ\x03\x02\x02\x02ăࡌ')
buf.write('\x03\x02\x02\x02ąࡓ\x03\x02\x02\x02ć\u085d\x03\x02\x02\x02ĉ')
buf.write('\u086c\x03\x02\x02\x02ċࡷ\x03\x02\x02\x02čࡿ\x03\x02\x02\x02')
buf.write('ďࢉ\x03\x02\x02\x02đ\u0891\x03\x02\x02\x02ē࢘\x03')
buf.write('\x02\x02\x02ĕ࢝\x03\x02\x02\x02ėࢥ\x03\x02\x02\x02ęࢮ')
buf.write('\x03\x02\x02\x02ěࢶ\x03\x02\x02\x02ĝࢾ\x03\x02\x02\x02ğ')
buf.write('ࣄ\x03\x02\x02\x02ġ࣊\x03\x02\x02\x02ģ࣐\x03\x02\x02\x02')
buf.write('ĥࣖ\x03\x02\x02\x02ħ\u08e2\x03\x02\x02\x02ĩࣨ\x03')
buf.write('\x02\x02\x02īࣲ\x03\x02\x02\x02ĭࣺ\x03\x02\x02\x02įࣾ')
buf.write('\x03\x02\x02\x02ıअ\x03\x02\x02\x02ijऋ\x03\x02\x02\x02ĵ')
buf.write('ऐ\x03\x02\x02\x02ķक\x03\x02\x02\x02Ĺञ\x03\x02\x02\x02')
buf.write('Ļण\x03\x02\x02\x02Ľऩ\x03\x02\x02\x02Ŀय\x03')
buf.write('\x02\x02\x02Łस\x03\x02\x02\x02Ńऽ\x03\x02\x02\x02Ņॄ')
buf.write('\x03\x02\x02\x02Ňॉ\x03\x02\x02\x02ʼnॎ\x03\x02\x02\x02ŋ')
buf.write('॑\x03\x02\x02\x02ōक़\x03\x02\x02\x02ŏॢ\x03\x02\x02\x02')
buf.write('ő॥\x03\x02\x02\x02œ७\x03\x02\x02\x02ŕॷ\x03')
buf.write('\x02\x02\x02ŗঁ\x03\x02\x02\x02řঈ\x03\x02\x02\x02ś\u098e')
buf.write('\x03\x02\x02\x02ŝখ\x03\x02\x02\x02şঠ\x03\x02\x02\x02š')
buf.write('ন\x03\x02\x02\x02ţ\u09b1\x03\x02\x02\x02ťস\x03\x02\x02\x02')
buf.write('ŧা\x03\x02\x02\x02ũৄ\x03\x02\x02\x02ūো\x03')
buf.write(
'\x02\x02\x02ŭ\u09d8\x03\x02\x02\x02ůৠ\x03\x02\x02\x02ű\u09e4')
buf.write('\x03\x02\x02\x02ų৬\x03\x02\x02\x02ŵ৶\x03\x02\x02\x02ŷ')
buf.write(
'\u09ff\x03\x02\x02\x02Ź\u0a04\x03\x02\x02\x02Żਏ\x03\x02\x02\x02')
buf.write('Ž\u0a12\x03\x02\x02\x02ſਜ\x03\x02\x02\x02Ɓਤ\x03')
buf.write('\x02\x02\x02ƃ\u0a29\x03\x02\x02\x02ƅਮ\x03\x02\x02\x02Ƈਲ਼')
buf.write('\x03\x02\x02\x02Ɖ਼\x03\x02\x02\x02Ƌੁ\x03\x02\x02\x02ƍ')
buf.write('ੌ\x03\x02\x02\x02Ə\u0a54\x03\x02\x02\x02Ƒਖ਼\x03\x02\x02\x02')
buf.write('Ɠ\u0a5f\x03\x02\x02\x02ƕ੧\x03\x02\x02\x02Ɨ੬\x03')
buf.write(
'\x02\x02\x02ƙੲ\x03\x02\x02\x02ƛ\u0a78\x03\x02\x02\x02Ɲ\u0a7e')
buf.write('\x03\x02\x02\x02Ɵ\u0a84\x03\x02\x02\x02ơઊ\x03\x02\x02\x02ƣ')
buf.write('એ\x03\x02\x02\x02ƥખ\x03\x02\x02\x02Ƨચ\x03\x02\x02\x02')
buf.write('Ʃડ\x03\x02\x02\x02ƫધ\x03\x02\x02\x02ƭબ\x03')
buf.write(
'\x02\x02\x02Ư\u0ab1\x03\x02\x02\x02Ʊશ\x03\x02\x02\x02Ƴ\u0aba')
buf.write('\x03\x02\x02\x02Ƶૂ\x03\x02\x02\x02Ʒો\x03\x02\x02\x02ƹ')
buf.write(
'\u0ad4\x03\x02\x02\x02ƻ\u0adb\x03\x02\x02\x02ƽૡ\x03\x02\x02\x02')
buf.write('ƿ૧\x03\x02\x02\x02ǁ૮\x03\x02\x02\x02ǃ\u0af7\x03')
buf.write('\x02\x02\x02Dž\u0b00\x03\x02\x02\x02LJଅ\x03\x02\x02\x02ljଋ')
buf.write('\x03\x02\x02\x02Nj\u0b12\x03\x02\x02\x02Ǎଘ\x03\x02\x02\x02Ǐ')
buf.write('ଡ\x03\x02\x02\x02Ǒଦ\x03\x02\x02\x02Ǔପ\x03\x02\x02\x02')
buf.write('Ǖଲ\x03\x02\x02\x02Ǘ\u0b3b\x03\x02\x02\x02Ǚି\x03')
buf.write(
'\x02\x02\x02Ǜ\u0b45\x03\x02\x02\x02ǝ\u0b4e\x03\x02\x02\x02ǟ\u0b54'
)
buf.write('\x03\x02\x02\x02ǡ\u0b5b\x03\x02\x02\x02ǣୟ\x03\x02\x02\x02ǥ')
buf.write('ୢ\x03\x02\x02\x02ǧ୪\x03\x02\x02\x02ǩ୲\x03\x02\x02\x02')
buf.write('ǫ\u0b79\x03\x02\x02\x02ǭ\u0b81\x03\x02\x02\x02ǯஒ\x03')
buf.write(
'\x02\x02\x02DZ\u0b9d\x03\x02\x02\x02dzந\x03\x02\x02\x02ǵ\u0bad')
buf.write('\x03\x02\x02\x02Ƿவ\x03\x02\x02\x02ǹ\u0bc3\x03\x02\x02\x02ǻ')
buf.write(
'ே\x03\x02\x02\x02ǽ\u0bce\x03\x02\x02\x02ǿ\u0bd3\x03\x02\x02\x02')
buf.write('ȁ\u0bd9\x03\x02\x02\x02ȃ\u0be0\x03\x02\x02\x02ȅ௨\x03')
buf.write('\x02\x02\x02ȇ௲\x03\x02\x02\x02ȉ௹\x03\x02\x02\x02ȋ\u0bfc')
buf.write('\x03\x02\x02\x02ȍఀ\x03\x02\x02\x02ȏఄ\x03\x02\x02\x02ȑ')
buf.write('ఈ\x03\x02\x02\x02ȓఋ\x03\x02\x02\x02ȕఐ\x03\x02\x02\x02')
buf.write('ȗక\x03\x02\x02\x02șజ\x03\x02\x02\x02țట\x03')
buf.write('\x02\x02\x02ȝధ\x03\x02\x02\x02ȟభ\x03\x02\x02\x02ȡస')
buf.write('\x03\x02\x02\x02ȣీ\x03\x02\x02\x02ȥౄ\x03\x02\x02\x02ȧ')
buf.write('ొ\x03\x02\x02\x02ȩ\u0c4f\x03\x02\x02\x02ȫౚ\x03\x02\x02\x02')
buf.write('ȭౢ\x03\x02\x02\x02ȯ\u0c72\x03\x02\x02\x02ȱ౽\x03')
buf.write('\x02\x02\x02ȳ಄\x03\x02\x02\x02ȵಎ\x03\x02\x02\x02ȷಖ')
buf.write('\x03\x02\x02\x02ȹಛ\x03\x02\x02\x02Ȼತ\x03\x02\x02\x02Ƚ')
buf.write(
'ಪ\x03\x02\x02\x02ȿ\u0cb4\x03\x02\x02\x02Ɂ\u0cba\x03\x02\x02\x02')
buf.write('Ƀಿ\x03\x02\x02\x02Ʌೋ\x03\x02\x02\x02ɇ\u0cd4\x03')
buf.write('\x02\x02\x02ɉೞ\x03\x02\x02\x02ɋ\u0ce5\x03\x02\x02\x02ɍ೯')
buf.write('\x03\x02\x02\x02ɏ\u0cf9\x03\x02\x02\x02ɑഁ\x03\x02\x02\x02ɓ')
buf.write('ഇ\x03\x02\x02\x02ɕ\u0d11\x03\x02\x02\x02ɗഗ\x03\x02\x02\x02')
buf.write('əഝ\x03\x02\x02\x02ɛഡ\x03\x02\x02\x02ɝദ\x03')
buf.write('\x02\x02\x02ɟഫ\x03\x02\x02\x02ɡല\x03\x02\x02\x02ɣശ')
buf.write('\x03\x02\x02\x02ɥീ\x03\x02\x02\x02ɧൌ\x03\x02\x02\x02ɩ')
buf.write(
'\u0d53\x03\x02\x02\x02ɫ൝\x03\x02\x02\x02ɭ\u0d64\x03\x02\x02\x02')
buf.write('ɯ൬\x03\x02\x02\x02ɱ൴\x03\x02\x02\x02ɳඈ\x03')
buf.write('\x02\x02\x02ɵඏ\x03\x02\x02\x02ɷග\x03\x02\x02\x02ɹඣ')
buf.write('\x03\x02\x02\x02ɻත\x03\x02\x02\x02ɽඳ\x03\x02\x02\x02ɿ')
buf.write('ර\x03\x02\x02\x02ʁෂ\x03\x02\x02\x02ʃ\u0dc8\x03\x02\x02\x02')
buf.write('ʅෑ\x03\x02\x02\x02ʇෘ\x03\x02\x02\x02ʉො\x03')
buf.write('\x02\x02\x02ʋ\u0de2\x03\x02\x02\x02ʍ෧\x03\x02\x02\x02ʏ෭')
buf.write('\x03\x02\x02\x02ʑ෴\x03\x02\x02\x02ʓ\u0df9\x03\x02\x02\x02ʕ')
buf.write('ฃ\x03\x02\x02\x02ʗช\x03\x02\x02\x02ʙถ\x03\x02\x02\x02')
buf.write('ʛบ\x03\x02\x02\x02ʝม\x03\x02\x02\x02ʟศ\x03')
buf.write('\x02\x02\x02ʡอ\x03\x02\x02\x02ʣี\x03\x02\x02\x02ʥ\u0e3c')
buf.write('\x03\x02\x02\x02ʧแ\x03\x02\x02\x02ʩ๊\x03\x02\x02\x02ʫ')
buf.write(
'๕\x03\x02\x02\x02ʭ\u0e62\x03\x02\x02\x02ʯ\u0e74\x03\x02\x02\x02')
buf.write('ʱ\u0e80\x03\x02\x02\x02ʳຐ\x03\x02\x02\x02ʵດ\x03')
buf.write('\x02\x02\x02ʷນ\x03\x02\x02\x02ʹຢ\x03\x02\x02\x02ʻຨ')
buf.write('\x03\x02\x02\x02ʽອ\x03\x02\x02\x02ʿຶ\x03\x02\x02\x02ˁ')
buf.write('\u0ebf\x03\x02\x02\x02˃່\x03\x02\x02\x02˅໗\x03\x02\x02\x02')
buf.write('ˇໞ\x03\x02\x02\x02ˉ\u0ee3\x03\x02\x02\x02ˋ\u0ee8\x03')
buf.write(
'\x02\x02\x02ˍ\u0ef1\x03\x02\x02\x02ˏ\u0efa\x03\x02\x02\x02ˑ\u0eff'
)
buf.write('\x03\x02\x02\x02˓།\x03\x02\x02\x02˕༕\x03\x02\x02\x02˗')
buf.write('༞\x03\x02\x02\x02˙༩\x03\x02\x02\x02˛༯\x03\x02\x02\x02')
buf.write('˝༷\x03\x02\x02\x02˟ཁ\x03\x02\x02\x02ˡཎ\x03')
buf.write('\x02\x02\x02ˣཕ\x03\x02\x02\x02˥འ\x03\x02\x02\x02˧ཧ')
buf.write('\x03\x02\x02\x02˩ཱི\x03\x02\x02\x02˫ྀ\x03\x02\x02\x02˭')
buf.write('ྎ\x03\x02\x02\x02˯ྖ\x03\x02\x02\x02˱ྞ\x03\x02\x02\x02')
buf.write('˳ྦ\x03\x02\x02\x02˵ྫྷ\x03\x02\x02\x02˷ྰ\x03')
buf.write('\x02\x02\x02˹ྵ\x03\x02\x02\x02˻ྺ\x03\x02\x02\x02˽࿄')
buf.write(
'\x03\x02\x02\x02˿\u0fe0\x03\x02\x02\x02́\u0ffb\x03\x02\x02\x02̃')
buf.write('ဓ\x03\x02\x02\x02̅အ\x03\x02\x02\x02̇ု\x03\x02\x02\x02')
buf.write('̉ဿ\x03\x02\x02\x02̋၏\x03\x02\x02\x02̍ၒ\x03')
buf.write('\x02\x02\x02̏ၛ\x03\x02\x02\x02̑ၧ\x03\x02\x02\x02̓ၱ')
buf.write('\x03\x02\x02\x02̕ၷ\x03\x02\x02\x02̗ၿ\x03\x02\x02\x02̙')
buf.write('ႄ\x03\x02\x02\x02̛ႉ\x03\x02\x02\x02̝႒\x03\x02\x02\x02')
buf.write('̟႗\x03\x02\x02\x02̡Ⴁ\x03\x02\x02\x02̣Ⴇ\x03')
buf.write('\x02\x02\x02̥Ⴍ\x03\x02\x02\x02̧Ⴔ\x03\x02\x02\x02̩Ⴞ')
buf.write(
'\x03\x02\x02\x02̫\u10c6\x03\x02\x02\x02̭\u10cc\x03\x02\x02\x02̯')
buf.write('დ\x03\x02\x02\x02̱მ\x03\x02\x02\x02̳ტ\x03\x02\x02\x02')
buf.write('̵ჩ\x03\x02\x02\x02̷ჭ\x03\x02\x02\x02̹ჳ\x03')
buf.write('\x02\x02\x02̻ჼ\x03\x02\x02\x02̽ᄂ\x03\x02\x02\x02̿ᄉ')
buf.write('\x03\x02\x02\x02́ᄑ\x03\x02\x02\x02̓ᄚ\x03\x02\x02\x02ͅ')
buf.write('ᄣ\x03\x02\x02\x02͇ᄪ\x03\x02\x02\x02͉ᄲ\x03\x02\x02\x02')
buf.write('͋ᄺ\x03\x02\x02\x02͍ᅃ\x03\x02\x02\x02͏ᅈ\x03')
buf.write('\x02\x02\x02͑ᅐ\x03\x02\x02\x02͓ᅛ\x03\x02\x02\x02͕ᅠ')
buf.write('\x03\x02\x02\x02͗ᅩ\x03\x02\x02\x02͙ᅯ\x03\x02\x02\x02͛')
buf.write('ᅵ\x03\x02\x02\x02͝ᅺ\x03\x02\x02\x02͟ᆁ\x03\x02\x02\x02')
buf.write('͡ᆆ\x03\x02\x02\x02ͣᆌ\x03\x02\x02\x02ͥᆐ\x03')
buf.write('\x02\x02\x02ͧᆗ\x03\x02\x02\x02ͩᆥ\x03\x02\x02\x02ͫᆭ')
buf.write('\x03\x02\x02\x02ͭᆺ\x03\x02\x02\x02ͯᇅ\x03\x02\x02\x02ͱ')
buf.write('ᇏ\x03\x02\x02\x02ͳᇙ\x03\x02\x02\x02͵ᇧ\x03\x02\x02\x02')
buf.write('ͷᇰ\x03\x02\x02\x02\u0379ᇶ\x03\x02\x02\x02ͻᇿ\x03')
buf.write('\x02\x02\x02ͽሇ\x03\x02\x02\x02Ϳሔ\x03\x02\x02\x02\u0381ም')
buf.write('\x03\x02\x02\x02\u0383ሢ\x03\x02\x02\x02΅ሦ\x03\x02\x02\x02·')
buf.write(
'ሿ\x03\x02\x02\x02Ήቄ\x03\x02\x02\x02\u038b\u124f\x03\x02\x02\x02')
buf.write('\u038dቡ\x03\x02\x02\x02Ώቱ\x03\x02\x02\x02Αኄ\x03')
buf.write('\x02\x02\x02Γኛ\x03\x02\x02\x02Εኪ\x03\x02\x02\x02Ηኴ')
buf.write(
'\x03\x02\x02\x02Ι\u12bf\x03\x02\x02\x02Λ\u12c7\x03\x02\x02\x02Ν')
buf.write('ዔ\x03\x02\x02\x02Οዤ\x03\x02\x02\x02Ρዴ\x03\x02\x02\x02')
buf.write('Σዹ\x03\x02\x02\x02Υዽ\x03\x02\x02\x02Χጂ\x03')
buf.write('\x02\x02\x02Ωጆ\x03\x02\x02\x02Ϋጋ\x03\x02\x02\x02έጏ')
buf.write('\x03\x02\x02\x02ί\u1316\x03\x02\x02\x02αጚ\x03\x02\x02\x02γ')
buf.write('ጠ\x03\x02\x02\x02εጰ\x03\x02\x02\x02ηጻ\x03\x02\x02\x02')
buf.write('ιጿ\x03\x02\x02\x02λፈ\x03\x02\x02\x02νፎ\x03')
buf.write('\x02\x02\x02οፕ\x03\x02\x02\x02ρፚ\x03\x02\x02\x02σ፡')
buf.write('\x03\x02\x02\x02υ፮\x03\x02\x02\x02χ፻\x03\x02\x02\x02ω')
buf.write('ᎈ\x03\x02\x02\x02ϋᎋ\x03\x02\x02\x02ύᎍ\x03\x02\x02\x02')
buf.write('Ϗᎏ\x03\x02\x02\x02ϑ\u139e\x03\x02\x02\x02ϓᎪ\x03')
buf.write('\x02\x02\x02ϕᎳ\x03\x02\x02\x02ϗᎵ\x03\x02\x02\x02ϙᏀ')
buf.write('\x03\x02\x02\x02ϛᏋ\x03\x02\x02\x02ϝᏖ\x03\x02\x02\x02ϟ')
buf.write('Ꮱ\x03\x02\x02\x02ϡᏣ\x03\x02\x02\x02ϣᏭ\x03\x02\x02\x02')
buf.write('ϥᏯ\x03\x02\x02\x02ϧᏱ\x03\x02\x02\x02ϩᏳ\x03')
buf.write('\x02\x02\x02ϫᏵ\x03\x02\x02\x02ϭᏸ\x03\x02\x02\x02ϯᏺ')
buf.write('\x03\x02\x02\x02ϱᏼ\x03\x02\x02\x02ϳ\u13fe\x03\x02\x02\x02ϵ')
buf.write('᐀\x03\x02\x02\x02Ϸᐂ\x03\x02\x02\x02Ϲᐄ\x03\x02\x02\x02')
buf.write('ϻᐕ\x03\x02\x02\x02Ͻᐗ\x03\x02\x02\x02Ͽᐙ\x03')
buf.write('\x02\x02\x02Ёᐛ\x03\x02\x02\x02Ѓᐞ\x03\x02\x02\x02Ѕᐠ')
buf.write('\x03\x02\x02\x02Їᐫ\x03\x02\x02\x02Љᐭ\x03\x02\x02\x02Ћ')
buf.write('ᐯ\x03\x02\x02\x02Ѝᐱ\x03\x02\x02\x02Џᐳ\x03\x02\x02\x02')
buf.write('Бᐵ\x03\x02\x02\x02Гᐷ\x03\x02\x02\x02Еᐺ\x03')
buf.write('\x02\x02\x02Зᐼ\x03\x02\x02\x02Йᐾ\x03\x02\x02\x02Лᑀ')
buf.write('\x03\x02\x02\x02Нᑂ\x03\x02\x02\x02Пᑅ\x03\x02\x02\x02С')
buf.write('ᑋ\x03\x02\x02\x02Уᑎ\x03\x02\x02\x02Хᑕ\x03\x02\x02\x02')
buf.write('Чᑠ\x03\x02\x02\x02Щᑯ\x03\x02\x02\x02Ыᑽ\x03')
buf.write('\x02\x02\x02Эᒐ\x03\x02\x02\x02Яᒔ\x03\x02\x02\x02бᒖ')
buf.write('\x03\x02\x02\x02гᒞ\x03\x02\x02\x02еᒣ\x03\x02\x02\x02з')
buf.write('ᒥ\x03\x02\x02\x02йᒧ\x03\x02\x02\x02лᒩ\x03\x02\x02\x02')
buf.write('нᒫ\x03\x02\x02\x02пᒭ\x03\x02\x02\x02сᒯ\x03')
buf.write('\x02\x02\x02уᒱ\x03\x02\x02\x02хᒳ\x03\x02\x02\x02чᒵ')
buf.write('\x03\x02\x02\x02щᒷ\x03\x02\x02\x02ыᒹ\x03\x02\x02\x02э')
buf.write('ᒻ\x03\x02\x02\x02яᒽ\x03\x02\x02\x02ёᒿ\x03\x02\x02\x02')
buf.write('ѓᓁ\x03\x02\x02\x02ѕᓃ\x03\x02\x02\x02їᓅ\x03')
buf.write('\x02\x02\x02љᓇ\x03\x02\x02\x02ћᓉ\x03\x02\x02\x02ѝᓋ')
buf.write('\x03\x02\x02\x02џᓍ\x03\x02\x02\x02ѡᓏ\x03\x02\x02\x02ѣ')
buf.write('ᓑ\x03\x02\x02\x02ѥᓓ\x03\x02\x02\x02ѧᓕ\x03\x02\x02\x02')
buf.write('ѩѪ\x070\x02\x02Ѫѫ\x070\x02\x02ѫ\x04\x03\x02')
buf.write('\x02\x02Ѭѭ\x05еț\x02ѭ\x06\x03\x02\x02\x02Ѯ')
buf.write('ѯ\x05еț\x02ѯѰ\x05лȞ\x02Ѱ')
buf.write('ѱ\x05лȞ\x02ѱ\x08\x03\x02\x02\x02Ѳѳ\x05е')
buf.write('ț\x02ѳѴ\x05пȠ\x02Ѵѵ\x05ћ')
buf.write('Ȯ\x02ѵѶ\x05нȟ\x02Ѷѷ\x05ї')
buf.write('Ȭ\x02ѷ\n\x03\x02\x02\x02Ѹѹ\x05еț\x02ѹ')
buf.write('Ѻ\x05сȡ\x02Ѻѻ\x05нȟ\x02ѻ')
buf.write('Ѽ\x05яȨ\x02Ѽѽ\x05ћȮ\x02ѽ')
buf.write('\x0c\x03\x02\x02\x02Ѿѿ\x05еț\x02ѿҀ\x05с')
buf.write('ȡ\x02Ҁҁ\x05сȡ\x02ҁ҂\x05ї')
buf.write('Ȭ\x02҂҃\x05нȟ\x02҃҄\x05с')
buf.write('ȡ\x02҄҅\x05еț\x02҅҆\x05ћ')
buf.write('Ȯ\x02҆҇\x05нȟ\x02҇\x0e\x03\x02\x02\x02')
buf.write('҈҉\x05еț\x02҉Ҋ\x05ыȦ')
buf.write('\x02Ҋҋ\x05ыȦ\x02ҋ\x10\x03\x02\x02\x02Ҍ')
buf.write('ҍ\x05еț\x02ҍҎ\x05ыȦ\x02Ҏ')
buf.write('ҏ\x05ћȮ\x02ҏҐ\x05нȟ\x02Ґ')
buf.write('ґ\x05їȬ\x02ґ\x12\x03\x02\x02\x02Ғғ\x05')
buf.write('еț\x02ғҔ\x05яȨ\x02Ҕҕ')
buf.write('\x05еț\x02ҕҖ\x05ыȦ\x02Җҗ')
buf.write('\x05ѥȳ\x02җҘ\x05ѧȴ\x02Ҙҙ')
buf.write('\x05нȟ\x02ҙ\x14\x03\x02\x02\x02Ққ\x05е')
buf.write('ț\x02қҜ\x05яȨ\x02Ҝҝ\x05л')
buf.write('Ȟ\x02ҝ\x16\x03\x02\x02\x02Ҟҟ\x05еț\x02')
buf.write('ҟҠ\x05яȨ\x02Ҡҡ\x05ѥȳ')
buf.write('\x02ҡ\x18\x03\x02\x02\x02Ңң\x05еț\x02ң')
buf.write('Ҥ\x05їȬ\x02Ҥҥ\x05їȬ\x02ҥ')
buf.write('Ҧ\x05еț\x02Ҧҧ\x05ѥȳ\x02ҧ')
buf.write('\x1a\x03\x02\x02\x02Ҩҩ\x05еț\x02ҩҪ\x05')
buf.write('љȭ\x02Ҫ\x1c\x03\x02\x02\x02ҫҬ\x05еț')
buf.write('\x02Ҭҭ\x05љȭ\x02ҭҮ\x05љȭ')
buf.write('\x02Үү\x05ѝȯ\x02үҰ\x05эȧ')
buf.write('\x02Ұұ\x05нȟ\x02ұ\x1e\x03\x02\x02\x02Ҳ')
buf.write('ҳ\x05еț\x02ҳҴ\x05љȭ\x02Ҵ')
buf.write('ҵ\x05љȭ\x02ҵҶ\x05нȟ\x02Ҷ')
buf.write('ҷ\x05їȬ\x02ҷҸ\x05ћȮ\x02Ҹ')
buf.write(' \x03\x02\x02\x02ҹҺ\x05еț\x02Һһ\x05љ')
buf.write('ȭ\x02һҼ\x05йȝ\x02Ҽ"\x03\x02\x02\x02ҽ')
buf.write('Ҿ\x05еț\x02Ҿҿ\x05љȭ\x02ҿ')
buf.write('Ӏ\x05љȭ\x02ӀӁ\x05ёȩ\x02Ӂ')
buf.write('ӂ\x05йȝ\x02ӂӃ\x05хȣ\x02Ӄ')
buf.write('ӄ\x05еț\x02ӄӅ\x05ћȮ\x02Ӆ')
buf.write('ӆ\x05нȟ\x02ӆ$\x03\x02\x02\x02Ӈӈ\x05е')
buf.write('ț\x02ӈӉ\x05ћȮ\x02Ӊ&\x03\x02\x02\x02ӊ')
buf.write('Ӌ\x05еț\x02Ӌӌ\x05ћȮ\x02ӌ')
buf.write('Ӎ\x05ћȮ\x02Ӎӎ\x05їȬ\x02ӎ')
buf.write('ӏ\x05хȣ\x02ӏӐ\x05зȜ\x02Ӑ')
buf.write('ӑ\x05ѝȯ\x02ӑӒ\x05ћȮ\x02Ӓ')
buf.write('ӓ\x05нȟ\x02ӓ(\x03\x02\x02\x02Ӕӕ\x05е')
buf.write('ț\x02ӕӖ\x05ѝȯ\x02Ӗӗ\x05л')
buf.write('Ȟ\x02ӗӘ\x05хȣ\x02Әә\x05ћ')
buf.write('Ȯ\x02ә*\x03\x02\x02\x02Ӛӛ\x05еț\x02ӛ')
buf.write('Ӝ\x05ѝȯ\x02Ӝӝ\x05ћȮ\x02ӝ')
buf.write('Ӟ\x05уȢ\x02Ӟӟ\x05хȣ\x02ӟ')
buf.write('Ӡ\x05лȞ\x02Ӡ,\x03\x02\x02\x02ӡӢ\x05е')
buf.write('ț\x02Ӣӣ\x05ѝȯ\x02ӣӤ\x05ћ')
buf.write('Ȯ\x02Ӥӥ\x05ёȩ\x02ӥ.\x03\x02\x02\x02Ӧ')
buf.write('ӧ\x05еț\x02ӧӨ\x05ѝȯ\x02Ө')
buf.write('ө\x05ћȮ\x02өӪ\x05ёȩ\x02Ӫ')
buf.write('ӫ\x05эȧ\x02ӫӬ\x05еț\x02Ӭ')
buf.write('ӭ\x05ћȮ\x02ӭӮ\x05хȣ\x02Ӯ')
buf.write('ӯ\x05йȝ\x02ӯ0\x03\x02\x02\x02Ӱӱ\x05')
buf.write('еț\x02ӱӲ\x05ѝȯ\x02Ӳӳ')
buf.write('\x05ћȮ\x02ӳӴ\x05ёȩ\x02Ӵӵ')
buf.write('\x05яȨ\x02ӵӶ\x05ёȩ\x02Ӷӷ')
buf.write('\x05эȧ\x02ӷӸ\x05ёȩ\x02Ӹӹ')
buf.write('\x05ѝȯ\x02ӹӺ\x05љȭ\x02Ӻӻ')
buf.write('\x07a\x02\x02ӻӼ\x05ћȮ\x02Ӽӽ\x05ї')
buf.write('Ȭ\x02ӽӾ\x05еț\x02Ӿӿ\x05я')
buf.write('Ȩ\x02ӿԀ\x05љȭ\x02Ԁԁ\x05е')
buf.write('ț\x02ԁԂ\x05йȝ\x02Ԃԃ\x05ћ')
buf.write('Ȯ\x02ԃԄ\x05хȣ\x02Ԅԅ\x05ё')
buf.write('ȩ\x02ԅԆ\x05яȨ\x02Ԇ2\x03\x02\x02\x02')
buf.write('ԇԈ\x05зȜ\x02Ԉԉ\x05еț')
buf.write('\x02ԉԊ\x05ћȮ\x02Ԋԋ\x05йȝ')
buf.write('\x02ԋԌ\x05уȢ\x02Ԍ4\x03\x02\x02\x02ԍ')
buf.write('Ԏ\x05зȜ\x02Ԏԏ\x05нȟ\x02ԏ')
buf.write('Ԑ\x05пȠ\x02Ԑԑ\x05ёȩ\x02ԑ')
buf.write('Ԓ\x05їȬ\x02Ԓԓ\x05нȟ\x02ԓ')
buf.write('6\x03\x02\x02\x02Ԕԕ\x05зȜ\x02ԕԖ\x05')
buf.write('нȟ\x02Ԗԗ\x05сȡ\x02ԗԘ')
buf.write('\x05хȣ\x02Ԙԙ\x05яȨ\x02ԙ8\x03')
buf.write('\x02\x02\x02Ԛԛ\x05зȜ\x02ԛԜ\x05н')
buf.write('ȟ\x02Ԝԝ\x05ћȮ\x02ԝԞ\x05ѡ')
buf.write('ȱ\x02Ԟԟ\x05нȟ\x02ԟԠ\x05н')
buf.write('ȟ\x02Ԡԡ\x05яȨ\x02ԡ:\x03\x02\x02\x02Ԣ')
buf.write('ԣ\x05зȜ\x02ԣԤ\x05пȠ\x02Ԥ')
buf.write('ԥ\x05хȣ\x02ԥԦ\x05ыȦ\x02Ԧ')
buf.write('ԧ\x05нȟ\x02ԧ<\x03\x02\x02\x02Ԩԩ\x05з')
buf.write('Ȝ\x02ԩԪ\x05хȣ\x02Ԫԫ\x05я')
buf.write('Ȩ\x02ԫԬ\x05еț\x02Ԭԭ\x05ї')
buf.write('Ȭ\x02ԭԮ\x05ѥȳ\x02Ԯԯ\x07a\x02')
buf.write('\x02ԯ\u0530\x05лȞ\x02\u0530Ա\x05ёȩ')
buf.write('\x02ԱԲ\x05ѝȯ\x02ԲԳ\x05зȜ')
buf.write('\x02ԳԴ\x05ыȦ\x02ԴԵ\x05нȟ')
buf.write('\x02Ե>\x03\x02\x02\x02ԶԷ\x05зȜ\x02ԷԸ')
buf.write('\x05хȣ\x02ԸԹ\x05яȨ\x02ԹԺ')
buf.write('\x05еț\x02ԺԻ\x05їȬ\x02ԻԼ')
buf.write('\x05ѥȳ\x02ԼԽ\x07a\x02\x02ԽԾ\x05п')
buf.write('Ƞ\x02ԾԿ\x05ыȦ\x02ԿՀ\x05ё')
buf.write('ȩ\x02ՀՁ\x05еț\x02ՁՂ\x05ћ')
buf.write('Ȯ\x02Ղ@\x03\x02\x02\x02ՃՄ\x05зȜ\x02Մ')
buf.write('Յ\x05хȣ\x02ՅՆ\x05яȨ\x02Ն')
buf.write('Շ\x05еț\x02ՇՈ\x05їȬ\x02Ո')
buf.write('Չ\x05ѥȳ\x02ՉՊ\x07a\x02\x02ՊՋ')
buf.write('\x05хȣ\x02ՋՌ\x05яȨ\x02ՌՍ')
buf.write('\x05ћȮ\x02ՍՎ\x05нȟ\x02ՎՏ')
buf.write('\x05сȡ\x02ՏՐ\x05нȟ\x02ՐՑ')
buf.write('\x05їȬ\x02ՑB\x03\x02\x02\x02ՒՓ\x05зȜ')
buf.write('\x02ՓՔ\x05ыȦ\x02ՔՕ\x05ёȩ')
buf.write('\x02ՕՖ\x05зȜ\x02ՖD\x03\x02\x02\x02\u0557\u0558')
buf.write('\x05зȜ\x02\u0558ՙ\x05ыȦ\x02ՙ՚')
buf.write('\x05ёȩ\x02՚՛\x05йȝ\x02՛՜')
buf.write('\x05щȥ\x02՜F\x03\x02\x02\x02՝՞\x05зȜ')
buf.write('\x02՞՟\x05ёȩ\x02՟ՠ\x05лȞ')
buf.write('\x02ՠա\x05ѥȳ\x02աH\x03\x02\x02\x02բգ')
buf.write('\x05зȜ\x02գդ\x05ёȩ\x02դե')
buf.write('\x05ёȩ\x02եզ\x05ыȦ\x02զէ')
buf.write('\x05нȟ\x02էը\x05еț\x02ըթ')
buf.write('\x05яȨ\x02թJ\x03\x02\x02\x02ժի\x05зȜ')
buf.write('\x02իլ\x05ёȩ\x02լխ\x05ћȮ')
buf.write('\x02խծ\x05уȢ\x02ծL\x03\x02\x02\x02կհ')
buf.write('\x05зȜ\x02հձ\x05їȬ\x02ձղ')
buf.write('\x05нȟ\x02ղճ\x05еț\x02ճմ')
buf.write('\x05лȞ\x02մյ\x05ћȮ\x02յն')
buf.write('\x05уȢ\x02նN\x03\x02\x02\x02շո\x05зȜ')
buf.write('\x02ոչ\x05ѝȯ\x02չպ\x05ыȦ')
buf.write('\x02պջ\x05щȥ\x02ջP\x03\x02\x02\x02ռս')
buf.write('\x05зȜ\x02սվ\x05ѥȳ\x02վR\x03')
buf.write('\x02\x02\x02տր\x05зȜ\x02րց\x05ѥ')
buf.write('ȳ\x02ցւ\x05ћȮ\x02ւփ\x05н')
buf.write('ȟ\x02փT\x03\x02\x02\x02քօ\x05йȝ\x02օ')
buf.write('V\x03\x02\x02\x02ֆև\x05йȝ\x02ևֈ\x05е')
buf.write('ț\x02ֈ։\x05йȝ\x02։֊\x05у')
buf.write('Ȣ\x02֊\u058b\x05нȟ\x02\u058bX\x03\x02\x02\x02\u058c')
buf.write('֍\x05йȝ\x02֍֎\x05еț\x02֎')
buf.write('֏\x05ыȦ\x02֏\u0590\x05ыȦ\x02\u0590')
buf.write('Z\x03\x02\x02\x02֑֒\x05йȝ\x02֒֓\x05е')
buf.write('ț\x02֓֔\x05яȨ\x02֔֕\x05ё')
buf.write('ȩ\x02֖֕\x05яȨ\x02֖֗\x05х')
buf.write('ȣ\x02֗֘\x05йȝ\x02֘֙\x05е')
buf.write('ț\x02֚֙\x05ыȦ\x02֚\\\x03\x02\x02\x02֛')
buf.write('֜\x05йȝ\x02֜֝\x05еț\x02֝')
buf.write('֞\x05љȭ\x02֞֟\x05йȝ\x02֟')
buf.write('֠\x05еț\x02֠֡\x05лȞ\x02֡')
buf.write('֢\x05нȟ\x02֢^\x03\x02\x02\x02֣֤\x05й')
buf.write('ȝ\x02֤֥\x05еț\x02֥֦\x05љ')
buf.write('ȭ\x02֦֧\x05нȟ\x02֧`\x03\x02\x02\x02֨')
buf.write('֩\x05йȝ\x02֪֩\x05еț\x02֪')
buf.write('֫\x05љȭ\x02֫֬\x05ћȮ\x02֬')
buf.write('b\x03\x02\x02\x02֭֮\x05йȝ\x02֮֯\x05у')
buf.write('Ȣ\x02ְ֯\x05еț\x02ְֱ\x05ї')
buf.write('Ȭ\x02ֱd\x03\x02\x02\x02ֲֳ\x05йȝ\x02ֳ')
buf.write('ִ\x05уȢ\x02ִֵ\x05еț\x02ֵ')
buf.write('ֶ\x05їȬ\x02ֶַ\x07a\x02\x02ַָ')
buf.write('\x05йȝ\x02ָֹ\x05љȭ\x02ֹf\x03')
buf.write('\x02\x02\x02ֺֻ\x05йȝ\x02ֻּ\x05у')
buf.write('Ȣ\x02ּֽ\x05еț\x02ֽ־\x05ї')
buf.write('Ȭ\x02־ֿ\x05еț\x02ֿ׀\x05й')
buf.write('ȝ\x02׀ׁ\x05ћȮ\x02ׁׂ\x05н')
buf.write('ȟ\x02ׂ׃\x05їȬ\x02׃h\x03\x02\x02\x02ׄ')
buf.write('ׅ\x05йȝ\x02ׅ׆\x05уȢ\x02׆')
buf.write('ׇ\x05нȟ\x02ׇ\u05c8\x05йȝ\x02\u05c8')
buf.write('\u05c9\x05щȥ\x02\u05c9j\x03\x02\x02\x02\u05ca\u05cb\x05й')
buf.write('ȝ\x02\u05cb\u05cc\x05уȢ\x02\u05cc\u05cd\x05ї')
buf.write('Ȭ\x02\u05cdl\x03\x02\x02\x02\u05ce\u05cf\x05йȝ\x02\u05cf')
buf.write('א\x05ыȦ\x02אב\x05ёȩ\x02ב')
buf.write('ג\x05зȜ\x02גn\x03\x02\x02\x02דה\x05й')
buf.write('ȝ\x02הו\x05ыȦ\x02וז\x05ё')
buf.write('ȩ\x02זח\x05љȭ\x02חט\x05н')
buf.write('ȟ\x02טp\x03\x02\x02\x02יך\x05йȝ\x02ך')
buf.write('כ\x05ыȦ\x02כל\x05ѝȯ\x02ל')
buf.write('ם\x05љȭ\x02םמ\x05ћȮ\x02מ')
buf.write('ן\x05нȟ\x02ןנ\x05їȬ\x02נ')
buf.write('r\x03\x02\x02\x02סע\x05йȝ\x02עף\x05ё')
buf.write('ȩ\x02ףפ\x05ыȦ\x02פץ\x05ы')
buf.write('Ȧ\x02ץצ\x05нȟ\x02צק\x05й')
buf.write('ȝ\x02קר\x05ћȮ\x02רt\x03\x02\x02\x02ש')
buf.write('ת\x05йȝ\x02ת\u05eb\x05ёȩ\x02\u05eb')
buf.write('\u05ec\x05ыȦ\x02\u05ec\u05ed\x05ѝȯ\x02\u05ed')
buf.write('\u05ee\x05эȧ\x02\u05eeׯ\x05яȨ\x02ׯ')
buf.write('װ\x05љȭ\x02װv\x03\x02\x02\x02ױײ\x05й')
buf.write('ȝ\x02ײ׳\x05ёȩ\x02׳״\x05э')
buf.write('ȧ\x02״\u05f5\x05эȧ\x02\u05f5\u05f6\x05н')
buf.write('ȟ\x02\u05f6\u05f7\x05яȨ\x02\u05f7\u05f8\x05ћ')
buf.write('Ȯ\x02\u05f8x\x03\x02\x02\x02\u05f9\u05fa\x05йȝ\x02\u05fa')
buf.write('\u05fb\x05ёȩ\x02\u05fb\u05fc\x05эȧ\x02\u05fc')
buf.write('\u05fd\x05эȧ\x02\u05fd\u05fe\x05хȣ\x02\u05fe')
buf.write('\u05ff\x05ћȮ\x02\u05ffz\x03\x02\x02\x02\u0600\u0601\x05й')
buf.write('ȝ\x02\u0601\u0602\x05ёȩ\x02\u0602\u0603\x05э')
buf.write('ȧ\x02\u0603\u0604\x05эȧ\x02\u0604\u0605\x05х')
buf.write('ȣ\x02\u0605؆\x05ћȮ\x02؆؇\x05ћ')
buf.write('Ȯ\x02؇؈\x05нȟ\x02؈؉\x05л')
buf.write('Ȟ\x02؉|\x03\x02\x02\x02؊؋\x05йȝ\x02؋')
buf.write('،\x05ёȩ\x02،؍\x05эȧ\x02؍')
buf.write('؎\x05ѓȪ\x02؎؏\x05еț\x02؏')
buf.write('ؐ\x05ћȮ\x02ؐؑ\x05хȣ\x02ؑ')
buf.write('ؒ\x05зȜ\x02ؒؓ\x05хȣ\x02ؓ')
buf.write('ؔ\x05ыȦ\x02ؔؕ\x05хȣ\x02ؕ')
buf.write('ؖ\x05ћȮ\x02ؖؗ\x05ѥȳ\x02ؗ')
buf.write('~\x03\x02\x02\x02ؘؙ\x05йȝ\x02ؙؚ\x05ё')
buf.write('ȩ\x02ؚ؛\x05эȧ\x02؛\u061c\x05ѓ')
buf.write('Ȫ\x02\u061c؝\x05хȣ\x02؝؞\x05ы')
buf.write('Ȧ\x02؞؟\x05нȟ\x02؟\x80\x03\x02\x02')
buf.write('\x02ؠء\x05йȝ\x02ءآ\x05ёȩ')
buf.write('\x02آأ\x05эȧ\x02أؤ\x05ѓȪ')
buf.write('\x02ؤإ\x05ёȩ\x02إئ\x05ѝȯ')
buf.write('\x02ئا\x05яȨ\x02اب\x05лȞ')
buf.write('\x02ب\x82\x03\x02\x02\x02ةت\x05йȝ\x02ت')
buf.write('ث\x05ёȩ\x02ثج\x05яȨ\x02ج')
buf.write('ح\x05яȨ\x02حخ\x05нȟ\x02خ')
buf.write('د\x05йȝ\x02دذ\x05ћȮ\x02ذ')
buf.write('\x84\x03\x02\x02\x02رز\x05йȝ\x02زس')
buf.write('\x05ёȩ\x02سش\x05яȨ\x02شص')
buf.write('\x05яȨ\x02صض\x05нȟ\x02ضط')
buf.write('\x05йȝ\x02طظ\x05ћȮ\x02ظع')
buf.write('\x07a\x02\x02عغ\x05зȜ\x02غػ\x05ѥ')
buf.write('ȳ\x02ػؼ\x07a\x02\x02ؼؽ\x05їȬ')
buf.write('\x02ؽؾ\x05ёȩ\x02ؾؿ\x05ёȩ')
buf.write('\x02ؿـ\x05ћȮ\x02ـ\x86\x03\x02\x02\x02ف')
buf.write('ق\x05йȝ\x02قك\x05ёȩ\x02ك')
buf.write('ل\x05яȨ\x02لم\x05љȭ\x02م')
buf.write('ن\x05ћȮ\x02نه\x05еț\x02ه')
buf.write('و\x05яȨ\x02وى\x05ћȮ\x02ى')
buf.write('\x88\x03\x02\x02\x02يً\x05йȝ\x02ًٌ')
buf.write('\x05ёȩ\x02ٌٍ\x05яȨ\x02ٍَ')
buf.write('\x05љȭ\x02َُ\x05ћȮ\x02ُِ')
buf.write('\x05їȬ\x02ِّ\x05еț\x02ّْ')
buf.write('\x05хȣ\x02ْٓ\x05яȨ\x02ٓٔ')
buf.write('\x05ћȮ\x02ٔ\x8a\x03\x02\x02\x02ٕٖ\x05й')
buf.write('ȝ\x02ٖٗ\x05ёȩ\x02ٗ٘\x05я')
buf.write('Ȩ\x02٘ٙ\x05љȭ\x02ٙٚ\x05ћ')
buf.write('Ȯ\x02ٚٛ\x05їȬ\x02ٜٛ\x05е')
buf.write('ț\x02ٜٝ\x05хȣ\x02ٝٞ\x05я')
buf.write('Ȩ\x02ٟٞ\x05ћȮ\x02ٟ٠\x05љ')
buf.write('ȭ\x02٠\x8c\x03\x02\x02\x02١٢\x05йȝ')
buf.write('\x02٢٣\x05ёȩ\x02٣٤\x05яȨ')
buf.write('\x02٤٥\x05љȭ\x02٥٦\x05ћȮ')
buf.write('\x02٦٧\x05їȬ\x02٧٨\x05ѝȯ')
buf.write('\x02٨٩\x05йȝ\x02٩٪\x05ћȮ')
buf.write('\x02٪٫\x05ёȩ\x02٫٬\x05їȬ')
buf.write('\x02٬\x8e\x03\x02\x02\x02٭ٮ\x05йȝ\x02ٮ')
buf.write('ٯ\x05ёȩ\x02ٯٰ\x05яȨ\x02ٰ')
buf.write('ٱ\x05ћȮ\x02ٱٲ\x05нȟ\x02ٲ')
buf.write('ٳ\x05яȨ\x02ٳٴ\x05ћȮ\x02ٴ')
buf.write('\x90\x03\x02\x02\x02ٵٶ\x05йȝ\x02ٶٷ')
buf.write('\x05ёȩ\x02ٷٸ\x05яȨ\x02ٸٹ')
buf.write('\x05ћȮ\x02ٹٺ\x05нȟ\x02ٺٻ')
buf.write('\x05ѣȲ\x02ٻټ\x05ћȮ\x02ټ\x92')
buf.write('\x03\x02\x02\x02ٽپ\x05йȝ\x02پٿ\x05ё')
buf.write('ȩ\x02ٿڀ\x05яȨ\x02ڀځ\x05ћ')
buf.write('Ȯ\x02ځڂ\x05хȣ\x02ڂڃ\x05я')
buf.write('Ȩ\x02ڃڄ\x05ѝȯ\x02ڄڅ\x05н')
buf.write('ȟ\x02څ\x94\x03\x02\x02\x02چڇ\x05йȝ')
buf.write('\x02ڇڈ\x05ёȩ\x02ڈډ\x05яȨ')
buf.write('\x02ډڊ\x05џȰ\x02ڊڋ\x05нȟ')
buf.write('\x02ڋڌ\x05їȬ\x02ڌڍ\x05ћȮ')
buf.write('\x02ڍ\x96\x03\x02\x02\x02ڎڏ\x05йȝ\x02ڏ')
buf.write('ڐ\x05ёȩ\x02ڐڑ\x05їȬ\x02ڑ')
buf.write('ڒ\x05їȬ\x02ڒړ\x05ѝȯ\x02ړ')
buf.write('ڔ\x05ѓȪ\x02ڔڕ\x05ћȮ\x02ڕ')
buf.write('ږ\x07a\x02\x02ږڗ\x05ѣȲ\x02ڗژ')
buf.write('\x05хȣ\x02ژڙ\x05лȞ\x02ڙ\x98')
buf.write('\x03\x02\x02\x02ښڛ\x05йȝ\x02ڛڜ\x05ё')
buf.write('ȩ\x02ڜڝ\x05їȬ\x02ڝڞ\x05ї')
buf.write('Ȭ\x02ڞڟ\x05ѝȯ\x02ڟڠ\x05ѓ')
buf.write('Ȫ\x02ڠڡ\x05ћȮ\x02ڡڢ\x07a\x02')
buf.write('\x02ڢڣ\x05ѣȲ\x02ڣڤ\x05хȣ')
buf.write('\x02ڤڥ\x05лȞ\x02ڥڦ\x07a\x02\x02ڦ')
buf.write('ڧ\x05еț\x02ڧڨ\x05ыȦ\x02ڨ')
buf.write('ک\x05ыȦ\x02ک\x9a\x03\x02\x02\x02ڪګ')
buf.write('\x05йȝ\x02ګڬ\x05ёȩ\x02ڬڭ')
buf.write('\x05љȭ\x02ڭڮ\x05ћȮ\x02ڮ\x9c')
buf.write('\x03\x02\x02\x02گڰ\x05йȝ\x02ڰڱ\x05ё')
buf.write('ȩ\x02ڱڲ\x05ѝȯ\x02ڲڳ\x05я')
buf.write('Ȩ\x02ڳڴ\x05ћȮ\x02ڴ\x9e\x03\x02\x02')
buf.write('\x02ڵڶ\x05йȝ\x02ڶڷ\x05їȬ')
buf.write('\x02ڷڸ\x05нȟ\x02ڸڹ\x05еț')
buf.write('\x02ڹں\x05ћȮ\x02ںڻ\x05нȟ')
buf.write('\x02ڻ\xa0\x03\x02\x02\x02ڼڽ\x05йȝ\x02ڽ')
buf.write('ھ\x05їȬ\x02ھڿ\x05ёȩ\x02ڿ')
buf.write('ۀ\x05љȭ\x02ۀہ\x05љȭ\x02ہ')
buf.write('¢\x03\x02\x02\x02ۂۃ\x05йȝ\x02ۃۄ')
buf.write('\x05ѝȯ\x02ۄۅ\x05зȜ\x02ۅۆ')
buf.write('\x05нȟ\x02ۆ¤\x03\x02\x02\x02ۇۈ\x05й')
buf.write('ȝ\x02ۈۉ\x05ѝȯ\x02ۉۊ\x05ї')
buf.write('Ȭ\x02ۊۋ\x05їȬ\x02ۋی\x05н')
buf.write('ȟ\x02یۍ\x05яȨ\x02ۍێ\x05ћ')
buf.write('Ȯ\x02ێ¦\x03\x02\x02\x02ۏې\x05йȝ')
buf.write('\x02ېۑ\x05ѝȯ\x02ۑے\x05їȬ')
buf.write('\x02ےۓ\x05їȬ\x02ۓ۔\x05нȟ')
buf.write('\x02۔ە\x05яȨ\x02ەۖ\x05ћȮ')
buf.write('\x02ۖۗ\x07a\x02\x02ۗۘ\x05ѝȯ\x02ۘ')
buf.write('ۙ\x05љȭ\x02ۙۚ\x05нȟ\x02ۚ')
buf.write('ۛ\x05їȬ\x02ۛ¨\x03\x02\x02\x02ۜ\u06dd')
buf.write('\x05йȝ\x02\u06dd۞\x05ѝȯ\x02۞۟')
buf.write('\x05їȬ\x02۟۠\x05љȭ\x02۠ۡ')
buf.write('\x05ёȩ\x02ۡۢ\x05їȬ\x02ۢª')
buf.write('\x03\x02\x02\x02ۣۤ\x05йȝ\x02ۤۥ\x05ѝ')
buf.write('ȯ\x02ۥۦ\x05љȭ\x02ۦۧ\x05ћ')
buf.write('Ȯ\x02ۧۨ\x05ёȩ\x02ۨ۩\x05э')
buf.write('ȧ\x02۩۪\x05лȞ\x02۪۫\x05е')
buf.write('ț\x02۫۬\x05ћȮ\x02ۭ۬\x05ѝ')
buf.write('ȯ\x02ۭۮ\x05эȧ\x02ۮ¬\x03\x02\x02')
buf.write('\x02ۯ۰\x05йȝ\x02۰۱\x05ѥȳ')
buf.write('\x02۱۲\x05йȝ\x02۲۳\x05ыȦ')
buf.write('\x02۳۴\x05нȟ\x02۴®\x03\x02\x02\x02۵')
buf.write('۶\x05лȞ\x02۶۷\x05еț\x02۷')
buf.write('۸\x05ћȮ\x02۸۹\x05еț\x02۹')
buf.write('°\x03\x02\x02\x02ۺۻ\x05лȞ\x02ۻۼ')
buf.write('\x05еț\x02ۼ۽\x05ћȮ\x02۽۾')
buf.write('\x05еț\x02۾ۿ\x05зȜ\x02ۿ܀')
buf.write('\x05еț\x02܀܁\x05љȭ\x02܁܂')
buf.write('\x05нȟ\x02܂²\x03\x02\x02\x02܃܄\x05л')
buf.write('Ȟ\x02܄܅\x05еț\x02܅܆\x05ћ')
buf.write('Ȯ\x02܆܇\x05нȟ\x02܇´\x03\x02\x02')
buf.write('\x02܈܉\x05лȞ\x02܉܊\x05еț')
buf.write('\x02܊܋\x05ѥȳ\x02܋¶\x03\x02\x02\x02܌')
buf.write('܍\x05лȞ\x02܍\u070e\x05зȜ\x02\u070e')
buf.write('\u070f\x07a\x02\x02\u070fܐ\x05їȬ\x02ܐܑ')
buf.write('\x05ёȩ\x02ܑܒ\x05ыȦ\x02ܒܓ')
buf.write('\x05нȟ\x02ܓܔ\x07a\x02\x02ܔܕ\x05й')
buf.write('ȝ\x02ܕܖ\x05уȢ\x02ܖܗ\x05е')
buf.write('ț\x02ܗܘ\x05яȨ\x02ܘܙ\x05с')
buf.write('ȡ\x02ܙܚ\x05нȟ\x02ܚ¸\x03\x02\x02')
buf.write('\x02ܛܜ\x05лȞ\x02ܜܝ\x05зȜ')
buf.write('\x02ܝܞ\x05ћȮ\x02ܞܟ\x05хȣ')
buf.write('\x02ܟܠ\x05эȧ\x02ܠܡ\x05нȟ')
buf.write('\x02ܡܢ\x05ѧȴ\x02ܢܣ\x05ёȩ')
buf.write('\x02ܣܤ\x05яȨ\x02ܤܥ\x05нȟ')
buf.write('\x02ܥº\x03\x02\x02\x02ܦܧ\x05лȞ\x02ܧ')
buf.write('ܨ\x05лȞ\x02ܨܩ\x05ыȦ\x02ܩ')
buf.write('¼\x03\x02\x02\x02ܪܫ\x05лȞ\x02ܫܬ')
buf.write('\x05нȟ\x02ܬܭ\x05зȜ\x02ܭܮ')
buf.write('\x05ѝȯ\x02ܮܯ\x05сȡ\x02ܯ¾')
buf.write('\x03\x02\x02\x02ܱܰ\x05лȞ\x02ܱܲ\x05н')
buf.write('ȟ\x02ܲܳ\x05йȝ\x02ܳÀ\x03\x02\x02')
buf.write('\x02ܴܵ\x05лȞ\x02ܵܶ\x05нȟ')
buf.write('\x02ܷܶ\x05йȝ\x02ܷܸ\x05хȣ')
buf.write('\x02ܸܹ\x05эȧ\x02ܹܺ\x05еț')
buf.write('\x02ܻܺ\x05ыȦ\x02ܻÂ\x03\x02\x02\x02ܼ')
buf.write('ܽ\x05лȞ\x02ܾܽ\x05нȟ\x02ܾ')
buf.write('ܿ\x05йȝ\x02ܿ݀\x05ыȦ\x02݀')
buf.write('݁\x05еț\x02݂݁\x05їȬ\x02݂')
buf.write('݃\x05нȟ\x02݃Ä\x03\x02\x02\x02݄݅')
buf.write('\x05лȞ\x02݆݅\x05нȟ\x02݆݇')
buf.write('\x05йȝ\x02݈݇\x05ёȩ\x02݈݉')
buf.write('\x05эȧ\x02݉݊\x05ѓȪ\x02݊\u074b')
buf.write('\x05ёȩ\x02\u074b\u074c\x05љȭ\x02\u074cݍ')
buf.write('\x05нȟ\x02ݍÆ\x03\x02\x02\x02ݎݏ\x05л')
buf.write('Ȟ\x02ݏݐ\x05нȟ\x02ݐݑ\x05й')
buf.write('ȝ\x02ݑݒ\x05їȬ\x02ݒݓ\x05н')
buf.write('ȟ\x02ݓݔ\x05эȧ\x02ݔݕ\x05н')
buf.write('ȟ\x02ݕݖ\x05яȨ\x02ݖݗ\x05ћ')
buf.write('Ȯ\x02ݗÈ\x03\x02\x02\x02ݘݙ\x05лȞ')
buf.write('\x02ݙݚ\x05нȟ\x02ݚݛ\x05пȠ')
buf.write('\x02ݛݜ\x05еț\x02ݜݝ\x05ѝȯ')
buf.write('\x02ݝݞ\x05ыȦ\x02ݞݟ\x05ћȮ')
buf.write('\x02ݟÊ\x03\x02\x02\x02ݠݡ\x05лȞ\x02ݡ')
buf.write('ݢ\x05нȟ\x02ݢݣ\x05пȠ\x02ݣ')
buf.write('ݤ\x05еț\x02ݤݥ\x05ѝȯ\x02ݥ')
buf.write('ݦ\x05ыȦ\x02ݦݧ\x05ћȮ\x02ݧ')
buf.write('ݨ\x05љȭ\x02ݨÌ\x03\x02\x02\x02ݩݪ')
buf.write('\x05лȞ\x02ݪݫ\x05нȟ\x02ݫݬ')
buf.write('\x05пȠ\x02ݬݭ\x05нȟ\x02ݭݮ')
buf.write('\x05їȬ\x02ݮݯ\x05їȬ\x02ݯݰ')
buf.write('\x05нȟ\x02ݰݱ\x05лȞ\x02ݱÎ')
buf.write('\x03\x02\x02\x02ݲݳ\x05лȞ\x02ݳݴ\x05н')
buf.write('ȟ\x02ݴݵ\x05пȠ\x02ݵݶ\x05х')
buf.write('ȣ\x02ݶݷ\x05яȨ\x02ݷݸ\x05н')
buf.write('ȟ\x02ݸݹ\x05їȬ\x02ݹÐ\x03\x02\x02')
buf.write('\x02ݺݻ\x05лȞ\x02ݻݼ\x05нȟ')
buf.write('\x02ݼݽ\x05ыȦ\x02ݽݾ\x05нȟ')
buf.write('\x02ݾݿ\x05ћȮ\x02ݿހ\x05нȟ')
buf.write('\x02ހÒ\x03\x02\x02\x02ށނ\x05лȞ\x02ނ')
buf.write('ރ\x05нȟ\x02ރބ\x05ѓȪ\x02ބ')
buf.write('ޅ\x05ћȮ\x02ޅކ\x05уȢ\x02ކ')
buf.write('Ô\x03\x02\x02\x02އވ\x05лȞ\x02ވމ')
buf.write('\x05нȟ\x02މފ\x05љȭ\x02ފދ')
buf.write('\x05йȝ\x02ދÖ\x03\x02\x02\x02ތލ\x05л')
buf.write('Ȟ\x02ލގ\x05нȟ\x02ގޏ\x05ћ')
buf.write('Ȯ\x02ޏސ\x05нȟ\x02ސޑ\x05ї')
buf.write('Ȭ\x02ޑޒ\x05эȧ\x02ޒޓ\x05х')
buf.write('ȣ\x02ޓޔ\x05яȨ\x02ޔޕ\x05х')
buf.write('ȣ\x02ޕޖ\x05љȭ\x02ޖޗ\x05ћ')
buf.write('Ȯ\x02ޗޘ\x05хȣ\x02ޘޙ\x05й')
buf.write('ȝ\x02ޙØ\x03\x02\x02\x02ޚޛ\x05лȞ')
buf.write('\x02ޛޜ\x05хȣ\x02ޜޝ\x05эȧ')
buf.write('\x02ޝޞ\x05нȟ\x02ޞޟ\x05яȨ')
buf.write('\x02ޟޠ\x05љȭ\x02ޠޡ\x05хȣ')
buf.write('\x02ޡޢ\x05ёȩ\x02ޢޣ\x05яȨ')
buf.write('\x02ޣÚ\x03\x02\x02\x02ޤޥ\x05лȞ\x02ޥ')
buf.write('ަ\x05хȣ\x02ަާ\x05љȭ\x02ާ')
buf.write('ި\x05еț\x02ިީ\x05зȜ\x02ީ')
buf.write('ު\x05ыȦ\x02ުޫ\x05нȟ\x02ޫ')
buf.write('Ü\x03\x02\x02\x02ެޭ\x05лȞ\x02ޭޮ')
buf.write('\x05хȣ\x02ޮޯ\x05љȭ\x02ޯް')
buf.write('\x05еț\x02ްޱ\x05љȭ\x02ޱ\u07b2')
buf.write('\x05љȭ\x02\u07b2\u07b3\x05ёȩ\x02\u07b3\u07b4')
buf.write('\x05йȝ\x02\u07b4\u07b5\x05хȣ\x02\u07b5\u07b6')
buf.write('\x05еț\x02\u07b6\u07b7\x05ћȮ\x02\u07b7\u07b8')
buf.write('\x05нȟ\x02\u07b8Þ\x03\x02\x02\x02\u07b9\u07ba\x05л')
buf.write('Ȟ\x02\u07ba\u07bb\x05хȣ\x02\u07bb\u07bc\x05љ')
buf.write('ȭ\x02\u07bc\u07bd\x05ћȮ\x02\u07bd\u07be\x05х')
buf.write('ȣ\x02\u07be\u07bf\x05яȨ\x02\u07bf߀\x05й')
buf.write('ȝ\x02߀߁\x05ћȮ\x02߁à\x03\x02\x02')
buf.write('\x02߂߃\x05лȞ\x02߃߄\x05ёȩ')
buf.write('\x02߄߅\x05йȝ\x02߅߆\x05ѝȯ')
buf.write('\x02߆߇\x05эȧ\x02߇߈\x05нȟ')
buf.write('\x02߈߉\x05яȨ\x02߉ߊ\x05ћȮ')
buf.write('\x02ߊâ\x03\x02\x02\x02ߋߌ\x05лȞ\x02ߌ')
buf.write('ߍ\x05ёȩ\x02ߍߎ\x05ѝȯ\x02ߎ')
buf.write('ߏ\x05зȜ\x02ߏߐ\x05ыȦ\x02ߐ')
buf.write('ߑ\x05нȟ\x02ߑä\x03\x02\x02\x02ߒߓ')
buf.write('\x05лȞ\x02ߓߔ\x05їȬ\x02ߔߕ')
buf.write('\x05ёȩ\x02ߕߖ\x05ѓȪ\x02ߖæ')
buf.write('\x03\x02\x02\x02ߗߘ\x05лȞ\x02ߘߙ\x05љ')
buf.write('ȭ\x02ߙߚ\x05хȣ\x02ߚߛ\x05я')
buf.write('Ȩ\x02ߛߜ\x05ћȮ\x02ߜߝ\x05н')
buf.write('ȟ\x02ߝߞ\x05їȬ\x02ߞߟ\x05џ')
buf.write('Ȱ\x02ߟߠ\x05еț\x02ߠߡ\x05ы')
buf.write('Ȧ\x02ߡߢ\x07a\x02\x02ߢߣ\x05ѝȯ')
buf.write('\x02ߣߤ\x05яȨ\x02ߤߥ\x05йȝ')
buf.write('\x02ߥߦ\x05ёȩ\x02ߦߧ\x05яȨ')
buf.write('\x02ߧߨ\x05љȭ\x02ߨߩ\x05ћȮ')
buf.write('\x02ߩߪ\x05їȬ\x02ߪ߫\x05еț')
buf.write('\x02߫߬\x05хȣ\x02߬߭\x05яȨ')
buf.write('\x02߭߮\x05нȟ\x02߮߯\x05лȞ')
buf.write('\x02߯è\x03\x02\x02\x02߰߱\x05нȟ\x02߱')
buf.write('߲\x05еț\x02߲߳\x05йȝ\x02߳')
buf.write('ߴ\x05уȢ\x02ߴê\x03\x02\x02\x02ߵ߶')
buf.write('\x05нȟ\x02߶߷\x05ыȦ\x02߷߸')
buf.write('\x05нȟ\x02߸߹\x05эȧ\x02߹ߺ')
buf.write('\x05нȟ\x02ߺ\u07fb\x05яȨ\x02\u07fb\u07fc')
buf.write('\x05ћȮ\x02\u07fcì\x03\x02\x02\x02߽߾\x05н')
buf.write('ȟ\x02߾߿\x05ыȦ\x02߿ࠀ\x05љ')
buf.write('ȭ\x02ࠀࠁ\x05нȟ\x02ࠁî\x03\x02\x02')
buf.write('\x02ࠂࠃ\x05нȟ\x02ࠃࠄ\x05ыȦ')
buf.write('\x02ࠄࠅ\x05љȭ\x02ࠅࠆ\x05хȣ')
buf.write('\x02ࠆࠇ\x05пȠ\x02ࠇð\x03\x02\x02\x02ࠈ')
buf.write('ࠉ\x05нȟ\x02ࠉࠊ\x05эȧ\x02ࠊ')
buf.write('ࠋ\x05ѓȪ\x02ࠋࠌ\x05ћȮ\x02ࠌ')
buf.write('ࠍ\x05ѥȳ\x02ࠍò\x03\x02\x02\x02ࠎࠏ')
buf.write('\x05нȟ\x02ࠏࠐ\x05яȨ\x02ࠐࠑ')
buf.write('\x05еț\x02ࠑࠒ\x05зȜ\x02ࠒࠓ')
buf.write('\x05ыȦ\x02ࠓࠔ\x05нȟ\x02ࠔô')
buf.write('\x03\x02\x02\x02ࠕࠖ\x05нȟ\x02ࠖࠗ\x05я')
buf.write('Ȩ\x02ࠗ࠘\x05йȝ\x02࠘࠙\x05ё')
buf.write('ȩ\x02࠙ࠚ\x05лȞ\x02ࠚࠛ\x05х')
buf.write('ȣ\x02ࠛࠜ\x05яȨ\x02ࠜࠝ\x05с')
buf.write('ȡ\x02ࠝö\x03\x02\x02\x02ࠞࠟ\x05нȟ')
buf.write('\x02ࠟࠠ\x05яȨ\x02ࠠࠡ\x05лȞ')
buf.write('\x02ࠡø\x03\x02\x02\x02ࠢࠣ\x05нȟ\x02ࠣ')
buf.write('ࠤ\x05яȨ\x02ࠤࠥ\x05ћȮ\x02ࠥ')
buf.write('ࠦ\x05хȣ\x02ࠦࠧ\x05ћȮ\x02ࠧ')
buf.write('ࠨ\x05ѥȳ\x02ࠨࠩ\x05нȟ\x02ࠩ')
buf.write('ࠪ\x05љȭ\x02ࠪࠫ\x05йȝ\x02ࠫ')
buf.write('ࠬ\x05еț\x02ࠬ࠭\x05ѓȪ\x02࠭')
buf.write('\u082e\x05хȣ\x02\u082e\u082f\x05яȨ\x02\u082f')
buf.write('࠰\x05сȡ\x02࠰ú\x03\x02\x02\x02࠱࠲')
buf.write('\x05нȟ\x02࠲࠳\x05їȬ\x02࠳࠴')
buf.write('\x05їȬ\x02࠴ü\x03\x02\x02\x02࠵࠶\x05н')
buf.write('ȟ\x02࠶࠷\x05їȬ\x02࠷࠸\x05ї')
buf.write('Ȭ\x02࠸࠹\x05ёȩ\x02࠹࠺\x05ї')
buf.write('Ȭ\x02࠺࠻\x05љȭ\x02࠻þ\x03\x02\x02')
buf.write('\x02࠼࠽\x05нȟ\x02࠽࠾\x05љȭ')
buf.write('\x02࠾\u083f\x05йȝ\x02\u083fࡀ\x05еț')
buf.write('\x02ࡀࡁ\x05ѓȪ\x02ࡁࡂ\x05нȟ')
buf.write('\x02ࡂĀ\x03\x02\x02\x02ࡃࡄ\x05нȟ\x02ࡄ')
buf.write('ࡅ\x05џȰ\x02ࡅࡆ\x05еț\x02ࡆ')
buf.write('ࡇ\x05ыȦ\x02ࡇࡈ\x05яȨ\x02ࡈ')
buf.write('ࡉ\x05еț\x02ࡉࡊ\x05эȧ\x02ࡊ')
buf.write('ࡋ\x05нȟ\x02ࡋĂ\x03\x02\x02\x02ࡌࡍ')
buf.write('\x05нȟ\x02ࡍࡎ\x05ѣȲ\x02ࡎࡏ')
buf.write('\x05йȝ\x02ࡏࡐ\x05нȟ\x02ࡐࡑ')
buf.write('\x05ѓȪ\x02ࡑࡒ\x05ћȮ\x02ࡒĄ')
buf.write('\x03\x02\x02\x02ࡓࡔ\x05нȟ\x02ࡔࡕ\x05ѣ')
buf.write('Ȳ\x02ࡕࡖ\x05йȝ\x02ࡖࡗ\x05н')
buf.write('ȟ\x02ࡗࡘ\x05ѓȪ\x02ࡘ࡙\x05ћ')
buf.write('Ȯ\x02࡙࡚\x05хȣ\x02࡚࡛\x05ё')
buf.write('ȩ\x02࡛\u085c\x05яȨ\x02\u085cĆ\x03\x02\x02')
buf.write('\x02\u085d࡞\x05нȟ\x02࡞\u085f\x05ѣȲ')
buf.write('\x02\u085fࡠ\x05йȝ\x02ࡠࡡ\x05нȟ')
buf.write('\x02ࡡࡢ\x05ѓȪ\x02ࡢࡣ\x05ћȮ')
buf.write('\x02ࡣࡤ\x05хȣ\x02ࡤࡥ\x05ёȩ')
buf.write('\x02ࡥࡦ\x05яȨ\x02ࡦࡧ\x07a\x02\x02ࡧ')
buf.write('ࡨ\x05хȣ\x02ࡨࡩ\x05яȨ\x02ࡩ')
buf.write('ࡪ\x05хȣ\x02ࡪ\u086b\x05ћȮ\x02\u086b')
buf.write('Ĉ\x03\x02\x02\x02\u086c\u086d\x05нȟ\x02\u086d\u086e')
buf.write('\x05ѣȲ\x02\u086e\u086f\x05йȝ\x02\u086fࡰ')
buf.write('\x05нȟ\x02ࡰࡱ\x05ѓȪ\x02ࡱࡲ')
buf.write('\x05ћȮ\x02ࡲࡳ\x05хȣ\x02ࡳࡴ')
buf.write('\x05ёȩ\x02ࡴࡵ\x05яȨ\x02ࡵࡶ')
buf.write('\x05љȭ\x02ࡶĊ\x03\x02\x02\x02ࡷࡸ\x05н')
buf.write('ȟ\x02ࡸࡹ\x05ѣȲ\x02ࡹࡺ\x05й')
buf.write('ȝ\x02ࡺࡻ\x05ыȦ\x02ࡻࡼ\x05ѝ')
buf.write('ȯ\x02ࡼࡽ\x05лȞ\x02ࡽࡾ\x05н')
buf.write('ȟ\x02ࡾČ\x03\x02\x02\x02ࡿࢀ\x05нȟ')
buf.write('\x02ࢀࢁ\x05ѣȲ\x02ࢁࢂ\x05йȝ')
buf.write('\x02ࢂࢃ\x05ыȦ\x02ࢃࢄ\x05ѝȯ')
buf.write('\x02ࢄࢅ\x05љȭ\x02ࢅࢆ\x05хȣ')
buf.write('\x02ࢆࢇ\x05џȰ\x02ࢇ࢈\x05нȟ')
buf.write('\x02࢈Ď\x03\x02\x02\x02ࢉࢊ\x05нȟ\x02ࢊ')
buf.write('ࢋ\x05ѣȲ\x02ࢋࢌ\x05нȟ\x02ࢌ')
buf.write('ࢍ\x05йȝ\x02ࢍࢎ\x05ѝȯ\x02ࢎ')
buf.write('\u088f\x05ћȮ\x02\u088f\u0890\x05нȟ\x02\u0890')
buf.write('Đ\x03\x02\x02\x02\u0891\u0892\x05нȟ\x02\u0892\u0893')
buf.write('\x05ѣȲ\x02\u0893\u0894\x05хȣ\x02\u0894\u0895')
buf.write('\x05љȭ\x02\u0895\u0896\x05ћȮ\x02\u0896\u0897')
buf.write('\x05љȭ\x02\u0897Ē\x03\x02\x02\x02࢙࢘\x05н')
buf.write('ȟ\x02࢙࢚\x05ѣȲ\x02࢚࢛\x05х')
buf.write('ȣ\x02࢛࢜\x05ћȮ\x02࢜Ĕ\x03\x02\x02')
buf.write('\x02࢝࢞\x05нȟ\x02࢞࢟\x05ѣȲ')
buf.write('\x02࢟ࢠ\x05ѓȪ\x02ࢠࢡ\x05ыȦ')
buf.write('\x02ࢡࢢ\x05еț\x02ࢢࢣ\x05хȣ')
buf.write('\x02ࢣࢤ\x05яȨ\x02ࢤĖ\x03\x02\x02\x02ࢥ')
buf.write('ࢦ\x05нȟ\x02ࢦࢧ\x05ѣȲ\x02ࢧ')
buf.write('ࢨ\x05ћȮ\x02ࢨࢩ\x05нȟ\x02ࢩ')
buf.write('ࢪ\x05їȬ\x02ࢪࢫ\x05яȨ\x02ࢫ')
buf.write('ࢬ\x05еț\x02ࢬࢭ\x05ыȦ\x02ࢭ')
buf.write('Ę\x03\x02\x02\x02ࢮࢯ\x05нȟ\x02ࢯࢰ')
buf.write('\x05ѣȲ\x02ࢰࢱ\x05ћȮ\x02ࢱࢲ')
buf.write('\x05їȬ\x02ࢲࢳ\x05еț\x02ࢳࢴ')
buf.write('\x05йȝ\x02ࢴࢵ\x05ћȮ\x02ࢵĚ')
buf.write('\x03\x02\x02\x02ࢶࢷ\x05пȠ\x02ࢷࢸ\x05е')
buf.write('ț\x02ࢸࢹ\x05хȣ\x02ࢹࢺ\x05ы')
buf.write('Ȧ\x02ࢺࢻ\x05ѝȯ\x02ࢻࢼ\x05ї')
buf.write('Ȭ\x02ࢼࢽ\x05нȟ\x02ࢽĜ\x03\x02\x02')
buf.write('\x02ࢾࢿ\x05пȠ\x02ࢿࣀ\x05еț')
buf.write('\x02ࣀࣁ\x05ыȦ\x02ࣁࣂ\x05љȭ')
buf.write('\x02ࣂࣃ\x05нȟ\x02ࣃĞ\x03\x02\x02\x02ࣄ')
buf.write('ࣅ\x05пȠ\x02ࣅࣆ\x05нȟ\x02ࣆ')
buf.write('ࣇ\x05ћȮ\x02ࣇࣈ\x05йȝ\x02ࣈ')
buf.write('ࣉ\x05уȢ\x02ࣉĠ\x03\x02\x02\x02࣊࣋')
buf.write('\x05пȠ\x02࣋࣌\x05хȣ\x02࣌࣍')
buf.write('\x05яȨ\x02࣍࣎\x05еț\x02࣏࣎')
buf.write('\x05ыȦ\x02࣏Ģ\x03\x02\x02\x02࣐࣑\x05п')
buf.write('Ƞ\x02࣑࣒\x05хȣ\x02࣒࣓\x05ї')
buf.write('Ȭ\x02࣓ࣔ\x05љȭ\x02ࣔࣕ\x05ћ')
buf.write('Ȯ\x02ࣕĤ\x03\x02\x02\x02ࣖࣗ\x05пȠ')
buf.write('\x02ࣗࣘ\x05хȣ\x02ࣘࣙ\x05їȬ')
buf.write('\x02ࣙࣚ\x05љȭ\x02ࣚࣛ\x05ћȮ')
buf.write('\x02ࣛࣜ\x07a\x02\x02ࣜࣝ\x05џȰ\x02ࣝ')
buf.write('ࣞ\x05еț\x02ࣞࣟ\x05ыȦ\x02ࣟ')
buf.write('࣠\x05ѝȯ\x02࣠࣡\x05нȟ\x02࣡')
buf.write('Ħ\x03\x02\x02\x02\u08e2ࣣ\x05пȠ\x02ࣣࣤ')
buf.write('\x05ыȦ\x02ࣤࣥ\x05ёȩ\x02ࣦࣥ')
buf.write('\x05еț\x02ࣦࣧ\x05ћȮ\x02ࣧĨ')
buf.write('\x03\x02\x02\x02ࣩࣨ\x05пȠ\x02ࣩ࣪\x05ё')
buf.write('ȩ\x02࣪࣫\x05ыȦ\x02࣫࣬\x05ы')
buf.write('Ȧ\x02࣭࣬\x05ёȩ\x02࣭࣮\x05ѡ')
buf.write('ȱ\x02࣮࣯\x05хȣ\x02ࣰ࣯\x05я')
buf.write('Ȩ\x02ࣰࣱ\x05сȡ\x02ࣱĪ\x03\x02\x02')
buf.write('\x02ࣲࣳ\x05пȠ\x02ࣳࣴ\x05ёȩ')
buf.write('\x02ࣴࣵ\x05ыȦ\x02ࣶࣵ\x05ыȦ')
buf.write('\x02ࣶࣷ\x05ёȩ\x02ࣷࣸ\x05ѡȱ')
buf.write('\x02ࣹࣸ\x05љȭ\x02ࣹĬ\x03\x02\x02\x02ࣺ')
buf.write('ࣻ\x05пȠ\x02ࣻࣼ\x05ёȩ\x02ࣼ')
buf.write('ࣽ\x05їȬ\x02ࣽĮ\x03\x02\x02\x02ࣾࣿ')
buf.write('\x05пȠ\x02ࣿऀ\x05ёȩ\x02ऀँ')
buf.write('\x05їȬ\x02ँं\x05еț\x02ंः')
buf.write('\x05ыȦ\x02ःऄ\x05ыȦ\x02ऄİ')
buf.write('\x03\x02\x02\x02अआ\x05пȠ\x02आइ\x05ё')
buf.write('ȩ\x02इई\x05їȬ\x02ईउ\x05й')
buf.write('ȝ\x02उऊ\x05нȟ\x02ऊIJ\x03\x02\x02')
buf.write('\x02ऋऌ\x05пȠ\x02ऌऍ\x05їȬ')
buf.write('\x02ऍऎ\x05ёȩ\x02ऎए\x05эȧ')
buf.write('\x02एĴ\x03\x02\x02\x02ऐऑ\x05пȠ\x02ऑ')
buf.write('ऒ\x05ѝȯ\x02ऒओ\x05ыȦ\x02ओ')
buf.write('औ\x05ыȦ\x02औĶ\x03\x02\x02\x02कख')
buf.write('\x05пȠ\x02खग\x05ѝȯ\x02गघ')
buf.write('\x05яȨ\x02घङ\x05йȝ\x02ङच')
buf.write('\x05ћȮ\x02चछ\x05хȣ\x02छज')
buf.write('\x05ёȩ\x02जझ\x05яȨ\x02झĸ')
buf.write('\x03\x02\x02\x02ञट\x05сȡ\x02टठ\x05ё')
buf.write('ȩ\x02ठड\x05ћȮ\x02डढ\x05ё')
buf.write('ȩ\x02ढĺ\x03\x02\x02\x02णत\x05сȡ')
buf.write('\x02तथ\x05їȬ\x02थद\x05еț')
buf.write('\x02दध\x05яȨ\x02धन\x05ћȮ')
buf.write('\x02नļ\x03\x02\x02\x02ऩप\x05сȡ\x02प')
buf.write('फ\x05їȬ\x02फब\x05ёȩ\x02ब')
buf.write('भ\x05ѝȯ\x02भम\x05ѓȪ\x02म')
buf.write('ľ\x03\x02\x02\x02यर\x05сȡ\x02रऱ')
buf.write('\x05їȬ\x02ऱल\x05ёȩ\x02लळ')
buf.write('\x05ѝȯ\x02ळऴ\x05ѓȪ\x02ऴव')
buf.write('\x05хȣ\x02वश\x05яȨ\x02शष')
buf.write('\x05сȡ\x02षŀ\x03\x02\x02\x02सह\x05у')
buf.write('Ȣ\x02हऺ\x05еț\x02ऺऻ\x05љ')
buf.write('ȭ\x02ऻ़\x05уȢ\x02़ł\x03\x02\x02')
buf.write('\x02ऽा\x05уȢ\x02ाि\x05еț')
buf.write('\x02िी\x05џȰ\x02ीु\x05хȣ')
buf.write('\x02ुू\x05яȨ\x02ूृ\x05сȡ')
buf.write('\x02ृń\x03\x02\x02\x02ॄॅ\x05уȢ\x02ॅ')
buf.write('ॆ\x05хȣ\x02ॆे\x05лȞ\x02े')
buf.write('ै\x05нȟ\x02ैņ\x03\x02\x02\x02ॉॊ')
buf.write('\x05уȢ\x02ॊो\x05ёȩ\x02ोौ')
buf.write('\x05ѝȯ\x02ौ्\x05їȬ\x02्ň')
buf.write('\x03\x02\x02\x02ॎॏ\x05хȣ\x02ॏॐ\x05п')
buf.write('Ƞ\x02ॐŊ\x03\x02\x02\x02॒॑\x05хȣ')
buf.write('\x02॒॓\x05сȡ\x02॓॔\x05яȨ')
buf.write('\x02॔ॕ\x05ёȩ\x02ॕॖ\x05їȬ')
buf.write('\x02ॖॗ\x05нȟ\x02ॗŌ\x03\x02\x02\x02क़')
buf.write('ख़\x05хȣ\x02ख़ग़\x05эȧ\x02ग़')
buf.write('ज़\x05эȧ\x02ज़ड़\x05нȟ\x02ड़')
buf.write('ढ़\x05лȞ\x02ढ़फ़\x05хȣ\x02फ़')
buf.write('य़\x05еț\x02य़ॠ\x05ћȮ\x02ॠ')
buf.write('ॡ\x05нȟ\x02ॡŎ\x03\x02\x02\x02ॢॣ')
buf.write('\x05хȣ\x02ॣ।\x05яȨ\x02।Ő')
buf.write('\x03\x02\x02\x02॥०\x05хȣ\x02०१\x05я')
buf.write('Ȩ\x02१२\x05йȝ\x02२३\x05ы')
buf.write('Ȧ\x02३४\x05ѝȯ\x02४५\x05л')
buf.write('Ȟ\x02५६\x05нȟ\x02६Œ\x03\x02\x02')
buf.write('\x02७८\x05хȣ\x02८९\x05яȨ')
buf.write('\x02९॰\x05йȝ\x02॰ॱ\x05ыȦ')
buf.write('\x02ॱॲ\x05ѝȯ\x02ॲॳ\x05лȞ')
buf.write('\x02ॳॴ\x05хȣ\x02ॴॵ\x05яȨ')
buf.write('\x02ॵॶ\x05сȡ\x02ॶŔ\x03\x02\x02\x02ॷ')
buf.write('ॸ\x05хȣ\x02ॸॹ\x05яȨ\x02ॹ')
buf.write('ॺ\x05йȝ\x02ॺॻ\x05їȬ\x02ॻ')
buf.write('ॼ\x05нȟ\x02ॼॽ\x05эȧ\x02ॽ')
buf.write('ॾ\x05нȟ\x02ॾॿ\x05яȨ\x02ॿ')
buf.write('ঀ\x05ћȮ\x02ঀŖ\x03\x02\x02\x02ঁং')
buf.write('\x05хȣ\x02ংঃ\x05яȨ\x02ঃ\u0984')
buf.write('\x05лȞ\x02\u0984অ\x05нȟ\x02অআ')
buf.write('\x05яȨ\x02আই\x05ћȮ\x02ইŘ')
buf.write('\x03\x02\x02\x02ঈউ\x05хȣ\x02উঊ\x05я')
buf.write('Ȩ\x02ঊঋ\x05лȞ\x02ঋঌ\x05н')
buf.write('ȟ\x02ঌ\u098d\x05ѣȲ\x02\u098dŚ\x03\x02\x02')
buf.write('\x02\u098eএ\x05хȣ\x02এঐ\x05яȨ')
buf.write('\x02ঐ\u0991\x05лȞ\x02\u0991\u0992\x05нȟ')
buf.write('\x02\u0992ও\x05ѣȲ\x02ওঔ\x05нȟ')
buf.write('\x02ঔক\x05лȞ\x02কŜ\x03\x02\x02\x02খ')
buf.write('গ\x05хȣ\x02গঘ\x05яȨ\x02ঘ')
buf.write('ঙ\x05лȞ\x02ঙচ\x05хȣ\x02চ')
buf.write('ছ\x05йȝ\x02ছজ\x05еț\x02জ')
buf.write('ঝ\x05ћȮ\x02ঝঞ\x05ёȩ\x02ঞ')
buf.write('ট\x05їȬ\x02টŞ\x03\x02\x02\x02ঠড')
buf.write('\x05хȣ\x02ডঢ\x05яȨ\x02ঢণ')
buf.write('\x05лȞ\x02ণত\x05хȣ\x02তথ')
buf.write('\x05йȝ\x02থদ\x05нȟ\x02দধ')
buf.write('\x05љȭ\x02ধŠ\x03\x02\x02\x02ন\u09a9\x05х')
buf.write('ȣ\x02\u09a9প\x05яȨ\x02পফ\x05п')
buf.write('Ƞ\x02ফব\x05хȣ\x02বভ\x05я')
buf.write('Ȩ\x02ভম\x05хȣ\x02ময\x05ћ')
buf.write('Ȯ\x02যর\x05нȟ\x02রŢ\x03\x02\x02')
buf.write('\x02\u09b1ল\x05хȣ\x02ল\u09b3\x05яȨ')
buf.write('\x02\u09b3\u09b4\x05ыȦ\x02\u09b4\u09b5\x05хȣ')
buf.write('\x02\u09b5শ\x05яȨ\x02শষ\x05нȟ')
buf.write('\x02ষŤ\x03\x02\x02\x02সহ\x05хȣ\x02হ')
buf.write('\u09ba\x05яȨ\x02\u09ba\u09bb\x05яȨ\x02\u09bb')
buf.write('়\x05нȟ\x02়ঽ\x05їȬ\x02ঽ')
buf.write('Ŧ\x03\x02\x02\x02াি\x05хȣ\x02িী')
buf.write('\x05яȨ\x02ীু\x05ёȩ\x02ুূ')
buf.write('\x05ѝȯ\x02ূৃ\x05ћȮ\x02ৃŨ')
buf.write('\x03\x02\x02\x02ৄ\u09c5\x05хȣ\x02\u09c5\u09c6\x05я')
buf.write('Ȩ\x02\u09c6ে\x05љȭ\x02েৈ\x05н')
buf.write('ȟ\x02ৈ\u09c9\x05їȬ\x02\u09c9\u09ca\x05ћ')
buf.write('Ȯ\x02\u09caŪ\x03\x02\x02\x02োৌ\x05хȣ')
buf.write('\x02ৌ্\x05яȨ\x02্ৎ\x05љȭ')
buf.write('\x02ৎ\u09cf\x05ћȮ\x02\u09cf\u09d0\x05еț')
buf.write('\x02\u09d0\u09d1\x05яȨ\x02\u09d1\u09d2\x05ћȮ')
buf.write('\x02\u09d2\u09d3\x05хȣ\x02\u09d3\u09d4\x05еț')
buf.write('\x02\u09d4\u09d5\x05зȜ\x02\u09d5\u09d6\x05ыȦ')
buf.write('\x02\u09d6ৗ\x05нȟ\x02ৗŬ\x03\x02\x02\x02\u09d8')
buf.write('\u09d9\x05хȣ\x02\u09d9\u09da\x05яȨ\x02\u09da')
buf.write('\u09db\x05љȭ\x02\u09dbড়\x05ћȮ\x02ড়')
buf.write('ঢ়\x05нȟ\x02ঢ়\u09de\x05еț\x02\u09de')
buf.write('য়\x05лȞ\x02য়Ů\x03\x02\x02\x02ৠৡ')
buf.write('\x05хȣ\x02ৡৢ\x05яȨ\x02ৢৣ')
buf.write('\x05ћȮ\x02ৣŰ\x03\x02\x02\x02\u09e4\u09e5\x05х')
buf.write('ȣ\x02\u09e5০\x05яȨ\x02০১\x05ћ')
buf.write('Ȯ\x02১২\x05нȟ\x02২৩\x05с')
buf.write('ȡ\x02৩৪\x05нȟ\x02৪৫\x05ї')
buf.write('Ȭ\x02৫Ų\x03\x02\x02\x02৬৭\x05хȣ')
buf.write('\x02৭৮\x05яȨ\x02৮৯\x05ћȮ')
buf.write('\x02৯ৰ\x05нȟ\x02ৰৱ\x05їȬ')
buf.write('\x02ৱ৲\x05љȭ\x02৲৳\x05нȟ')
buf.write('\x02৳৴\x05йȝ\x02৴৵\x05ћȮ')
buf.write('\x02৵Ŵ\x03\x02\x02\x02৶৷\x05хȣ\x02৷')
buf.write('৸\x05яȨ\x02৸৹\x05ћȮ\x02৹')
buf.write('৺\x05нȟ\x02৺৻\x05їȬ\x02৻')
buf.write('ৼ\x05џȰ\x02ৼ৽\x05еț\x02৽')
buf.write('৾\x05ыȦ\x02৾Ŷ\x03\x02\x02\x02\u09ff\u0a00')
buf.write('\x05хȣ\x02\u0a00ਁ\x05яȨ\x02ਁਂ')
buf.write('\x05ћȮ\x02ਂਃ\x05ёȩ\x02ਃŸ')
buf.write('\x03\x02\x02\x02\u0a04ਅ\x05хȣ\x02ਅਆ\x05я')
buf.write('Ȩ\x02ਆਇ\x05џȰ\x02ਇਈ\x05е')
buf.write('ț\x02ਈਉ\x05ыȦ\x02ਉਊ\x05х')
buf.write('ȣ\x02ਊ\u0a0b\x05лȞ\x02\u0a0b\u0a0c\x05е')
buf.write('ț\x02\u0a0c\u0a0d\x05ћȮ\x02\u0a0d\u0a0e\x05н')
buf.write('ȟ\x02\u0a0eź\x03\x02\x02\x02ਏਐ\x05хȣ')
buf.write('\x02ਐ\u0a11\x05љȭ\x02\u0a11ż\x03\x02\x02\x02\u0a12')
buf.write('ਓ\x05хȣ\x02ਓਔ\x05љȭ\x02ਔ')
buf.write('ਕ\x05ёȩ\x02ਕਖ\x05ыȦ\x02ਖ')
buf.write('ਗ\x05еț\x02ਗਘ\x05ћȮ\x02ਘ')
buf.write('ਙ\x05хȣ\x02ਙਚ\x05ёȩ\x02ਚ')
buf.write('ਛ\x05яȨ\x02ਛž\x03\x02\x02\x02ਜਝ')
buf.write('\x05хȣ\x02ਝਞ\x05ћȮ\x02ਞਟ')
buf.write('\x05нȟ\x02ਟਠ\x05їȬ\x02ਠਡ')
buf.write('\x05еț\x02ਡਢ\x05ћȮ\x02ਢਣ')
buf.write('\x05нȟ\x02ਣƀ\x03\x02\x02\x02ਤਥ\x05ч')
buf.write('Ȥ\x02ਥਦ\x05еț\x02ਦਧ\x05џ')
buf.write('Ȱ\x02ਧਨ\x05еț\x02ਨƂ\x03\x02\x02')
buf.write('\x02\u0a29ਪ\x05чȤ\x02ਪਫ\x05ёȩ')
buf.write('\x02ਫਬ\x05хȣ\x02ਬਭ\x05яȨ')
buf.write('\x02ਭƄ\x03\x02\x02\x02ਮਯ\x05щȥ\x02ਯ')
buf.write('ਰ\x05нȟ\x02ਰ\u0a31\x05нȟ\x02\u0a31')
buf.write('ਲ\x05ѓȪ\x02ਲƆ\x03\x02\x02\x02ਲ਼\u0a34')
buf.write('\x05ыȦ\x02\u0a34ਵ\x05еț\x02ਵਸ਼')
buf.write('\x05яȨ\x02ਸ਼\u0a37\x05сȡ\x02\u0a37ਸ')
buf.write('\x05ѝȯ\x02ਸਹ\x05еț\x02ਹ\u0a3a')
buf.write('\x05сȡ\x02\u0a3a\u0a3b\x05нȟ\x02\u0a3bƈ')
buf.write('\x03\x02\x02\x02਼\u0a3d\x05ыȦ\x02\u0a3dਾ\x05е')
buf.write('ț\x02ਾਿ\x05љȭ\x02ਿੀ\x05ћ')
buf.write('Ȯ\x02ੀƊ\x03\x02\x02\x02ੁੂ\x05ыȦ')
buf.write('\x02ੂ\u0a43\x05еț\x02\u0a43\u0a44\x05љȭ')
buf.write('\x02\u0a44\u0a45\x05ћȮ\x02\u0a45\u0a46\x07a\x02\x02\u0a46')
buf.write('ੇ\x05џȰ\x02ੇੈ\x05еț\x02ੈ')
buf.write('\u0a49\x05ыȦ\x02\u0a49\u0a4a\x05ѝȯ\x02\u0a4a')
buf.write('ੋ\x05нȟ\x02ੋƌ\x03\x02\x02\x02ੌ੍')
buf.write('\x05ыȦ\x02੍\u0a4e\x05нȟ\x02\u0a4e\u0a4f')
buf.write('\x05еț\x02\u0a4f\u0a50\x05лȞ\x02\u0a50ੑ')
buf.write('\x05хȣ\x02ੑ\u0a52\x05яȨ\x02\u0a52\u0a53')
buf.write('\x05сȡ\x02\u0a53Ǝ\x03\x02\x02\x02\u0a54\u0a55\x05ы')
buf.write('Ȧ\x02\u0a55\u0a56\x05нȟ\x02\u0a56\u0a57\x05п')
buf.write('Ƞ\x02\u0a57\u0a58\x05ћȮ\x02\u0a58Ɛ\x03\x02\x02')
buf.write('\x02ਖ਼ਗ਼\x05ыȦ\x02ਗ਼ਜ਼\x05нȟ')
buf.write('\x02ਜ਼ੜ\x05џȰ\x02ੜ\u0a5d\x05нȟ')
buf.write('\x02\u0a5dਫ਼\x05ыȦ\x02ਫ਼ƒ\x03\x02\x02\x02\u0a5f')
buf.write('\u0a60\x05ыȦ\x02\u0a60\u0a61\x05хȣ\x02\u0a61')
buf.write('\u0a62\x05зȜ\x02\u0a62\u0a63\x05їȬ\x02\u0a63')
buf.write('\u0a64\x05еț\x02\u0a64\u0a65\x05їȬ\x02\u0a65')
buf.write('੦\x05ѥȳ\x02੦Ɣ\x03\x02\x02\x02੧੨')
buf.write('\x05ыȦ\x02੨੩\x05хȣ\x02੩੪')
buf.write('\x05щȥ\x02੪੫\x05нȟ\x02੫Ɩ')
buf.write('\x03\x02\x02\x02੬੭\x05ыȦ\x02੭੮\x05х')
buf.write('ȣ\x02੮੯\x05щȥ\x02੯ੰ\x05н')
buf.write('ȟ\x02ੰੱ\x074\x02\x02ੱƘ\x03\x02\x02\x02ੲ')
buf.write('ੳ\x05ыȦ\x02ੳੴ\x05хȣ\x02ੴ')
buf.write('ੵ\x05щȥ\x02ੵ੶\x05нȟ\x02੶')
buf.write('\u0a77\x076\x02\x02\u0a77ƚ\x03\x02\x02\x02\u0a78\u0a79\x05ы'
)
buf.write('Ȧ\x02\u0a79\u0a7a\x05хȣ\x02\u0a7a\u0a7b\x05щ')
buf.write('ȥ\x02\u0a7b\u0a7c\x05нȟ\x02\u0a7c\u0a7d\x05й')
buf.write('ȝ\x02\u0a7dƜ\x03\x02\x02\x02\u0a7e\u0a7f\x05ыȦ')
buf.write('\x02\u0a7f\u0a80\x05хȣ\x02\u0a80ઁ\x05эȧ')
buf.write('\x02ઁં\x05хȣ\x02ંઃ\x05ћȮ')
buf.write('\x02ઃƞ\x03\x02\x02\x02\u0a84અ\x05ыȦ\x02અ')
buf.write('આ\x05ёȩ\x02આઇ\x05йȝ\x02ઇ')
buf.write('ઈ\x05еț\x02ઈઉ\x05ыȦ\x02ઉ')
buf.write('Ơ\x03\x02\x02\x02ઊઋ\x05ыȦ\x02ઋઌ')
buf.write('\x05ёȩ\x02ઌઍ\x05йȝ\x02ઍ\u0a8e')
buf.write('\x05щȥ\x02\u0a8eƢ\x03\x02\x02\x02એઐ\x05ы')
buf.write('Ȧ\x02ઐઑ\x05ёȩ\x02ઑ\u0a92\x05й')
buf.write('ȝ\x02\u0a92ઓ\x05щȥ\x02ઓઔ\x05н')
buf.write('ȟ\x02ઔક\x05лȞ\x02કƤ\x03\x02\x02')
buf.write('\x02ખગ\x05ыȦ\x02ગઘ\x05ёȩ')
buf.write('\x02ઘઙ\x05сȡ\x02ઙƦ\x03\x02\x02\x02ચ')
buf.write('છ\x05ыȦ\x02છજ\x05ёȩ\x02જ')
buf.write('ઝ\x05сȡ\x02ઝઞ\x05ёȩ\x02ઞ')
buf.write('ટ\x05пȠ\x02ટઠ\x05пȠ\x02ઠ')
buf.write('ƨ\x03\x02\x02\x02ડઢ\x05ыȦ\x02ઢણ')
buf.write('\x05ёȩ\x02ણત\x05сȡ\x02તથ')
buf.write('\x05ёȩ\x02થદ\x05яȨ\x02દƪ')
buf.write('\x03\x02\x02\x02ધન\x05ыȦ\x02ન\u0aa9\x05ё')
buf.write('ȩ\x02\u0aa9પ\x05яȨ\x02પફ\x05с')
buf.write('ȡ\x02ફƬ\x03\x02\x02\x02બભ\x05ыȦ')
buf.write('\x02ભમ\x05ёȩ\x02મય\x05ёȩ')
buf.write('\x02યર\x05ѓȪ\x02રƮ\x03\x02\x02\x02\u0ab1')
buf.write('લ\x05эȧ\x02લળ\x05еț\x02ળ')
buf.write('\u0ab4\x05хȣ\x02\u0ab4વ\x05яȨ\x02વ')
buf.write('ư\x03\x02\x02\x02શષ\x05эȧ\x02ષસ')
buf.write('\x05еț\x02સહ\x05ѓȪ\x02હƲ')
buf.write('\x03\x02\x02\x02\u0aba\u0abb\x05эȧ\x02\u0abb઼\x05е')
buf.write('ț\x02઼ઽ\x05ћȮ\x02ઽા\x05й')
buf.write('ȝ\x02ાિ\x05уȢ\x02િી\x05н')
buf.write('ȟ\x02ીુ\x05лȞ\x02ુƴ\x03\x02\x02')
buf.write('\x02ૂૃ\x05эȧ\x02ૃૄ\x05еț')
buf.write('\x02ૄૅ\x05ѣȲ\x02ૅ\u0ac6\x05џȰ')
buf.write('\x02\u0ac6ે\x05еț\x02ેૈ\x05ыȦ')
buf.write('\x02ૈૉ\x05ѝȯ\x02ૉ\u0aca\x05нȟ')
buf.write('\x02\u0acaƶ\x03\x02\x02\x02ોૌ\x05эȧ\x02ૌ')
buf.write('્\x05нȟ\x02્\u0ace\x05еț\x02\u0ace')
buf.write('\u0acf\x05љȭ\x02\u0acfૐ\x05ѝȯ\x02ૐ')
buf.write('\u0ad1\x05їȬ\x02\u0ad1\u0ad2\x05нȟ\x02\u0ad2')
buf.write('\u0ad3\x05љȭ\x02\u0ad3Ƹ\x03\x02\x02\x02\u0ad4\u0ad5')
buf.write('\x05эȧ\x02\u0ad5\u0ad6\x05нȟ\x02\u0ad6\u0ad7')
buf.write('\x05эȧ\x02\u0ad7\u0ad8\x05зȜ\x02\u0ad8\u0ad9')
buf.write('\x05нȟ\x02\u0ad9\u0ada\x05їȬ\x02\u0adaƺ')
buf.write('\x03\x02\x02\x02\u0adb\u0adc\x05эȧ\x02\u0adc\u0add\x05н')
buf.write('ȟ\x02\u0add\u0ade\x05їȬ\x02\u0ade\u0adf\x05с')
buf.write('ȡ\x02\u0adfૠ\x05нȟ\x02ૠƼ\x03\x02\x02')
buf.write('\x02ૡૢ\x05эȧ\x02ૢૣ\x05хȣ')
buf.write('\x02ૣ\u0ae4\x05яȨ\x02\u0ae4\u0ae5\x05ѝȯ')
buf.write('\x02\u0ae5૦\x05љȭ\x02૦ƾ\x03\x02\x02\x02૧')
buf.write('૨\x05эȧ\x02૨૩\x05хȣ\x02૩')
buf.write('૪\x05яȨ\x02૪૫\x05ѝȯ\x02૫')
buf.write('૬\x05ћȮ\x02૬૭\x05нȟ\x02૭')
buf.write('ǀ\x03\x02\x02\x02૮૯\x05эȧ\x02૯૰')
buf.write('\x05хȣ\x02૰૱\x05яȨ\x02૱\u0af2')
buf.write('\x05џȰ\x02\u0af2\u0af3\x05еț\x02\u0af3\u0af4')
buf.write('\x05ыȦ\x02\u0af4\u0af5\x05ѝȯ\x02\u0af5\u0af6')
buf.write('\x05нȟ\x02\u0af6ǂ\x03\x02\x02\x02\u0af7\u0af8\x05э')
buf.write('ȧ\x02\u0af8ૹ\x05ыȦ\x02ૹૺ\x05љ')
buf.write('ȭ\x02ૺૻ\x05ыȦ\x02ૻૼ\x05е')
buf.write('ț\x02ૼ૽\x05зȜ\x02૽૾\x05н')
buf.write('ȟ\x02૾૿\x05ыȦ\x02૿DŽ\x03\x02\x02')
buf.write('\x02\u0b00ଁ\x05эȧ\x02ଁଂ\x05ёȩ')
buf.write('\x02ଂଃ\x05лȞ\x02ଃ\u0b04\x05нȟ')
buf.write('\x02\u0b04dž\x03\x02\x02\x02ଅଆ\x05эȧ\x02ଆ')
buf.write('ଇ\x05ёȩ\x02ଇଈ\x05лȞ\x02ଈ')
buf.write('ଉ\x05нȟ\x02ଉଊ\x05ыȦ\x02ଊ')
buf.write('Lj\x03\x02\x02\x02ଋଌ\x05эȧ\x02ଌ\u0b0d')
buf.write('\x05ёȩ\x02\u0b0d\u0b0e\x05лȞ\x02\u0b0eଏ')
buf.write('\x05хȣ\x02ଏଐ\x05пȠ\x02ଐ\u0b11')
buf.write('\x05ѥȳ\x02\u0b11NJ\x03\x02\x02\x02\u0b12ଓ\x05э')
buf.write('ȧ\x02ଓଔ\x05ёȩ\x02ଔକ\x05я')
buf.write('Ȩ\x02କଖ\x05ћȮ\x02ଖଗ\x05у')
buf.write('Ȣ\x02ଗnj\x03\x02\x02\x02ଘଙ\x05эȧ')
buf.write('\x02ଙଚ\x05ѝȯ\x02ଚଛ\x05ыȦ')
buf.write('\x02ଛଜ\x05ћȮ\x02ଜଝ\x05хȣ')
buf.write('\x02ଝଞ\x05љȭ\x02ଞଟ\x05нȟ')
buf.write('\x02ଟଠ\x05ћȮ\x02ଠǎ\x03\x02\x02\x02ଡ')
buf.write('ଢ\x05яȨ\x02ଢଣ\x05еț\x02ଣ')
buf.write('ତ\x05эȧ\x02ତଥ\x05нȟ\x02ଥ')
buf.write('ǐ\x03\x02\x02\x02ଦଧ\x05яȨ\x02ଧନ')
buf.write('\x05еț\x02ନ\u0b29\x05яȨ\x02\u0b29ǒ')
buf.write('\x03\x02\x02\x02ପଫ\x05яȨ\x02ଫବ\x05е')
buf.write('ț\x02ବଭ\x05ћȮ\x02ଭମ\x05ѝ')
buf.write('ȯ\x02ମଯ\x05їȬ\x02ଯର\x05е')
buf.write('ț\x02ର\u0b31\x05ыȦ\x02\u0b31ǔ\x03\x02\x02')
buf.write('\x02ଲଳ\x05яȨ\x02ଳ\u0b34\x05еț')
buf.write('\x02\u0b34ଵ\x05ћȮ\x02ଵଶ\x05ѝȯ')
buf.write('\x02ଶଷ\x05їȬ\x02ଷସ\x05еț')
buf.write('\x02ସହ\x05ыȦ\x02ହ\u0b3a\x05яȨ')
buf.write('\x02\u0b3aǖ\x03\x02\x02\x02\u0b3b଼\x05яȨ\x02଼')
buf.write('ଽ\x05еț\x02ଽା\x05џȰ\x02ା')
buf.write('ǘ\x03\x02\x02\x02ିୀ\x05яȨ\x02ୀୁ')
buf.write('\x05йȝ\x02ୁୂ\x05уȢ\x02ୂୃ')
buf.write('\x05еț\x02ୃୄ\x05їȬ\x02ୄǚ')
buf.write('\x03\x02\x02\x02\u0b45\u0b46\x05яȨ\x02\u0b46େ\x05й')
buf.write('ȝ\x02େୈ\x05уȢ\x02ୈ\u0b49\x05е')
buf.write('ț\x02\u0b49\u0b4a\x05їȬ\x02\u0b4aୋ\x07a\x02')
buf.write('\x02ୋୌ\x05йȝ\x02ୌ୍\x05љȭ')
buf.write('\x02୍ǜ\x03\x02\x02\x02\u0b4e\u0b4f\x05яȨ\x02\u0b4f')
buf.write('\u0b50\x05йȝ\x02\u0b50\u0b51\x05ыȦ\x02\u0b51')
buf.write('\u0b52\x05ёȩ\x02\u0b52\u0b53\x05зȜ\x02\u0b53')
buf.write('Ǟ\x03\x02\x02\x02\u0b54୕\x05яȨ\x02୕ୖ')
buf.write('\x05нȟ\x02ୖୗ\x05љȭ\x02ୗ\u0b58')
buf.write('\x05ћȮ\x02\u0b58\u0b59\x05нȟ\x02\u0b59\u0b5a')
buf.write('\x05лȞ\x02\u0b5aǠ\x03\x02\x02\x02\u0b5bଡ଼\x05я')
buf.write('Ȩ\x02ଡ଼ଢ଼\x05нȟ\x02ଢ଼\u0b5e\x05ѡ')
buf.write('ȱ\x02\u0b5eǢ\x03\x02\x02\x02ୟୠ\x05яȨ')
buf.write('\x02ୠୡ\x05ёȩ\x02ୡǤ\x03\x02\x02\x02ୢ')
buf.write('ୣ\x05яȨ\x02ୣ\u0b64\x05ёȩ\x02\u0b64')
buf.write('\u0b65\x05еț\x02\u0b65୦\x05ѝȯ\x02୦')
buf.write('୧\x05лȞ\x02୧୨\x05хȣ\x02୨')
buf.write('୩\x05ћȮ\x02୩Ǧ\x03\x02\x02\x02୪୫')
buf.write('\x05яȨ\x02୫୬\x05ёȩ\x02୬୭')
buf.write('\x05йȝ\x02୭୮\x05еț\x02୮୯')
buf.write('\x05йȝ\x02୯୰\x05уȢ\x02୰ୱ')
buf.write('\x05нȟ\x02ୱǨ\x03\x02\x02\x02୲୳\x05я')
buf.write('Ȩ\x02୳୴\x05ёȩ\x02୴୵\x05й')
buf.write('ȝ\x02୵୶\x05ёȩ\x02୶୷\x05ѓ')
buf.write('Ȫ\x02୷\u0b78\x05ѥȳ\x02\u0b78Ǫ\x03\x02\x02')
buf.write('\x02\u0b79\u0b7a\x05яȨ\x02\u0b7a\u0b7b\x05ёȩ')
buf.write('\x02\u0b7b\u0b7c\x05йȝ\x02\u0b7c\u0b7d\x05ѥȳ')
buf.write('\x02\u0b7d\u0b7e\x05йȝ\x02\u0b7e\u0b7f\x05ыȦ')
buf.write('\x02\u0b7f\u0b80\x05нȟ\x02\u0b80Ǭ\x03\x02\x02\x02\u0b81')
buf.write('ஂ\x05яȨ\x02ஂஃ\x05ёȩ\x02ஃ')
buf.write('\u0b84\x05нȟ\x02\u0b84அ\x05яȨ\x02அ')
buf.write('ஆ\x05ћȮ\x02ஆஇ\x05хȣ\x02இ')
buf.write('ஈ\x05ћȮ\x02ஈஉ\x05ѥȳ\x02உ')
buf.write('ஊ\x05нȟ\x02ஊ\u0b8b\x05љȭ\x02\u0b8b')
buf.write('\u0b8c\x05йȝ\x02\u0b8c\u0b8d\x05еț\x02\u0b8d')
buf.write('எ\x05ѓȪ\x02எஏ\x05хȣ\x02ஏ')
buf.write('ஐ\x05яȨ\x02ஐ\u0b91\x05сȡ\x02\u0b91')
buf.write('Ǯ\x03\x02\x02\x02ஒஓ\x05яȨ\x02ஓஔ')
buf.write('\x05ёȩ\x02ஔக\x05эȧ\x02க\u0b96')
buf.write('\x05еț\x02\u0b96\u0b97\x05ѣȲ\x02\u0b97\u0b98')
buf.write('\x05џȰ\x02\u0b98ங\x05еț\x02ஙச')
buf.write('\x05ыȦ\x02ச\u0b9b\x05ѝȯ\x02\u0b9bஜ')
buf.write('\x05нȟ\x02ஜǰ\x03\x02\x02\x02\u0b9dஞ\x05я')
buf.write('Ȩ\x02ஞட\x05ёȩ\x02ட\u0ba0\x05э')
buf.write('ȧ\x02\u0ba0\u0ba1\x05хȣ\x02\u0ba1\u0ba2\x05я')
buf.write('Ȩ\x02\u0ba2ண\x05џȰ\x02ணத\x05е')
buf.write('ț\x02த\u0ba5\x05ыȦ\x02\u0ba5\u0ba6\x05ѝ')
buf.write('ȯ\x02\u0ba6\u0ba7\x05нȟ\x02\u0ba7Dz\x03\x02\x02')
buf.write('\x02நன\x05яȨ\x02னப\x05ёȩ')
buf.write('\x02ப\u0bab\x05яȨ\x02\u0bab\u0bac\x05нȟ')
buf.write('\x02\u0bacǴ\x03\x02\x02\x02\u0badம\x05яȨ\x02ம')
buf.write('ய\x05ёȩ\x02யர\x05ёȩ\x02ர')
buf.write('ற\x05їȬ\x02றல\x05лȞ\x02ல')
buf.write('ள\x05нȟ\x02ளழ\x05їȬ\x02ழ')
buf.write('Ƕ\x03\x02\x02\x02வஶ\x05яȨ\x02ஶஷ')
buf.write('\x05ёȩ\x02ஷஸ\x05љȭ\x02ஸஹ')
buf.write('\x05йȝ\x02ஹ\u0bba\x05уȢ\x02\u0bba\u0bbb')
buf.write('\x05нȟ\x02\u0bbb\u0bbc\x05эȧ\x02\u0bbc\u0bbd')
buf.write('\x05еț\x02\u0bbdா\x05йȝ\x02ாி')
buf.write('\x05уȢ\x02ிீ\x05нȟ\x02ீு')
buf.write('\x05йȝ\x02ுூ\x05щȥ\x02ூǸ')
buf.write('\x03\x02\x02\x02\u0bc3\u0bc4\x05яȨ\x02\u0bc4\u0bc5\x05ё')
buf.write('ȩ\x02\u0bc5ெ\x05ћȮ\x02ெǺ\x03\x02\x02')
buf.write('\x02ேை\x05яȨ\x02ை\u0bc9\x05ёȩ')
buf.write('\x02\u0bc9ொ\x05ѡȱ\x02ொோ\x05еț')
buf.write('\x02ோௌ\x05хȣ\x02ௌ்\x05ћȮ')
buf.write('\x02்Ǽ\x03\x02\x02\x02\u0bce\u0bcf\x05яȨ\x02\u0bcf')
buf.write('ௐ\x05ѝȯ\x02ௐ\u0bd1\x05ыȦ\x02\u0bd1')
buf.write('\u0bd2\x05ыȦ\x02\u0bd2Ǿ\x03\x02\x02\x02\u0bd3\u0bd4')
buf.write('\x05яȨ\x02\u0bd4\u0bd5\x05ѝȯ\x02\u0bd5\u0bd6')
buf.write('\x05ыȦ\x02\u0bd6ௗ\x05ыȦ\x02ௗ\u0bd8')
buf.write('\x05љȭ\x02\u0bd8Ȁ\x03\x02\x02\x02\u0bd9\u0bda\x05я')
buf.write('Ȩ\x02\u0bda\u0bdb\x05ѝȯ\x02\u0bdb\u0bdc\x05э')
buf.write('ȧ\x02\u0bdc\u0bdd\x05зȜ\x02\u0bdd\u0bde\x05н')
buf.write('ȟ\x02\u0bde\u0bdf\x05їȬ\x02\u0bdfȂ\x03\x02\x02')
buf.write('\x02\u0be0\u0be1\x05яȨ\x02\u0be1\u0be2\x05ѝȯ')
buf.write('\x02\u0be2\u0be3\x05эȧ\x02\u0be3\u0be4\x05нȟ')
buf.write('\x02\u0be4\u0be5\x05їȬ\x02\u0be5௦\x05хȣ')
buf.write('\x02௦௧\x05йȝ\x02௧Ȅ\x03\x02\x02\x02௨')
buf.write('௩\x05яȨ\x02௩௪\x05џȰ\x02௪')
buf.write('௫\x05еț\x02௫௬\x05їȬ\x02௬')
buf.write('௭\x05йȝ\x02௭௮\x05уȢ\x02௮')
buf.write('௯\x05еț\x02௯௰\x05їȬ\x02௰')
buf.write('௱\x074\x02\x02௱Ȇ\x03\x02\x02\x02௲௳\x05ё')
buf.write('ȩ\x02௳௴\x05зȜ\x02௴௵\x05ч')
buf.write('Ȥ\x02௵௶\x05нȟ\x02௶௷\x05й')
buf.write('ȝ\x02௷௸\x05ћȮ\x02௸Ȉ\x03\x02\x02')
buf.write('\x02௹௺\x05ёȩ\x02௺\u0bfb\x05пȠ')
buf.write('\x02\u0bfbȊ\x03\x02\x02\x02\u0bfc\u0bfd\x05ёȩ\x02\u0bfd')
buf.write('\u0bfe\x05пȠ\x02\u0bfe\u0bff\x05пȠ\x02\u0bff')
buf.write('Ȍ\x03\x02\x02\x02ఀఁ\x05ёȩ\x02ఁం')
buf.write('\x05хȣ\x02ంః\x05лȞ\x02ఃȎ')
buf.write('\x03\x02\x02\x02ఄఅ\x05ёȩ\x02అఆ\x05ы')
buf.write('Ȧ\x02ఆఇ\x05лȞ\x02ఇȐ\x03\x02\x02')
buf.write('\x02ఈఉ\x05ёȩ\x02ఉఊ\x05яȨ')
buf.write('\x02ఊȒ\x03\x02\x02\x02ఋఌ\x05ёȩ\x02ఌ')
buf.write('\u0c0d\x05яȨ\x02\u0c0dఎ\x05ыȦ\x02ఎ')
buf.write('ఏ\x05ѥȳ\x02ఏȔ\x03\x02\x02\x02ఐ\u0c11')
buf.write('\x05ёȩ\x02\u0c11ఒ\x05ѓȪ\x02ఒఓ')
buf.write('\x05нȟ\x02ఓఔ\x05яȨ\x02ఔȖ')
buf.write('\x03\x02\x02\x02కఖ\x05ёȩ\x02ఖగ\x05ѓ')
buf.write('Ȫ\x02గఘ\x05ћȮ\x02ఘఙ\x05х')
buf.write('ȣ\x02ఙచ\x05ёȩ\x02చఛ\x05я')
buf.write('Ȩ\x02ఛȘ\x03\x02\x02\x02జఝ\x05ёȩ')
buf.write('\x02ఝఞ\x05їȬ\x02ఞȚ\x03\x02\x02\x02ట')
buf.write('ఠ\x05ёȩ\x02ఠడ\x05їȬ\x02డ')
buf.write('ఢ\x05еț\x02ఢణ\x05лȞ\x02ణ')
buf.write('త\x05еț\x02తథ\x05ћȮ\x02థ')
buf.write('ద\x05еț\x02దȜ\x03\x02\x02\x02ధన')
buf.write('\x05ёȩ\x02న\u0c29\x05їȬ\x02\u0c29ప')
buf.write('\x05лȞ\x02పఫ\x05нȟ\x02ఫబ')
buf.write('\x05їȬ\x02బȞ\x03\x02\x02\x02భమ\x05ё')
buf.write('ȩ\x02మయ\x05їȬ\x02యర\x05л')
buf.write('Ȟ\x02రఱ\x05хȣ\x02ఱల\x05я')
buf.write('Ȩ\x02లళ\x05еț\x02ళఴ\x05ы')
buf.write('Ȧ\x02ఴవ\x05хȣ\x02వశ\x05ћ')
buf.write('Ȯ\x02శష\x05ѥȳ\x02షȠ\x03\x02\x02')
buf.write('\x02సహ\x05ёȩ\x02హ\u0c3a\x05љȭ')
buf.write('\x02\u0c3a\u0c3b\x05нȟ\x02\u0c3b఼\x05їȬ')
buf.write('\x02఼ఽ\x05їȬ\x02ఽా\x05ёȩ')
buf.write('\x02ాి\x05їȬ\x02ిȢ\x03\x02\x02\x02ీ')
buf.write('ు\x05ёȩ\x02ుూ\x05ѝȯ\x02ూ')
buf.write('ృ\x05ћȮ\x02ృȤ\x03\x02\x02\x02ౄ\u0c45')
buf.write('\x05ёȩ\x02\u0c45ె\x05ѝȯ\x02ెే')
buf.write('\x05ћȮ\x02ేై\x05нȟ\x02ై\u0c49')
buf.write('\x05їȬ\x02\u0c49Ȧ\x03\x02\x02\x02ొో\x05ё')
buf.write('ȩ\x02ోౌ\x05џȰ\x02ౌ్\x05н')
buf.write('ȟ\x02్\u0c4e\x05їȬ\x02\u0c4eȨ\x03\x02\x02')
buf.write('\x02\u0c4f\u0c50\x05ёȩ\x02\u0c50\u0c51\x05џȰ')
buf.write('\x02\u0c51\u0c52\x05нȟ\x02\u0c52\u0c53\x05їȬ')
buf.write('\x02\u0c53\u0c54\x05їȬ\x02\u0c54ౕ\x05хȣ')
buf.write('\x02ౕౖ\x05лȞ\x02ౖ\u0c57\x05хȣ')
buf.write('\x02\u0c57ౘ\x05яȨ\x02ౘౙ\x05сȡ')
buf.write('\x02ౙȪ\x03\x02\x02\x02ౚ\u0c5b\x05ѓȪ\x02\u0c5b')
buf.write('\u0c5c\x05еț\x02\u0c5cౝ\x05йȝ\x02ౝ')
buf.write('\u0c5e\x05щȥ\x02\u0c5e\u0c5f\x05еț\x02\u0c5f')
buf.write('ౠ\x05сȡ\x02ౠౡ\x05нȟ\x02ౡ')
buf.write('Ȭ\x03\x02\x02\x02ౢౣ\x05ѓȪ\x02ౣ\u0c64')
buf.write('\x05еț\x02\u0c64\u0c65\x05їȬ\x02\u0c65౦')
buf.write('\x05еț\x02౦౧\x05ыȦ\x02౧౨')
buf.write('\x05ыȦ\x02౨౩\x05нȟ\x02౩౪')
buf.write('\x05ыȦ\x02౪౫\x07a\x02\x02౫౬\x05н')
buf.write('ȟ\x02౬౭\x05яȨ\x02౭౮\x05е')
buf.write('ț\x02౮౯\x05зȜ\x02౯\u0c70\x05ы')
buf.write('Ȧ\x02\u0c70\u0c71\x05нȟ\x02\u0c71Ȯ\x03\x02\x02')
buf.write('\x02\u0c72\u0c73\x05ѓȪ\x02\u0c73\u0c74\x05еț')
buf.write('\x02\u0c74\u0c75\x05їȬ\x02\u0c75\u0c76\x05еț')
buf.write('\x02\u0c76౷\x05эȧ\x02౷౸\x05нȟ')
buf.write('\x02౸౹\x05ћȮ\x02౹౺\x05нȟ')
buf.write('\x02౺౻\x05їȬ\x02౻౼\x05љȭ')
buf.write('\x02౼Ȱ\x03\x02\x02\x02౽౾\x05ѓȪ\x02౾')
buf.write('౿\x05еț\x02౿ಀ\x05їȬ\x02ಀ')
buf.write('ಁ\x05нȟ\x02ಁಂ\x05яȨ\x02ಂ')
buf.write('ಃ\x05ћȮ\x02ಃȲ\x03\x02\x02\x02಄ಅ')
buf.write('\x05ѓȪ\x02ಅಆ\x05еț\x02ಆಇ')
buf.write('\x05їȬ\x02ಇಈ\x05ћȮ\x02ಈಉ')
buf.write('\x05хȣ\x02ಉಊ\x05ћȮ\x02ಊಋ')
buf.write('\x05хȣ\x02ಋಌ\x05ёȩ\x02ಌ\u0c8d')
buf.write('\x05яȨ\x02\u0c8dȴ\x03\x02\x02\x02ಎಏ\x05ѓ')
buf.write('Ȫ\x02ಏಐ\x05еț\x02ಐ\u0c91\x05љ')
buf.write('ȭ\x02\u0c91ಒ\x05љȭ\x02ಒಓ\x05х')
buf.write('ȣ\x02ಓಔ\x05яȨ\x02ಔಕ\x05с')
buf.write('ȡ\x02ಕȶ\x03\x02\x02\x02ಖಗ\x05ѓȪ')
buf.write('\x02ಗಘ\x05еț\x02ಘಙ\x05ћȮ')
buf.write('\x02ಙಚ\x05уȢ\x02ಚȸ\x03\x02\x02\x02ಛ')
buf.write("ಜ\x07'\x02\x02ಜಝ\x05їȬ\x02ಝಞ")
buf.write('\x05ёȩ\x02ಞಟ\x05ѡȱ\x02ಟಠ')
buf.write('\x05ћȮ\x02ಠಡ\x05ѥȳ\x02ಡಢ')
buf.write('\x05ѓȪ\x02ಢಣ\x05нȟ\x02ಣȺ')
buf.write("\x03\x02\x02\x02ತಥ\x07'\x02\x02ಥದ\x05ћȮ")
buf.write('\x02ದಧ\x05ѥȳ\x02ಧನ\x05ѓȪ')
buf.write('\x02ನ\u0ca9\x05нȟ\x02\u0ca9ȼ\x03\x02\x02\x02ಪ')
buf.write('ಫ\x05ѓȪ\x02ಫಬ\x05хȣ\x02ಬ')
buf.write('ಭ\x05ѓȪ\x02ಭಮ\x05нȟ\x02ಮ')
buf.write('ಯ\x05ыȦ\x02ಯರ\x05хȣ\x02ರ')
buf.write('ಱ\x05яȨ\x02ಱಲ\x05нȟ\x02ಲ')
buf.write('ಳ\x05лȞ\x02ಳȾ\x03\x02\x02\x02\u0cb4ವ')
buf.write('\x05ѓȪ\x02ವಶ\x05хȣ\x02ಶಷ')
buf.write('\x05џȰ\x02ಷಸ\x05ёȩ\x02ಸಹ')
buf.write('\x05ћȮ\x02ಹɀ\x03\x02\x02\x02\u0cba\u0cbb\x05ѓ')
buf.write('Ȫ\x02\u0cbb಼\x05ыȦ\x02಼ಽ\x05е')
buf.write('ț\x02ಽಾ\x05яȨ\x02ಾɂ\x03\x02\x02')
buf.write('\x02ಿೀ\x05ѓȪ\x02ೀು\x05ыȦ')
buf.write('\x02ುೂ\x05љȭ\x02ೂೃ\x07a\x02\x02ೃ')
buf.write('ೄ\x05хȣ\x02ೄ\u0cc5\x05яȨ\x02\u0cc5')
buf.write('ೆ\x05ћȮ\x02ೆೇ\x05нȟ\x02ೇ')
buf.write('ೈ\x05сȡ\x02ೈ\u0cc9\x05нȟ\x02\u0cc9')
buf.write('ೊ\x05їȬ\x02ೊɄ\x03\x02\x02\x02ೋೌ')
buf.write('\x05ѓȪ\x02ೌ್\x05ёȩ\x02್\u0cce')
buf.write('\x05љȭ\x02\u0cce\u0ccf\x05хȣ\x02\u0ccf\u0cd0')
buf.write('\x05ћȮ\x02\u0cd0\u0cd1\x05хȣ\x02\u0cd1\u0cd2')
buf.write('\x05џȰ\x02\u0cd2\u0cd3\x05нȟ\x02\u0cd3Ɇ')
buf.write('\x03\x02\x02\x02\u0cd4ೕ\x05ѓȪ\x02ೕೖ\x05ё')
buf.write('ȩ\x02ೖ\u0cd7\x05љȭ\x02\u0cd7\u0cd8\x05х')
buf.write('ȣ\x02\u0cd8\u0cd9\x05ћȮ\x02\u0cd9\u0cda\x05х')
buf.write('ȣ\x02\u0cda\u0cdb\x05џȰ\x02\u0cdb\u0cdc\x05н')
buf.write('ȟ\x02\u0cdcೝ\x05яȨ\x02ೝɈ\x03\x02\x02')
buf.write('\x02ೞ\u0cdf\x05ѓȪ\x02\u0cdfೠ\x05їȬ')
buf.write('\x02ೠೡ\x05еț\x02ೡೢ\x05сȡ')
buf.write('\x02ೢೣ\x05эȧ\x02ೣ\u0ce4\x05еț')
buf.write('\x02\u0ce4Ɋ\x03\x02\x02\x02\u0ce5೦\x05ѓȪ\x02೦')
buf.write('೧\x05їȬ\x02೧೨\x05нȟ\x02೨')
buf.write('೩\x05йȝ\x02೩೪\x05нȟ\x02೪')
buf.write('೫\x05лȞ\x02೫೬\x05хȣ\x02೬')
buf.write('೭\x05яȨ\x02೭೮\x05сȡ\x02೮')
buf.write('Ɍ\x03\x02\x02\x02೯\u0cf0\x05ѓȪ\x02\u0cf0ೱ')
buf.write('\x05їȬ\x02ೱೲ\x05нȟ\x02ೲ\u0cf3')
buf.write('\x05йȝ\x02\u0cf3\u0cf4\x05хȣ\x02\u0cf4\u0cf5')
buf.write('\x05љȭ\x02\u0cf5\u0cf6\x05хȣ\x02\u0cf6\u0cf7')
buf.write('\x05ёȩ\x02\u0cf7\u0cf8\x05яȨ\x02\u0cf8Ɏ')
buf.write('\x03\x02\x02\x02\u0cf9\u0cfa\x05ѓȪ\x02\u0cfa\u0cfb\x05ї')
buf.write('Ȭ\x02\u0cfb\u0cfc\x05нȟ\x02\u0cfc\u0cfd\x05љ')
buf.write('ȭ\x02\u0cfd\u0cfe\x05нȟ\x02\u0cfe\u0cff\x05я')
buf.write('Ȩ\x02\u0cffഀ\x05ћȮ\x02ഀɐ\x03\x02\x02')
buf.write('\x02ഁം\x05ѓȪ\x02ംഃ\x05їȬ')
buf.write('\x02ഃഄ\x05хȣ\x02ഄഅ\x05ёȩ')
buf.write('\x02അആ\x05їȬ\x02ആɒ\x03\x02\x02\x02ഇ')
buf.write('ഈ\x05ѓȪ\x02ഈഉ\x05їȬ\x02ഉ')
buf.write('ഊ\x05ёȩ\x02ഊഋ\x05йȝ\x02ഋ')
buf.write('ഌ\x05нȟ\x02ഌ\u0d0d\x05лȞ\x02\u0d0d')
buf.write('എ\x05ѝȯ\x02എഏ\x05їȬ\x02ഏ')
buf.write('ഐ\x05нȟ\x02ഐɔ\x03\x02\x02\x02\u0d11ഒ')
buf.write('\x05їȬ\x02ഒഓ\x05еț\x02ഓഔ')
buf.write('\x05хȣ\x02ഔക\x05љȭ\x02കഖ')
buf.write('\x05нȟ\x02ഖɖ\x03\x02\x02\x02ഗഘ\x05ї')
buf.write('Ȭ\x02ഘങ\x05еț\x02ങച\x05я')
buf.write('Ȩ\x02ചഛ\x05сȡ\x02ഛജ\x05н')
buf.write('ȟ\x02ജɘ\x03\x02\x02\x02ഝഞ\x05їȬ')
buf.write('\x02ഞട\x05еț\x02ടഠ\x05ѡȱ')
buf.write('\x02ഠɚ\x03\x02\x02\x02ഡഢ\x05їȬ\x02ഢ')
buf.write('ണ\x05нȟ\x02ണത\x05еț\x02ത')
buf.write('ഥ\x05лȞ\x02ഥɜ\x03\x02\x02\x02ദധ')
buf.write('\x05їȬ\x02ധന\x05нȟ\x02നഩ')
buf.write('\x05еț\x02ഩപ\x05ыȦ\x02പɞ')
buf.write('\x03\x02\x02\x02ഫബ\x05їȬ\x02ബഭ\x05н')
buf.write('ȟ\x02ഭമ\x05йȝ\x02മയ\x05ё')
buf.write('ȩ\x02യര\x05їȬ\x02രറ\x05л')
buf.write('Ȟ\x02റɠ\x03\x02\x02\x02ലള\x05їȬ')
buf.write('\x02ളഴ\x05нȟ\x02ഴവ\x05пȠ')
buf.write('\x02വɢ\x03\x02\x02\x02ശഷ\x05їȬ\x02ഷ')
buf.write('സ\x05нȟ\x02സഹ\x05пȠ\x02ഹ')
buf.write('ഺ\x05нȟ\x02ഺ഻\x05їȬ\x02഻')
buf.write('഼\x05нȟ\x02഼ഽ\x05яȨ\x02ഽ')
buf.write('ാ\x05йȝ\x02ാി\x05нȟ\x02ി')
buf.write('ɤ\x03\x02\x02\x02ീു\x05їȬ\x02ുൂ')
buf.write('\x05нȟ\x02ൂൃ\x05пȠ\x02ൃൄ')
buf.write('\x05нȟ\x02ൄ\u0d45\x05їȬ\x02\u0d45െ')
buf.write('\x05нȟ\x02െേ\x05яȨ\x02േൈ')
buf.write('\x05йȝ\x02ൈ\u0d49\x05хȣ\x02\u0d49ൊ')
buf.write('\x05яȨ\x02ൊോ\x05сȡ\x02ോɦ')
buf.write('\x03\x02\x02\x02ൌ്\x05їȬ\x02്ൎ\x05н')
buf.write('ȟ\x02ൎ൏\x05чȤ\x02൏\u0d50\x05н')
buf.write('ȟ\x02\u0d50\u0d51\x05йȝ\x02\u0d51\u0d52\x05ћ')
buf.write('Ȯ\x02\u0d52ɨ\x03\x02\x02\x02\u0d53ൔ\x05їȬ')
buf.write('\x02ൔൕ\x05нȟ\x02ൕൖ\x05ыȦ')
buf.write('\x02ൖൗ\x05хȣ\x02ൗ൘\x05нȟ')
buf.write('\x02൘൙\x05љȭ\x02൙൚\x07a\x02\x02൚')
buf.write('൛\x05ёȩ\x02൛൜\x05яȨ\x02൜')
buf.write('ɪ\x03\x02\x02\x02൝൞\x05їȬ\x02൞ൟ')
buf.write('\x05нȟ\x02ൟൠ\x05яȨ\x02ൠൡ')
buf.write('\x05еț\x02ൡൢ\x05эȧ\x02ൢൣ')
buf.write('\x05нȟ\x02ൣɬ\x03\x02\x02\x02\u0d64\u0d65\x05ї')
buf.write('Ȭ\x02\u0d65൦\x05нȟ\x02൦൧\x05ѓ')
buf.write('Ȫ\x02൧൨\x05ыȦ\x02൨൩\x05е')
buf.write('ț\x02൩൪\x05йȝ\x02൪൫\x05н')
buf.write('ȟ\x02൫ɮ\x03\x02\x02\x02൬൭\x05їȬ')
buf.write('\x02൭൮\x05нȟ\x02൮൯\x05љȭ')
buf.write('\x02൯൰\x05ѓȪ\x02൰൱\x05нȟ')
buf.write('\x02൱൲\x05йȝ\x02൲൳\x05ћȮ')
buf.write('\x02൳ɰ\x03\x02\x02\x02൴൵\x05їȬ\x02൵')
buf.write('൶\x05нȟ\x02൶൷\x05љȭ\x02൷')
buf.write('൸\x05ћȮ\x02൸൹\x05їȬ\x02൹')
buf.write('ൺ\x05хȣ\x02ൺൻ\x05йȝ\x02ൻ')
buf.write('ർ\x05ћȮ\x02ർൽ\x07a\x02\x02ൽൾ')
buf.write('\x05їȬ\x02ൾൿ\x05нȟ\x02ൿ\u0d80')
buf.write('\x05пȠ\x02\u0d80ඁ\x05нȟ\x02ඁං')
buf.write('\x05їȬ\x02ංඃ\x05нȟ\x02ඃ\u0d84')
buf.write('\x05яȨ\x02\u0d84අ\x05йȝ\x02අආ')
buf.write('\x05нȟ\x02ආඇ\x05љȭ\x02ඇɲ')
buf.write('\x03\x02\x02\x02ඈඉ\x05їȬ\x02ඉඊ\x05н')
buf.write('ȟ\x02ඊඋ\x05љȭ\x02උඌ\x05ѝ')
buf.write('ȯ\x02ඌඍ\x05ыȦ\x02ඍඎ\x05ћ')
buf.write('Ȯ\x02ඎɴ\x03\x02\x02\x02ඏඐ\x05їȬ')
buf.write('\x02ඐඑ\x05нȟ\x02එඒ\x05љȭ')
buf.write('\x02ඒඓ\x05ѝȯ\x02ඓඔ\x05ыȦ')
buf.write('\x02ඔඕ\x05ћȮ\x02ඕඖ\x07a\x02\x02ඖ')
buf.write('\u0d97\x05йȝ\x02\u0d97\u0d98\x05еț\x02\u0d98')
buf.write('\u0d99\x05йȝ\x02\u0d99ක\x05уȢ\x02ක')
buf.write('ඛ\x05нȟ\x02ඛɶ\x03\x02\x02\x02ගඝ')
buf.write('\x05їȬ\x02ඝඞ\x05нȟ\x02ඞඟ')
buf.write('\x05ћȮ\x02ඟච\x05ѝȯ\x02චඡ')
buf.write('\x05їȬ\x02ඡජ\x05яȨ\x02ජɸ')
buf.write('\x03\x02\x02\x02ඣඤ\x05їȬ\x02ඤඥ\x05н')
buf.write('ȟ\x02ඥඦ\x05ћȮ\x02ඦට\x05ѝ')
buf.write('ȯ\x02ටඨ\x05їȬ\x02ඨඩ\x05я')
buf.write('Ȩ\x02ඩඪ\x05хȣ\x02ඪණ\x05я')
buf.write('Ȩ\x02ණඬ\x05сȡ\x02ඬɺ\x03\x02\x02')
buf.write('\x02තථ\x05їȬ\x02ථද\x05нȟ')
buf.write('\x02දධ\x05ѝȯ\x02ධන\x05љȭ')
buf.write('\x02න\u0db2\x05нȟ\x02\u0db2ɼ\x03\x02\x02\x02ඳ')
buf.write('ප\x05їȬ\x02පඵ\x05нȟ\x02ඵ')
buf.write('බ\x05џȰ\x02බභ\x05нȟ\x02භ')
buf.write('ම\x05їȬ\x02මඹ\x05љȭ\x02ඹ')
buf.write('ය\x05нȟ\x02යɾ\x03\x02\x02\x02ර\u0dbc')
buf.write('\x05їȬ\x02\u0dbcල\x05нȟ\x02ල\u0dbe')
buf.write('\x05џȰ\x02\u0dbe\u0dbf\x05ёȩ\x02\u0dbfව')
buf.write('\x05щȥ\x02වශ\x05нȟ\x02ශʀ')
buf.write('\x03\x02\x02\x02ෂස\x05їȬ\x02සහ\x05х')
buf.write('ȣ\x02හළ\x05сȡ\x02ළෆ\x05у')
buf.write('Ȣ\x02ෆ\u0dc7\x05ћȮ\x02\u0dc7ʂ\x03\x02\x02')
buf.write('\x02\u0dc8\u0dc9\x05їȬ\x02\u0dc9්\x05ёȩ')
buf.write('\x02්\u0dcb\x05ыȦ\x02\u0dcb\u0dcc\x05ыȦ')
buf.write('\x02\u0dcc\u0dcd\x05зȜ\x02\u0dcd\u0dce\x05еț')
buf.write('\x02\u0dceා\x05йȝ\x02ාැ\x05щȥ')
buf.write('\x02ැʄ\x03\x02\x02\x02ෑි\x05їȬ\x02ි')
buf.write('ී\x05ёȩ\x02ීු\x05ыȦ\x02ු')
buf.write('\u0dd5\x05ыȦ\x02\u0dd5ූ\x05ѝȯ\x02ූ')
buf.write('\u0dd7\x05ѓȪ\x02\u0dd7ʆ\x03\x02\x02\x02ෘෙ')
buf.write('\x05їȬ\x02ෙේ\x05ёȩ\x02ේෛ')
buf.write('\x05ѡȱ\x02ෛʈ\x03\x02\x02\x02ොෝ\x05ї')
buf.write('Ȭ\x02ෝෞ\x05ёȩ\x02ෞෟ\x05ѡ')
buf.write('ȱ\x02ෟ\u0de0\x05хȣ\x02\u0de0\u0de1\x05л')
buf.write('Ȟ\x02\u0de1ʊ\x03\x02\x02\x02\u0de2\u0de3\x05їȬ')
buf.write('\x02\u0de3\u0de4\x05ёȩ\x02\u0de4\u0de5\x05ѡȱ')
buf.write('\x02\u0de5෦\x05љȭ\x02෦ʌ\x03\x02\x02\x02෧')
buf.write('෨\x05їȬ\x02෨෩\x05ѝȯ\x02෩')
buf.write('෪\x05ыȦ\x02෪෫\x05нȟ\x02෫')
buf.write('෬\x05љȭ\x02෬ʎ\x03\x02\x02\x02෭෮')
buf.write('\x05љȭ\x02෮෯\x05еț\x02෯\u0df0')
buf.write('\x05эȧ\x02\u0df0\u0df1\x05ѓȪ\x02\u0df1ෲ')
buf.write('\x05ыȦ\x02ෲෳ\x05нȟ\x02ෳʐ')
buf.write('\x03\x02\x02\x02෴\u0df5\x05љȭ\x02\u0df5\u0df6\x05е')
buf.write('ț\x02\u0df6\u0df7\x05џȰ\x02\u0df7\u0df8\x05н')
buf.write('ȟ\x02\u0df8ʒ\x03\x02\x02\x02\u0df9\u0dfa\x05љȭ')
buf.write('\x02\u0dfa\u0dfb\x05еț\x02\u0dfb\u0dfc\x05џȰ')
buf.write('\x02\u0dfc\u0dfd\x05нȟ\x02\u0dfd\u0dfe\x05ѓȪ')
buf.write('\x02\u0dfe\u0dff\x05ёȩ\x02\u0dff\u0e00\x05хȣ')
buf.write('\x02\u0e00ก\x05яȨ\x02กข\x05ћȮ')
buf.write('\x02ขʔ\x03\x02\x02\x02ฃค\x05љȭ\x02ค')
buf.write('ฅ\x05йȝ\x02ฅฆ\x05уȢ\x02ฆ')
buf.write('ง\x05нȟ\x02งจ\x05эȧ\x02จ')
buf.write('ฉ\x05еț\x02ฉʖ\x03\x02\x02\x02ชซ')
buf.write('\x05љȭ\x02ซฌ\x05йȝ\x02ฌญ')
buf.write('\x05уȢ\x02ญฎ\x05нȟ\x02ฎฏ')
buf.write('\x05эȧ\x02ฏฐ\x05еț\x02ฐฑ')
buf.write('\x05йȝ\x02ฑฒ\x05уȢ\x02ฒณ')
buf.write('\x05нȟ\x02ณด\x05йȝ\x02ดต')
buf.write('\x05щȥ\x02ตʘ\x03\x02\x02\x02ถท\x05љ')
buf.write('ȭ\x02ทธ\x05йȝ\x02ธน\x05я')
buf.write('Ȩ\x02นʚ\x03\x02\x02\x02บป\x05љȭ')
buf.write('\x02ปผ\x05нȟ\x02ผฝ\x05еț')
buf.write('\x02ฝพ\x05їȬ\x02พฟ\x05йȝ')
buf.write('\x02ฟภ\x05уȢ\x02ภʜ\x03\x02\x02\x02ม')
buf.write('ย\x05љȭ\x02ยร\x05нȟ\x02ร')
buf.write('ฤ\x05йȝ\x02ฤล\x05ёȩ\x02ล')
buf.write('ฦ\x05яȨ\x02ฦว\x05лȞ\x02ว')
buf.write('ʞ\x03\x02\x02\x02ศษ\x05љȭ\x02ษส')
buf.write('\x05нȟ\x02สห\x05нȟ\x02หฬ')
buf.write('\x05лȞ\x02ฬʠ\x03\x02\x02\x02อฮ\x05љ')
buf.write('ȭ\x02ฮฯ\x05нȟ\x02ฯะ\x05с')
buf.write('ȡ\x02ะั\x05эȧ\x02ัา\x05н')
buf.write('ȟ\x02าำ\x05яȨ\x02ำิ\x05ћ')
buf.write('Ȯ\x02ิʢ\x03\x02\x02\x02ีึ\x05љȭ')
buf.write('\x02ึื\x05нȟ\x02ืุ\x05ыȦ')
buf.write('\x02ุู\x05нȟ\x02ฺู\x05йȝ')
buf.write('\x02ฺ\u0e3b\x05ћȮ\x02\u0e3bʤ\x03\x02\x02\x02\u0e3c')
buf.write('\u0e3d\x05љȭ\x02\u0e3d\u0e3e\x05нȟ\x02\u0e3e')
buf.write('฿\x05ыȦ\x02฿เ\x05пȠ\x02เ')
buf.write('ʦ\x03\x02\x02\x02แโ\x05љȭ\x02โใ')
buf.write('\x05нȟ\x02ใไ\x05ѕȫ\x02ไๅ')
buf.write('\x05ѝȯ\x02ๅๆ\x05нȟ\x02ๆ็')
buf.write('\x05яȨ\x02็่\x05йȝ\x02่้')
buf.write('\x05нȟ\x02้ʨ\x03\x02\x02\x02๊๋\x05љ')
buf.write('ȭ\x02๋์\x05нȟ\x02์ํ\x05ѕ')
buf.write('ȫ\x02ํ๎\x05ѝȯ\x02๎๏\x05н')
buf.write('ȟ\x02๏๐\x05яȨ\x02๐๑\x05ћ')
buf.write('Ȯ\x02๑๒\x05хȣ\x02๒๓\x05е')
buf.write('ț\x02๓๔\x05ыȦ\x02๔ʪ\x03\x02\x02')
buf.write('\x02๕๖\x05љȭ\x02๖๗\x05нȟ')
buf.write('\x02๗๘\x05їȬ\x02๘๙\x05хȣ')
buf.write('\x02๙๚\x05еț\x02๚๛\x05ыȦ')
buf.write('\x02๛\u0e5c\x05хȣ\x02\u0e5c\u0e5d\x05ѧȴ')
buf.write('\x02\u0e5d\u0e5e\x05еț\x02\u0e5e\u0e5f\x05зȜ')
buf.write('\x02\u0e5f\u0e60\x05ыȦ\x02\u0e60\u0e61\x05нȟ')
buf.write('\x02\u0e61ʬ\x03\x02\x02\x02\u0e62\u0e63\x05љȭ\x02\u0e63')
buf.write('\u0e64\x05нȟ\x02\u0e64\u0e65\x05їȬ\x02\u0e65')
buf.write('\u0e66\x05хȣ\x02\u0e66\u0e67\x05еț\x02\u0e67')
buf.write('\u0e68\x05ыȦ\x02\u0e68\u0e69\x05ыȦ\x02\u0e69')
buf.write('\u0e6a\x05ѥȳ\x02\u0e6a\u0e6b\x07a\x02\x02\u0e6b\u0e6c')
buf.write('\x05їȬ\x02\u0e6c\u0e6d\x05нȟ\x02\u0e6d\u0e6e')
buf.write('\x05ѝȯ\x02\u0e6e\u0e6f\x05љȭ\x02\u0e6f\u0e70')
buf.write('\x05еț\x02\u0e70\u0e71\x05зȜ\x02\u0e71\u0e72')
buf.write('\x05ыȦ\x02\u0e72\u0e73\x05нȟ\x02\u0e73ʮ')
buf.write('\x03\x02\x02\x02\u0e74\u0e75\x05љȭ\x02\u0e75\u0e76\x05н')
buf.write('ȟ\x02\u0e76\u0e77\x05їȬ\x02\u0e77\u0e78\x05џ')
buf.write('Ȱ\x02\u0e78\u0e79\x05нȟ\x02\u0e79\u0e7a\x05ї')
buf.write('Ȭ\x02\u0e7a\u0e7b\x05нȟ\x02\u0e7b\u0e7c\x05ї')
buf.write('Ȭ\x02\u0e7c\u0e7d\x05їȬ\x02\u0e7d\u0e7e\x05ё')
buf.write('ȩ\x02\u0e7e\u0e7f\x05їȬ\x02\u0e7fʰ\x03\x02\x02')
buf.write('\x02\u0e80ກ\x05љȭ\x02ກຂ\x05нȟ')
buf.write('\x02ຂ\u0e83\x05љȭ\x02\u0e83ຄ\x05љȭ')
buf.write('\x02ຄ\u0e85\x05хȣ\x02\u0e85ຆ\x05ёȩ')
buf.write('\x02ຆງ\x05яȨ\x02ງຈ\x05ћȮ')
buf.write('\x02ຈຉ\x05хȣ\x02ຉຊ\x05эȧ')
buf.write('\x02ຊ\u0e8b\x05нȟ\x02\u0e8bຌ\x05ѧȴ')
buf.write('\x02ຌຍ\x05ёȩ\x02ຍຎ\x05яȨ')
buf.write('\x02ຎຏ\x05нȟ\x02ຏʲ\x03\x02\x02\x02ຐ')
buf.write('ຑ\x05љȭ\x02ຑຒ\x05нȟ\x02ຒ')
buf.write('ຓ\x05ћȮ\x02ຓʴ\x03\x02\x02\x02ດຕ')
buf.write('\x05љȭ\x02ຕຖ\x05нȟ\x02ຖທ')
buf.write('\x05ћȮ\x02ທຘ\x05љȭ\x02ຘʶ')
buf.write('\x03\x02\x02\x02ນບ\x05љȭ\x02ບປ\x05н')
buf.write('ȟ\x02ປຜ\x05ћȮ\x02ຜຝ\x05ћ')
buf.write('Ȯ\x02ຝພ\x05хȣ\x02ພຟ\x05я')
buf.write('Ȩ\x02ຟຠ\x05сȡ\x02ຠມ\x05љ')
buf.write('ȭ\x02ມʸ\x03\x02\x02\x02ຢຣ\x05љȭ')
buf.write('\x02ຣ\u0ea4\x05уȢ\x02\u0ea4ລ\x05еț')
buf.write('\x02ລ\u0ea6\x05їȬ\x02\u0ea6ວ\x05нȟ')
buf.write('\x02ວʺ\x03\x02\x02\x02ຨຩ\x05љȭ\x02ຩ')
buf.write('ສ\x05уȢ\x02ສຫ\x05ёȩ\x02ຫ')
buf.write('ຬ\x05ѡȱ\x02ຬʼ\x03\x02\x02\x02ອຮ')
buf.write('\x05љȭ\x02ຮຯ\x05уȢ\x02ຯະ')
buf.write('\x05ѝȯ\x02ະັ\x05ћȮ\x02ັາ')
buf.write('\x05лȞ\x02າຳ\x05ёȩ\x02ຳິ')
buf.write('\x05ѡȱ\x02ິີ\x05яȨ\x02ີʾ')
buf.write('\x03\x02\x02\x02ຶື\x05љȭ\x02ືຸ\x05х')
buf.write('ȣ\x02ຸູ\x05зȜ\x02຺ູ\x05ы')
buf.write('Ȧ\x02຺ົ\x05хȣ\x02ົຼ\x05я')
buf.write('Ȩ\x02ຼຽ\x05сȡ\x02ຽ\u0ebe\x05љ')
buf.write('ȭ\x02\u0ebeˀ\x03\x02\x02\x02\u0ebfເ\x05љȭ')
buf.write('\x02ເແ\x05хȣ\x02ແໂ\x05сȡ')
buf.write('\x02ໂໃ\x05яȨ\x02ໃໄ\x05ћȮ')
buf.write('\x02ໄ\u0ec5\x05ѥȳ\x02\u0ec5ໆ\x05ѓȪ')
buf.write('\x02ໆ\u0ec7\x05нȟ\x02\u0ec7˂\x03\x02\x02\x02່')
buf.write('້\x05љȭ\x02້໊\x05хȣ\x02໊')
buf.write('໋\x05эȧ\x02໋໌\x05ѓȪ\x02໌')
buf.write('ໍ\x05ыȦ\x02ໍ\u0ece\x05нȟ\x02\u0ece')
buf.write('\u0ecf\x07a\x02\x02\u0ecf໐\x05хȣ\x02໐໑')
buf.write('\x05яȨ\x02໑໒\x05ћȮ\x02໒໓')
buf.write('\x05нȟ\x02໓໔\x05сȡ\x02໔໕')
buf.write('\x05нȟ\x02໕໖\x05їȬ\x02໖˄')
buf.write('\x03\x02\x02\x02໗໘\x05љȭ\x02໘໙\x05х')
buf.write('ȣ\x02໙\u0eda\x05яȨ\x02\u0eda\u0edb\x05с')
buf.write('ȡ\x02\u0edbໜ\x05ыȦ\x02ໜໝ\x05н')
buf.write('ȟ\x02ໝˆ\x03\x02\x02\x02ໞໟ\x05љȭ')
buf.write('\x02ໟ\u0ee0\x05хȣ\x02\u0ee0\u0ee1\x05ѧȴ')
buf.write('\x02\u0ee1\u0ee2\x05нȟ\x02\u0ee2ˈ\x03\x02\x02\x02\u0ee3')
buf.write('\u0ee4\x05љȭ\x02\u0ee4\u0ee5\x05щȥ\x02\u0ee5')
buf.write('\u0ee6\x05хȣ\x02\u0ee6\u0ee7\x05ѓȪ\x02\u0ee7')
buf.write('ˊ\x03\x02\x02\x02\u0ee8\u0ee9\x05љȭ\x02\u0ee9\u0eea')
buf.write('\x05эȧ\x02\u0eea\u0eeb\x05еț\x02\u0eeb\u0eec')
buf.write('\x05ыȦ\x02\u0eec\u0eed\x05ыȦ\x02\u0eed\u0eee')
buf.write('\x05хȣ\x02\u0eee\u0eef\x05яȨ\x02\u0eef\u0ef0')
buf.write('\x05ћȮ\x02\u0ef0ˌ\x03\x02\x02\x02\u0ef1\u0ef2\x05љ')
buf.write('ȭ\x02\u0ef2\u0ef3\x05яȨ\x02\u0ef3\u0ef4\x05е')
buf.write('ț\x02\u0ef4\u0ef5\x05ѓȪ\x02\u0ef5\u0ef6\x05љ')
buf.write('ȭ\x02\u0ef6\u0ef7\x05уȢ\x02\u0ef7\u0ef8\x05ё')
buf.write('ȩ\x02\u0ef8\u0ef9\x05ћȮ\x02\u0ef9ˎ\x03\x02\x02')
buf.write('\x02\u0efa\u0efb\x05љȭ\x02\u0efb\u0efc\x05ёȩ')
buf.write('\x02\u0efc\u0efd\x05эȧ\x02\u0efd\u0efe\x05нȟ')
buf.write('\x02\u0efeː\x03\x02\x02\x02\u0effༀ\x05љȭ\x02ༀ')
buf.write('༁\x05ѓȪ\x02༁༂\x05нȟ\x02༂')
buf.write('༃\x05йȝ\x02༃༄\x05хȣ\x02༄')
buf.write('༅\x05пȠ\x02༅༆\x05хȣ\x02༆')
buf.write('༇\x05йȝ\x02༇༈\x05еț\x02༈')
buf.write('༉\x05ћȮ\x02༉༊\x05хȣ\x02༊')
buf.write('་\x05ёȩ\x02་༌\x05яȨ\x02༌')
buf.write('˒\x03\x02\x02\x02།༎\x05љȭ\x02༎༏')
buf.write('\x05ѕȫ\x02༏༐\x05ыȦ\x02༐༑')
buf.write('\x05лȞ\x02༑༒\x05еț\x02༒༓')
buf.write('\x05ћȮ\x02༓༔\x05еț\x02༔˔')
buf.write('\x03\x02\x02\x02༕༖\x05љȭ\x02༖༗\x05ѕ')
buf.write('ȫ\x02༗༘\x05ыȦ\x02༘༙\x05н')
buf.write('ȟ\x02༙༚\x05їȬ\x02༚༛\x05ї')
buf.write('Ȭ\x02༛༜\x05ёȩ\x02༜༝\x05ї')
buf.write('Ȭ\x02༝˖\x03\x02\x02\x02༞༟\x05љȭ')
buf.write('\x02༟༠\x05ћȮ\x02༠༡\x05еț')
buf.write('\x02༡༢\x05яȨ\x02༢༣\x05лȞ')
buf.write('\x02༣༤\x05еț\x02༤༥\x05ыȦ')
buf.write('\x02༥༦\x05ёȩ\x02༦༧\x05яȨ')
buf.write('\x02༧༨\x05нȟ\x02༨˘\x03\x02\x02\x02༩')
buf.write('༪\x05љȭ\x02༪༫\x05ћȮ\x02༫')
buf.write('༬\x05еț\x02༬༭\x05їȬ\x02༭')
buf.write('༮\x05ћȮ\x02༮˚\x03\x02\x02\x02༯༰')
buf.write('\x05љȭ\x02༰༱\x05ћȮ\x02༱༲')
buf.write('\x05еț\x02༲༳\x05їȬ\x02༳༴')
buf.write('\x05ћȮ\x02༴༵\x05ѝȯ\x02༵༶')
buf.write('\x05ѓȪ\x02༶˜\x03\x02\x02\x02༷༸\x05љ')
buf.write('ȭ\x02༸༹\x05ћȮ\x02༹༺\x05е')
buf.write('ț\x02༺༻\x05ћȮ\x02༻༼\x05н')
buf.write('ȟ\x02༼༽\x05эȧ\x02༽༾\x05н')
buf.write('ȟ\x02༾༿\x05яȨ\x02༿ཀ\x05ћ')
buf.write('Ȯ\x02ཀ˞\x03\x02\x02\x02ཁག\x05љȭ')
buf.write('\x02གགྷ\x05ћȮ\x02གྷང\x05еț')
buf.write('\x02ངཅ\x05ћȮ\x02ཅཆ\x05нȟ')
buf.write('\x02ཆཇ\x05эȧ\x02ཇ\u0f48\x05нȟ')
buf.write('\x02\u0f48ཉ\x05яȨ\x02ཉཊ\x05ћȮ')
buf.write('\x02ཊཋ\x07a\x02\x02ཋཌ\x05хȣ\x02ཌ')
buf.write('ཌྷ\x05лȞ\x02ཌྷˠ\x03\x02\x02\x02ཎཏ')
buf.write('\x05љȭ\x02ཏཐ\x05ћȮ\x02ཐད')
buf.write('\x05еț\x02དདྷ\x05ћȮ\x02དྷན')
buf.write('\x05хȣ\x02ནཔ\x05йȝ\x02པˢ')
buf.write('\x03\x02\x02\x02ཕབ\x05љȭ\x02བབྷ\x05ћ')
buf.write('Ȯ\x02བྷམ\x05еț\x02མཙ\x05ћ')
buf.write('Ȯ\x02ཙཚ\x05хȣ\x02ཚཛ\x05љ')
buf.write('ȭ\x02ཛཛྷ\x05ћȮ\x02ཛྷཝ\x05х')
buf.write('ȣ\x02ཝཞ\x05йȝ\x02ཞཟ\x05љ')
buf.write('ȭ\x02ཟˤ\x03\x02\x02\x02འཡ\x05љȭ')
buf.write('\x02ཡར\x05ћȮ\x02རལ\x05їȬ')
buf.write('\x02ལཤ\x05хȣ\x02ཤཥ\x05яȨ')
buf.write('\x02ཥས\x05сȡ\x02ས˦\x03\x02\x02\x02ཧ')
buf.write('ཨ\x05љȭ\x02ཨཀྵ\x05ѝȯ\x02ཀྵ')
buf.write('ཪ\x05зȜ\x02ཪཫ\x05эȧ\x02ཫ')
buf.write('ཬ\x05ѝȯ\x02ཬ\u0f6d\x05ыȦ\x02\u0f6d')
buf.write('\u0f6e\x05ћȮ\x02\u0f6e\u0f6f\x05хȣ\x02\u0f6f')
buf.write('\u0f70\x05љȭ\x02\u0f70ཱ\x05нȟ\x02ཱ')
buf.write('ི\x05ћȮ\x02ི˨\x03\x02\x02\x02ཱིུ')
buf.write('\x05љȭ\x02ཱུུ\x05ѝȯ\x02ཱུྲྀ')
buf.write('\x05зȜ\x02ྲྀཷ\x05ѓȪ\x02ཷླྀ')
buf.write('\x05еț\x02ླྀཹ\x05їȬ\x02ཹེ')
buf.write('\x05ћȮ\x02ེཻ\x05хȣ\x02ཻོ')
buf.write('\x05ћȮ\x02ོཽ\x05хȣ\x02ཽཾ')
buf.write('\x05ёȩ\x02ཾཿ\x05яȨ\x02ཿ˪')
buf.write('\x03\x02\x02\x02ཱྀྀ\x05љȭ\x02ཱྀྂ\x05ѝ')
buf.write('ȯ\x02ྂྃ\x05зȜ\x02྄ྃ\x05љ')
buf.write('ȭ\x02྄྅\x05ћȮ\x02྅྆\x05х')
buf.write('ȣ\x02྆྇\x05ћȮ\x02྇ྈ\x05ѝ')
buf.write('ȯ\x02ྈྉ\x05ћȮ\x02ྉྊ\x05е')
buf.write('ț\x02ྊྋ\x05зȜ\x02ྋྌ\x05ы')
buf.write('Ȧ\x02ྌྍ\x05нȟ\x02ྍˬ\x03\x02\x02')
buf.write('\x02ྎྏ\x05љȭ\x02ྏྐ\x05ѝȯ')
buf.write('\x02ྐྑ\x05зȜ\x02ྑྒ\x05ћȮ')
buf.write('\x02ྒྒྷ\x05ѥȳ\x02ྒྷྔ\x05ѓȪ')
buf.write('\x02ྔྕ\x05нȟ\x02ྕˮ\x03\x02\x02\x02ྖ')
buf.write('ྗ\x05љȭ\x02ྗ\u0f98\x05ѝȯ\x02\u0f98')
buf.write('ྙ\x05йȝ\x02ྙྚ\x05йȝ\x02ྚ')
buf.write('ྛ\x05нȟ\x02ྛྜ\x05љȭ\x02ྜ')
buf.write('ྜྷ\x05љȭ\x02ྜྷ˰\x03\x02\x02\x02ྞྟ')
buf.write('\x05љȭ\x02ྟྠ\x05ѝȯ\x02ྠྡ')
buf.write('\x05љȭ\x02ྡྡྷ\x05ѓȪ\x02ྡྷྣ')
buf.write('\x05нȟ\x02ྣྤ\x05яȨ\x02ྤྥ')
buf.write('\x05лȞ\x02ྥ˲\x03\x02\x02\x02ྦྦྷ\x05ћ')
buf.write('Ȯ\x02ྦྷྨ\x05еț\x02ྨྩ\x05з')
buf.write('Ȝ\x02ྩྪ\x05ыȦ\x02ྪྫ\x05н')
buf.write('ȟ\x02ྫ˴\x03\x02\x02\x02ྫྷྭ\x05ћȮ')
buf.write('\x02ྭྮ\x05уȢ\x02ྮྯ\x05нȟ')
buf.write('\x02ྯ˶\x03\x02\x02\x02ྰྱ\x05ћȮ\x02ྱ')
buf.write('ྲ\x05уȢ\x02ྲླ\x05нȟ\x02ླ')
buf.write('ྴ\x05яȨ\x02ྴ˸\x03\x02\x02\x02ྵྶ')
buf.write('\x05ћȮ\x02ྶྷ\x05хȣ\x02ྷྸ')
buf.write('\x05эȧ\x02ྸྐྵ\x05нȟ\x02ྐྵ˺')
buf.write('\x03\x02\x02\x02ྺྻ\x05ћȮ\x02ྻྼ\x05х')
buf.write('ȣ\x02ྼ\u0fbd\x05эȧ\x02\u0fbd྾\x05н')
buf.write('ȟ\x02྾྿\x05љȭ\x02྿࿀\x05ћ')
buf.write('Ȯ\x02࿀࿁\x05еț\x02࿁࿂\x05э')
buf.write('ȧ\x02࿂࿃\x05ѓȪ\x02࿃˼\x03\x02\x02')
buf.write('\x02࿄࿅\x05ћȮ\x02࿅࿆\x05хȣ')
buf.write('\x02࿆࿇\x05эȧ\x02࿇࿈\x05нȟ')
buf.write('\x02࿈࿉\x05љȭ\x02࿉࿊\x05ћȮ')
buf.write('\x02࿊࿋\x05еț\x02࿋࿌\x05эȧ')
buf.write('\x02࿌\u0fcd\x05ѓȪ\x02\u0fcd࿎\x07a\x02\x02࿎')
buf.write('࿏\x05ыȦ\x02࿏࿐\x05ћȮ\x02࿐')
buf.write('࿑\x05ѧȴ\x02࿑࿒\x07a\x02\x02࿒࿓')
buf.write('\x05ѝȯ\x02࿓࿔\x05яȨ\x02࿔࿕')
buf.write('\x05йȝ\x02࿕࿖\x05ёȩ\x02࿖࿗')
buf.write('\x05яȨ\x02࿗࿘\x05љȭ\x02࿘࿙')
buf.write('\x05ћȮ\x02࿙࿚\x05їȬ\x02࿚\u0fdb')
buf.write('\x05еț\x02\u0fdb\u0fdc\x05хȣ\x02\u0fdc\u0fdd')
buf.write('\x05яȨ\x02\u0fdd\u0fde\x05нȟ\x02\u0fde\u0fdf')
buf.write('\x05лȞ\x02\u0fdf˾\x03\x02\x02\x02\u0fe0\u0fe1\x05ћ')
buf.write('Ȯ\x02\u0fe1\u0fe2\x05хȣ\x02\u0fe2\u0fe3\x05э')
buf.write('ȧ\x02\u0fe3\u0fe4\x05нȟ\x02\u0fe4\u0fe5\x05љ')
buf.write('ȭ\x02\u0fe5\u0fe6\x05ћȮ\x02\u0fe6\u0fe7\x05е')
buf.write('ț\x02\u0fe7\u0fe8\x05эȧ\x02\u0fe8\u0fe9\x05ѓ')
buf.write('Ȫ\x02\u0fe9\u0fea\x07a\x02\x02\u0fea\u0feb\x05ћȮ')
buf.write('\x02\u0feb\u0fec\x05ѧȴ\x02\u0fec\u0fed\x07a\x02\x02\u0fed')
buf.write('\u0fee\x05ѝȯ\x02\u0fee\u0fef\x05яȨ\x02\u0fef')
buf.write('\u0ff0\x05йȝ\x02\u0ff0\u0ff1\x05ёȩ\x02\u0ff1')
buf.write('\u0ff2\x05яȨ\x02\u0ff2\u0ff3\x05љȭ\x02\u0ff3')
buf.write('\u0ff4\x05ћȮ\x02\u0ff4\u0ff5\x05їȬ\x02\u0ff5')
buf.write('\u0ff6\x05еț\x02\u0ff6\u0ff7\x05хȣ\x02\u0ff7')
buf.write('\u0ff8\x05яȨ\x02\u0ff8\u0ff9\x05нȟ\x02\u0ff9')
buf.write('\u0ffa\x05лȞ\x02\u0ffà\x03\x02\x02\x02\u0ffb\u0ffc')
buf.write('\x05ћȮ\x02\u0ffc\u0ffd\x05хȣ\x02\u0ffd\u0ffe')
buf.write('\x05эȧ\x02\u0ffe\u0fff\x05нȟ\x02\u0fffက')
buf.write('\x05љȭ\x02ကခ\x05ћȮ\x02ခဂ')
buf.write('\x05еț\x02ဂဃ\x05эȧ\x02ဃင')
buf.write('\x05ѓȪ\x02ငစ\x07a\x02\x02စဆ\x05ѝ')
buf.write('ȯ\x02ဆဇ\x05яȨ\x02ဇဈ\x05й')
buf.write('ȝ\x02ဈဉ\x05ёȩ\x02ဉည\x05я')
buf.write('Ȩ\x02ညဋ\x05љȭ\x02ဋဌ\x05ћ')
buf.write('Ȯ\x02ဌဍ\x05їȬ\x02ဍဎ\x05е')
buf.write('ț\x02ဎဏ\x05хȣ\x02ဏတ\x05я')
buf.write('Ȩ\x02တထ\x05нȟ\x02ထဒ\x05л')
buf.write('Ȟ\x02ဒ̂\x03\x02\x02\x02ဓန\x05ћȮ')
buf.write('\x02နပ\x05хȣ\x02ပဖ\x05эȧ')
buf.write('\x02ဖဗ\x05нȟ\x02ဗဘ\x05ѧȴ')
buf.write('\x02ဘမ\x05ёȩ\x02မယ\x05яȨ')
buf.write('\x02ယရ\x05нȟ\x02ရလ\x07a\x02\x02လ')
buf.write('ဝ\x05еț\x02ဝသ\x05зȜ\x02သ')
buf.write('ဟ\x05зȜ\x02ဟဠ\x05їȬ\x02ဠ')
buf.write('̄\x03\x02\x02\x02အဢ\x05ћȮ\x02ဢဣ')
buf.write('\x05хȣ\x02ဣဤ\x05эȧ\x02ဤဥ')
buf.write('\x05нȟ\x02ဥဦ\x05ѧȴ\x02ဦဧ')
buf.write('\x05ёȩ\x02ဧဨ\x05яȨ\x02ဨဩ')
buf.write('\x05нȟ\x02ဩဪ\x07a\x02\x02ဪါ\x05у')
buf.write('Ȣ\x02ါာ\x05ёȩ\x02ာိ\x05ѝ')
buf.write('ȯ\x02ိီ\x05їȬ\x02ီ̆\x03\x02\x02')
buf.write('\x02ုူ\x05ћȮ\x02ူေ\x05хȣ')
buf.write('\x02ေဲ\x05эȧ\x02ဲဳ\x05нȟ')
buf.write('\x02ဳဴ\x05ѧȴ\x02ဴဵ\x05ёȩ')
buf.write('\x02ဵံ\x05яȨ\x02ံ့\x05нȟ')
buf.write('\x02့း\x07a\x02\x02း္\x05эȧ\x02္')
buf.write('်\x05хȣ\x02်ျ\x05яȨ\x02ျ')
buf.write('ြ\x05ѝȯ\x02ြွ\x05ћȮ\x02ွ')
buf.write('ှ\x05нȟ\x02ှ̈\x03\x02\x02\x02ဿ၀')
buf.write('\x05ћȮ\x02၀၁\x05хȣ\x02၁၂')
buf.write('\x05эȧ\x02၂၃\x05нȟ\x02၃၄')
buf.write('\x05ѧȴ\x02၄၅\x05ёȩ\x02၅၆')
buf.write('\x05яȨ\x02၆၇\x05нȟ\x02၇၈')
buf.write('\x07a\x02\x02၈၉\x05їȬ\x02၉၊\x05н')
buf.write('ȟ\x02၊။\x05сȡ\x02။၌\x05х')
buf.write('ȣ\x02၌၍\x05ёȩ\x02၍၎\x05я')
buf.write('Ȩ\x02၎̊\x03\x02\x02\x02၏ၐ\x05ћȮ')
buf.write('\x02ၐၑ\x05ёȩ\x02ၑ̌\x03\x02\x02\x02ၒ')
buf.write('ၓ\x05ћȮ\x02ၓၔ\x05їȬ\x02ၔ')
buf.write('ၕ\x05еț\x02ၕၖ\x05хȣ\x02ၖ')
buf.write('ၗ\x05ыȦ\x02ၗၘ\x05хȣ\x02ၘ')
buf.write('ၙ\x05яȨ\x02ၙၚ\x05сȡ\x02ၚ')
buf.write('̎\x03\x02\x02\x02ၛၜ\x05ћȮ\x02ၜၝ')
buf.write('\x05їȬ\x02ၝၞ\x05еț\x02ၞၟ')
buf.write('\x05яȨ\x02ၟၠ\x05љȭ\x02ၠၡ')
buf.write('\x05еț\x02ၡၢ\x05йȝ\x02ၢၣ')
buf.write('\x05ћȮ\x02ၣၤ\x05хȣ\x02ၤၥ')
buf.write('\x05ёȩ\x02ၥၦ\x05яȨ\x02ၦ̐')
buf.write('\x03\x02\x02\x02ၧၨ\x05ћȮ\x02ၨၩ\x05ї')
buf.write('Ȭ\x02ၩၪ\x05еț\x02ၪၫ\x05я')
buf.write('Ȩ\x02ၫၬ\x05љȭ\x02ၬၭ\x05ы')
buf.write('Ȧ\x02ၭၮ\x05еț\x02ၮၯ\x05ћ')
buf.write('Ȯ\x02ၯၰ\x05нȟ\x02ၰ̒\x03\x02\x02')
buf.write('\x02ၱၲ\x05ћȮ\x02ၲၳ\x05їȬ')
buf.write('\x02ၳၴ\x05нȟ\x02ၴၵ\x05еț')
buf.write('\x02ၵၶ\x05ћȮ\x02ၶ̔\x03\x02\x02\x02ၷ')
buf.write('ၸ\x05ћȮ\x02ၸၹ\x05їȬ\x02ၹ')
buf.write('ၺ\x05хȣ\x02ၺၻ\x05сȡ\x02ၻ')
buf.write('ၼ\x05сȡ\x02ၼၽ\x05нȟ\x02ၽ')
buf.write('ၾ\x05їȬ\x02ၾ̖\x03\x02\x02\x02ၿႀ')
buf.write('\x05ћȮ\x02ႀႁ\x05їȬ\x02ႁႂ')
buf.write('\x05хȣ\x02ႂႃ\x05эȧ\x02ႃ̘')
buf.write('\x03\x02\x02\x02ႄႅ\x05ћȮ\x02ႅႆ\x05ї')
buf.write('Ȭ\x02ႆႇ\x05ѝȯ\x02ႇႈ\x05н')
buf.write('ȟ\x02ႈ̚\x03\x02\x02\x02ႉႊ\x05ћȮ')
buf.write('\x02ႊႋ\x05їȬ\x02ႋႌ\x05ѝȯ')
buf.write('\x02ႌႍ\x05яȨ\x02ႍႎ\x05йȝ')
buf.write('\x02ႎႏ\x05еț\x02ႏ႐\x05ћȮ')
buf.write('\x02႐႑\x05нȟ\x02႑̜\x03\x02\x02\x02႒')
buf.write('႓\x05ћȮ\x02႓႔\x05ѥȳ\x02႔')
buf.write('႕\x05ѓȪ\x02႕႖\x05нȟ\x02႖')
buf.write('̞\x03\x02\x02\x02႗႘\x05ѝȯ\x02႘႙')
buf.write('\x05яȨ\x02႙ႚ\x05зȜ\x02ႚႛ')
buf.write('\x05ёȩ\x02ႛႜ\x05ѝȯ\x02ႜႝ')
buf.write('\x05яȨ\x02ႝ႞\x05лȞ\x02႞႟')
buf.write('\x05нȟ\x02႟Ⴀ\x05лȞ\x02Ⴀ̠')
buf.write('\x03\x02\x02\x02ႡႢ\x05ѝȯ\x02ႢႣ\x05я')
buf.write('Ȩ\x02ႣႤ\x05лȞ\x02ႤႥ\x05н')
buf.write('ȟ\x02ႥႦ\x05їȬ\x02Ⴆ̢\x03\x02\x02')
buf.write('\x02ႧႨ\x05ѝȯ\x02ႨႩ\x05яȨ')
buf.write('\x02ႩႪ\x05хȣ\x02ႪႫ\x05ёȩ')
buf.write('\x02ႫႬ\x05яȨ\x02Ⴌ̤\x03\x02\x02\x02Ⴍ')
buf.write('Ⴎ\x05ѝȯ\x02ႮႯ\x05яȨ\x02Ⴏ')
buf.write('Ⴐ\x05хȣ\x02ႰႱ\x05ѕȫ\x02Ⴑ')
buf.write('Ⴒ\x05ѝȯ\x02ႲႳ\x05нȟ\x02Ⴓ')
buf.write('̦\x03\x02\x02\x02ႴႵ\x05ѝȯ\x02ႵႶ')
buf.write('\x05яȨ\x02ႶႷ\x05ыȦ\x02ႷႸ')
buf.write('\x05хȣ\x02ႸႹ\x05эȧ\x02ႹႺ')
buf.write('\x05хȣ\x02ႺႻ\x05ћȮ\x02ႻႼ')
buf.write('\x05нȟ\x02ႼႽ\x05лȞ\x02Ⴝ̨')
buf.write('\x03\x02\x02\x02ႾႿ\x05ѝȯ\x02ႿჀ\x05я')
buf.write('Ȩ\x02ჀჁ\x05ѓȪ\x02ჁჂ\x05х')
buf.write('ȣ\x02ჂჃ\x05џȰ\x02ჃჄ\x05ё')
buf.write('ȩ\x02ჄჅ\x05ћȮ\x02Ⴥ̪\x03\x02\x02')
buf.write('\x02\u10c6Ⴧ\x05ѝȯ\x02Ⴧ\u10c8\x05яȨ')
buf.write('\x02\u10c8\u10c9\x05ћȮ\x02\u10c9\u10ca\x05хȣ')
buf.write('\x02\u10ca\u10cb\x05ыȦ\x02\u10cb̬\x03\x02\x02\x02\u10cc')
buf.write('Ⴭ\x05ѝȯ\x02Ⴭ\u10ce\x05ѓȪ\x02\u10ce')
buf.write('\u10cf\x05лȞ\x02\u10cfა\x05еț\x02ა')
buf.write('ბ\x05ћȮ\x02ბგ\x05нȟ\x02გ')
buf.write('̮\x03\x02\x02\x02დე\x05ѝȯ\x02ევ')
buf.write('\x05ѓȪ\x02ვზ\x05лȞ\x02ზთ')
buf.write('\x05еț\x02თი\x05ћȮ\x02იკ')
buf.write('\x05нȟ\x02კლ\x05лȞ\x02ლ̰')
buf.write('\x03\x02\x02\x02მნ\x05ѝȯ\x02ნო\x05ѓ')
buf.write('Ȫ\x02ოპ\x05љȭ\x02პჟ\x05н')
buf.write('ȟ\x02ჟრ\x05їȬ\x02რს\x05ћ')
buf.write('Ȯ\x02ს̲\x03\x02\x02\x02ტუ\x05ѝȯ')
buf.write('\x02უფ\x05їȬ\x02ფქ\x05ёȩ')
buf.write('\x02ქღ\x05ѡȱ\x02ღყ\x05хȣ')
buf.write('\x02ყშ\x05лȞ\x02შ̴\x03\x02\x02\x02ჩ')
buf.write('ც\x05ѝȯ\x02ცძ\x05љȭ\x02ძ')
buf.write('წ\x05нȟ\x02წ̶\x03\x02\x02\x02ჭხ')
buf.write('\x05ѝȯ\x02ხჯ\x05љȭ\x02ჯჰ')
buf.write('\x05хȣ\x02ჰჱ\x05яȨ\x02ჱჲ')
buf.write('\x05сȡ\x02ჲ̸\x03\x02\x02\x02ჳჴ\x05џ')
buf.write('Ȱ\x02ჴჵ\x05еț\x02ჵჶ\x05ы')
buf.write('Ȧ\x02ჶჷ\x05хȣ\x02ჷჸ\x05л')
buf.write('Ȟ\x02ჸჹ\x05еț\x02ჹჺ\x05ћ')
buf.write('Ȯ\x02ჺ჻\x05нȟ\x02჻̺\x03\x02\x02')
buf.write('\x02ჼჽ\x05џȰ\x02ჽჾ\x05еț')
buf.write('\x02ჾჿ\x05ыȦ\x02ჿᄀ\x05ѝȯ')
buf.write('\x02ᄀᄁ\x05нȟ\x02ᄁ̼\x03\x02\x02\x02ᄂ')
buf.write('ᄃ\x05џȰ\x02ᄃᄄ\x05еț\x02ᄄ')
buf.write('ᄅ\x05ыȦ\x02ᄅᄆ\x05ѝȯ\x02ᄆ')
buf.write('ᄇ\x05нȟ\x02ᄇᄈ\x05љȭ\x02ᄈ')
buf.write('̾\x03\x02\x02\x02ᄉᄊ\x05џȰ\x02ᄊᄋ')
buf.write('\x05еț\x02ᄋᄌ\x05їȬ\x02ᄌᄍ')
buf.write('\x05йȝ\x02ᄍᄎ\x05уȢ\x02ᄎᄏ')
buf.write('\x05еț\x02ᄏᄐ\x05їȬ\x02ᄐ̀')
buf.write('\x03\x02\x02\x02ᄑᄒ\x05џȰ\x02ᄒᄓ\x05е')
buf.write('ț\x02ᄓᄔ\x05їȬ\x02ᄔᄕ\x05й')
buf.write('ȝ\x02ᄕᄖ\x05уȢ\x02ᄖᄗ\x05е')
buf.write('ț\x02ᄗᄘ\x05їȬ\x02ᄘᄙ\x074')
buf.write('\x02\x02ᄙ͂\x03\x02\x02\x02ᄚᄛ\x05џȰ\x02ᄛ')
buf.write('ᄜ\x05еț\x02ᄜᄝ\x05їȬ\x02ᄝ')
buf.write('ᄞ\x05хȣ\x02ᄞᄟ\x05еț\x02ᄟ')
buf.write('ᄠ\x05зȜ\x02ᄠᄡ\x05ыȦ\x02ᄡ')
buf.write('ᄢ\x05нȟ\x02ᄢ̈́\x03\x02\x02\x02ᄣᄤ')
buf.write('\x05џȰ\x02ᄤᄥ\x05еț\x02ᄥᄦ')
buf.write('\x05їȬ\x02ᄦᄧ\x05їȬ\x02ᄧᄨ')
buf.write('\x05еț\x02ᄨᄩ\x05ѥȳ\x02ᄩ͆')
buf.write('\x03\x02\x02\x02ᄪᄫ\x05џȰ\x02ᄫᄬ\x05е')
buf.write('ț\x02ᄬᄭ\x05їȬ\x02ᄭᄮ\x05ѥ')
buf.write('ȳ\x02ᄮᄯ\x05хȣ\x02ᄯᄰ\x05я')
buf.write('Ȩ\x02ᄰᄱ\x05сȡ\x02ᄱ͈\x03\x02\x02')
buf.write('\x02ᄲᄳ\x05џȰ\x02ᄳᄴ\x05нȟ')
buf.write('\x02ᄴᄵ\x05їȬ\x02ᄵᄶ\x05љȭ')
buf.write('\x02ᄶᄷ\x05хȣ\x02ᄷᄸ\x05ёȩ')
buf.write('\x02ᄸᄹ\x05яȨ\x02ᄹ͊\x03\x02\x02\x02ᄺ')
buf.write('ᄻ\x05џȰ\x02ᄻᄼ\x05нȟ\x02ᄼ')
buf.write('ᄽ\x05їȬ\x02ᄽᄾ\x05љȭ\x02ᄾ')
buf.write('ᄿ\x05хȣ\x02ᄿᅀ\x05ёȩ\x02ᅀ')
buf.write('ᅁ\x05яȨ\x02ᅁᅂ\x05љȭ\x02ᅂ')
buf.write('͌\x03\x02\x02\x02ᅃᅄ\x05ѡȱ\x02ᅄᅅ')
buf.write('\x05еț\x02ᅅᅆ\x05хȣ\x02ᅆᅇ')
buf.write('\x05ћȮ\x02ᅇ͎\x03\x02\x02\x02ᅈᅉ\x05ѡ')
buf.write('ȱ\x02ᅉᅊ\x05еț\x02ᅊᅋ\x05ї')
buf.write('Ȭ\x02ᅋᅌ\x05яȨ\x02ᅌᅍ\x05х')
buf.write('ȣ\x02ᅍᅎ\x05яȨ\x02ᅎᅏ\x05с')
buf.write('ȡ\x02ᅏ͐\x03\x02\x02\x02ᅐᅑ\x05ѡȱ')
buf.write('\x02ᅑᅒ\x05нȟ\x02ᅒᅓ\x05ыȦ')
buf.write('\x02ᅓᅔ\x05ыȦ\x02ᅔᅕ\x05пȠ')
buf.write('\x02ᅕᅖ\x05ёȩ\x02ᅖᅗ\x05їȬ')
buf.write('\x02ᅗᅘ\x05эȧ\x02ᅘᅙ\x05нȟ')
buf.write('\x02ᅙᅚ\x05лȞ\x02ᅚ͒\x03\x02\x02\x02ᅛ')
buf.write('ᅜ\x05ѡȱ\x02ᅜᅝ\x05уȢ\x02ᅝ')
buf.write('ᅞ\x05нȟ\x02ᅞᅟ\x05яȨ\x02ᅟ')
buf.write('͔\x03\x02\x02\x02ᅠᅡ\x05ѡȱ\x02ᅡᅢ')
buf.write('\x05уȢ\x02ᅢᅣ\x05нȟ\x02ᅣᅤ')
buf.write('\x05яȨ\x02ᅤᅥ\x05нȟ\x02ᅥᅦ')
buf.write('\x05џȰ\x02ᅦᅧ\x05нȟ\x02ᅧᅨ')
buf.write('\x05їȬ\x02ᅨ͖\x03\x02\x02\x02ᅩᅪ\x05ѡ')
buf.write('ȱ\x02ᅪᅫ\x05уȢ\x02ᅫᅬ\x05н')
buf.write('ȟ\x02ᅬᅭ\x05їȬ\x02ᅭᅮ\x05н')
buf.write('ȟ\x02ᅮ͘\x03\x02\x02\x02ᅯᅰ\x05ѡȱ')
buf.write('\x02ᅰᅱ\x05уȢ\x02ᅱᅲ\x05хȣ')
buf.write('\x02ᅲᅳ\x05ыȦ\x02ᅳᅴ\x05нȟ')
buf.write('\x02ᅴ͚\x03\x02\x02\x02ᅵᅶ\x05ѡȱ\x02ᅶ')
buf.write('ᅷ\x05хȣ\x02ᅷᅸ\x05ћȮ\x02ᅸ')
buf.write('ᅹ\x05уȢ\x02ᅹ͜\x03\x02\x02\x02ᅺᅻ')
buf.write('\x05ѡȱ\x02ᅻᅼ\x05хȣ\x02ᅼᅽ')
buf.write('\x05ћȮ\x02ᅽᅾ\x05уȢ\x02ᅾᅿ')
buf.write('\x05хȣ\x02ᅿᆀ\x05яȨ\x02ᆀ͞')
buf.write('\x03\x02\x02\x02ᆁᆂ\x05ѡȱ\x02ᆂᆃ\x05ё')
buf.write('ȩ\x02ᆃᆄ\x05їȬ\x02ᆄᆅ\x05щ')
buf.write('ȥ\x02ᆅ͠\x03\x02\x02\x02ᆆᆇ\x05ѡȱ')
buf.write('\x02ᆇᆈ\x05їȬ\x02ᆈᆉ\x05хȣ')
buf.write('\x02ᆉᆊ\x05ћȮ\x02ᆊᆋ\x05нȟ')
buf.write('\x02ᆋ͢\x03\x02\x02\x02ᆌᆍ\x05ѣȲ\x02ᆍ')
buf.write('ᆎ\x05эȧ\x02ᆎᆏ\x05ыȦ\x02ᆏ')
buf.write('ͤ\x03\x02\x02\x02ᆐᆑ\x05ѣȲ\x02ᆑᆒ')
buf.write('\x05эȧ\x02ᆒᆓ\x05ыȦ\x02ᆓᆔ')
buf.write('\x05еț\x02ᆔᆕ\x05сȡ\x02ᆕᆖ')
buf.write('\x05сȡ\x02ᆖͦ\x03\x02\x02\x02ᆗᆘ\x05ѣ')
buf.write('Ȳ\x02ᆘᆙ\x05эȧ\x02ᆙᆚ\x05ы')
buf.write('Ȧ\x02ᆚᆛ\x05еț\x02ᆛᆜ\x05ћ')
buf.write('Ȯ\x02ᆜᆝ\x05ћȮ\x02ᆝᆞ\x05ї')
buf.write('Ȭ\x02ᆞᆟ\x05хȣ\x02ᆟᆠ\x05з')
buf.write('Ȝ\x02ᆠᆡ\x05ѝȯ\x02ᆡᆢ\x05ћ')
buf.write('Ȯ\x02ᆢᆣ\x05нȟ\x02ᆣᆤ\x05љ')
buf.write('ȭ\x02ᆤͨ\x03\x02\x02\x02ᆥᆦ\x05ѣȲ')
buf.write('\x02ᆦᆧ\x05эȧ\x02ᆧᆨ\x05ыȦ')
buf.write('\x02ᆨᆩ\x05йȝ\x02ᆩᆪ\x05еț')
buf.write('\x02ᆪᆫ\x05љȭ\x02ᆫᆬ\x05ћȮ')
buf.write('\x02ᆬͪ\x03\x02\x02\x02ᆭᆮ\x05ѣȲ\x02ᆮ')
buf.write('ᆯ\x05эȧ\x02ᆯᆰ\x05ыȦ\x02ᆰ')
buf.write('ᆱ\x05йȝ\x02ᆱᆲ\x05ёȩ\x02ᆲ')
buf.write('ᆳ\x05ыȦ\x02ᆳᆴ\x05еț\x02ᆴ')
buf.write('ᆵ\x05ћȮ\x02ᆵᆶ\x05ћȮ\x02ᆶ')
buf.write('ᆷ\x05џȰ\x02ᆷᆸ\x05еț\x02ᆸ')
buf.write('ᆹ\x05ыȦ\x02ᆹͬ\x03\x02\x02\x02ᆺᆻ')
buf.write('\x05ѣȲ\x02ᆻᆼ\x05эȧ\x02ᆼᆽ')
buf.write('\x05ыȦ\x02ᆽᆾ\x05нȟ\x02ᆾᆿ')
buf.write('\x05ыȦ\x02ᆿᇀ\x05нȟ\x02ᇀᇁ')
buf.write('\x05эȧ\x02ᇁᇂ\x05нȟ\x02ᇂᇃ')
buf.write('\x05яȨ\x02ᇃᇄ\x05ћȮ\x02ᇄͮ')
buf.write('\x03\x02\x02\x02ᇅᇆ\x05ѣȲ\x02ᇆᇇ\x05э')
buf.write('ȧ\x02ᇇᇈ\x05ыȦ\x02ᇈᇉ\x05н')
buf.write('ȟ\x02ᇉᇊ\x05ѣȲ\x02ᇊᇋ\x05х')
buf.write('ȣ\x02ᇋᇌ\x05љȭ\x02ᇌᇍ\x05ћ')
buf.write('Ȯ\x02ᇍᇎ\x05љȭ\x02ᇎͰ\x03\x02\x02')
buf.write('\x02ᇏᇐ\x05ѣȲ\x02ᇐᇑ\x05эȧ')
buf.write('\x02ᇑᇒ\x05ыȦ\x02ᇒᇓ\x05пȠ')
buf.write('\x02ᇓᇔ\x05ёȩ\x02ᇔᇕ\x05їȬ')
buf.write('\x02ᇕᇖ\x05нȟ\x02ᇖᇗ\x05љȭ')
buf.write('\x02ᇗᇘ\x05ћȮ\x02ᇘͲ\x03\x02\x02\x02ᇙ')
buf.write('ᇚ\x05ѣȲ\x02ᇚᇛ\x05эȧ\x02ᇛ')
buf.write('ᇜ\x05ыȦ\x02ᇜᇝ\x05яȨ\x02ᇝ')
buf.write('ᇞ\x05еț\x02ᇞᇟ\x05эȧ\x02ᇟ')
buf.write('ᇠ\x05нȟ\x02ᇠᇡ\x05љȭ\x02ᇡ')
buf.write('ᇢ\x05ѓȪ\x02ᇢᇣ\x05еț\x02ᇣ')
buf.write('ᇤ\x05йȝ\x02ᇤᇥ\x05нȟ\x02ᇥ')
buf.write('ᇦ\x05љȭ\x02ᇦʹ\x03\x02\x02\x02ᇧᇨ')
buf.write('\x05ѣȲ\x02ᇨᇩ\x05эȧ\x02ᇩᇪ')
buf.write('\x05ыȦ\x02ᇪᇫ\x05ѓȪ\x02ᇫᇬ')
buf.write('\x05еț\x02ᇬᇭ\x05їȬ\x02ᇭᇮ')
buf.write('\x05љȭ\x02ᇮᇯ\x05нȟ\x02ᇯͶ')
buf.write('\x03\x02\x02\x02ᇰᇱ\x05ѣȲ\x02ᇱᇲ\x05э')
buf.write('ȧ\x02ᇲᇳ\x05ыȦ\x02ᇳᇴ\x05ѓ')
buf.write('Ȫ\x02ᇴᇵ\x05хȣ\x02ᇵ\u0378\x03\x02\x02')
buf.write('\x02ᇶᇷ\x05ѣȲ\x02ᇷᇸ\x05эȧ')
buf.write('\x02ᇸᇹ\x05ыȦ\x02ᇹᇺ\x05ѕȫ')
buf.write('\x02ᇺᇻ\x05ѝȯ\x02ᇻᇼ\x05нȟ')
buf.write('\x02ᇼᇽ\x05їȬ\x02ᇽᇾ\x05ѥȳ')
buf.write('\x02ᇾͺ\x03\x02\x02\x02ᇿሀ\x05ѣȲ\x02ሀ')
buf.write('ሁ\x05эȧ\x02ሁሂ\x05ыȦ\x02ሂ')
buf.write('ሃ\x05їȬ\x02ሃሄ\x05ёȩ\x02ሄ')
buf.write('ህ\x05ёȩ\x02ህሆ\x05ћȮ\x02ሆ')
buf.write('ͼ\x03\x02\x02\x02ሇለ\x05ѣȲ\x02ለሉ')
buf.write('\x05эȧ\x02ሉሊ\x05ыȦ\x02ሊላ')
buf.write('\x05љȭ\x02ላሌ\x05нȟ\x02ሌል')
buf.write('\x05їȬ\x02ልሎ\x05хȣ\x02ሎሏ')
buf.write('\x05еț\x02ሏሐ\x05ыȦ\x02ሐሑ')
buf.write('\x05хȣ\x02ሑሒ\x05ѧȴ\x02ሒሓ')
buf.write('\x05нȟ\x02ሓ;\x03\x02\x02\x02ሔሕ\x05ѣ')
buf.write('Ȳ\x02ሕሖ\x05эȧ\x02ሖሗ\x05ы')
buf.write('Ȧ\x02ሗመ\x05ћȮ\x02መሙ\x05е')
buf.write('ț\x02ሙሚ\x05зȜ\x02ሚማ\x05ы')
buf.write('Ȧ\x02ማሜ\x05нȟ\x02ሜ\u0380\x03\x02\x02')
buf.write('\x02ምሞ\x05ѥȳ\x02ሞሟ\x05нȟ')
buf.write('\x02ሟሠ\x05еț\x02ሠሡ\x05їȬ')
buf.write('\x02ሡ\u0382\x03\x02\x02\x02ሢሣ\x05ѥȳ\x02ሣ')
buf.write('ሤ\x05нȟ\x02ሤሥ\x05љȭ\x02ሥ')
buf.write('΄\x03\x02\x02\x02ሦሧ\x05ѥȳ\x02ሧረ')
buf.write('\x05эȧ\x02ረሩ\x05хȣ\x02ሩሪ')
buf.write('\x05яȨ\x02ሪራ\x05ћȮ\x02ራሬ')
buf.write('\x05нȟ\x02ሬር\x05їȬ\x02ርሮ')
buf.write('\x05џȰ\x02ሮሯ\x05еț\x02ሯሰ')
buf.write('\x05ыȦ\x02ሰሱ\x07a\x02\x02ሱሲ\x05ѝ')
buf.write('ȯ\x02ሲሳ\x05яȨ\x02ሳሴ\x05й')
buf.write('ȝ\x02ሴስ\x05ёȩ\x02ስሶ\x05я')
buf.write('Ȩ\x02ሶሷ\x05љȭ\x02ሷሸ\x05ћ')
buf.write('Ȯ\x02ሸሹ\x05їȬ\x02ሹሺ\x05е')
buf.write('ț\x02ሺሻ\x05хȣ\x02ሻሼ\x05я')
buf.write('Ȩ\x02ሼሽ\x05нȟ\x02ሽሾ\x05л')
buf.write('Ȟ\x02ሾΆ\x03\x02\x02\x02ሿቀ\x05ѧȴ')
buf.write('\x02ቀቁ\x05ёȩ\x02ቁቂ\x05яȨ')
buf.write('\x02ቂቃ\x05нȟ\x02ቃΈ\x03\x02\x02\x02ቄ')
buf.write('ቅ\x05ѓȪ\x02ቅቆ\x05їȬ\x02ቆ')
buf.write('ቇ\x05нȟ\x02ቇቈ\x05лȞ\x02ቈ')
buf.write('\u1249\x05хȣ\x02\u1249ቊ\x05йȝ\x02ቊ')
buf.write('ቋ\x05ћȮ\x02ቋቌ\x05хȣ\x02ቌ')
buf.write('ቍ\x05ёȩ\x02ቍ\u124e\x05яȨ\x02\u124e')
buf.write('Ί\x03\x02\x02\x02\u124fቐ\x05ѓȪ\x02ቐቑ')
buf.write('\x05їȬ\x02ቑቒ\x05нȟ\x02ቒቓ')
buf.write('\x05лȞ\x02ቓቔ\x05хȣ\x02ቔቕ')
buf.write('\x05йȝ\x02ቕቖ\x05ћȮ\x02ቖ\u1257')
buf.write('\x05хȣ\x02\u1257ቘ\x05ёȩ\x02ቘ\u1259')
buf.write('\x05яȨ\x02\u1259ቚ\x07a\x02\x02ቚቛ\x05з')
buf.write('Ȝ\x02ቛቜ\x05ёȩ\x02ቜቝ\x05ѝ')
buf.write('ȯ\x02ቝ\u125e\x05яȨ\x02\u125e\u125f\x05л')
buf.write('Ȟ\x02\u125fበ\x05љȭ\x02በΌ\x03\x02\x02')
buf.write('\x02ቡቢ\x05ѓȪ\x02ቢባ\x05їȬ')
buf.write('\x02ባቤ\x05нȟ\x02ቤብ\x05лȞ')
buf.write('\x02ብቦ\x05хȣ\x02ቦቧ\x05йȝ')
buf.write('\x02ቧቨ\x05ћȮ\x02ቨቩ\x05хȣ')
buf.write('\x02ቩቪ\x05ёȩ\x02ቪቫ\x05яȨ')
buf.write('\x02ቫቬ\x07a\x02\x02ቬቭ\x05йȝ\x02ቭ')
buf.write('ቮ\x05ёȩ\x02ቮቯ\x05љȭ\x02ቯ')
buf.write('ተ\x05ћȮ\x02ተΎ\x03\x02\x02\x02ቱቲ')
buf.write('\x05ѓȪ\x02ቲታ\x05їȬ\x02ታቴ')
buf.write('\x05нȟ\x02ቴት\x05лȞ\x02ትቶ')
buf.write('\x05хȣ\x02ቶቷ\x05йȝ\x02ቷቸ')
buf.write('\x05ћȮ\x02ቸቹ\x05хȣ\x02ቹቺ')
buf.write('\x05ёȩ\x02ቺቻ\x05яȨ\x02ቻቼ')
buf.write('\x07a\x02\x02ቼች\x05лȞ\x02ችቾ\x05н')
buf.write('ȟ\x02ቾቿ\x05ћȮ\x02ቿኀ\x05е')
buf.write('ț\x02ኀኁ\x05хȣ\x02ኁኂ\x05ы')
buf.write('Ȧ\x02ኂኃ\x05љȭ\x02ኃΐ\x03\x02\x02')
buf.write('\x02ኄኅ\x05ѓȪ\x02ኅኆ\x05їȬ')
buf.write('\x02ኆኇ\x05нȟ\x02ኇኈ\x05лȞ')
buf.write('\x02ኈ\u1289\x05хȣ\x02\u1289ኊ\x05йȝ')
buf.write('\x02ኊኋ\x05ћȮ\x02ኋኌ\x05хȣ')
buf.write('\x02ኌኍ\x05ёȩ\x02ኍ\u128e\x05яȨ')
buf.write('\x02\u128e\u128f\x07a\x02\x02\u128fነ\x05ѓȪ\x02ነ')
buf.write('ኑ\x05їȬ\x02ኑኒ\x05ёȩ\x02ኒ')
buf.write('ና\x05зȜ\x02ናኔ\x05еț\x02ኔ')
buf.write('ን\x05зȜ\x02ንኖ\x05хȣ\x02ኖ')
buf.write('ኗ\x05ыȦ\x02ኗኘ\x05хȣ\x02ኘ')
buf.write('ኙ\x05ћȮ\x02ኙኚ\x05ѥȳ\x02ኚ')
buf.write('Β\x03\x02\x02\x02ኛኜ\x05ѓȪ\x02ኜኝ')
buf.write('\x05їȬ\x02ኝኞ\x05нȟ\x02ኞኟ')
buf.write('\x05лȞ\x02ኟአ\x05хȣ\x02አኡ')
buf.write('\x05йȝ\x02ኡኢ\x05ћȮ\x02ኢኣ')
buf.write('\x05хȣ\x02ኣኤ\x05ёȩ\x02ኤእ')
buf.write('\x05яȨ\x02እኦ\x07a\x02\x02ኦኧ\x05љ')
buf.write('ȭ\x02ኧከ\x05нȟ\x02ከኩ\x05ћ')
buf.write('Ȯ\x02ኩΔ\x03\x02\x02\x02ኪካ\x05йȝ')
buf.write('\x02ካኬ\x05ѝȯ\x02ኬክ\x05эȧ')
buf.write('\x02ክኮ\x05нȟ\x02ኮኯ\x07a\x02\x02ኯ')
buf.write('ኰ\x05лȞ\x02ኰ\u12b1\x05хȣ\x02\u12b1')
buf.write('ኲ\x05љȭ\x02ኲኳ\x05ћȮ\x02ኳ')
buf.write('Ζ\x03\x02\x02\x02ኴኵ\x05лȞ\x02ኵ\u12b6')
buf.write('\x05нȟ\x02\u12b6\u12b7\x05яȨ\x02\u12b7ኸ')
buf.write('\x05љȭ\x02ኸኹ\x05нȟ\x02ኹኺ')
buf.write('\x07a\x02\x02ኺኻ\x05їȬ\x02ኻኼ\x05е')
buf.write('ț\x02ኼኽ\x05яȨ\x02ኽኾ\x05щ')
buf.write('ȥ\x02ኾΘ\x03\x02\x02\x02\u12bfዀ\x05ыȦ')
buf.write('\x02ዀ\u12c1\x05хȣ\x02\u12c1ዂ\x05љȭ')
buf.write('\x02ዂዃ\x05ћȮ\x02ዃዄ\x05еț')
buf.write('\x02ዄዅ\x05сȡ\x02ዅ\u12c6\x05сȡ')
buf.write('\x02\u12c6Κ\x03\x02\x02\x02\u12c7ወ\x05ѓȪ\x02ወ')
buf.write('ዉ\x05нȟ\x02ዉዊ\x05їȬ\x02ዊ')
buf.write('ዋ\x05йȝ\x02ዋዌ\x05нȟ\x02ዌ')
buf.write('ው\x05яȨ\x02ውዎ\x05ћȮ\x02ዎ')
buf.write('ዏ\x07a\x02\x02ዏዐ\x05їȬ\x02ዐዑ')
buf.write('\x05еț\x02ዑዒ\x05яȨ\x02ዒዓ')
buf.write('\x05щȥ\x02ዓΜ\x03\x02\x02\x02ዔዕ\x05ѓ')
buf.write('Ȫ\x02ዕዖ\x05нȟ\x02ዖ\u12d7\x05ї')
buf.write('Ȭ\x02\u12d7ዘ\x05йȝ\x02ዘዙ\x05н')
buf.write('ȟ\x02ዙዚ\x05яȨ\x02ዚዛ\x05ћ')
buf.write('Ȯ\x02ዛዜ\x05хȣ\x02ዜዝ\x05ы')
buf.write('Ȧ\x02ዝዞ\x05нȟ\x02ዞዟ\x07a\x02')
buf.write('\x02ዟዠ\x05йȝ\x02ዠዡ\x05ёȩ')
buf.write('\x02ዡዢ\x05яȨ\x02ዢዣ\x05ћȮ')
buf.write('\x02ዣΞ\x03\x02\x02\x02ዤዥ\x05ѓȪ\x02ዥ')
buf.write('ዦ\x05нȟ\x02ዦዧ\x05їȬ\x02ዧ')
buf.write('የ\x05йȝ\x02የዩ\x05нȟ\x02ዩ')
buf.write('ዪ\x05яȨ\x02ዪያ\x05ћȮ\x02ያ')
buf.write('ዬ\x05хȣ\x02ዬይ\x05ыȦ\x02ይ')
buf.write('ዮ\x05нȟ\x02ዮዯ\x07a\x02\x02ዯደ')
buf.write('\x05лȞ\x02ደዱ\x05хȣ\x02ዱዲ')
buf.write('\x05љȭ\x02ዲዳ\x05йȝ\x02ዳΠ')
buf.write('\x03\x02\x02\x02ዴድ\x05їȬ\x02ድዶ\x05е')
buf.write('ț\x02ዶዷ\x05яȨ\x02ዷዸ\x05щ')
buf.write('ȥ\x02ዸ\u03a2\x03\x02\x02\x02ዹዺ\x05еț')
buf.write('\x02ዺዻ\x05џȰ\x02ዻዼ\x05сȡ')
buf.write('\x02ዼΤ\x03\x02\x02\x02ዽዾ\x05йȝ\x02ዾ')
buf.write('ዿ\x05ёȩ\x02ዿጀ\x05їȬ\x02ጀ')
buf.write('ጁ\x05їȬ\x02ጁΦ\x03\x02\x02\x02ጂጃ')
buf.write('\x05ыȦ\x02ጃጄ\x05еț\x02ጄጅ')
buf.write('\x05сȡ\x02ጅΨ\x03\x02\x02\x02ጆጇ\x05ы')
buf.write('Ȧ\x02ጇገ\x05нȟ\x02ገጉ\x05е')
buf.write('ț\x02ጉጊ\x05лȞ\x02ጊΪ\x03\x02\x02')
buf.write('\x02ጋጌ\x05эȧ\x02ጌግ\x05еț')
buf.write('\x02ግጎ\x05ѣȲ\x02ጎά\x03\x02\x02\x02ጏ')
buf.write('ጐ\x05эȧ\x02ጐ\u1311\x05нȟ\x02\u1311')
buf.write('ጒ\x05лȞ\x02ጒጓ\x05хȣ\x02ጓ')
buf.write('ጔ\x05еț\x02ጔጕ\x05яȨ\x02ጕ')
buf.write('ή\x03\x02\x02\x02\u1316\u1317\x05эȧ\x02\u1317ጘ')
buf.write('\x05хȣ\x02ጘጙ\x05яȨ\x02ጙΰ')
buf.write('\x03\x02\x02\x02ጚጛ\x05яȨ\x02ጛጜ\x05ћ')
buf.write('Ȯ\x02ጜጝ\x05хȣ\x02ጝጞ\x05ы')
buf.write('Ȧ\x02ጞጟ\x05нȟ\x02ጟβ\x03\x02\x02')
buf.write('\x02ጠጡ\x05їȬ\x02ጡጢ\x05еț')
buf.write('\x02ጢጣ\x05ћȮ\x02ጣጤ\x05хȣ')
buf.write('\x02ጤጥ\x05ёȩ\x02ጥጦ\x07a\x02\x02ጦ')
buf.write('ጧ\x05ћȮ\x02ጧጨ\x05ёȩ\x02ጨ')
buf.write('ጩ\x07a\x02\x02ጩጪ\x05їȬ\x02ጪጫ')
buf.write('\x05нȟ\x02ጫጬ\x05ѓȪ\x02ጬጭ')
buf.write('\x05ёȩ\x02ጭጮ\x05їȬ\x02ጮጯ')
buf.write('\x05ћȮ\x02ጯδ\x03\x02\x02\x02ጰጱ\x05ї')
buf.write('Ȭ\x02ጱጲ\x05ёȩ\x02ጲጳ\x05ѡ')
buf.write('ȱ\x02ጳጴ\x07a\x02\x02ጴጵ\x05яȨ')
buf.write('\x02ጵጶ\x05ѝȯ\x02ጶጷ\x05эȧ')
buf.write('\x02ጷጸ\x05зȜ\x02ጸጹ\x05нȟ')
buf.write('\x02ጹጺ\x05їȬ\x02ጺζ\x03\x02\x02\x02ጻ')
buf.write('ጼ\x05љȭ\x02ጼጽ\x05ѝȯ\x02ጽ')
buf.write('ጾ\x05эȧ\x02ጾθ\x03\x02\x02\x02ጿፀ')
buf.write('\x05џȰ\x02ፀፁ\x05еț\x02ፁፂ')
buf.write('\x05їȬ\x02ፂፃ\x05хȣ\x02ፃፄ')
buf.write('\x05еț\x02ፄፅ\x05яȨ\x02ፅፆ')
buf.write('\x05йȝ\x02ፆፇ\x05нȟ\x02ፇκ')
buf.write('\x03\x02\x02\x02ፈፉ\x05їȬ\x02ፉፊ\x05н')
buf.write('ȟ\x02ፊፋ\x05сȡ\x02ፋፌ\x05ї')
buf.write('Ȭ\x02ፌፍ\x07a\x02\x02ፍμ\x03\x02\x02\x02ፎ')
buf.write('ፏ\x05љȭ\x02ፏፐ\x05ћȮ\x02ፐ')
buf.write('ፑ\x05лȞ\x02ፑፒ\x05лȞ\x02ፒ')
buf.write('ፓ\x05нȟ\x02ፓፔ\x05џȰ\x02ፔ')
buf.write('ξ\x03\x02\x02\x02ፕፖ\x05џȰ\x02ፖፗ')
buf.write('\x05еț\x02ፗፘ\x05їȬ\x02ፘፙ')
buf.write('\x07a\x02\x02ፙπ\x03\x02\x02\x02ፚ\u135b\x05йȝ')
buf.write('\x02\u135b\u135c\x05ёȩ\x02\u135c፝\x05џȰ')
buf.write('\x02፝፞\x05еț\x02፞፟\x05їȬ')
buf.write('\x02፟፠\x07a\x02\x02፠ς\x03\x02\x02\x02፡።')
buf.write('\x05яȨ\x02።፩\x07)\x02\x02፣፨\n\x02\x02')
buf.write('\x02፤፥\x07)\x02\x02፥፨\x07)\x02\x02፦፨\x05')
buf.write('Эȗ\x02፧፣\x03\x02\x02\x02፧፤\x03\x02\x02\x02')
buf.write('፧፦\x03\x02\x02\x02፨፫\x03\x02\x02\x02፩፧\x03')
buf.write('\x02\x02\x02፩፪\x03\x02\x02\x02፪፬\x03\x02\x02\x02፫፩')
buf.write('\x03\x02\x02\x02፬፭\x07)\x02\x02፭τ\x03\x02\x02\x02፮')
buf.write('፷\x05зȜ\x02፯፳\x07)\x02\x02፰፲')
buf.write('\x0423\x02፱፰\x03\x02\x02\x02፲፵\x03\x02\x02\x02፳')
buf.write('፱\x03\x02\x02\x02፳፴\x03\x02\x02\x02፴፶\x03\x02\x02\x02')
buf.write('፵፳\x03\x02\x02\x02፶፸\x07)\x02\x02፷፯\x03')
buf.write('\x02\x02\x02፸፹\x03\x02\x02\x02፹፷\x03\x02\x02\x02፹፺')
buf.write('\x03\x02\x02\x02፺φ\x03\x02\x02\x02፻ᎄ\x05ѣȲ')
buf.write('\x02፼ᎀ\x07)\x02\x02\u137d\u137f\t\x03\x02\x02\u137e\u137d')
buf.write(
'\x03\x02\x02\x02\u137fᎂ\x03\x02\x02\x02ᎀ\u137e\x03\x02\x02\x02ᎀ')
buf.write('ᎁ\x03\x02\x02\x02ᎁᎃ\x03\x02\x02\x02ᎂᎀ\x03\x02\x02\x02')
buf.write('ᎃᎅ\x07)\x02\x02ᎄ፼\x03\x02\x02\x02ᎅᎆ\x03')
buf.write('\x02\x02\x02ᎆᎄ\x03\x02\x02\x02ᎆᎇ\x03\x02\x02\x02ᎇψ')
buf.write('\x03\x02\x02\x02ᎈᎉ\x070\x02\x02ᎉᎊ\x070\x02\x02ᎊ')
buf.write('ϊ\x03\x02\x02\x02ᎋᎌ\x070\x02\x02ᎌό\x03\x02\x02')
buf.write('\x02ᎍᎎ\x05УȒ\x02ᎎώ\x03\x02\x02\x02ᎏ')
buf.write('᎘\x05Хȓ\x02᎐᎒\t\x04\x02\x02᎑᎓')
buf.write('\t\x05\x02\x02᎒᎑\x03\x02\x02\x02᎒᎓\x03\x02\x02\x02᎓')
buf.write('᎖\x03\x02\x02\x02᎔᎗\x05Хȓ\x02᎕᎗')
buf.write('\x05УȒ\x02᎖᎔\x03\x02\x02\x02᎖᎕\x03\x02\x02')
buf.write('\x02᎗᎙\x03\x02\x02\x02᎘᎐\x03\x02\x02\x02᎘᎙')
buf.write('\x03\x02\x02\x02᎙\u139c\x03\x02\x02\x02\u139a\u139d\x05лȞ')
buf.write(
'\x02\u139b\u139d\x05пȠ\x02\u139c\u139a\x03\x02\x02\x02\u139c')
buf.write(
'\u139b\x03\x02\x02\x02\u139c\u139d\x03\x02\x02\x02\u139dϐ\x03\x02\x02\x02'
)
buf.write('\u139eᎥ\x07)\x02\x02\u139fᎤ\n\x02\x02\x02ᎠᎡ\x07')
buf.write(')\x02\x02ᎡᎤ\x07)\x02\x02ᎢᎤ\x05Эȗ\x02Ꭳ')
buf.write('\u139f\x03\x02\x02\x02ᎣᎠ\x03\x02\x02\x02ᎣᎢ\x03\x02\x02\x02')
buf.write('ᎤᎧ\x03\x02\x02\x02ᎥᎣ\x03\x02\x02\x02ᎥᎦ\x03')
buf.write('\x02\x02\x02ᎦᎨ\x03\x02\x02\x02ᎧᎥ\x03\x02\x02\x02ᎨᎩ')
buf.write('\x07)\x02\x02Ꭹϒ\x03\x02\x02\x02ᎪᎯ\x05ѕȫ')
buf.write('\x02ᎫᎰ\x05ϗǬ\x02ᎬᎰ\x05ϙǭ')
buf.write('\x02ᎭᎰ\x05ϛǮ\x02ᎮᎰ\x05ϝǯ')
buf.write('\x02ᎯᎫ\x03\x02\x02\x02ᎯᎬ\x03\x02\x02\x02ᎯᎭ')
buf.write('\x03\x02\x02\x02ᎯᎮ\x03\x02\x02\x02ᎰᎱ\x03\x02\x02\x02Ꮁ')
buf.write('Ꮂ\x08Ǫ\x02\x02Ꮂϔ\x03\x02\x02\x02ᎳᎴ\x07)')
buf.write('\x02\x02Ꮄϖ\x03\x02\x02\x02ᎵᎶ\x05ϕǫ\x02Ꮆ')
buf.write('Ꮊ\x07>\x02\x02ᎷᎹ\x0b\x02\x02\x02ᎸᎷ\x03\x02\x02\x02')
buf.write('ᎹᎼ\x03\x02\x02\x02ᎺᎻ\x03\x02\x02\x02ᎺᎸ\x03')
buf.write('\x02\x02\x02ᎻᎽ\x03\x02\x02\x02ᎼᎺ\x03\x02\x02\x02ᎽᎾ')
buf.write('\x07@\x02\x02ᎾᎿ\x05ϕǫ\x02ᎿϘ\x03\x02\x02')
buf.write('\x02ᏀᏁ\x05ϕǫ\x02ᏁᏅ\x07}\x02\x02Ꮒ')
buf.write('Ꮔ\x0b\x02\x02\x02ᏃᏂ\x03\x02\x02\x02ᏄᏇ\x03\x02\x02')
buf.write('\x02ᏅᏆ\x03\x02\x02\x02ᏅᏃ\x03\x02\x02\x02ᏆᏈ')
buf.write('\x03\x02\x02\x02ᏇᏅ\x03\x02\x02\x02ᏈᏉ\x07\x7f\x02\x02Ꮙ')
buf.write('Ꮚ\x05ϕǫ\x02ᏊϚ\x03\x02\x02\x02ᏋᏌ')
buf.write('\x05ϕǫ\x02ᏌᏐ\x07]\x02\x02ᏍᏏ\x0b\x02\x02')
buf.write('\x02ᏎᏍ\x03\x02\x02\x02ᏏᏒ\x03\x02\x02\x02ᏐᏑ')
buf.write('\x03\x02\x02\x02ᏐᏎ\x03\x02\x02\x02ᏑᏓ\x03\x02\x02\x02Ꮢ')
buf.write('Ꮠ\x03\x02\x02\x02ᏓᏔ\x07_\x02\x02ᏔᏕ\x05ϕ')
buf.write('ǫ\x02ᏕϜ\x03\x02\x02\x02ᏖᏗ\x05ϕǫ')
buf.write('\x02ᏗᏛ\x07*\x02\x02ᏘᏚ\x0b\x02\x02\x02ᏙᏘ')
buf.write('\x03\x02\x02\x02ᏚᏝ\x03\x02\x02\x02ᏛᏜ\x03\x02\x02\x02Ꮫ')
buf.write('Ꮩ\x03\x02\x02\x02ᏜᏞ\x03\x02\x02\x02ᏝᏛ\x03\x02\x02\x02')
buf.write('ᏞᏟ\x07+\x02\x02ᏟᏠ\x05ϕǫ\x02Ꮰ')
buf.write('Ϟ\x03\x02\x02\x02ᏡᏢ\n\x06\x02\x02ᏢϠ\x03\x02\x02\x02')
buf.write('ᏣᏧ\x07$\x02\x02ᏤᏨ\n\x07\x02\x02ᏥᏦ\x07')
buf.write('$\x02\x02ᏦᏨ\x07$\x02\x02ᏧᏤ\x03\x02\x02\x02ᏧᏥ')
buf.write('\x03\x02\x02\x02ᏨᏩ\x03\x02\x02\x02ᏩᏧ\x03\x02\x02\x02Ꮹ')
buf.write('Ꮺ\x03\x02\x02\x02ᏪᏫ\x03\x02\x02\x02ᏫᏬ\x07$\x02\x02')
buf.write("ᏬϢ\x03\x02\x02\x02ᏭᏮ\x07'\x02\x02ᏮϤ\x03")
buf.write('\x02\x02\x02ᏯᏰ\x07(\x02\x02ᏰϦ\x03\x02\x02\x02ᏱᏲ')
buf.write('\x07*\x02\x02ᏲϨ\x03\x02\x02\x02ᏳᏴ\x07+\x02\x02ᏴϪ')
buf.write(
'\x03\x02\x02\x02Ᏽ\u13f6\x07,\x02\x02\u13f6\u13f7\x07,\x02\x02\u13f7Ϭ'
)
buf.write('\x03\x02\x02\x02ᏸᏹ\x07,\x02\x02ᏹϮ\x03\x02\x02\x02ᏺ')
buf.write('ᏻ\x07-\x02\x02ᏻϰ\x03\x02\x02\x02ᏼᏽ\x07/\x02\x02ᏽ')
buf.write(
'ϲ\x03\x02\x02\x02\u13fe\u13ff\x07.\x02\x02\u13ffϴ\x03\x02\x02\x02'
)
buf.write('᐀ᐁ\x071\x02\x02ᐁ϶\x03\x02\x02\x02ᐂᐃ')
buf.write('\x07B\x02\x02ᐃϸ\x03\x02\x02\x02ᐄᐅ\x07<\x02\x02ᐅᐆ')
buf.write('\x07?\x02\x02ᐆϺ\x03\x02\x02\x02ᐇᐈ\x07<\x02\x02ᐈᐍ')
buf.write('\x05Сȑ\x02ᐉᐌ\x05Сȑ\x02ᐊᐌ')
buf.write('\t\x08\x02\x02ᐋᐉ\x03\x02\x02\x02ᐋᐊ\x03\x02\x02\x02ᐌ')
buf.write('ᐏ\x03\x02\x02\x02ᐍᐋ\x03\x02\x02\x02ᐍᐎ\x03\x02\x02\x02')
buf.write('ᐎᐖ\x03\x02\x02\x02ᐏᐍ\x03\x02\x02\x02ᐐᐑ\x07')
buf.write('<\x02\x02ᐑᐖ\x05ϡDZ\x02ᐒᐓ\x07<\x02\x02ᐓ')
buf.write('ᐖ\x05ύǧ\x02ᐔᐖ\x05Бȉ\x02ᐕ')
buf.write('ᐇ\x03\x02\x02\x02ᐕᐐ\x03\x02\x02\x02ᐕᐒ\x03\x02\x02\x02')
buf.write('ᐕᐔ\x03\x02\x02\x02ᐖϼ\x03\x02\x02\x02ᐗᐘ\x07')
buf.write('<\x02\x02ᐘϾ\x03\x02\x02\x02ᐙᐚ\x07=\x02\x02ᐚЀ')
buf.write('\x03\x02\x02\x02ᐛᐜ\x07>\x02\x02ᐜᐝ\x07?\x02\x02ᐝЂ')
buf.write('\x03\x02\x02\x02ᐞᐟ\x07>\x02\x02ᐟЄ\x03\x02\x02\x02ᐠ')
buf.write('ᐡ\x07@\x02\x02ᐡᐢ\x07?\x02\x02ᐢІ\x03\x02\x02\x02ᐣ')
buf.write('ᐤ\x07#\x02\x02ᐤᐬ\x07?\x02\x02ᐥᐦ\x07>\x02\x02ᐦ')
buf.write('ᐬ\x07@\x02\x02ᐧᐨ\x07`\x02\x02ᐨᐬ\x07?\x02\x02ᐩ')
buf.write('ᐪ\x07\x80\x02\x02ᐪᐬ\x07?\x02\x02ᐫᐣ\x03\x02')
buf.write('\x02\x02ᐫᐥ\x03\x02\x02\x02ᐫᐧ\x03\x02\x02\x02ᐫᐩ')
buf.write('\x03\x02\x02\x02ᐬЈ\x03\x02\x02\x02ᐭᐮ\x07`\x02\x02ᐮ')
buf.write('Њ\x03\x02\x02\x02ᐯᐰ\x07\x80\x02\x02ᐰЌ\x03\x02')
buf.write('\x02\x02ᐱᐲ\x07#\x02\x02ᐲЎ\x03\x02\x02\x02ᐳᐴ')
buf.write('\x07@\x02\x02ᐴА\x03\x02\x02\x02ᐵᐶ\x07A\x02\x02ᐶВ')
buf.write('\x03\x02\x02\x02ᐷᐸ\x07~\x02\x02ᐸᐹ\x07~\x02\x02ᐹД')
buf.write('\x03\x02\x02\x02ᐺᐻ\x07~\x02\x02ᐻЖ\x03\x02\x02\x02ᐼ')
buf.write('ᐽ\x07?\x02\x02ᐽИ\x03\x02\x02\x02ᐾᐿ\x07]\x02\x02ᐿ')
buf.write('К\x03\x02\x02\x02ᑀᑁ\x07_\x02\x02ᑁМ\x03\x02\x02\x02')
buf.write('ᑂᑃ\x07a\x02\x02ᑃО\x03\x02\x02\x02ᑄᑆ\t')
buf.write('\t\x02\x02ᑅᑄ\x03\x02\x02\x02ᑆᑇ\x03\x02\x02\x02ᑇᑅ')
buf.write('\x03\x02\x02\x02ᑇᑈ\x03\x02\x02\x02ᑈᑉ\x03\x02\x02\x02ᑉ')
buf.write('ᑊ\x08Ȑ\x03\x02ᑊР\x03\x02\x02\x02ᑋᑌ\t\n')
buf.write('\x02\x02ᑌТ\x03\x02\x02\x02ᑍᑏ\x042;\x02ᑎᑍ')
buf.write('\x03\x02\x02\x02ᑏᑐ\x03\x02\x02\x02ᑐᑎ\x03\x02\x02\x02ᑐ')
buf.write('ᑑ\x03\x02\x02\x02ᑑФ\x03\x02\x02\x02ᑒᑔ\x05ύ')
buf.write('ǧ\x02ᑓᑒ\x03\x02\x02\x02ᑔᑗ\x03\x02\x02\x02ᑕ')
buf.write('ᑓ\x03\x02\x02\x02ᑕᑖ\x03\x02\x02\x02ᑖᑙ\x03\x02\x02\x02')
buf.write('ᑗᑕ\x03\x02\x02\x02ᑘᑚ\x070\x02\x02ᑙᑘ')
buf.write('\x03\x02\x02\x02ᑙᑚ\x03\x02\x02\x02ᑚᑜ\x03\x02\x02\x02ᑛ')
buf.write('ᑝ\x05ύǧ\x02ᑜᑛ\x03\x02\x02\x02ᑝᑞ')
buf.write('\x03\x02\x02\x02ᑞᑜ\x03\x02\x02\x02ᑞᑟ\x03\x02\x02\x02ᑟ')
buf.write('Ц\x03\x02\x02\x02ᑠᑡ\x07/\x02\x02ᑡᑢ\x07/\x02\x02ᑢ')
buf.write('ᑦ\x03\x02\x02\x02ᑣᑥ\n\x0b\x02\x02ᑤᑣ\x03\x02\x02')
buf.write('\x02ᑥᑨ\x03\x02\x02\x02ᑦᑤ\x03\x02\x02\x02ᑦᑧ')
buf.write('\x03\x02\x02\x02ᑧᑫ\x03\x02\x02\x02ᑨᑦ\x03\x02\x02\x02ᑩ')
buf.write('ᑬ\x05Эȗ\x02ᑪᑬ\x07\x02\x02\x03ᑫᑩ')
buf.write('\x03\x02\x02\x02ᑫᑪ\x03\x02\x02\x02ᑬᑭ\x03\x02\x02\x02ᑭ')
buf.write('ᑮ\x08Ȕ\x04\x02ᑮШ\x03\x02\x02\x02ᑯᑰ\x071')
buf.write('\x02\x02ᑰᑱ\x07,\x02\x02ᑱᑵ\x03\x02\x02\x02ᑲᑴ')
buf.write('\x0b\x02\x02\x02ᑳᑲ\x03\x02\x02\x02ᑴᑷ\x03\x02\x02\x02ᑵ')
buf.write('ᑶ\x03\x02\x02\x02ᑵᑳ\x03\x02\x02\x02ᑶᑸ\x03\x02\x02\x02')
buf.write('ᑷᑵ\x03\x02\x02\x02ᑸᑹ\x07,\x02\x02ᑹᑺ\x07')
buf.write('1\x02\x02ᑺᑻ\x03\x02\x02\x02ᑻᑼ\x08ȕ\x04\x02ᑼ')
buf.write('Ъ\x03\x02\x02\x02ᑽᑾ\x07r\x02\x02ᑾᑿ\x07t\x02\x02ᑿ')
buf.write('ᒀ\x07q\x02\x02ᒀᒁ\x07o\x02\x02ᒁᒂ\x07r\x02\x02ᒂ')
buf.write('ᒃ\x07v\x02\x02ᒃᒄ\x03\x02\x02\x02ᒄᒈ\x05Я')
buf.write('Ș\x02ᒅᒇ\n\x0b\x02\x02ᒆᒅ\x03\x02\x02\x02ᒇ')
buf.write('ᒊ\x03\x02\x02\x02ᒈᒆ\x03\x02\x02\x02ᒈᒉ\x03\x02\x02\x02')
buf.write('ᒉᒍ\x03\x02\x02\x02ᒊᒈ\x03\x02\x02\x02ᒋᒎ\x05')
buf.write('Эȗ\x02ᒌᒎ\x07\x02\x02\x03ᒍᒋ\x03\x02\x02\x02')
buf.write('ᒍᒌ\x03\x02\x02\x02ᒎЬ\x03\x02\x02\x02ᒏᒑ\x07')
buf.write('\x0f\x02\x02ᒐᒏ\x03\x02\x02\x02ᒐᒑ\x03\x02\x02\x02ᒑ')
buf.write('ᒒ\x03\x02\x02\x02ᒒᒓ\x07\x0c\x02\x02ᒓЮ\x03\x02\x02\x02')
buf.write('ᒔᒕ\t\x0c\x02\x02ᒕа\x03\x02\x02\x02ᒖᒛ\x05')
buf.write('Сȑ\x02ᒗᒚ\x05Сȑ\x02ᒘᒚ')
buf.write('\t\r\x02\x02ᒙᒗ\x03\x02\x02\x02ᒙᒘ\x03\x02\x02\x02ᒚ')
buf.write('ᒝ\x03\x02\x02\x02ᒛᒙ\x03\x02\x02\x02ᒛᒜ\x03\x02\x02\x02')
buf.write('ᒜв\x03\x02\x02\x02ᒝᒛ\x03\x02\x02\x02ᒞᒟ\x07')
buf.write('B\x02\x02ᒟᒠ\x07#\x02\x02ᒠᒡ\x03\x02\x02\x02ᒡᒢ')
buf.write('\x08Ț\x04\x02ᒢд\x03\x02\x02\x02ᒣᒤ\t\x0e\x02\x02')
buf.write('ᒤж\x03\x02\x02\x02ᒥᒦ\t\x0f\x02\x02ᒦи')
buf.write('\x03\x02\x02\x02ᒧᒨ\t\x10\x02\x02ᒨк\x03\x02\x02\x02ᒩ')
buf.write('ᒪ\t\x11\x02\x02ᒪм\x03\x02\x02\x02ᒫᒬ\t\x04\x02')
buf.write('\x02ᒬо\x03\x02\x02\x02ᒭᒮ\t\x12\x02\x02ᒮр')
buf.write('\x03\x02\x02\x02ᒯᒰ\t\x13\x02\x02ᒰт\x03\x02\x02\x02ᒱ')
buf.write('ᒲ\t\x14\x02\x02ᒲф\x03\x02\x02\x02ᒳᒴ\t\x15\x02')
buf.write('\x02ᒴц\x03\x02\x02\x02ᒵᒶ\t\x16\x02\x02ᒶш')
buf.write('\x03\x02\x02\x02ᒷᒸ\t\x17\x02\x02ᒸъ\x03\x02\x02\x02ᒹ')
buf.write('ᒺ\t\x18\x02\x02ᒺь\x03\x02\x02\x02ᒻᒼ\t\x19\x02')
buf.write('\x02ᒼю\x03\x02\x02\x02ᒽᒾ\t\x1a\x02\x02ᒾѐ')
buf.write('\x03\x02\x02\x02ᒿᓀ\t\x1b\x02\x02ᓀђ\x03\x02\x02\x02ᓁ')
buf.write('ᓂ\t\x1c\x02\x02ᓂє\x03\x02\x02\x02ᓃᓄ\t\x1d\x02')
buf.write('\x02ᓄі\x03\x02\x02\x02ᓅᓆ\t\x1e\x02\x02ᓆј')
buf.write('\x03\x02\x02\x02ᓇᓈ\t\x1f\x02\x02ᓈњ\x03\x02\x02\x02ᓉ')
buf.write('ᓊ\t \x02\x02ᓊќ\x03\x02\x02\x02ᓋᓌ\t!\x02\x02ᓌ')
buf.write('ў\x03\x02\x02\x02ᓍᓎ\t"\x02\x02ᓎѠ\x03\x02\x02\x02')
buf.write('ᓏᓐ\t#\x02\x02ᓐѢ\x03\x02\x02\x02ᓑᓒ\t')
buf.write('$\x02\x02ᓒѤ\x03\x02\x02\x02ᓓᓔ\t%\x02\x02ᓔѦ')
buf.write("\x03\x02\x02\x02ᓕᓖ\t&\x02\x02ᓖѨ\x03\x02\x02\x02'\x02፧")
buf.write('፩፳፹ᎀᎆ᎒᎖᎘\u139c')
buf.write('ᎣᎥᎯᎺᏅᏐᏛᏧᏩ')
buf.write('ᐋᐍᐕᐫᑇᑐᑕᑙᑞ')
buf.write('ᑦᑫᑵᒈᒍᒐᒙᒛ\x05\tǪ')
buf.write('\x02\x08\x02\x02\x02\x03\x02')
return buf.getvalue()
class PlSqlLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]
T__0 = 1
A_LETTER = 2
ADD = 3
AFTER = 4
AGENT = 5
AGGREGATE = 6
ALL = 7
ALTER = 8
ANALYZE = 9
AND = 10
ANY = 11
ARRAY = 12
AS = 13
ASSUME = 14
ASSERT = 15
ASC = 16
ASSOCIATE = 17
AT = 18
ATTRIBUTE = 19
AUDIT = 20
AUTHID = 21
AUTO = 22
AUTOMATIC = 23
AUTONOMOUS_TRANSACTION = 24
BATCH = 25
BEFORE = 26
BEGIN = 27
BETWEEN = 28
BFILE = 29
BINARY_DOUBLE = 30
BINARY_FLOAT = 31
BINARY_INTEGER = 32
BLOB = 33
BLOCK = 34
BODY = 35
BOOLEAN = 36
BOTH = 37
BREADTH = 38
BULK = 39
BY = 40
BYTE = 41
C_LETTER = 42
CACHE = 43
CALL = 44
CANONICAL = 45
CASCADE = 46
CASE = 47
CAST = 48
CHAR = 49
CHAR_CS = 50
CHARACTER = 51
CHECK = 52
CHR = 53
CLOB = 54
CLOSE = 55
CLUSTER = 56
COLLECT = 57
COLUMNS = 58
COMMENT = 59
COMMIT = 60
COMMITTED = 61
COMPATIBILITY = 62
COMPILE = 63
COMPOUND = 64
CONNECT = 65
CONNECT_BY_ROOT = 66
CONSTANT = 67
CONSTRAINT = 68
CONSTRAINTS = 69
CONSTRUCTOR = 70
CONTENT = 71
CONTEXT = 72
CONTINUE = 73
CONVERT = 74
CORRUPT_XID = 75
CORRUPT_XID_ALL = 76
COST = 77
COUNT = 78
CREATE = 79
CROSS = 80
CUBE = 81
CURRENT = 82
CURRENT_USER = 83
CURSOR = 84
CUSTOMDATUM = 85
CYCLE = 86
DATA = 87
DATABASE = 88
DATE = 89
DAY = 90
DB_ROLE_CHANGE = 91
DBTIMEZONE = 92
DDL = 93
DEBUG = 94
DEC = 95
DECIMAL = 96
DECLARE = 97
DECOMPOSE = 98
DECREMENT = 99
DEFAULT = 100
DEFAULTS = 101
DEFERRED = 102
DEFINER = 103
DELETE = 104
DEPTH = 105
DESC = 106
DETERMINISTIC = 107
DIMENSION = 108
DISABLE = 109
DISASSOCIATE = 110
DISTINCT = 111
DOCUMENT = 112
DOUBLE = 113
DROP = 114
DSINTERVAL_UNCONSTRAINED = 115
EACH = 116
ELEMENT = 117
ELSE = 118
ELSIF = 119
EMPTY = 120
ENABLE = 121
ENCODING = 122
END = 123
ENTITYESCAPING = 124
ERR = 125
ERRORS = 126
ESCAPE = 127
EVALNAME = 128
EXCEPT = 129
EXCEPTION = 130
EXCEPTION_INIT = 131
EXCEPTIONS = 132
EXCLUDE = 133
EXCLUSIVE = 134
EXECUTE = 135
EXISTS = 136
EXIT = 137
EXPLAIN = 138
EXTERNAL = 139
EXTRACT = 140
FAILURE = 141
FALSE = 142
FETCH = 143
FINAL = 144
FIRST = 145
FIRST_VALUE = 146
FLOAT = 147
FOLLOWING = 148
FOLLOWS = 149
FOR = 150
FORALL = 151
FORCE = 152
FROM = 153
FULL = 154
FUNCTION = 155
GOTO = 156
GRANT = 157
GROUP = 158
GROUPING = 159
HASH = 160
HAVING = 161
HIDE = 162
HOUR = 163
IF = 164
IGNORE = 165
IMMEDIATE = 166
IN = 167
INCLUDE = 168
INCLUDING = 169
INCREMENT = 170
INDENT = 171
INDEX = 172
INDEXED = 173
INDICATOR = 174
INDICES = 175
INFINITE = 176
INLINE = 177
INNER = 178
INOUT = 179
INSERT = 180
INSTANTIABLE = 181
INSTEAD = 182
INT = 183
INTEGER = 184
INTERSECT = 185
INTERVAL = 186
INTO = 187
INVALIDATE = 188
IS = 189
ISOLATION = 190
ITERATE = 191
JAVA = 192
JOIN = 193
KEEP = 194
LANGUAGE = 195
LAST = 196
LAST_VALUE = 197
LEADING = 198
LEFT = 199
LEVEL = 200
LIBRARY = 201
LIKE = 202
LIKE2 = 203
LIKE4 = 204
LIKEC = 205
LIMIT = 206
LOCAL = 207
LOCK = 208
LOCKED = 209
LOG = 210
LOGOFF = 211
LOGON = 212
LONG = 213
LOOP = 214
MAIN = 215
MAP = 216
MATCHED = 217
MAXVALUE = 218
MEASURES = 219
MEMBER = 220
MERGE = 221
MINUS = 222
MINUTE = 223
MINVALUE = 224
MLSLABEL = 225
MODE = 226
MODEL = 227
MODIFY = 228
MONTH = 229
MULTISET = 230
NAME = 231
NAN = 232
NATURAL = 233
NATURALN = 234
NAV = 235
NCHAR = 236
NCHAR_CS = 237
NCLOB = 238
NESTED = 239
NEW = 240
NO = 241
NOAUDIT = 242
NOCACHE = 243
NOCOPY = 244
NOCYCLE = 245
NOENTITYESCAPING = 246
NOMAXVALUE = 247
NOMINVALUE = 248
NONE = 249
NOORDER = 250
NOSCHEMACHECK = 251
NOT = 252
NOWAIT = 253
NULL = 254
NULLS = 255
NUMBER = 256
NUMERIC = 257
NVARCHAR2 = 258
OBJECT = 259
OF = 260
OFF = 261
OID = 262
OLD = 263
ON = 264
ONLY = 265
OPEN = 266
OPTION = 267
OR = 268
ORADATA = 269
ORDER = 270
ORDINALITY = 271
OSERROR = 272
OUT = 273
OUTER = 274
OVER = 275
OVERRIDING = 276
PACKAGE = 277
PARALLEL_ENABLE = 278
PARAMETERS = 279
PARENT = 280
PARTITION = 281
PASSING = 282
PATH = 283
PERCENT_ROWTYPE = 284
PERCENT_TYPE = 285
PIPELINED = 286
PIVOT = 287
PLAN = 288
PLS_INTEGER = 289
POSITIVE = 290
POSITIVEN = 291
PRAGMA = 292
PRECEDING = 293
PRECISION = 294
PRESENT = 295
PRIOR = 296
PROCEDURE = 297
RAISE = 298
RANGE = 299
RAW = 300
READ = 301
REAL = 302
RECORD = 303
REF = 304
REFERENCE = 305
REFERENCING = 306
REJECT = 307
RELIES_ON = 308
RENAME = 309
REPLACE = 310
RESPECT = 311
RESTRICT_REFERENCES = 312
RESULT = 313
RESULT_CACHE = 314
RETURN = 315
RETURNING = 316
REUSE = 317
REVERSE = 318
REVOKE = 319
RIGHT = 320
ROLLBACK = 321
ROLLUP = 322
ROW = 323
ROWID = 324
ROWS = 325
RULES = 326
SAMPLE = 327
SAVE = 328
SAVEPOINT = 329
SCHEMA = 330
SCHEMACHECK = 331
SCN = 332
SEARCH = 333
SECOND = 334
SEED = 335
SEGMENT = 336
SELECT = 337
SELF = 338
SEQUENCE = 339
SEQUENTIAL = 340
SERIALIZABLE = 341
SERIALLY_REUSABLE = 342
SERVERERROR = 343
SESSIONTIMEZONE = 344
SET = 345
SETS = 346
SETTINGS = 347
SHARE = 348
SHOW = 349
SHUTDOWN = 350
SIBLINGS = 351
SIGNTYPE = 352
SIMPLE_INTEGER = 353
SINGLE = 354
SIZE = 355
SKIP_ = 356
SMALLINT = 357
SNAPSHOT = 358
SOME = 359
SPECIFICATION = 360
SQLDATA = 361
SQLERROR = 362
STANDALONE = 363
START = 364
STARTUP = 365
STATEMENT = 366
STATEMENT_ID = 367
STATIC = 368
STATISTICS = 369
STRING = 370
SUBMULTISET = 371
SUBPARTITION = 372
SUBSTITUTABLE = 373
SUBTYPE = 374
SUCCESS = 375
SUSPEND = 376
TABLE = 377
THE = 378
THEN = 379
TIME = 380
TIMESTAMP = 381
TIMESTAMP_LTZ_UNCONSTRAINED = 382
TIMESTAMP_TZ_UNCONSTRAINED = 383
TIMESTAMP_UNCONSTRAINED = 384
TIMEZONE_ABBR = 385
TIMEZONE_HOUR = 386
TIMEZONE_MINUTE = 387
TIMEZONE_REGION = 388
TO = 389
TRAILING = 390
TRANSACTION = 391
TRANSLATE = 392
TREAT = 393
TRIGGER = 394
TRIM = 395
TRUE = 396
TRUNCATE = 397
TYPE = 398
UNBOUNDED = 399
UNDER = 400
UNION = 401
UNIQUE = 402
UNLIMITED = 403
UNPIVOT = 404
UNTIL = 405
UPDATE = 406
UPDATED = 407
UPSERT = 408
UROWID = 409
USE = 410
USING = 411
VALIDATE = 412
VALUE = 413
VALUES = 414
VARCHAR = 415
VARCHAR2 = 416
VARIABLE = 417
VARRAY = 418
VARYING = 419
VERSION = 420
VERSIONS = 421
WAIT = 422
WARNING = 423
WELLFORMED = 424
WHEN = 425
WHENEVER = 426
WHERE = 427
WHILE = 428
WITH = 429
WITHIN = 430
WORK = 431
WRITE = 432
XML = 433
XMLAGG = 434
XMLATTRIBUTES = 435
XMLCAST = 436
XMLCOLATTVAL = 437
XMLELEMENT = 438
XMLEXISTS = 439
XMLFOREST = 440
XMLNAMESPACES = 441
XMLPARSE = 442
XMLPI = 443
XMLQUERY = 444
XMLROOT = 445
XMLSERIALIZE = 446
XMLTABLE = 447
YEAR = 448
YES = 449
YMINTERVAL_UNCONSTRAINED = 450
ZONE = 451
PREDICTION = 452
PREDICTION_BOUNDS = 453
PREDICTION_COST = 454
PREDICTION_DETAILS = 455
PREDICTION_PROBABILITY = 456
PREDICTION_SET = 457
CUME_DIST = 458
DENSE_RANK = 459
LISTAGG = 460
PERCENT_RANK = 461
PERCENTILE_CONT = 462
PERCENTILE_DISC = 463
RANK = 464
AVG = 465
CORR = 466
LAG = 467
LEAD = 468
MAX = 469
MEDIAN = 470
MIN = 471
NTILE = 472
RATIO_TO_REPORT = 473
ROW_NUMBER = 474
SUM = 475
VARIANCE = 476
REGR_ = 477
STDDEV = 478
VAR_ = 479
COVAR_ = 480
NATIONAL_CHAR_STRING_LIT = 481
BIT_STRING_LIT = 482
HEX_STRING_LIT = 483
DOUBLE_PERIOD = 484
PERIOD = 485
UNSIGNED_INTEGER = 486
APPROXIMATE_NUM_LIT = 487
CHAR_STRING = 488
DELIMITED_ID = 489
PERCENT = 490
AMPERSAND = 491
LEFT_PAREN = 492
RIGHT_PAREN = 493
DOUBLE_ASTERISK = 494
ASTERISK = 495
PLUS_SIGN = 496
MINUS_SIGN = 497
COMMA = 498
SOLIDUS = 499
AT_SIGN = 500
ASSIGN_OP = 501
BINDVAR = 502
COLON = 503
SEMICOLON = 504
LESS_THAN_OR_EQUALS_OP = 505
LESS_THAN_OP = 506
GREATER_THAN_OR_EQUALS_OP = 507
NOT_EQUAL_OP = 508
CARRET_OPERATOR_PART = 509
TILDE_OPERATOR_PART = 510
EXCLAMATION_OPERATOR_PART = 511
GREATER_THAN_OP = 512
CONCATENATION_OP = 513
VERTICAL_BAR = 514
EQUALS_OP = 515
LEFT_BRACKET = 516
RIGHT_BRACKET = 517
INTRODUCER = 518
SPACES = 519
SINGLE_LINE_COMMENT = 520
MULTI_LINE_COMMENT = 521
PROMPT = 522
REGULAR_ID = 523
ZV = 524
channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']
modeNames = ['DEFAULT_MODE']
literalNames = ['<INVALID>', "'..'", "'.'", "'%'", "'&'", "'('", "')'",
"'**'", "'*'", "'+'", "'-'", "','", "'/'", "'@'", "':='", "':'",
"';'", "'<='", "'<'", "'>='", "'^'", "'~'", "'!'", "'>'", "'||'",
"'|'", "'='", "'['", "']'", "'_'", "'@!'"]
symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',
'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',
'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',
'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',
'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',
'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',
'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',
'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',
'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',
'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',
'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',
'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',
'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',
'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',
'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',
'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',
'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',
'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',
'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',
'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',
'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',
'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',
'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',
'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',
'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',
'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',
'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',
'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',
'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',
'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',
'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',
'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',
'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',
'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',
'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',
'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',
'ZV']
ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',
'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',
'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',
'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',
'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',
'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',
'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',
'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',
'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',
'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',
'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',
'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',
'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',
'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',
'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',
'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',
'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',
'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',
'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',
'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',
'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',
'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',
'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',
'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',
'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',
'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',
'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',
'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',
'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',
'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',
'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',
'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',
'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',
'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',
'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',
'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',
'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',
'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',
'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
'V', 'W', 'X', 'Y', 'Z']
grammarFileName = 'PlSql.g4'
def __init__(self, input=None, output: TextIO=sys.stdout):
super().__init__(input, output)
self.checkVersion('4.7.2')
self._interp = LexerATNSimulator(self, self.atn, self.
decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
<|reserved_special_token_1|>
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write('\x03悋Ꜫ脳맭䅼㯧瞆奤\x02Ȏ')
buf.write(
'ᓗ\x08\x01\x04\x02\t\x02\x04\x03\t\x03\x04\x04\t\x04\x04\x05\t\x05\x04\x06\t\x06\x04\x07'
)
buf.write(
'\t\x07\x04\x08\t\x08\x04\t\t\t\x04\n\t\n\x04\x0b\t\x0b\x04\x0c\t\x0c\x04\r\t\r'
)
buf.write(
'\x04\x0e\t\x0e\x04\x0f\t\x0f\x04\x10\t\x10\x04\x11\t\x11\x04\x12\t\x12\x04\x13'
)
buf.write(
'\t\x13\x04\x14\t\x14\x04\x15\t\x15\x04\x16\t\x16\x04\x17\t\x17\x04\x18\t\x18'
)
buf.write(
'\x04\x19\t\x19\x04\x1a\t\x1a\x04\x1b\t\x1b\x04\x1c\t\x1c\x04\x1d\t\x1d\x04\x1e'
)
buf.write(
'\t\x1e\x04\x1f\t\x1f\x04 \t \x04!\t!\x04"\t"\x04#\t#\x04$\t$\x04%\t%'
)
buf.write(
"\x04&\t&\x04'\t'\x04(\t(\x04)\t)\x04*\t*\x04+\t+\x04,\t,\x04-\t-\x04."
)
buf.write('\t.\x04/\t/\x040\t0\x041\t1\x042\t2\x043\t3\x044')
buf.write('\t4\x045\t5\x046\t6\x047\t7\x048\t8\x049\t9\x04:\t:')
buf.write(
'\x04;\t;\x04<\t<\x04=\t=\x04>\t>\x04?\t?\x04@\t@\x04A\tA\x04B\tB\x04C\t'
)
buf.write(
'C\x04D\tD\x04E\tE\x04F\tF\x04G\tG\x04H\tH\x04I\tI\x04J\tJ\x04K\tK\x04L\t'
)
buf.write(
'L\x04M\tM\x04N\tN\x04O\tO\x04P\tP\x04Q\tQ\x04R\tR\x04S\tS\x04T\tT\x04U\t'
)
buf.write(
'U\x04V\tV\x04W\tW\x04X\tX\x04Y\tY\x04Z\tZ\x04[\t[\x04\\\t\\\x04]\t]\x04'
)
buf.write(
'^\t^\x04_\t_\x04`\t`\x04a\ta\x04b\tb\x04c\tc\x04d\td\x04e\te\x04f\tf\x04'
)
buf.write(
'g\tg\x04h\th\x04i\ti\x04j\tj\x04k\tk\x04l\tl\x04m\tm\x04n\tn\x04o\to\x04'
)
buf.write(
'p\tp\x04q\tq\x04r\tr\x04s\ts\x04t\tt\x04u\tu\x04v\tv\x04w\tw\x04x\tx\x04'
)
buf.write(
'y\ty\x04z\tz\x04{\t{\x04|\t|\x04}\t}\x04~\t~\x04\x7f\t\x7f\x04\x80'
)
buf.write('\t\x80\x04\x81\t\x81\x04\x82\t\x82\x04\x83\t\x83')
buf.write('\x04\x84\t\x84\x04\x85\t\x85\x04\x86\t\x86\x04\x87')
buf.write('\t\x87\x04\x88\t\x88\x04\x89\t\x89\x04\x8a\t\x8a')
buf.write('\x04\x8b\t\x8b\x04\x8c\t\x8c\x04\x8d\t\x8d\x04\x8e')
buf.write('\t\x8e\x04\x8f\t\x8f\x04\x90\t\x90\x04\x91\t\x91')
buf.write('\x04\x92\t\x92\x04\x93\t\x93\x04\x94\t\x94\x04\x95')
buf.write('\t\x95\x04\x96\t\x96\x04\x97\t\x97\x04\x98\t\x98')
buf.write('\x04\x99\t\x99\x04\x9a\t\x9a\x04\x9b\t\x9b\x04\x9c')
buf.write('\t\x9c\x04\x9d\t\x9d\x04\x9e\t\x9e\x04\x9f\t\x9f')
buf.write('\x04\xa0\t\xa0\x04¡\t¡\x04¢\t¢\x04£')
buf.write('\t£\x04¤\t¤\x04¥\t¥\x04¦\t¦')
buf.write('\x04§\t§\x04¨\t¨\x04©\t©\x04ª')
buf.write('\tª\x04«\t«\x04¬\t¬\x04\xad\t\xad')
buf.write('\x04®\t®\x04¯\t¯\x04°\t°\x04±')
buf.write('\t±\x04²\t²\x04³\t³\x04´\t´')
buf.write('\x04µ\tµ\x04¶\t¶\x04·\t·\x04¸')
buf.write('\t¸\x04¹\t¹\x04º\tº\x04»\t»')
buf.write('\x04¼\t¼\x04½\t½\x04¾\t¾\x04¿')
buf.write('\t¿\x04À\tÀ\x04Á\tÁ\x04Â\tÂ')
buf.write('\x04Ã\tÃ\x04Ä\tÄ\x04Å\tÅ\x04Æ')
buf.write('\tÆ\x04Ç\tÇ\x04È\tÈ\x04É\tÉ')
buf.write('\x04Ê\tÊ\x04Ë\tË\x04Ì\tÌ\x04Í')
buf.write('\tÍ\x04Î\tÎ\x04Ï\tÏ\x04Ð\tÐ')
buf.write('\x04Ñ\tÑ\x04Ò\tÒ\x04Ó\tÓ\x04Ô')
buf.write('\tÔ\x04Õ\tÕ\x04Ö\tÖ\x04×\t×')
buf.write('\x04Ø\tØ\x04Ù\tÙ\x04Ú\tÚ\x04Û')
buf.write('\tÛ\x04Ü\tÜ\x04Ý\tÝ\x04Þ\tÞ')
buf.write('\x04ß\tß\x04à\tà\x04á\tá\x04â')
buf.write('\tâ\x04ã\tã\x04ä\tä\x04å\tå')
buf.write('\x04æ\tæ\x04ç\tç\x04è\tè\x04é')
buf.write('\té\x04ê\tê\x04ë\të\x04ì\tì')
buf.write('\x04í\tí\x04î\tî\x04ï\tï\x04ð')
buf.write('\tð\x04ñ\tñ\x04ò\tò\x04ó\tó')
buf.write('\x04ô\tô\x04õ\tõ\x04ö\tö\x04÷')
buf.write('\t÷\x04ø\tø\x04ù\tù\x04ú\tú')
buf.write('\x04û\tû\x04ü\tü\x04ý\tý\x04þ')
buf.write('\tþ\x04ÿ\tÿ\x04Ā\tĀ\x04ā\tā')
buf.write('\x04Ă\tĂ\x04ă\tă\x04Ą\tĄ\x04ą')
buf.write('\tą\x04Ć\tĆ\x04ć\tć\x04Ĉ\tĈ')
buf.write('\x04ĉ\tĉ\x04Ċ\tĊ\x04ċ\tċ\x04Č')
buf.write('\tČ\x04č\tč\x04Ď\tĎ\x04ď\tď')
buf.write('\x04Đ\tĐ\x04đ\tđ\x04Ē\tĒ\x04ē')
buf.write('\tē\x04Ĕ\tĔ\x04ĕ\tĕ\x04Ė\tĖ')
buf.write('\x04ė\tė\x04Ę\tĘ\x04ę\tę\x04Ě')
buf.write('\tĚ\x04ě\tě\x04Ĝ\tĜ\x04ĝ\tĝ')
buf.write('\x04Ğ\tĞ\x04ğ\tğ\x04Ġ\tĠ\x04ġ')
buf.write('\tġ\x04Ģ\tĢ\x04ģ\tģ\x04Ĥ\tĤ')
buf.write('\x04ĥ\tĥ\x04Ħ\tĦ\x04ħ\tħ\x04Ĩ')
buf.write('\tĨ\x04ĩ\tĩ\x04Ī\tĪ\x04ī\tī')
buf.write('\x04Ĭ\tĬ\x04ĭ\tĭ\x04Į\tĮ\x04į')
buf.write('\tį\x04İ\tİ\x04ı\tı\x04IJ\tIJ')
buf.write('\x04ij\tij\x04Ĵ\tĴ\x04ĵ\tĵ\x04Ķ')
buf.write('\tĶ\x04ķ\tķ\x04ĸ\tĸ\x04Ĺ\tĹ')
buf.write('\x04ĺ\tĺ\x04Ļ\tĻ\x04ļ\tļ\x04Ľ')
buf.write('\tĽ\x04ľ\tľ\x04Ŀ\tĿ\x04ŀ\tŀ')
buf.write('\x04Ł\tŁ\x04ł\tł\x04Ń\tŃ\x04ń')
buf.write('\tń\x04Ņ\tŅ\x04ņ\tņ\x04Ň\tŇ')
buf.write('\x04ň\tň\x04ʼn\tʼn\x04Ŋ\tŊ\x04ŋ')
buf.write('\tŋ\x04Ō\tŌ\x04ō\tō\x04Ŏ\tŎ')
buf.write('\x04ŏ\tŏ\x04Ő\tŐ\x04ő\tő\x04Œ')
buf.write('\tŒ\x04œ\tœ\x04Ŕ\tŔ\x04ŕ\tŕ')
buf.write('\x04Ŗ\tŖ\x04ŗ\tŗ\x04Ř\tŘ\x04ř')
buf.write('\tř\x04Ś\tŚ\x04ś\tś\x04Ŝ\tŜ')
buf.write('\x04ŝ\tŝ\x04Ş\tŞ\x04ş\tş\x04Š')
buf.write('\tŠ\x04š\tš\x04Ţ\tŢ\x04ţ\tţ')
buf.write('\x04Ť\tŤ\x04ť\tť\x04Ŧ\tŦ\x04ŧ')
buf.write('\tŧ\x04Ũ\tŨ\x04ũ\tũ\x04Ū\tŪ')
buf.write('\x04ū\tū\x04Ŭ\tŬ\x04ŭ\tŭ\x04Ů')
buf.write('\tŮ\x04ů\tů\x04Ű\tŰ\x04ű\tű')
buf.write('\x04Ų\tŲ\x04ų\tų\x04Ŵ\tŴ\x04ŵ')
buf.write('\tŵ\x04Ŷ\tŶ\x04ŷ\tŷ\x04Ÿ\tŸ')
buf.write('\x04Ź\tŹ\x04ź\tź\x04Ż\tŻ\x04ż')
buf.write('\tż\x04Ž\tŽ\x04ž\tž\x04ſ\tſ')
buf.write('\x04ƀ\tƀ\x04Ɓ\tƁ\x04Ƃ\tƂ\x04ƃ')
buf.write('\tƃ\x04Ƅ\tƄ\x04ƅ\tƅ\x04Ɔ\tƆ')
buf.write('\x04Ƈ\tƇ\x04ƈ\tƈ\x04Ɖ\tƉ\x04Ɗ')
buf.write('\tƊ\x04Ƌ\tƋ\x04ƌ\tƌ\x04ƍ\tƍ')
buf.write('\x04Ǝ\tƎ\x04Ə\tƏ\x04Ɛ\tƐ\x04Ƒ')
buf.write('\tƑ\x04ƒ\tƒ\x04Ɠ\tƓ\x04Ɣ\tƔ')
buf.write('\x04ƕ\tƕ\x04Ɩ\tƖ\x04Ɨ\tƗ\x04Ƙ')
buf.write('\tƘ\x04ƙ\tƙ\x04ƚ\tƚ\x04ƛ\tƛ')
buf.write('\x04Ɯ\tƜ\x04Ɲ\tƝ\x04ƞ\tƞ\x04Ɵ')
buf.write('\tƟ\x04Ơ\tƠ\x04ơ\tơ\x04Ƣ\tƢ')
buf.write('\x04ƣ\tƣ\x04Ƥ\tƤ\x04ƥ\tƥ\x04Ʀ')
buf.write('\tƦ\x04Ƨ\tƧ\x04ƨ\tƨ\x04Ʃ\tƩ')
buf.write('\x04ƪ\tƪ\x04ƫ\tƫ\x04Ƭ\tƬ\x04ƭ')
buf.write('\tƭ\x04Ʈ\tƮ\x04Ư\tƯ\x04ư\tư')
buf.write('\x04Ʊ\tƱ\x04Ʋ\tƲ\x04Ƴ\tƳ\x04ƴ')
buf.write('\tƴ\x04Ƶ\tƵ\x04ƶ\tƶ\x04Ʒ\tƷ')
buf.write('\x04Ƹ\tƸ\x04ƹ\tƹ\x04ƺ\tƺ\x04ƻ')
buf.write('\tƻ\x04Ƽ\tƼ\x04ƽ\tƽ\x04ƾ\tƾ')
buf.write('\x04ƿ\tƿ\x04ǀ\tǀ\x04ǁ\tǁ\x04ǂ')
buf.write('\tǂ\x04ǃ\tǃ\x04DŽ\tDŽ\x04Dž\tDž')
buf.write('\x04dž\tdž\x04LJ\tLJ\x04Lj\tLj\x04lj')
buf.write('\tlj\x04NJ\tNJ\x04Nj\tNj\x04nj\tnj')
buf.write('\x04Ǎ\tǍ\x04ǎ\tǎ\x04Ǐ\tǏ\x04ǐ')
buf.write('\tǐ\x04Ǒ\tǑ\x04ǒ\tǒ\x04Ǔ\tǓ')
buf.write('\x04ǔ\tǔ\x04Ǖ\tǕ\x04ǖ\tǖ\x04Ǘ')
buf.write('\tǗ\x04ǘ\tǘ\x04Ǚ\tǙ\x04ǚ\tǚ')
buf.write('\x04Ǜ\tǛ\x04ǜ\tǜ\x04ǝ\tǝ\x04Ǟ')
buf.write('\tǞ\x04ǟ\tǟ\x04Ǡ\tǠ\x04ǡ\tǡ')
buf.write('\x04Ǣ\tǢ\x04ǣ\tǣ\x04Ǥ\tǤ\x04ǥ')
buf.write('\tǥ\x04Ǧ\tǦ\x04ǧ\tǧ\x04Ǩ\tǨ')
buf.write('\x04ǩ\tǩ\x04Ǫ\tǪ\x04ǫ\tǫ\x04Ǭ')
buf.write('\tǬ\x04ǭ\tǭ\x04Ǯ\tǮ\x04ǯ\tǯ')
buf.write('\x04ǰ\tǰ\x04DZ\tDZ\x04Dz\tDz\x04dz')
buf.write('\tdz\x04Ǵ\tǴ\x04ǵ\tǵ\x04Ƕ\tǶ')
buf.write('\x04Ƿ\tǷ\x04Ǹ\tǸ\x04ǹ\tǹ\x04Ǻ')
buf.write('\tǺ\x04ǻ\tǻ\x04Ǽ\tǼ\x04ǽ\tǽ')
buf.write('\x04Ǿ\tǾ\x04ǿ\tǿ\x04Ȁ\tȀ\x04ȁ')
buf.write('\tȁ\x04Ȃ\tȂ\x04ȃ\tȃ\x04Ȅ\tȄ')
buf.write('\x04ȅ\tȅ\x04Ȇ\tȆ\x04ȇ\tȇ\x04Ȉ')
buf.write('\tȈ\x04ȉ\tȉ\x04Ȋ\tȊ\x04ȋ\tȋ')
buf.write('\x04Ȍ\tȌ\x04ȍ\tȍ\x04Ȏ\tȎ\x04ȏ')
buf.write('\tȏ\x04Ȑ\tȐ\x04ȑ\tȑ\x04Ȓ\tȒ')
buf.write('\x04ȓ\tȓ\x04Ȕ\tȔ\x04ȕ\tȕ\x04Ȗ')
buf.write('\tȖ\x04ȗ\tȗ\x04Ș\tȘ\x04ș\tș')
buf.write('\x04Ț\tȚ\x04ț\tț\x04Ȝ\tȜ\x04ȝ')
buf.write('\tȝ\x04Ȟ\tȞ\x04ȟ\tȟ\x04Ƞ\tȠ')
buf.write('\x04ȡ\tȡ\x04Ȣ\tȢ\x04ȣ\tȣ\x04Ȥ')
buf.write('\tȤ\x04ȥ\tȥ\x04Ȧ\tȦ\x04ȧ\tȧ')
buf.write('\x04Ȩ\tȨ\x04ȩ\tȩ\x04Ȫ\tȪ\x04ȫ')
buf.write('\tȫ\x04Ȭ\tȬ\x04ȭ\tȭ\x04Ȯ\tȮ')
buf.write('\x04ȯ\tȯ\x04Ȱ\tȰ\x04ȱ\tȱ\x04Ȳ')
buf.write('\tȲ\x04ȳ\tȳ\x04ȴ\tȴ\x03\x02\x03\x02\x03\x02\x03')
buf.write(
'\x03\x03\x03\x03\x04\x03\x04\x03\x04\x03\x04\x03\x05\x03\x05\x03\x05\x03\x05\x03\x05\x03\x05\x03\x06\x03\x06'
)
buf.write(
'\x03\x06\x03\x06\x03\x06\x03\x06\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03\x07\x03'
)
buf.write("""
""")
buf.write("""
""")
buf.write(
'\x0c\x03\r\x03\r\x03\r\x03\r\x03\r\x03\r\x03\x0e\x03\x0e\x03\x0e\x03\x0f\x03\x0f\x03'
)
buf.write(
'\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10'
)
buf.write(
'\x03\x10\x03\x11\x03\x11\x03\x11\x03\x11\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12'
)
buf.write(
'\x03\x12\x03\x12\x03\x12\x03\x12\x03\x13\x03\x13\x03\x13\x03\x14\x03\x14\x03\x14\x03\x14'
)
buf.write(
'\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15'
)
buf.write(
'\x03\x15\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x17\x03\x17\x03\x17'
)
buf.write(
'\x03\x17\x03\x17\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18\x03\x18'
)
buf.write(
'\x03\x18\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19'
)
buf.write(
'\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19\x03\x19'
)
buf.write(
'\x03\x19\x03\x19\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1a\x03\x1b\x03\x1b\x03\x1b'
)
buf.write(
'\x03\x1b\x03\x1b\x03\x1b\x03\x1b\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1c\x03\x1d'
)
buf.write(
'\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1d\x03\x1e\x03\x1e\x03\x1e\x03\x1e'
)
buf.write(
'\x03\x1e\x03\x1e\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f'
)
buf.write(
'\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03\x1f\x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03 \x03'
)
buf.write(
' \x03 \x03 \x03 \x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03!\x03'
)
buf.write(
'!\x03"\x03"\x03"\x03"\x03"\x03#\x03#\x03#\x03#\x03#\x03#\x03$\x03$\x03$\x03$\x03'
)
buf.write(
"$\x03%\x03%\x03%\x03%\x03%\x03%\x03%\x03%\x03&\x03&\x03&\x03&\x03&\x03'\x03'\x03'\x03"
)
buf.write(
"'\x03'\x03'\x03'\x03'\x03(\x03(\x03(\x03(\x03(\x03)\x03)\x03)\x03*\x03*\x03*\x03"
)
buf.write(
'*\x03*\x03+\x03+\x03,\x03,\x03,\x03,\x03,\x03,\x03-\x03-\x03-\x03-\x03-\x03.\x03.\x03.\x03'
)
buf.write(
'.\x03.\x03.\x03.\x03.\x03.\x03.\x03/\x03/\x03/\x03/\x03/\x03/\x03/\x03/\x030\x030'
)
buf.write('\x030\x030\x030\x031\x031\x031\x031\x031\x032\x032\x032')
buf.write('\x032\x032\x033\x033\x033\x033\x033\x033\x033\x033\x034')
buf.write('\x034\x034\x034\x034\x034\x034\x034\x034\x034\x035\x035')
buf.write('\x035\x035\x035\x035\x036\x036\x036\x036\x037\x037\x037')
buf.write(
'\x037\x037\x038\x038\x038\x038\x038\x038\x039\x039\x039\x039\x039\x039\x039\x039\x03'
)
buf.write(
':\x03:\x03:\x03:\x03:\x03:\x03:\x03:\x03;\x03;\x03;\x03;\x03;\x03;\x03;\x03;\x03<\x03<\x03'
)
buf.write(
'<\x03<\x03<\x03<\x03<\x03<\x03=\x03=\x03=\x03=\x03=\x03=\x03=\x03>\x03>\x03>\x03>\x03>\x03'
)
buf.write(
'>\x03>\x03>\x03>\x03>\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03?\x03'
)
buf.write(
'?\x03@\x03@\x03@\x03@\x03@\x03@\x03@\x03@\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03A\x03'
)
buf.write(
'B\x03B\x03B\x03B\x03B\x03B\x03B\x03B\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03C\x03'
)
buf.write(
'C\x03C\x03C\x03C\x03C\x03C\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03D\x03E\x03E\x03E\x03'
)
buf.write(
'E\x03E\x03E\x03E\x03E\x03E\x03E\x03E\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03F\x03'
)
buf.write(
'F\x03F\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03G\x03H\x03H\x03H\x03H\x03'
)
buf.write(
'H\x03H\x03H\x03H\x03I\x03I\x03I\x03I\x03I\x03I\x03I\x03I\x03J\x03J\x03J\x03J\x03J\x03J\x03'
)
buf.write(
'J\x03J\x03J\x03K\x03K\x03K\x03K\x03K\x03K\x03K\x03K\x03L\x03L\x03L\x03L\x03L\x03L\x03L\x03'
)
buf.write(
'L\x03L\x03L\x03L\x03L\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03M\x03'
)
buf.write(
'M\x03M\x03M\x03N\x03N\x03N\x03N\x03N\x03O\x03O\x03O\x03O\x03O\x03O\x03P\x03P\x03P\x03P\x03'
)
buf.write(
'P\x03P\x03P\x03Q\x03Q\x03Q\x03Q\x03Q\x03Q\x03R\x03R\x03R\x03R\x03R\x03S\x03S\x03S\x03S\x03'
)
buf.write(
'S\x03S\x03S\x03S\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03T\x03U\x03'
)
buf.write(
'U\x03U\x03U\x03U\x03U\x03U\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03V\x03'
)
buf.write(
'W\x03W\x03W\x03W\x03W\x03W\x03X\x03X\x03X\x03X\x03X\x03Y\x03Y\x03Y\x03Y\x03Y\x03Y\x03Y\x03'
)
buf.write(
'Y\x03Y\x03Z\x03Z\x03Z\x03Z\x03Z\x03[\x03[\x03[\x03[\x03\\\x03\\\x03\\\x03\\\x03\\\x03'
)
buf.write(
'\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03\\\x03]\x03]\x03]\x03]\x03]'
)
buf.write(
'\x03]\x03]\x03]\x03]\x03]\x03]\x03^\x03^\x03^\x03^\x03_\x03_\x03_\x03_\x03_\x03_\x03`\x03'
)
buf.write(
'`\x03`\x03`\x03a\x03a\x03a\x03a\x03a\x03a\x03a\x03a\x03b\x03b\x03b\x03b\x03b\x03b\x03b\x03'
)
buf.write(
'b\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03c\x03d\x03d\x03d\x03d\x03d\x03d\x03d\x03'
)
buf.write(
'd\x03d\x03d\x03e\x03e\x03e\x03e\x03e\x03e\x03e\x03e\x03f\x03f\x03f\x03f\x03f\x03f\x03f\x03'
)
buf.write(
'f\x03f\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03g\x03h\x03h\x03h\x03h\x03h\x03h\x03h\x03'
)
buf.write(
'h\x03i\x03i\x03i\x03i\x03i\x03i\x03i\x03j\x03j\x03j\x03j\x03j\x03j\x03k\x03k\x03k\x03k\x03'
)
buf.write(
'k\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03l\x03m\x03m\x03m\x03'
)
buf.write(
'm\x03m\x03m\x03m\x03m\x03m\x03m\x03n\x03n\x03n\x03n\x03n\x03n\x03n\x03n\x03o\x03o\x03o\x03'
)
buf.write(
'o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03o\x03p\x03p\x03p\x03p\x03p\x03p\x03p\x03p\x03'
)
buf.write(
'p\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03q\x03r\x03r\x03r\x03r\x03r\x03r\x03r\x03s\x03'
)
buf.write(
's\x03s\x03s\x03s\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03'
)
buf.write(
't\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03t\x03u\x03u\x03u\x03u\x03u\x03v\x03v\x03'
)
buf.write(
'v\x03v\x03v\x03v\x03v\x03v\x03w\x03w\x03w\x03w\x03w\x03x\x03x\x03x\x03x\x03x\x03x\x03y\x03'
)
buf.write(
'y\x03y\x03y\x03y\x03y\x03z\x03z\x03z\x03z\x03z\x03z\x03z\x03{\x03{\x03{\x03{\x03{\x03{\x03'
)
buf.write(
'{\x03{\x03{\x03|\x03|\x03|\x03|\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03}\x03'
)
buf.write(
'}\x03}\x03}\x03}\x03~\x03~\x03~\x03~\x03\x7f\x03\x7f\x03\x7f\x03\x7f\x03\x7f\x03'
)
buf.write(
'\x7f\x03\x7f\x03\x80\x03\x80\x03\x80\x03\x80\x03\x80\x03\x80')
buf.write('\x03\x80\x03\x81\x03\x81\x03\x81\x03\x81\x03\x81\x03\x81')
buf.write('\x03\x81\x03\x81\x03\x81\x03\x82\x03\x82\x03\x82\x03\x82')
buf.write('\x03\x82\x03\x82\x03\x82\x03\x83\x03\x83\x03\x83\x03\x83')
buf.write('\x03\x83\x03\x83\x03\x83\x03\x83\x03\x83\x03\x83\x03\x84')
buf.write('\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84')
buf.write('\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84\x03\x84')
buf.write('\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85\x03\x85')
buf.write('\x03\x85\x03\x85\x03\x85\x03\x85\x03\x86\x03\x86\x03\x86')
buf.write('\x03\x86\x03\x86\x03\x86\x03\x86\x03\x86\x03\x87\x03\x87')
buf.write('\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87\x03\x87')
buf.write('\x03\x87\x03\x88\x03\x88\x03\x88\x03\x88\x03\x88\x03\x88')
buf.write('\x03\x88\x03\x88\x03\x89\x03\x89\x03\x89\x03\x89\x03\x89')
buf.write('\x03\x89\x03\x89\x03\x8a\x03\x8a\x03\x8a\x03\x8a\x03\x8a')
buf.write('\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b\x03\x8b')
buf.write('\x03\x8b\x03\x8c\x03\x8c\x03\x8c\x03\x8c\x03\x8c\x03\x8c')
buf.write('\x03\x8c\x03\x8c\x03\x8c\x03\x8d\x03\x8d\x03\x8d\x03\x8d')
buf.write('\x03\x8d\x03\x8d\x03\x8d\x03\x8d\x03\x8e\x03\x8e\x03\x8e')
buf.write('\x03\x8e\x03\x8e\x03\x8e\x03\x8e\x03\x8e\x03\x8f\x03\x8f')
buf.write('\x03\x8f\x03\x8f\x03\x8f\x03\x8f\x03\x90\x03\x90\x03\x90')
buf.write('\x03\x90\x03\x90\x03\x90\x03\x91\x03\x91\x03\x91\x03\x91')
buf.write('\x03\x91\x03\x91\x03\x92\x03\x92\x03\x92\x03\x92\x03\x92')
buf.write('\x03\x92\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93')
buf.write('\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x93\x03\x94')
buf.write('\x03\x94\x03\x94\x03\x94\x03\x94\x03\x94\x03\x95\x03\x95')
buf.write('\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95\x03\x95')
buf.write('\x03\x95\x03\x96\x03\x96\x03\x96\x03\x96\x03\x96\x03\x96')
buf.write('\x03\x96\x03\x96\x03\x97\x03\x97\x03\x97\x03\x97\x03\x98')
buf.write('\x03\x98\x03\x98\x03\x98\x03\x98\x03\x98\x03\x98\x03\x99')
buf.write('\x03\x99\x03\x99\x03\x99\x03\x99\x03\x99\x03\x9a\x03\x9a')
buf.write('\x03\x9a\x03\x9a\x03\x9a\x03\x9b\x03\x9b\x03\x9b\x03\x9b')
buf.write('\x03\x9b\x03\x9c\x03\x9c\x03\x9c\x03\x9c\x03\x9c\x03\x9c')
buf.write('\x03\x9c\x03\x9c\x03\x9c\x03\x9d\x03\x9d\x03\x9d\x03\x9d')
buf.write('\x03\x9d\x03\x9e\x03\x9e\x03\x9e\x03\x9e\x03\x9e\x03\x9e')
buf.write('\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\x9f\x03\xa0')
buf.write('\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0\x03\xa0')
buf.write('\x03\xa0\x03¡\x03¡\x03¡\x03¡\x03¡\x03¢')
buf.write('\x03¢\x03¢\x03¢\x03¢\x03¢\x03¢\x03£')
buf.write('\x03£\x03£\x03£\x03£\x03¤\x03¤\x03¤')
buf.write('\x03¤\x03¤\x03¥\x03¥\x03¥\x03¦\x03¦')
buf.write('\x03¦\x03¦\x03¦\x03¦\x03¦\x03§\x03§')
buf.write('\x03§\x03§\x03§\x03§\x03§\x03§\x03§')
buf.write('\x03§\x03¨\x03¨\x03¨\x03©\x03©\x03©')
buf.write('\x03©\x03©\x03©\x03©\x03©\x03ª\x03ª')
buf.write('\x03ª\x03ª\x03ª\x03ª\x03ª\x03ª\x03ª')
buf.write('\x03ª\x03«\x03«\x03«\x03«\x03«\x03«')
buf.write('\x03«\x03«\x03«\x03«\x03¬\x03¬\x03¬')
buf.write('\x03¬\x03¬\x03¬\x03¬\x03\xad\x03\xad\x03\xad')
buf.write('\x03\xad\x03\xad\x03\xad\x03®\x03®\x03®\x03®')
buf.write('\x03®\x03®\x03®\x03®\x03¯\x03¯\x03¯')
buf.write('\x03¯\x03¯\x03¯\x03¯\x03¯\x03¯\x03¯')
buf.write('\x03°\x03°\x03°\x03°\x03°\x03°\x03°')
buf.write('\x03°\x03±\x03±\x03±\x03±\x03±\x03±')
buf.write('\x03±\x03±\x03±\x03²\x03²\x03²\x03²')
buf.write('\x03²\x03²\x03²\x03³\x03³\x03³\x03³')
buf.write('\x03³\x03³\x03´\x03´\x03´\x03´\x03´')
buf.write('\x03´\x03µ\x03µ\x03µ\x03µ\x03µ\x03µ')
buf.write('\x03µ\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶')
buf.write('\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶\x03¶')
buf.write('\x03·\x03·\x03·\x03·\x03·\x03·\x03·')
buf.write('\x03·\x03¸\x03¸\x03¸\x03¸\x03¹\x03¹')
buf.write('\x03¹\x03¹\x03¹\x03¹\x03¹\x03¹\x03º')
buf.write('\x03º\x03º\x03º\x03º\x03º\x03º\x03º')
buf.write('\x03º\x03º\x03»\x03»\x03»\x03»\x03»')
buf.write('\x03»\x03»\x03»\x03»\x03¼\x03¼\x03¼')
buf.write('\x03¼\x03¼\x03½\x03½\x03½\x03½\x03½')
buf.write('\x03½\x03½\x03½\x03½\x03½\x03½\x03¾')
buf.write('\x03¾\x03¾\x03¿\x03¿\x03¿\x03¿\x03¿')
buf.write('\x03¿\x03¿\x03¿\x03¿\x03¿\x03À\x03À')
buf.write('\x03À\x03À\x03À\x03À\x03À\x03À\x03Á')
buf.write('\x03Á\x03Á\x03Á\x03Á\x03Â\x03Â\x03Â')
buf.write('\x03Â\x03Â\x03Ã\x03Ã\x03Ã\x03Ã\x03Ã')
buf.write('\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä\x03Ä')
buf.write('\x03Ä\x03Ä\x03Å\x03Å\x03Å\x03Å\x03Å')
buf.write('\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ\x03Æ')
buf.write('\x03Æ\x03Æ\x03Æ\x03Æ\x03Ç\x03Ç\x03Ç')
buf.write('\x03Ç\x03Ç\x03Ç\x03Ç\x03Ç\x03È\x03È')
buf.write('\x03È\x03È\x03È\x03É\x03É\x03É\x03É')
buf.write('\x03É\x03É\x03Ê\x03Ê\x03Ê\x03Ê\x03Ê')
buf.write('\x03Ê\x03Ê\x03Ê\x03Ë\x03Ë\x03Ë\x03Ë')
buf.write('\x03Ë\x03Ì\x03Ì\x03Ì\x03Ì\x03Ì\x03Ì')
buf.write('\x03Í\x03Í\x03Í\x03Í\x03Í\x03Í\x03Î')
buf.write('\x03Î\x03Î\x03Î\x03Î\x03Î\x03Ï\x03Ï')
buf.write('\x03Ï\x03Ï\x03Ï\x03Ï\x03Ð\x03Ð\x03Ð')
buf.write('\x03Ð\x03Ð\x03Ð\x03Ñ\x03Ñ\x03Ñ\x03Ñ')
buf.write('\x03Ñ\x03Ò\x03Ò\x03Ò\x03Ò\x03Ò\x03Ò')
buf.write('\x03Ò\x03Ó\x03Ó\x03Ó\x03Ó\x03Ô\x03Ô')
buf.write('\x03Ô\x03Ô\x03Ô\x03Ô\x03Ô\x03Õ\x03Õ')
buf.write('\x03Õ\x03Õ\x03Õ\x03Õ\x03Ö\x03Ö\x03Ö')
buf.write('\x03Ö\x03Ö\x03×\x03×\x03×\x03×\x03×')
buf.write('\x03Ø\x03Ø\x03Ø\x03Ø\x03Ø\x03Ù\x03Ù')
buf.write('\x03Ù\x03Ù\x03Ú\x03Ú\x03Ú\x03Ú\x03Ú')
buf.write('\x03Ú\x03Ú\x03Ú\x03Û\x03Û\x03Û\x03Û')
buf.write('\x03Û\x03Û\x03Û\x03Û\x03Û\x03Ü\x03Ü')
buf.write('\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü\x03Ü')
buf.write('\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý\x03Ý')
buf.write('\x03Þ\x03Þ\x03Þ\x03Þ\x03Þ\x03Þ\x03ß')
buf.write('\x03ß\x03ß\x03ß\x03ß\x03ß\x03à\x03à')
buf.write('\x03à\x03à\x03à\x03à\x03à\x03á\x03á')
buf.write('\x03á\x03á\x03á\x03á\x03á\x03á\x03á')
buf.write('\x03â\x03â\x03â\x03â\x03â\x03â\x03â')
buf.write('\x03â\x03â\x03ã\x03ã\x03ã\x03ã\x03ã')
buf.write('\x03ä\x03ä\x03ä\x03ä\x03ä\x03ä\x03å')
buf.write('\x03å\x03å\x03å\x03å\x03å\x03å\x03æ')
buf.write('\x03æ\x03æ\x03æ\x03æ\x03æ\x03ç\x03ç')
buf.write('\x03ç\x03ç\x03ç\x03ç\x03ç\x03ç\x03ç')
buf.write('\x03è\x03è\x03è\x03è\x03è\x03é\x03é')
buf.write('\x03é\x03é\x03ê\x03ê\x03ê\x03ê\x03ê')
buf.write('\x03ê\x03ê\x03ê\x03ë\x03ë\x03ë\x03ë')
buf.write('\x03ë\x03ë\x03ë\x03ë\x03ë\x03ì\x03ì')
buf.write('\x03ì\x03ì\x03í\x03í\x03í\x03í\x03í')
buf.write('\x03í\x03î\x03î\x03î\x03î\x03î\x03î')
buf.write('\x03î\x03î\x03î\x03ï\x03ï\x03ï\x03ï')
buf.write('\x03ï\x03ï\x03ð\x03ð\x03ð\x03ð\x03ð')
buf.write('\x03ð\x03ð\x03ñ\x03ñ\x03ñ\x03ñ\x03ò')
buf.write('\x03ò\x03ò\x03ó\x03ó\x03ó\x03ó\x03ó')
buf.write('\x03ó\x03ó\x03ó\x03ô\x03ô\x03ô\x03ô')
buf.write('\x03ô\x03ô\x03ô\x03ô\x03õ\x03õ\x03õ')
buf.write('\x03õ\x03õ\x03õ\x03õ\x03ö\x03ö\x03ö')
buf.write('\x03ö\x03ö\x03ö\x03ö\x03ö\x03÷\x03÷')
buf.write('\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷')
buf.write('\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷\x03÷')
buf.write('\x03÷\x03ø\x03ø\x03ø\x03ø\x03ø\x03ø')
buf.write('\x03ø\x03ø\x03ø\x03ø\x03ø\x03ù\x03ù')
buf.write('\x03ù\x03ù\x03ù\x03ù\x03ù\x03ù\x03ù')
buf.write('\x03ù\x03ù\x03ú\x03ú\x03ú\x03ú\x03ú')
buf.write('\x03û\x03û\x03û\x03û\x03û\x03û\x03û')
buf.write('\x03û\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü')
buf.write('\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü\x03ü')
buf.write('\x03ü\x03ý\x03ý\x03ý\x03ý\x03þ\x03þ')
buf.write('\x03þ\x03þ\x03þ\x03þ\x03þ\x03ÿ\x03ÿ')
buf.write('\x03ÿ\x03ÿ\x03ÿ\x03Ā\x03Ā\x03Ā\x03Ā')
buf.write('\x03Ā\x03Ā\x03ā\x03ā\x03ā\x03ā\x03ā')
buf.write('\x03ā\x03ā\x03Ă\x03Ă\x03Ă\x03Ă\x03Ă')
buf.write('\x03Ă\x03Ă\x03Ă\x03ă\x03ă\x03ă\x03ă')
buf.write('\x03ă\x03ă\x03ă\x03ă\x03ă\x03ă\x03Ą')
buf.write('\x03Ą\x03Ą\x03Ą\x03Ą\x03Ą\x03Ą\x03ą')
buf.write('\x03ą\x03ą\x03Ć\x03Ć\x03Ć\x03Ć\x03ć')
buf.write('\x03ć\x03ć\x03ć\x03Ĉ\x03Ĉ\x03Ĉ\x03Ĉ')
buf.write('\x03ĉ\x03ĉ\x03ĉ\x03Ċ\x03Ċ\x03Ċ\x03Ċ')
buf.write('\x03Ċ\x03ċ\x03ċ\x03ċ\x03ċ\x03ċ\x03Č')
buf.write('\x03Č\x03Č\x03Č\x03Č\x03Č\x03Č\x03č')
buf.write('\x03č\x03č\x03Ď\x03Ď\x03Ď\x03Ď\x03Ď')
buf.write('\x03Ď\x03Ď\x03Ď\x03ď\x03ď\x03ď\x03ď')
buf.write('\x03ď\x03ď\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ')
buf.write('\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ\x03Đ\x03đ')
buf.write('\x03đ\x03đ\x03đ\x03đ\x03đ\x03đ\x03đ')
buf.write('\x03Ē\x03Ē\x03Ē\x03Ē\x03ē\x03ē\x03ē')
buf.write('\x03ē\x03ē\x03ē\x03Ĕ\x03Ĕ\x03Ĕ\x03Ĕ')
buf.write('\x03Ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ')
buf.write('\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03ĕ\x03Ė\x03Ė')
buf.write('\x03Ė\x03Ė\x03Ė\x03Ė\x03Ė\x03Ė\x03ė')
buf.write('\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė')
buf.write('\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė\x03ė')
buf.write('\x03ė\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę')
buf.write('\x03Ę\x03Ę\x03Ę\x03Ę\x03Ę\x03ę\x03ę')
buf.write('\x03ę\x03ę\x03ę\x03ę\x03ę\x03Ě\x03Ě')
buf.write('\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě\x03Ě')
buf.write('\x03Ě\x03ě\x03ě\x03ě\x03ě\x03ě\x03ě')
buf.write('\x03ě\x03ě\x03Ĝ\x03Ĝ\x03Ĝ\x03Ĝ\x03Ĝ')
buf.write('\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ\x03ĝ')
buf.write('\x03ĝ\x03ĝ\x03Ğ\x03Ğ\x03Ğ\x03Ğ\x03Ğ')
buf.write('\x03Ğ\x03ğ\x03ğ\x03ğ\x03ğ\x03ğ\x03ğ')
buf.write('\x03ğ\x03ğ\x03ğ\x03ğ\x03Ġ\x03Ġ\x03Ġ')
buf.write('\x03Ġ\x03Ġ\x03Ġ\x03ġ\x03ġ\x03ġ\x03ġ')
buf.write('\x03ġ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ')
buf.write('\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03Ģ\x03ģ')
buf.write('\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ\x03ģ')
buf.write('\x03ģ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ')
buf.write('\x03Ĥ\x03Ĥ\x03Ĥ\x03Ĥ\x03ĥ\x03ĥ\x03ĥ')
buf.write('\x03ĥ\x03ĥ\x03ĥ\x03ĥ\x03Ħ\x03Ħ\x03Ħ')
buf.write('\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ\x03Ħ')
buf.write('\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ\x03ħ')
buf.write('\x03ħ\x03ħ\x03ħ\x03Ĩ\x03Ĩ\x03Ĩ\x03Ĩ')
buf.write('\x03Ĩ\x03Ĩ\x03Ĩ\x03Ĩ\x03ĩ\x03ĩ\x03ĩ')
buf.write('\x03ĩ\x03ĩ\x03ĩ\x03Ī\x03Ī\x03Ī\x03Ī')
buf.write('\x03Ī\x03Ī\x03Ī\x03Ī\x03Ī\x03Ī\x03ī')
buf.write('\x03ī\x03ī\x03ī\x03ī\x03ī\x03Ĭ\x03Ĭ')
buf.write('\x03Ĭ\x03Ĭ\x03Ĭ\x03Ĭ\x03ĭ\x03ĭ\x03ĭ')
buf.write('\x03ĭ\x03Į\x03Į\x03Į\x03Į\x03Į\x03į')
buf.write('\x03į\x03į\x03į\x03į\x03İ\x03İ\x03İ')
buf.write('\x03İ\x03İ\x03İ\x03İ\x03ı\x03ı\x03ı')
buf.write('\x03ı\x03IJ\x03IJ\x03IJ\x03IJ\x03IJ\x03IJ')
buf.write('\x03IJ\x03IJ\x03IJ\x03IJ\x03ij\x03ij\x03ij')
buf.write('\x03ij\x03ij\x03ij\x03ij\x03ij\x03ij\x03ij')
buf.write('\x03ij\x03ij\x03Ĵ\x03Ĵ\x03Ĵ\x03Ĵ\x03Ĵ')
buf.write('\x03Ĵ\x03Ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ')
buf.write('\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03ĵ\x03Ķ\x03Ķ')
buf.write('\x03Ķ\x03Ķ\x03Ķ\x03Ķ\x03Ķ\x03ķ\x03ķ')
buf.write('\x03ķ\x03ķ\x03ķ\x03ķ\x03ķ\x03ķ\x03ĸ')
buf.write('\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ\x03ĸ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ')
buf.write('\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03Ĺ\x03ĺ')
buf.write('\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03ĺ\x03Ļ')
buf.write('\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ')
buf.write('\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03Ļ\x03ļ\x03ļ')
buf.write('\x03ļ\x03ļ\x03ļ\x03ļ\x03ļ\x03Ľ\x03Ľ')
buf.write('\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ\x03Ľ')
buf.write('\x03Ľ\x03ľ\x03ľ\x03ľ\x03ľ\x03ľ\x03ľ')
buf.write('\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ\x03Ŀ')
buf.write('\x03Ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ\x03ŀ')
buf.write('\x03ŀ\x03Ł\x03Ł\x03Ł\x03Ł\x03Ł\x03Ł')
buf.write('\x03ł\x03ł\x03ł\x03ł\x03ł\x03ł\x03ł')
buf.write('\x03ł\x03ł\x03Ń\x03Ń\x03Ń\x03Ń\x03Ń')
buf.write('\x03Ń\x03Ń\x03ń\x03ń\x03ń\x03ń\x03Ņ')
buf.write('\x03Ņ\x03Ņ\x03Ņ\x03Ņ\x03Ņ\x03ņ\x03ņ')
buf.write('\x03ņ\x03ņ\x03ņ\x03Ň\x03Ň\x03Ň\x03Ň')
buf.write('\x03Ň\x03Ň\x03ň\x03ň\x03ň\x03ň\x03ň')
buf.write('\x03ň\x03ň\x03ʼn\x03ʼn\x03ʼn\x03ʼn\x03ʼn')
buf.write('\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ\x03Ŋ')
buf.write('\x03Ŋ\x03Ŋ\x03Ŋ\x03ŋ\x03ŋ\x03ŋ\x03ŋ')
buf.write('\x03ŋ\x03ŋ\x03ŋ\x03Ō\x03Ō\x03Ō\x03Ō')
buf.write('\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō\x03Ō')
buf.write('\x03Ō\x03ō\x03ō\x03ō\x03ō\x03Ŏ\x03Ŏ')
buf.write('\x03Ŏ\x03Ŏ\x03Ŏ\x03Ŏ\x03Ŏ\x03ŏ\x03ŏ')
buf.write('\x03ŏ\x03ŏ\x03ŏ\x03ŏ\x03ŏ\x03Ő\x03Ő')
buf.write('\x03Ő\x03Ő\x03Ő\x03ő\x03ő\x03ő\x03ő')
buf.write('\x03ő\x03ő\x03ő\x03ő\x03Œ\x03Œ\x03Œ')
buf.write('\x03Œ\x03Œ\x03Œ\x03Œ\x03œ\x03œ\x03œ')
buf.write('\x03œ\x03œ\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ')
buf.write('\x03Ŕ\x03Ŕ\x03Ŕ\x03Ŕ\x03ŕ\x03ŕ\x03ŕ')
buf.write('\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ\x03ŕ')
buf.write('\x03ŕ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ')
buf.write('\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ\x03Ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03ŗ')
buf.write('\x03ŗ\x03ŗ\x03ŗ\x03ŗ\x03Ř\x03Ř\x03Ř')
buf.write('\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř\x03Ř')
buf.write('\x03Ř\x03Ř\x03ř\x03ř\x03ř\x03ř\x03ř')
buf.write('\x03ř\x03ř\x03ř\x03ř\x03ř\x03ř\x03ř')
buf.write('\x03ř\x03ř\x03ř\x03ř\x03Ś\x03Ś\x03Ś')
buf.write('\x03Ś\x03ś\x03ś\x03ś\x03ś\x03ś\x03Ŝ')
buf.write('\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ\x03Ŝ')
buf.write('\x03Ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ\x03ŝ')
buf.write('\x03Ş\x03Ş\x03Ş\x03Ş\x03Ş\x03ş\x03ş')
buf.write('\x03ş\x03ş\x03ş\x03ş\x03ş\x03ş\x03ş')
buf.write('\x03Š\x03Š\x03Š\x03Š\x03Š\x03Š\x03Š')
buf.write('\x03Š\x03Š\x03š\x03š\x03š\x03š\x03š')
buf.write('\x03š\x03š\x03š\x03š\x03Ţ\x03Ţ\x03Ţ')
buf.write('\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ')
buf.write('\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03Ţ\x03ţ\x03ţ')
buf.write('\x03ţ\x03ţ\x03ţ\x03ţ\x03ţ\x03Ť\x03Ť')
buf.write('\x03Ť\x03Ť\x03Ť\x03ť\x03ť\x03ť\x03ť')
buf.write('\x03ť\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ\x03Ŧ')
buf.write('\x03Ŧ\x03Ŧ\x03Ŧ\x03ŧ\x03ŧ\x03ŧ\x03ŧ')
buf.write('\x03ŧ\x03ŧ\x03ŧ\x03ŧ\x03ŧ\x03Ũ\x03Ũ')
buf.write('\x03Ũ\x03Ũ\x03Ũ\x03ũ\x03ũ\x03ũ\x03ũ')
buf.write('\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ\x03ũ')
buf.write('\x03ũ\x03ũ\x03ũ\x03Ū\x03Ū\x03Ū\x03Ū')
buf.write('\x03Ū\x03Ū\x03Ū\x03Ū\x03ū\x03ū\x03ū')
buf.write('\x03ū\x03ū\x03ū\x03ū\x03ū\x03ū\x03Ŭ')
buf.write('\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ\x03Ŭ')
buf.write('\x03Ŭ\x03Ŭ\x03Ŭ\x03ŭ\x03ŭ\x03ŭ\x03ŭ')
buf.write('\x03ŭ\x03ŭ\x03Ů\x03Ů\x03Ů\x03Ů\x03Ů')
buf.write('\x03Ů\x03Ů\x03Ů\x03ů\x03ů\x03ů\x03ů')
buf.write('\x03ů\x03ů\x03ů\x03ů\x03ů\x03ů\x03Ű')
buf.write('\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű')
buf.write('\x03Ű\x03Ű\x03Ű\x03Ű\x03Ű\x03ű\x03ű')
buf.write('\x03ű\x03ű\x03ű\x03ű\x03ű\x03Ų\x03Ų')
buf.write('\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų\x03Ų')
buf.write('\x03Ų\x03Ų\x03ų\x03ų\x03ų\x03ų\x03ų')
buf.write('\x03ų\x03ų\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ')
buf.write('\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ\x03Ŵ')
buf.write('\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ')
buf.write('\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03ŵ\x03Ŷ')
buf.write('\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ')
buf.write('\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03Ŷ\x03ŷ')
buf.write('\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ\x03ŷ')
buf.write('\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ\x03Ÿ')
buf.write('\x03Ÿ\x03Ź\x03Ź\x03Ź\x03Ź\x03Ź\x03Ź')
buf.write('\x03Ź\x03Ź\x03ź\x03ź\x03ź\x03ź\x03ź')
buf.write('\x03ź\x03Ż\x03Ż\x03Ż\x03Ż\x03ż\x03ż')
buf.write('\x03ż\x03ż\x03ż\x03Ž\x03Ž\x03Ž\x03Ž')
buf.write('\x03Ž\x03ž\x03ž\x03ž\x03ž\x03ž\x03ž')
buf.write('\x03ž\x03ž\x03ž\x03ž\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ\x03ſ')
buf.write('\x03ſ\x03ſ\x03ſ\x03ſ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ\x03ƀ')
buf.write('\x03ƀ\x03ƀ\x03ƀ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ')
buf.write('\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ɓ\x03Ƃ')
buf.write('\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ')
buf.write('\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03Ƃ\x03ƃ')
buf.write('\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ')
buf.write('\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03ƃ\x03Ƅ')
buf.write('\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ')
buf.write('\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ\x03Ƅ')
buf.write('\x03Ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ')
buf.write('\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ\x03ƅ')
buf.write('\x03ƅ\x03ƅ\x03ƅ\x03Ɔ\x03Ɔ\x03Ɔ\x03Ƈ')
buf.write('\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ\x03Ƈ')
buf.write('\x03Ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ')
buf.write('\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03ƈ\x03Ɖ')
buf.write('\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ\x03Ɖ')
buf.write('\x03Ɖ\x03Ɖ\x03Ɗ\x03Ɗ\x03Ɗ\x03Ɗ\x03Ɗ')
buf.write('\x03Ɗ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ\x03Ƌ')
buf.write('\x03Ƌ\x03Ƌ\x03ƌ\x03ƌ\x03ƌ\x03ƌ\x03ƌ')
buf.write('\x03ƍ\x03ƍ\x03ƍ\x03ƍ\x03ƍ\x03Ǝ\x03Ǝ')
buf.write('\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ\x03Ǝ')
buf.write('\x03Ə\x03Ə\x03Ə\x03Ə\x03Ə\x03Ɛ\x03Ɛ')
buf.write('\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ\x03Ɛ')
buf.write('\x03Ɛ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ\x03Ƒ')
buf.write('\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03ƒ\x03Ɠ')
buf.write('\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɠ\x03Ɣ')
buf.write('\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ\x03Ɣ')
buf.write('\x03Ɣ\x03Ɣ\x03ƕ\x03ƕ\x03ƕ\x03ƕ\x03ƕ')
buf.write('\x03ƕ\x03ƕ\x03ƕ\x03Ɩ\x03Ɩ\x03Ɩ\x03Ɩ')
buf.write('\x03Ɩ\x03Ɩ\x03Ɨ\x03Ɨ\x03Ɨ\x03Ɨ\x03Ɨ')
buf.write('\x03Ɨ\x03Ɨ\x03Ƙ\x03Ƙ\x03Ƙ\x03Ƙ\x03Ƙ')
buf.write('\x03Ƙ\x03Ƙ\x03Ƙ\x03ƙ\x03ƙ\x03ƙ\x03ƙ')
buf.write('\x03ƙ\x03ƙ\x03ƙ\x03ƚ\x03ƚ\x03ƚ\x03ƚ')
buf.write('\x03ƚ\x03ƚ\x03ƚ\x03ƛ\x03ƛ\x03ƛ\x03ƛ')
buf.write('\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɯ\x03Ɲ')
buf.write('\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ\x03Ɲ')
buf.write('\x03Ɲ\x03ƞ\x03ƞ\x03ƞ\x03ƞ\x03ƞ\x03ƞ')
buf.write('\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ\x03Ɵ')
buf.write('\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ\x03Ơ')
buf.write('\x03Ơ\x03ơ\x03ơ\x03ơ\x03ơ\x03ơ\x03ơ')
buf.write('\x03ơ\x03ơ\x03ơ\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ')
buf.write('\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ\x03Ƣ\x03ƣ\x03ƣ')
buf.write('\x03ƣ\x03ƣ\x03ƣ\x03ƣ\x03ƣ\x03Ƥ\x03Ƥ')
buf.write('\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03Ƥ\x03ƥ')
buf.write('\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ\x03ƥ')
buf.write('\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ\x03Ʀ')
buf.write('\x03Ʀ\x03Ʀ\x03Ƨ\x03Ƨ\x03Ƨ\x03Ƨ\x03Ƨ')
buf.write('\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ\x03ƨ')
buf.write('\x03ƨ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ')
buf.write('\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03Ʃ\x03ƪ\x03ƪ')
buf.write('\x03ƪ\x03ƪ\x03ƪ\x03ƫ\x03ƫ\x03ƫ\x03ƫ')
buf.write('\x03ƫ\x03ƫ\x03ƫ\x03ƫ\x03ƫ\x03Ƭ\x03Ƭ')
buf.write('\x03Ƭ\x03Ƭ\x03Ƭ\x03Ƭ\x03ƭ\x03ƭ\x03ƭ')
buf.write('\x03ƭ\x03ƭ\x03ƭ\x03Ʈ\x03Ʈ\x03Ʈ\x03Ʈ')
buf.write('\x03Ʈ\x03Ư\x03Ư\x03Ư\x03Ư\x03Ư\x03Ư')
buf.write('\x03Ư\x03ư\x03ư\x03ư\x03ư\x03ư\x03Ʊ')
buf.write('\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʊ\x03Ʋ\x03Ʋ')
buf.write('\x03Ʋ\x03Ʋ\x03Ƴ\x03Ƴ\x03Ƴ\x03Ƴ\x03Ƴ')
buf.write('\x03Ƴ\x03Ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ')
buf.write('\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ\x03ƴ')
buf.write('\x03ƴ\x03ƴ\x03Ƶ\x03Ƶ\x03Ƶ\x03Ƶ\x03Ƶ')
buf.write('\x03Ƶ\x03Ƶ\x03Ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ')
buf.write('\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ\x03ƶ')
buf.write('\x03ƶ\x03ƶ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ')
buf.write('\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ʒ\x03Ƹ')
buf.write('\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ\x03Ƹ')
buf.write('\x03Ƹ\x03Ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ')
buf.write('\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƹ\x03ƺ\x03ƺ')
buf.write('\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ')
buf.write('\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƺ\x03ƻ\x03ƻ')
buf.write('\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ\x03ƻ')
buf.write('\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03Ƽ\x03ƽ')
buf.write('\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ\x03ƽ')
buf.write('\x03ƽ\x03ƾ\x03ƾ\x03ƾ\x03ƾ\x03ƾ\x03ƾ')
buf.write('\x03ƾ\x03ƾ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ')
buf.write('\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ\x03ƿ')
buf.write('\x03ƿ\x03ǀ\x03ǀ\x03ǀ\x03ǀ\x03ǀ\x03ǀ')
buf.write('\x03ǀ\x03ǀ\x03ǀ\x03ǁ\x03ǁ\x03ǁ\x03ǁ')
buf.write('\x03ǁ\x03ǂ\x03ǂ\x03ǂ\x03ǂ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ\x03ǃ')
buf.write('\x03ǃ\x03ǃ\x03DŽ\x03DŽ\x03DŽ\x03DŽ\x03DŽ')
buf.write('\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž\x03Dž')
buf.write('\x03Dž\x03Dž\x03Dž\x03Dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž\x03dž')
buf.write('\x03dž\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ')
buf.write('\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ\x03LJ')
buf.write('\x03LJ\x03LJ\x03LJ\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj\x03Lj')
buf.write('\x03Lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj\x03lj')
buf.write('\x03lj\x03lj\x03lj\x03NJ\x03NJ\x03NJ\x03NJ')
buf.write('\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ\x03NJ')
buf.write('\x03NJ\x03NJ\x03NJ\x03NJ\x03Nj\x03Nj\x03Nj')
buf.write('\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj\x03Nj')
buf.write('\x03nj\x03nj\x03nj\x03nj\x03nj\x03nj\x03nj')
buf.write('\x03nj\x03nj\x03nj\x03nj\x03Ǎ\x03Ǎ\x03Ǎ')
buf.write('\x03Ǎ\x03Ǎ\x03Ǎ\x03Ǎ\x03Ǎ\x03ǎ\x03ǎ')
buf.write('\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03ǎ')
buf.write('\x03ǎ\x03ǎ\x03ǎ\x03ǎ\x03Ǐ\x03Ǐ\x03Ǐ')
buf.write('\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ')
buf.write('\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03Ǐ\x03ǐ')
buf.write('\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ')
buf.write('\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ\x03ǐ')
buf.write('\x03ǐ\x03Ǒ\x03Ǒ\x03Ǒ\x03Ǒ\x03Ǒ\x03ǒ')
buf.write('\x03ǒ\x03ǒ\x03ǒ\x03Ǔ\x03Ǔ\x03Ǔ\x03Ǔ')
buf.write('\x03Ǔ\x03ǔ\x03ǔ\x03ǔ\x03ǔ\x03Ǖ\x03Ǖ')
buf.write('\x03Ǖ\x03Ǖ\x03Ǖ\x03ǖ\x03ǖ\x03ǖ\x03ǖ')
buf.write('\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ\x03Ǘ')
buf.write('\x03ǘ\x03ǘ\x03ǘ\x03ǘ\x03Ǚ\x03Ǚ\x03Ǚ')
buf.write('\x03Ǚ\x03Ǚ\x03Ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ')
buf.write('\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ')
buf.write('\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03ǚ\x03Ǜ\x03Ǜ')
buf.write('\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ\x03Ǜ')
buf.write('\x03Ǜ\x03Ǜ\x03ǜ\x03ǜ\x03ǜ\x03ǜ\x03ǝ')
buf.write('\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ\x03ǝ')
buf.write('\x03ǝ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ\x03Ǟ')
buf.write('\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ\x03ǟ')
buf.write('\x03Ǡ\x03Ǡ\x03Ǡ\x03Ǡ\x03Ǡ\x03ǡ\x03ǡ')
buf.write('\x03ǡ\x03ǡ\x03ǡ\x03ǡ\x03ǡ\x03Ǣ\x03Ǣ')
buf.write('\x03Ǣ\x03Ǣ\x03Ǣ\x03Ǣ\x07Ǣ፨\nǢ')
buf.write('\x0cǢ\x0eǢ፫\x0bǢ\x03Ǣ\x03Ǣ\x03ǣ')
buf.write('\x03ǣ\x03ǣ\x07ǣ፲\nǣ\x0cǣ\x0eǣ')
buf.write('፵\x0bǣ\x03ǣ\x06ǣ፸\nǣ\rǣ')
buf.write('\x0eǣ፹\x03Ǥ\x03Ǥ\x03Ǥ\x07Ǥ\u137f')
buf.write('\nǤ\x0cǤ\x0eǤᎂ\x0bǤ\x03Ǥ\x06Ǥ')
buf.write('ᎅ\nǤ\rǤ\x0eǤᎆ\x03ǥ\x03ǥ')
buf.write('\x03ǥ\x03Ǧ\x03Ǧ\x03ǧ\x03ǧ\x03Ǩ\x03Ǩ')
buf.write('\x03Ǩ\x05Ǩ᎓\nǨ\x03Ǩ\x03Ǩ\x05Ǩ')
buf.write('᎗\nǨ\x05Ǩ᎙\nǨ\x03Ǩ\x03Ǩ\x05')
buf.write('Ǩ\u139d\nǨ\x03ǩ\x03ǩ\x03ǩ\x03ǩ\x03')
buf.write('ǩ\x07ǩᎤ\nǩ\x0cǩ\x0eǩᎧ\x0b')
buf.write('ǩ\x03ǩ\x03ǩ\x03Ǫ\x03Ǫ\x03Ǫ\x03Ǫ')
buf.write('\x03Ǫ\x05ǪᎰ\nǪ\x03Ǫ\x03Ǫ\x03ǫ')
buf.write('\x03ǫ\x03Ǭ\x03Ǭ\x03Ǭ\x07ǬᎹ\nǬ')
buf.write('\x0cǬ\x0eǬᎼ\x0bǬ\x03Ǭ\x03Ǭ\x03Ǭ')
buf.write('\x03ǭ\x03ǭ\x03ǭ\x07ǭᏄ\nǭ\x0cǭ')
buf.write('\x0eǭᏇ\x0bǭ\x03ǭ\x03ǭ\x03ǭ\x03Ǯ')
buf.write('\x03Ǯ\x03Ǯ\x07ǮᏏ\nǮ\x0cǮ\x0eǮ')
buf.write('Ꮢ\x0bǮ\x03Ǯ\x03Ǯ\x03Ǯ\x03ǯ\x03ǯ')
buf.write('\x03ǯ\x07ǯᏚ\nǯ\x0cǯ\x0eǯᏝ')
buf.write('\x0bǯ\x03ǯ\x03ǯ\x03ǯ\x03ǰ\x03ǰ\x03DZ')
buf.write('\x03DZ\x03DZ\x03DZ\x06DZᏨ\nDZ\rDZ')
buf.write('\x0eDZᏩ\x03DZ\x03DZ\x03Dz\x03Dz\x03dz')
buf.write('\x03dz\x03Ǵ\x03Ǵ\x03ǵ\x03ǵ\x03Ƕ\x03Ƕ')
buf.write('\x03Ƕ\x03Ƿ\x03Ƿ\x03Ǹ\x03Ǹ\x03ǹ\x03ǹ')
buf.write('\x03Ǻ\x03Ǻ\x03ǻ\x03ǻ\x03Ǽ\x03Ǽ\x03ǽ')
buf.write('\x03ǽ\x03ǽ\x03Ǿ\x03Ǿ\x03Ǿ\x03Ǿ\x07Ǿ')
buf.write('ᐌ\nǾ\x0cǾ\x0eǾᐏ\x0bǾ\x03Ǿ')
buf.write('\x03Ǿ\x03Ǿ\x03Ǿ\x03Ǿ\x05Ǿᐖ\nǾ')
buf.write('\x03ǿ\x03ǿ\x03Ȁ\x03Ȁ\x03ȁ\x03ȁ\x03ȁ')
buf.write('\x03Ȃ\x03Ȃ\x03ȃ\x03ȃ\x03ȃ\x03Ȅ\x03Ȅ')
buf.write('\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x03Ȅ\x05Ȅ')
buf.write('ᐬ\nȄ\x03ȅ\x03ȅ\x03Ȇ\x03Ȇ\x03ȇ')
buf.write('\x03ȇ\x03Ȉ\x03Ȉ\x03ȉ\x03ȉ\x03Ȋ\x03Ȋ')
buf.write('\x03Ȋ\x03ȋ\x03ȋ\x03Ȍ\x03Ȍ\x03ȍ\x03ȍ')
buf.write('\x03Ȏ\x03Ȏ\x03ȏ\x03ȏ\x03Ȑ\x06Ȑᑆ')
buf.write('\nȐ\rȐ\x0eȐᑇ\x03Ȑ\x03Ȑ\x03ȑ')
buf.write('\x03ȑ\x03Ȓ\x06Ȓᑏ\nȒ\rȒ\x0eȒ')
buf.write('ᑐ\x03ȓ\x07ȓᑔ\nȓ\x0cȓ\x0eȓ')
buf.write('ᑗ\x0bȓ\x03ȓ\x05ȓᑚ\nȓ\x03ȓ')
buf.write('\x06ȓᑝ\nȓ\rȓ\x0eȓᑞ\x03Ȕ')
buf.write('\x03Ȕ\x03Ȕ\x03Ȕ\x07Ȕᑥ\nȔ\x0cȔ')
buf.write('\x0eȔᑨ\x0bȔ\x03Ȕ\x03Ȕ\x05Ȕᑬ')
buf.write('\nȔ\x03Ȕ\x03Ȕ\x03ȕ\x03ȕ\x03ȕ\x03ȕ')
buf.write('\x07ȕᑴ\nȕ\x0cȕ\x0eȕᑷ\x0bȕ')
buf.write('\x03ȕ\x03ȕ\x03ȕ\x03ȕ\x03ȕ\x03Ȗ\x03Ȗ')
buf.write('\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ\x03Ȗ')
buf.write('\x07Ȗᒇ\nȖ\x0cȖ\x0eȖᒊ\x0bȖ')
buf.write('\x03Ȗ\x03Ȗ\x05Ȗᒎ\nȖ\x03ȗ\x05ȗ')
buf.write('ᒑ\nȗ\x03ȗ\x03ȗ\x03Ș\x03Ș\x03ș')
buf.write('\x03ș\x03ș\x07șᒚ\nș\x0cș\x0eș')
buf.write('ᒝ\x0bș\x03Ț\x03Ț\x03Ț\x03Ț\x03Ț')
buf.write('\x03ț\x03ț\x03Ȝ\x03Ȝ\x03ȝ\x03ȝ\x03Ȟ')
buf.write('\x03Ȟ\x03ȟ\x03ȟ\x03Ƞ\x03Ƞ\x03ȡ\x03ȡ')
buf.write('\x03Ȣ\x03Ȣ\x03ȣ\x03ȣ\x03Ȥ\x03Ȥ\x03ȥ')
buf.write('\x03ȥ\x03Ȧ\x03Ȧ\x03ȧ\x03ȧ\x03Ȩ\x03Ȩ')
buf.write('\x03ȩ\x03ȩ\x03Ȫ\x03Ȫ\x03ȫ\x03ȫ\x03Ȭ')
buf.write('\x03Ȭ\x03ȭ\x03ȭ\x03Ȯ\x03Ȯ\x03ȯ\x03ȯ')
buf.write('\x03Ȱ\x03Ȱ\x03ȱ\x03ȱ\x03Ȳ\x03Ȳ\x03ȳ')
buf.write('\x03ȳ\x03ȴ\x03ȴ\x07ᎺᏅᏐᏛᑵ')
buf.write(
'\x02ȵ\x03\x03\x05\x04\x07\x05\t\x06\x0b\x07\r\x08\x0f\t\x11\n\x13\x0b\x15\x0c'
)
buf.write(
"\x17\r\x19\x0e\x1b\x0f\x1d\x10\x1f\x11!\x12#\x13%\x14'\x15)\x16+\x17"
)
buf.write('-\x18/\x191\x1a3\x1b5\x1c7\x1d9\x1e;\x1f= ?!A"C#E$G%')
buf.write("I&K'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7")
buf.write('m8o9q:s;u<w=y>{?}@\x7fA\x81B\x83C\x85D\x87E\x89')
buf.write('F\x8bG\x8dH\x8fI\x91J\x93K\x95L\x97M\x99')
buf.write('N\x9bO\x9dP\x9fQ¡R£S¥T§U©')
buf.write('V«W\xadX¯Y±Z³[µ\\·]¹')
buf.write('^»_½`¿aÁbÃcÅdÇeÉ')
buf.write('fËgÍhÏiÑjÓkÕl×mÙ')
buf.write('nÛoÝpßqárãsåtçué')
buf.write('vëwíxïyñzó{õ|÷}ù')
buf.write('~û\x7fý\x80ÿ\x81ā\x82ă')
buf.write('\x83ą\x84ć\x85ĉ\x86ċ\x87')
buf.write('č\x88ď\x89đ\x8aē\x8bĕ')
buf.write('\x8cė\x8dę\x8eě\x8fĝ\x90')
buf.write('ğ\x91ġ\x92ģ\x93ĥ\x94ħ')
buf.write('\x95ĩ\x96ī\x97ĭ\x98į\x99')
buf.write('ı\x9aij\x9bĵ\x9cķ\x9dĹ')
buf.write('\x9eĻ\x9fĽ\xa0Ŀ¡Ł¢')
buf.write('Ń£Ņ¤Ň¥ʼn¦ŋ')
buf.write('§ō¨ŏ©őªœ«')
buf.write('ŕ¬ŗ\xadř®ś¯ŝ')
buf.write('°ş±š²ţ³ť´')
buf.write('ŧµũ¶ū·ŭ¸ů')
buf.write('¹űºų»ŵ¼ŷ½')
buf.write('Ź¾Ż¿ŽÀſÁƁ')
buf.write('ÂƃÃƅÄƇÅƉÆ')
buf.write('ƋÇƍÈƏÉƑÊƓ')
buf.write('ËƕÌƗÍƙÎƛÏ')
buf.write('ƝÐƟÑơÒƣÓƥ')
buf.write('ÔƧÕƩÖƫ×ƭØ')
buf.write('ƯÙƱÚƳÛƵÜƷ')
buf.write('ÝƹÞƻßƽàƿá')
buf.write('ǁâǃãDžäLJålj')
buf.write('æNjçǍèǏéǑê')
buf.write('ǓëǕìǗíǙîǛ')
buf.write('ïǝðǟñǡòǣó')
buf.write('ǥôǧõǩöǫ÷ǭ')
buf.write('øǯùDZúdzûǵü')
buf.write('ǷýǹþǻÿǽĀǿ')
buf.write('āȁĂȃăȅĄȇą')
buf.write('ȉĆȋćȍĈȏĉȑ')
buf.write('ĊȓċȕČȗčșĎ')
buf.write('țďȝĐȟđȡĒȣ')
buf.write('ēȥĔȧĕȩĖȫė')
buf.write('ȭĘȯęȱĚȳěȵ')
buf.write('ĜȷĝȹĞȻğȽĠ')
buf.write('ȿġɁĢɃģɅĤɇ')
buf.write('ĥɉĦɋħɍĨɏĩ')
buf.write('ɑĪɓīɕĬɗĭə')
buf.write('ĮɛįɝİɟıɡIJ')
buf.write('ɣijɥĴɧĵɩĶɫ')
buf.write('ķɭĸɯĹɱĺɳĻ')
buf.write('ɵļɷĽɹľɻĿɽ')
buf.write('ŀɿŁʁłʃŃʅń')
buf.write('ʇŅʉņʋŇʍňʏ')
buf.write('ʼnʑŊʓŋʕŌʗō')
buf.write('ʙŎʛŏʝŐʟőʡ')
buf.write('ŒʣœʥŔʧŕʩŖ')
buf.write('ʫŗʭŘʯřʱŚʳ')
buf.write('śʵŜʷŝʹŞʻş')
buf.write('ʽŠʿšˁŢ˃ţ˅')
buf.write('ŤˇťˉŦˋŧˍŨ')
buf.write('ˏũˑŪ˓ū˕Ŭ˗')
buf.write('ŭ˙ٲů˝Ű˟ű')
buf.write('ˡŲˣų˥Ŵ˧ŵ˩')
buf.write('Ŷ˫ŷ˭Ÿ˯Ź˱ź')
buf.write('˳Ż˵ż˷Ž˹ž˻')
buf.write('ſ˽ƀ˿Ɓ́Ƃ̃ƃ')
buf.write('̅Ƅ̇ƅ̉Ɔ̋Ƈ̍')
buf.write('ƈ̏Ɖ̑Ɗ̓Ƌ̕ƌ')
buf.write('̗ƍ̙Ǝ̛Ə̝Ɛ̟')
buf.write('Ƒ̡ƒ̣Ɠ̥Ɣ̧ƕ')
buf.write('̩Ɩ̫Ɨ̭Ƙ̯ƙ̱')
buf.write('ƚ̳ƛ̵Ɯ̷Ɲ̹ƞ')
buf.write('̻Ɵ̽Ơ̿ớƢ̓')
buf.write('ƣͅƤ͇ƥ͉Ʀ͋Ƨ')
buf.write('͍ƨ͏Ʃ͑ƪ͓ƫ͕')
buf.write('Ƭ͗ƭ͙Ʈ͛Ư͝ư')
buf.write('͟Ʊ͡ƲͣƳͥƴͧ')
buf.write('ƵͩƶͫƷͭƸͯƹ')
buf.write('ͱƺͳƻ͵Ƽͷƽ\u0379')
buf.write('ƾͻƿͽǀͿǁ\u0381ǂ')
buf.write('\u0383ǃ΅DŽ·DžΉdž\u038b')
buf.write('LJ\u038dLjΏljΑNJΓNj')
buf.write('ΕnjΗǍΙǎΛǏΝ')
buf.write('ǐΟǑΡǒΣǓΥǔ')
buf.write('ΧǕΩǖΫǗέǘί')
buf.write('ǙαǚγǛεǜηǝ')
buf.write('ιǞλǟνǠοǡρ')
buf.write('ǢσǣυǤχǥωǦ')
buf.write('ϋǧύǨϏǩϑǪϓ')
buf.write('\x02ϕ\x02ϗ\x02ϙ\x02ϛ\x02ϝ\x02ϟ\x02ϡ')
buf.write('ǫϣǬϥǭϧǮϩǯ')
buf.write('ϫǰϭDZϯDzϱdzϳ')
buf.write('ǴϵǵϷǶϹǷϻǸ')
buf.write('ϽǹϿǺЁǻЃǼЅ')
buf.write('ǽЇǾЉǿЋȀЍȁ')
buf.write('ЏȂБ\x02ГȃЕȄЗȅ')
buf.write('ЙȆЛȇНȈПȉС')
buf.write('\x02У\x02Х\x02ЧȊЩȋЫȌ')
buf.write('Э\x02Я\x02бȍгȎе\x02з')
buf.write('\x02й\x02л\x02н\x02п\x02с\x02у\x02х')
buf.write('\x02ч\x02щ\x02ы\x02э\x02я\x02ё\x02ѓ')
buf.write('\x02ѕ\x02ї\x02љ\x02ћ\x02ѝ\x02џ\x02ѡ')
buf.write(
"\x02ѣ\x02ѥ\x02ѧ\x02\x03\x02'\x05\x02\x0c\x0c\x0f\x0f))\x05\x022")
buf.write(
';CHch\x04\x02GGgg\x04\x02--//\t\x02\x0b\x0c\x0f\x0f""**>>]]}}\x05\x02\x0c'
)
buf.write(
'\x0c\x0f\x0f$$\x04\x022;aa\x05\x02\x0b\x0c\x0f\x0f""\x04\x02C\\c|\x04\x02\x0c'
)
buf.write(
'\x0c\x0f\x0f\x04\x02\x0b\x0b""\x05\x02%&2;aa\x04\x02CCcc\x04\x02DDdd\x04\x02'
)
buf.write(
'EEee\x04\x02FFff\x04\x02HHhh\x04\x02IIii\x04\x02JJjj\x04\x02KKkk\x04\x02LLll\x04'
)
buf.write(
'\x02MMmm\x04\x02NNnn\x04\x02OOoo\x04\x02PPpp\x04\x02QQqq\x04\x02RRrr\x04\x02SSs'
)
buf.write(
's\x04\x02TTtt\x04\x02UUuu\x04\x02VVvv\x04\x02WWww\x04\x02XXxx\x04\x02YYyy\x04\x02'
)
buf.write(
'ZZzz\x04\x02[[{{\x04\x02\\\\||\x02ᓝ\x02\x03\x03\x02\x02\x02\x02\x05\x03\x02\x02\x02'
)
buf.write(
'\x02\x07\x03\x02\x02\x02\x02\t\x03\x02\x02\x02\x02\x0b\x03\x02\x02\x02\x02\r\x03\x02\x02\x02\x02\x0f'
)
buf.write(
'\x03\x02\x02\x02\x02\x11\x03\x02\x02\x02\x02\x13\x03\x02\x02\x02\x02\x15\x03\x02\x02\x02\x02\x17\x03'
)
buf.write(
'\x02\x02\x02\x02\x19\x03\x02\x02\x02\x02\x1b\x03\x02\x02\x02\x02\x1d\x03\x02\x02\x02\x02\x1f\x03\x02'
)
buf.write(
"\x02\x02\x02!\x03\x02\x02\x02\x02#\x03\x02\x02\x02\x02%\x03\x02\x02\x02\x02'\x03\x02\x02\x02\x02)\x03"
)
buf.write(
'\x02\x02\x02\x02+\x03\x02\x02\x02\x02-\x03\x02\x02\x02\x02/\x03\x02\x02\x02\x021\x03\x02\x02\x02\x02'
)
buf.write(
'3\x03\x02\x02\x02\x025\x03\x02\x02\x02\x027\x03\x02\x02\x02\x029\x03\x02\x02\x02\x02;\x03'
)
buf.write(
'\x02\x02\x02\x02=\x03\x02\x02\x02\x02?\x03\x02\x02\x02\x02A\x03\x02\x02\x02\x02C\x03\x02\x02\x02\x02E'
)
buf.write(
'\x03\x02\x02\x02\x02G\x03\x02\x02\x02\x02I\x03\x02\x02\x02\x02K\x03\x02\x02\x02\x02M\x03\x02\x02\x02\x02'
)
buf.write(
'O\x03\x02\x02\x02\x02Q\x03\x02\x02\x02\x02S\x03\x02\x02\x02\x02U\x03\x02\x02\x02\x02W\x03\x02\x02\x02'
)
buf.write(
'\x02Y\x03\x02\x02\x02\x02[\x03\x02\x02\x02\x02]\x03\x02\x02\x02\x02_\x03\x02\x02\x02\x02a\x03\x02\x02'
)
buf.write(
'\x02\x02c\x03\x02\x02\x02\x02e\x03\x02\x02\x02\x02g\x03\x02\x02\x02\x02i\x03\x02\x02\x02\x02k\x03\x02'
)
buf.write(
'\x02\x02\x02m\x03\x02\x02\x02\x02o\x03\x02\x02\x02\x02q\x03\x02\x02\x02\x02s\x03\x02\x02\x02\x02u\x03'
)
buf.write(
'\x02\x02\x02\x02w\x03\x02\x02\x02\x02y\x03\x02\x02\x02\x02{\x03\x02\x02\x02\x02}\x03\x02\x02\x02\x02\x7f'
)
buf.write(
'\x03\x02\x02\x02\x02\x81\x03\x02\x02\x02\x02\x83\x03\x02\x02\x02\x02\x85\x03\x02\x02'
)
buf.write(
'\x02\x02\x87\x03\x02\x02\x02\x02\x89\x03\x02\x02\x02\x02\x8b\x03\x02\x02\x02\x02\x8d'
)
buf.write(
'\x03\x02\x02\x02\x02\x8f\x03\x02\x02\x02\x02\x91\x03\x02\x02\x02\x02\x93\x03\x02\x02'
)
buf.write(
'\x02\x02\x95\x03\x02\x02\x02\x02\x97\x03\x02\x02\x02\x02\x99\x03\x02\x02\x02\x02\x9b'
)
buf.write(
'\x03\x02\x02\x02\x02\x9d\x03\x02\x02\x02\x02\x9f\x03\x02\x02\x02\x02¡\x03\x02\x02'
)
buf.write(
'\x02\x02£\x03\x02\x02\x02\x02¥\x03\x02\x02\x02\x02§\x03\x02\x02\x02\x02©'
)
buf.write(
'\x03\x02\x02\x02\x02«\x03\x02\x02\x02\x02\xad\x03\x02\x02\x02\x02¯\x03\x02\x02'
)
buf.write(
'\x02\x02±\x03\x02\x02\x02\x02³\x03\x02\x02\x02\x02µ\x03\x02\x02\x02\x02·'
)
buf.write(
'\x03\x02\x02\x02\x02¹\x03\x02\x02\x02\x02»\x03\x02\x02\x02\x02½\x03\x02\x02'
)
buf.write(
'\x02\x02¿\x03\x02\x02\x02\x02Á\x03\x02\x02\x02\x02Ã\x03\x02\x02\x02\x02Å'
)
buf.write(
'\x03\x02\x02\x02\x02Ç\x03\x02\x02\x02\x02É\x03\x02\x02\x02\x02Ë\x03\x02\x02'
)
buf.write(
'\x02\x02Í\x03\x02\x02\x02\x02Ï\x03\x02\x02\x02\x02Ñ\x03\x02\x02\x02\x02Ó'
)
buf.write(
'\x03\x02\x02\x02\x02Õ\x03\x02\x02\x02\x02×\x03\x02\x02\x02\x02Ù\x03\x02\x02'
)
buf.write(
'\x02\x02Û\x03\x02\x02\x02\x02Ý\x03\x02\x02\x02\x02ß\x03\x02\x02\x02\x02á'
)
buf.write(
'\x03\x02\x02\x02\x02ã\x03\x02\x02\x02\x02å\x03\x02\x02\x02\x02ç\x03\x02\x02'
)
buf.write(
'\x02\x02é\x03\x02\x02\x02\x02ë\x03\x02\x02\x02\x02í\x03\x02\x02\x02\x02ï'
)
buf.write(
'\x03\x02\x02\x02\x02ñ\x03\x02\x02\x02\x02ó\x03\x02\x02\x02\x02õ\x03\x02\x02'
)
buf.write(
'\x02\x02÷\x03\x02\x02\x02\x02ù\x03\x02\x02\x02\x02û\x03\x02\x02\x02\x02ý'
)
buf.write(
'\x03\x02\x02\x02\x02ÿ\x03\x02\x02\x02\x02ā\x03\x02\x02\x02\x02ă\x03\x02\x02'
)
buf.write(
'\x02\x02ą\x03\x02\x02\x02\x02ć\x03\x02\x02\x02\x02ĉ\x03\x02\x02\x02\x02ċ'
)
buf.write(
'\x03\x02\x02\x02\x02č\x03\x02\x02\x02\x02ď\x03\x02\x02\x02\x02đ\x03\x02\x02'
)
buf.write(
'\x02\x02ē\x03\x02\x02\x02\x02ĕ\x03\x02\x02\x02\x02ė\x03\x02\x02\x02\x02ę'
)
buf.write(
'\x03\x02\x02\x02\x02ě\x03\x02\x02\x02\x02ĝ\x03\x02\x02\x02\x02ğ\x03\x02\x02'
)
buf.write(
'\x02\x02ġ\x03\x02\x02\x02\x02ģ\x03\x02\x02\x02\x02ĥ\x03\x02\x02\x02\x02ħ'
)
buf.write(
'\x03\x02\x02\x02\x02ĩ\x03\x02\x02\x02\x02ī\x03\x02\x02\x02\x02ĭ\x03\x02\x02'
)
buf.write(
'\x02\x02į\x03\x02\x02\x02\x02ı\x03\x02\x02\x02\x02ij\x03\x02\x02\x02\x02ĵ'
)
buf.write(
'\x03\x02\x02\x02\x02ķ\x03\x02\x02\x02\x02Ĺ\x03\x02\x02\x02\x02Ļ\x03\x02\x02'
)
buf.write(
'\x02\x02Ľ\x03\x02\x02\x02\x02Ŀ\x03\x02\x02\x02\x02Ł\x03\x02\x02\x02\x02Ń'
)
buf.write(
'\x03\x02\x02\x02\x02Ņ\x03\x02\x02\x02\x02Ň\x03\x02\x02\x02\x02ʼn\x03\x02\x02'
)
buf.write(
'\x02\x02ŋ\x03\x02\x02\x02\x02ō\x03\x02\x02\x02\x02ŏ\x03\x02\x02\x02\x02ő'
)
buf.write(
'\x03\x02\x02\x02\x02œ\x03\x02\x02\x02\x02ŕ\x03\x02\x02\x02\x02ŗ\x03\x02\x02'
)
buf.write(
'\x02\x02ř\x03\x02\x02\x02\x02ś\x03\x02\x02\x02\x02ŝ\x03\x02\x02\x02\x02ş'
)
buf.write(
'\x03\x02\x02\x02\x02š\x03\x02\x02\x02\x02ţ\x03\x02\x02\x02\x02ť\x03\x02\x02'
)
buf.write(
'\x02\x02ŧ\x03\x02\x02\x02\x02ũ\x03\x02\x02\x02\x02ū\x03\x02\x02\x02\x02ŭ'
)
buf.write(
'\x03\x02\x02\x02\x02ů\x03\x02\x02\x02\x02ű\x03\x02\x02\x02\x02ų\x03\x02\x02'
)
buf.write(
'\x02\x02ŵ\x03\x02\x02\x02\x02ŷ\x03\x02\x02\x02\x02Ź\x03\x02\x02\x02\x02Ż'
)
buf.write(
'\x03\x02\x02\x02\x02Ž\x03\x02\x02\x02\x02ſ\x03\x02\x02\x02\x02Ɓ\x03\x02\x02'
)
buf.write(
'\x02\x02ƃ\x03\x02\x02\x02\x02ƅ\x03\x02\x02\x02\x02Ƈ\x03\x02\x02\x02\x02Ɖ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƌ\x03\x02\x02\x02\x02ƍ\x03\x02\x02\x02\x02Ə\x03\x02\x02'
)
buf.write(
'\x02\x02Ƒ\x03\x02\x02\x02\x02Ɠ\x03\x02\x02\x02\x02ƕ\x03\x02\x02\x02\x02Ɨ'
)
buf.write(
'\x03\x02\x02\x02\x02ƙ\x03\x02\x02\x02\x02ƛ\x03\x02\x02\x02\x02Ɲ\x03\x02\x02'
)
buf.write(
'\x02\x02Ɵ\x03\x02\x02\x02\x02ơ\x03\x02\x02\x02\x02ƣ\x03\x02\x02\x02\x02ƥ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƨ\x03\x02\x02\x02\x02Ʃ\x03\x02\x02\x02\x02ƫ\x03\x02\x02'
)
buf.write(
'\x02\x02ƭ\x03\x02\x02\x02\x02Ư\x03\x02\x02\x02\x02Ʊ\x03\x02\x02\x02\x02Ƴ'
)
buf.write(
'\x03\x02\x02\x02\x02Ƶ\x03\x02\x02\x02\x02Ʒ\x03\x02\x02\x02\x02ƹ\x03\x02\x02'
)
buf.write(
'\x02\x02ƻ\x03\x02\x02\x02\x02ƽ\x03\x02\x02\x02\x02ƿ\x03\x02\x02\x02\x02ǁ'
)
buf.write(
'\x03\x02\x02\x02\x02ǃ\x03\x02\x02\x02\x02Dž\x03\x02\x02\x02\x02LJ\x03\x02\x02'
)
buf.write(
'\x02\x02lj\x03\x02\x02\x02\x02Nj\x03\x02\x02\x02\x02Ǎ\x03\x02\x02\x02\x02Ǐ'
)
buf.write(
'\x03\x02\x02\x02\x02Ǒ\x03\x02\x02\x02\x02Ǔ\x03\x02\x02\x02\x02Ǖ\x03\x02\x02'
)
buf.write(
'\x02\x02Ǘ\x03\x02\x02\x02\x02Ǚ\x03\x02\x02\x02\x02Ǜ\x03\x02\x02\x02\x02ǝ'
)
buf.write(
'\x03\x02\x02\x02\x02ǟ\x03\x02\x02\x02\x02ǡ\x03\x02\x02\x02\x02ǣ\x03\x02\x02'
)
buf.write(
'\x02\x02ǥ\x03\x02\x02\x02\x02ǧ\x03\x02\x02\x02\x02ǩ\x03\x02\x02\x02\x02ǫ'
)
buf.write(
'\x03\x02\x02\x02\x02ǭ\x03\x02\x02\x02\x02ǯ\x03\x02\x02\x02\x02DZ\x03\x02\x02'
)
buf.write(
'\x02\x02dz\x03\x02\x02\x02\x02ǵ\x03\x02\x02\x02\x02Ƿ\x03\x02\x02\x02\x02ǹ'
)
buf.write(
'\x03\x02\x02\x02\x02ǻ\x03\x02\x02\x02\x02ǽ\x03\x02\x02\x02\x02ǿ\x03\x02\x02'
)
buf.write(
'\x02\x02ȁ\x03\x02\x02\x02\x02ȃ\x03\x02\x02\x02\x02ȅ\x03\x02\x02\x02\x02ȇ'
)
buf.write(
'\x03\x02\x02\x02\x02ȉ\x03\x02\x02\x02\x02ȋ\x03\x02\x02\x02\x02ȍ\x03\x02\x02'
)
buf.write(
'\x02\x02ȏ\x03\x02\x02\x02\x02ȑ\x03\x02\x02\x02\x02ȓ\x03\x02\x02\x02\x02ȕ'
)
buf.write(
'\x03\x02\x02\x02\x02ȗ\x03\x02\x02\x02\x02ș\x03\x02\x02\x02\x02ț\x03\x02\x02'
)
buf.write(
'\x02\x02ȝ\x03\x02\x02\x02\x02ȟ\x03\x02\x02\x02\x02ȡ\x03\x02\x02\x02\x02ȣ'
)
buf.write(
'\x03\x02\x02\x02\x02ȥ\x03\x02\x02\x02\x02ȧ\x03\x02\x02\x02\x02ȩ\x03\x02\x02'
)
buf.write(
'\x02\x02ȫ\x03\x02\x02\x02\x02ȭ\x03\x02\x02\x02\x02ȯ\x03\x02\x02\x02\x02ȱ'
)
buf.write(
'\x03\x02\x02\x02\x02ȳ\x03\x02\x02\x02\x02ȵ\x03\x02\x02\x02\x02ȷ\x03\x02\x02'
)
buf.write(
'\x02\x02ȹ\x03\x02\x02\x02\x02Ȼ\x03\x02\x02\x02\x02Ƚ\x03\x02\x02\x02\x02ȿ'
)
buf.write(
'\x03\x02\x02\x02\x02Ɂ\x03\x02\x02\x02\x02Ƀ\x03\x02\x02\x02\x02Ʌ\x03\x02\x02'
)
buf.write(
'\x02\x02ɇ\x03\x02\x02\x02\x02ɉ\x03\x02\x02\x02\x02ɋ\x03\x02\x02\x02\x02ɍ'
)
buf.write(
'\x03\x02\x02\x02\x02ɏ\x03\x02\x02\x02\x02ɑ\x03\x02\x02\x02\x02ɓ\x03\x02\x02'
)
buf.write(
'\x02\x02ɕ\x03\x02\x02\x02\x02ɗ\x03\x02\x02\x02\x02ə\x03\x02\x02\x02\x02ɛ'
)
buf.write(
'\x03\x02\x02\x02\x02ɝ\x03\x02\x02\x02\x02ɟ\x03\x02\x02\x02\x02ɡ\x03\x02\x02'
)
buf.write(
'\x02\x02ɣ\x03\x02\x02\x02\x02ɥ\x03\x02\x02\x02\x02ɧ\x03\x02\x02\x02\x02ɩ'
)
buf.write(
'\x03\x02\x02\x02\x02ɫ\x03\x02\x02\x02\x02ɭ\x03\x02\x02\x02\x02ɯ\x03\x02\x02'
)
buf.write(
'\x02\x02ɱ\x03\x02\x02\x02\x02ɳ\x03\x02\x02\x02\x02ɵ\x03\x02\x02\x02\x02ɷ'
)
buf.write(
'\x03\x02\x02\x02\x02ɹ\x03\x02\x02\x02\x02ɻ\x03\x02\x02\x02\x02ɽ\x03\x02\x02'
)
buf.write(
'\x02\x02ɿ\x03\x02\x02\x02\x02ʁ\x03\x02\x02\x02\x02ʃ\x03\x02\x02\x02\x02ʅ'
)
buf.write(
'\x03\x02\x02\x02\x02ʇ\x03\x02\x02\x02\x02ʉ\x03\x02\x02\x02\x02ʋ\x03\x02\x02'
)
buf.write(
'\x02\x02ʍ\x03\x02\x02\x02\x02ʏ\x03\x02\x02\x02\x02ʑ\x03\x02\x02\x02\x02ʓ'
)
buf.write(
'\x03\x02\x02\x02\x02ʕ\x03\x02\x02\x02\x02ʗ\x03\x02\x02\x02\x02ʙ\x03\x02\x02'
)
buf.write(
'\x02\x02ʛ\x03\x02\x02\x02\x02ʝ\x03\x02\x02\x02\x02ʟ\x03\x02\x02\x02\x02ʡ'
)
buf.write(
'\x03\x02\x02\x02\x02ʣ\x03\x02\x02\x02\x02ʥ\x03\x02\x02\x02\x02ʧ\x03\x02\x02'
)
buf.write(
'\x02\x02ʩ\x03\x02\x02\x02\x02ʫ\x03\x02\x02\x02\x02ʭ\x03\x02\x02\x02\x02ʯ'
)
buf.write(
'\x03\x02\x02\x02\x02ʱ\x03\x02\x02\x02\x02ʳ\x03\x02\x02\x02\x02ʵ\x03\x02\x02'
)
buf.write(
'\x02\x02ʷ\x03\x02\x02\x02\x02ʹ\x03\x02\x02\x02\x02ʻ\x03\x02\x02\x02\x02ʽ'
)
buf.write(
'\x03\x02\x02\x02\x02ʿ\x03\x02\x02\x02\x02ˁ\x03\x02\x02\x02\x02˃\x03\x02\x02'
)
buf.write(
'\x02\x02˅\x03\x02\x02\x02\x02ˇ\x03\x02\x02\x02\x02ˉ\x03\x02\x02\x02\x02ˋ'
)
buf.write(
'\x03\x02\x02\x02\x02ˍ\x03\x02\x02\x02\x02ˏ\x03\x02\x02\x02\x02ˑ\x03\x02\x02'
)
buf.write(
'\x02\x02˓\x03\x02\x02\x02\x02˕\x03\x02\x02\x02\x02˗\x03\x02\x02\x02\x02˙'
)
buf.write(
'\x03\x02\x02\x02\x02˛\x03\x02\x02\x02\x02˝\x03\x02\x02\x02\x02˟\x03\x02\x02'
)
buf.write(
'\x02\x02ˡ\x03\x02\x02\x02\x02ˣ\x03\x02\x02\x02\x02˥\x03\x02\x02\x02\x02˧'
)
buf.write(
'\x03\x02\x02\x02\x02˩\x03\x02\x02\x02\x02˫\x03\x02\x02\x02\x02˭\x03\x02\x02'
)
buf.write(
'\x02\x02˯\x03\x02\x02\x02\x02˱\x03\x02\x02\x02\x02˳\x03\x02\x02\x02\x02˵'
)
buf.write(
'\x03\x02\x02\x02\x02˷\x03\x02\x02\x02\x02˹\x03\x02\x02\x02\x02˻\x03\x02\x02'
)
buf.write(
'\x02\x02˽\x03\x02\x02\x02\x02˿\x03\x02\x02\x02\x02́\x03\x02\x02\x02\x02̃'
)
buf.write(
'\x03\x02\x02\x02\x02̅\x03\x02\x02\x02\x02̇\x03\x02\x02\x02\x02̉\x03\x02\x02'
)
buf.write(
'\x02\x02̋\x03\x02\x02\x02\x02̍\x03\x02\x02\x02\x02̏\x03\x02\x02\x02\x02̑'
)
buf.write(
'\x03\x02\x02\x02\x02̓\x03\x02\x02\x02\x02̕\x03\x02\x02\x02\x02̗\x03\x02\x02'
)
buf.write(
'\x02\x02̙\x03\x02\x02\x02\x02̛\x03\x02\x02\x02\x02̝\x03\x02\x02\x02\x02̟'
)
buf.write(
'\x03\x02\x02\x02\x02̡\x03\x02\x02\x02\x02̣\x03\x02\x02\x02\x02̥\x03\x02\x02'
)
buf.write(
'\x02\x02̧\x03\x02\x02\x02\x02̩\x03\x02\x02\x02\x02̫\x03\x02\x02\x02\x02̭'
)
buf.write(
'\x03\x02\x02\x02\x02̯\x03\x02\x02\x02\x02̱\x03\x02\x02\x02\x02̳\x03\x02\x02'
)
buf.write(
'\x02\x02̵\x03\x02\x02\x02\x02̷\x03\x02\x02\x02\x02̹\x03\x02\x02\x02\x02̻'
)
buf.write(
'\x03\x02\x02\x02\x02̽\x03\x02\x02\x02\x02̿\x03\x02\x02\x02\x02́\x03\x02\x02'
)
buf.write(
'\x02\x02̓\x03\x02\x02\x02\x02ͅ\x03\x02\x02\x02\x02͇\x03\x02\x02\x02\x02͉'
)
buf.write(
'\x03\x02\x02\x02\x02͋\x03\x02\x02\x02\x02͍\x03\x02\x02\x02\x02͏\x03\x02\x02'
)
buf.write(
'\x02\x02͑\x03\x02\x02\x02\x02͓\x03\x02\x02\x02\x02͕\x03\x02\x02\x02\x02͗'
)
buf.write(
'\x03\x02\x02\x02\x02͙\x03\x02\x02\x02\x02͛\x03\x02\x02\x02\x02͝\x03\x02\x02'
)
buf.write(
'\x02\x02͟\x03\x02\x02\x02\x02͡\x03\x02\x02\x02\x02ͣ\x03\x02\x02\x02\x02ͥ'
)
buf.write(
'\x03\x02\x02\x02\x02ͧ\x03\x02\x02\x02\x02ͩ\x03\x02\x02\x02\x02ͫ\x03\x02\x02'
)
buf.write(
'\x02\x02ͭ\x03\x02\x02\x02\x02ͯ\x03\x02\x02\x02\x02ͱ\x03\x02\x02\x02\x02ͳ'
)
buf.write(
'\x03\x02\x02\x02\x02͵\x03\x02\x02\x02\x02ͷ\x03\x02\x02\x02\x02\u0379\x03\x02\x02'
)
buf.write(
'\x02\x02ͻ\x03\x02\x02\x02\x02ͽ\x03\x02\x02\x02\x02Ϳ\x03\x02\x02\x02\x02\u0381'
)
buf.write(
'\x03\x02\x02\x02\x02\u0383\x03\x02\x02\x02\x02΅\x03\x02\x02\x02\x02·\x03\x02\x02'
)
buf.write(
'\x02\x02Ή\x03\x02\x02\x02\x02\u038b\x03\x02\x02\x02\x02\u038d\x03\x02\x02\x02\x02Ώ'
)
buf.write(
'\x03\x02\x02\x02\x02Α\x03\x02\x02\x02\x02Γ\x03\x02\x02\x02\x02Ε\x03\x02\x02'
)
buf.write(
'\x02\x02Η\x03\x02\x02\x02\x02Ι\x03\x02\x02\x02\x02Λ\x03\x02\x02\x02\x02Ν'
)
buf.write(
'\x03\x02\x02\x02\x02Ο\x03\x02\x02\x02\x02Ρ\x03\x02\x02\x02\x02Σ\x03\x02\x02'
)
buf.write(
'\x02\x02Υ\x03\x02\x02\x02\x02Χ\x03\x02\x02\x02\x02Ω\x03\x02\x02\x02\x02Ϋ'
)
buf.write(
'\x03\x02\x02\x02\x02έ\x03\x02\x02\x02\x02ί\x03\x02\x02\x02\x02α\x03\x02\x02'
)
buf.write(
'\x02\x02γ\x03\x02\x02\x02\x02ε\x03\x02\x02\x02\x02η\x03\x02\x02\x02\x02ι'
)
buf.write(
'\x03\x02\x02\x02\x02λ\x03\x02\x02\x02\x02ν\x03\x02\x02\x02\x02ο\x03\x02\x02'
)
buf.write(
'\x02\x02ρ\x03\x02\x02\x02\x02σ\x03\x02\x02\x02\x02υ\x03\x02\x02\x02\x02χ'
)
buf.write(
'\x03\x02\x02\x02\x02ω\x03\x02\x02\x02\x02ϋ\x03\x02\x02\x02\x02ύ\x03\x02\x02'
)
buf.write(
'\x02\x02Ϗ\x03\x02\x02\x02\x02ϑ\x03\x02\x02\x02\x02ϓ\x03\x02\x02\x02\x02ϡ'
)
buf.write(
'\x03\x02\x02\x02\x02ϣ\x03\x02\x02\x02\x02ϥ\x03\x02\x02\x02\x02ϧ\x03\x02\x02'
)
buf.write(
'\x02\x02ϩ\x03\x02\x02\x02\x02ϫ\x03\x02\x02\x02\x02ϭ\x03\x02\x02\x02\x02ϯ'
)
buf.write(
'\x03\x02\x02\x02\x02ϱ\x03\x02\x02\x02\x02ϳ\x03\x02\x02\x02\x02ϵ\x03\x02\x02'
)
buf.write(
'\x02\x02Ϸ\x03\x02\x02\x02\x02Ϲ\x03\x02\x02\x02\x02ϻ\x03\x02\x02\x02\x02Ͻ'
)
buf.write(
'\x03\x02\x02\x02\x02Ͽ\x03\x02\x02\x02\x02Ё\x03\x02\x02\x02\x02Ѓ\x03\x02\x02'
)
buf.write(
'\x02\x02Ѕ\x03\x02\x02\x02\x02Ї\x03\x02\x02\x02\x02Љ\x03\x02\x02\x02\x02Ћ'
)
buf.write(
'\x03\x02\x02\x02\x02Ѝ\x03\x02\x02\x02\x02Џ\x03\x02\x02\x02\x02Г\x03\x02\x02'
)
buf.write(
'\x02\x02Е\x03\x02\x02\x02\x02З\x03\x02\x02\x02\x02Й\x03\x02\x02\x02\x02Л'
)
buf.write(
'\x03\x02\x02\x02\x02Н\x03\x02\x02\x02\x02П\x03\x02\x02\x02\x02Ч\x03\x02\x02'
)
buf.write(
'\x02\x02Щ\x03\x02\x02\x02\x02Ы\x03\x02\x02\x02\x02б\x03\x02\x02\x02\x02г'
)
buf.write(
'\x03\x02\x02\x02\x03ѩ\x03\x02\x02\x02\x05Ѭ\x03\x02\x02\x02\x07Ѯ\x03\x02\x02'
)
buf.write(
'\x02\tѲ\x03\x02\x02\x02\x0bѸ\x03\x02\x02\x02\rѾ\x03\x02\x02\x02\x0f'
)
buf.write(
'҈\x03\x02\x02\x02\x11Ҍ\x03\x02\x02\x02\x13Ғ\x03\x02\x02\x02\x15Қ')
buf.write(
'\x03\x02\x02\x02\x17Ҟ\x03\x02\x02\x02\x19Ң\x03\x02\x02\x02\x1bҨ\x03'
)
buf.write(
'\x02\x02\x02\x1dҫ\x03\x02\x02\x02\x1fҲ\x03\x02\x02\x02!ҹ\x03\x02\x02'
)
buf.write(
"\x02#ҽ\x03\x02\x02\x02%Ӈ\x03\x02\x02\x02'ӊ\x03\x02\x02\x02)Ӕ")
buf.write(
'\x03\x02\x02\x02+Ӛ\x03\x02\x02\x02-ӡ\x03\x02\x02\x02/Ӧ\x03\x02\x02\x02'
)
buf.write('1Ӱ\x03\x02\x02\x023ԇ\x03\x02\x02\x025ԍ\x03\x02\x02\x027')
buf.write('Ԕ\x03\x02\x02\x029Ԛ\x03\x02\x02\x02;Ԣ\x03\x02\x02\x02=Ԩ\x03'
)
buf.write(
'\x02\x02\x02?Զ\x03\x02\x02\x02AՃ\x03\x02\x02\x02CՒ\x03\x02\x02\x02E\u0557'
)
buf.write(
'\x03\x02\x02\x02G՝\x03\x02\x02\x02Iբ\x03\x02\x02\x02Kժ\x03\x02\x02\x02'
)
buf.write(
'Mկ\x03\x02\x02\x02Oշ\x03\x02\x02\x02Qռ\x03\x02\x02\x02Sտ\x03')
buf.write(
'\x02\x02\x02Uք\x03\x02\x02\x02Wֆ\x03\x02\x02\x02Y\u058c\x03\x02\x02\x02[֑'
)
buf.write(
'\x03\x02\x02\x02]֛\x03\x02\x02\x02_֣\x03\x02\x02\x02a֨\x03\x02\x02\x02'
)
buf.write(
'c֭\x03\x02\x02\x02eֲ\x03\x02\x02\x02gֺ\x03\x02\x02\x02iׄ\x03')
buf.write(
'\x02\x02\x02k\u05ca\x03\x02\x02\x02m\u05ce\x03\x02\x02\x02oד\x03\x02\x02\x02qי'
)
buf.write(
'\x03\x02\x02\x02sס\x03\x02\x02\x02uש\x03\x02\x02\x02wױ\x03\x02\x02\x02'
)
buf.write(
'y\u05f9\x03\x02\x02\x02{\u0600\x03\x02\x02\x02}؊\x03\x02\x02\x02\x7fؘ'
)
buf.write(
'\x03\x02\x02\x02\x81ؠ\x03\x02\x02\x02\x83ة\x03\x02\x02\x02\x85')
buf.write('ر\x03\x02\x02\x02\x87ف\x03\x02\x02\x02\x89ي\x03\x02\x02\x02'
)
buf.write('\x8bٕ\x03\x02\x02\x02\x8d١\x03\x02\x02\x02\x8f٭\x03')
buf.write('\x02\x02\x02\x91ٵ\x03\x02\x02\x02\x93ٽ\x03\x02\x02\x02\x95چ'
)
buf.write(
'\x03\x02\x02\x02\x97ڎ\x03\x02\x02\x02\x99ښ\x03\x02\x02\x02\x9b')
buf.write('ڪ\x03\x02\x02\x02\x9dگ\x03\x02\x02\x02\x9fڵ\x03\x02\x02\x02'
)
buf.write('¡ڼ\x03\x02\x02\x02£ۂ\x03\x02\x02\x02¥ۇ\x03')
buf.write('\x02\x02\x02§ۏ\x03\x02\x02\x02©ۜ\x03\x02\x02\x02«ۣ')
buf.write('\x03\x02\x02\x02\xadۯ\x03\x02\x02\x02¯۵\x03\x02\x02\x02±')
buf.write('ۺ\x03\x02\x02\x02³܃\x03\x02\x02\x02µ܈\x03\x02\x02\x02')
buf.write('·܌\x03\x02\x02\x02¹ܛ\x03\x02\x02\x02»ܦ\x03')
buf.write('\x02\x02\x02½ܪ\x03\x02\x02\x02¿ܰ\x03\x02\x02\x02Áܴ')
buf.write('\x03\x02\x02\x02Ãܼ\x03\x02\x02\x02Å݄\x03\x02\x02\x02Ç')
buf.write('ݎ\x03\x02\x02\x02Éݘ\x03\x02\x02\x02Ëݠ\x03\x02\x02\x02')
buf.write('Íݩ\x03\x02\x02\x02Ïݲ\x03\x02\x02\x02Ñݺ\x03')
buf.write('\x02\x02\x02Óށ\x03\x02\x02\x02Õއ\x03\x02\x02\x02×ތ')
buf.write('\x03\x02\x02\x02Ùޚ\x03\x02\x02\x02Ûޤ\x03\x02\x02\x02Ý')
buf.write('ެ\x03\x02\x02\x02ß\u07b9\x03\x02\x02\x02á߂\x03\x02\x02\x02')
buf.write('ãߋ\x03\x02\x02\x02åߒ\x03\x02\x02\x02çߗ\x03')
buf.write('\x02\x02\x02é߰\x03\x02\x02\x02ëߵ\x03\x02\x02\x02í߽')
buf.write('\x03\x02\x02\x02ïࠂ\x03\x02\x02\x02ñࠈ\x03\x02\x02\x02ó')
buf.write('ࠎ\x03\x02\x02\x02õࠕ\x03\x02\x02\x02÷ࠞ\x03\x02\x02\x02')
buf.write('ùࠢ\x03\x02\x02\x02û࠱\x03\x02\x02\x02ý࠵\x03')
buf.write('\x02\x02\x02ÿ࠼\x03\x02\x02\x02āࡃ\x03\x02\x02\x02ăࡌ')
buf.write('\x03\x02\x02\x02ąࡓ\x03\x02\x02\x02ć\u085d\x03\x02\x02\x02ĉ')
buf.write('\u086c\x03\x02\x02\x02ċࡷ\x03\x02\x02\x02čࡿ\x03\x02\x02\x02')
buf.write('ďࢉ\x03\x02\x02\x02đ\u0891\x03\x02\x02\x02ē࢘\x03')
buf.write('\x02\x02\x02ĕ࢝\x03\x02\x02\x02ėࢥ\x03\x02\x02\x02ęࢮ')
buf.write('\x03\x02\x02\x02ěࢶ\x03\x02\x02\x02ĝࢾ\x03\x02\x02\x02ğ')
buf.write('ࣄ\x03\x02\x02\x02ġ࣊\x03\x02\x02\x02ģ࣐\x03\x02\x02\x02')
buf.write('ĥࣖ\x03\x02\x02\x02ħ\u08e2\x03\x02\x02\x02ĩࣨ\x03')
buf.write('\x02\x02\x02īࣲ\x03\x02\x02\x02ĭࣺ\x03\x02\x02\x02įࣾ')
buf.write('\x03\x02\x02\x02ıअ\x03\x02\x02\x02ijऋ\x03\x02\x02\x02ĵ')
buf.write('ऐ\x03\x02\x02\x02ķक\x03\x02\x02\x02Ĺञ\x03\x02\x02\x02')
buf.write('Ļण\x03\x02\x02\x02Ľऩ\x03\x02\x02\x02Ŀय\x03')
buf.write('\x02\x02\x02Łस\x03\x02\x02\x02Ńऽ\x03\x02\x02\x02Ņॄ')
buf.write('\x03\x02\x02\x02Ňॉ\x03\x02\x02\x02ʼnॎ\x03\x02\x02\x02ŋ')
buf.write('॑\x03\x02\x02\x02ōक़\x03\x02\x02\x02ŏॢ\x03\x02\x02\x02')
buf.write('ő॥\x03\x02\x02\x02œ७\x03\x02\x02\x02ŕॷ\x03')
buf.write('\x02\x02\x02ŗঁ\x03\x02\x02\x02řঈ\x03\x02\x02\x02ś\u098e')
buf.write('\x03\x02\x02\x02ŝখ\x03\x02\x02\x02şঠ\x03\x02\x02\x02š')
buf.write('ন\x03\x02\x02\x02ţ\u09b1\x03\x02\x02\x02ťস\x03\x02\x02\x02')
buf.write('ŧা\x03\x02\x02\x02ũৄ\x03\x02\x02\x02ūো\x03')
buf.write(
'\x02\x02\x02ŭ\u09d8\x03\x02\x02\x02ůৠ\x03\x02\x02\x02ű\u09e4')
buf.write('\x03\x02\x02\x02ų৬\x03\x02\x02\x02ŵ৶\x03\x02\x02\x02ŷ')
buf.write(
'\u09ff\x03\x02\x02\x02Ź\u0a04\x03\x02\x02\x02Żਏ\x03\x02\x02\x02')
buf.write('Ž\u0a12\x03\x02\x02\x02ſਜ\x03\x02\x02\x02Ɓਤ\x03')
buf.write('\x02\x02\x02ƃ\u0a29\x03\x02\x02\x02ƅਮ\x03\x02\x02\x02Ƈਲ਼')
buf.write('\x03\x02\x02\x02Ɖ਼\x03\x02\x02\x02Ƌੁ\x03\x02\x02\x02ƍ')
buf.write('ੌ\x03\x02\x02\x02Ə\u0a54\x03\x02\x02\x02Ƒਖ਼\x03\x02\x02\x02')
buf.write('Ɠ\u0a5f\x03\x02\x02\x02ƕ੧\x03\x02\x02\x02Ɨ੬\x03')
buf.write(
'\x02\x02\x02ƙੲ\x03\x02\x02\x02ƛ\u0a78\x03\x02\x02\x02Ɲ\u0a7e')
buf.write('\x03\x02\x02\x02Ɵ\u0a84\x03\x02\x02\x02ơઊ\x03\x02\x02\x02ƣ')
buf.write('એ\x03\x02\x02\x02ƥખ\x03\x02\x02\x02Ƨચ\x03\x02\x02\x02')
buf.write('Ʃડ\x03\x02\x02\x02ƫધ\x03\x02\x02\x02ƭબ\x03')
buf.write(
'\x02\x02\x02Ư\u0ab1\x03\x02\x02\x02Ʊશ\x03\x02\x02\x02Ƴ\u0aba')
buf.write('\x03\x02\x02\x02Ƶૂ\x03\x02\x02\x02Ʒો\x03\x02\x02\x02ƹ')
buf.write(
'\u0ad4\x03\x02\x02\x02ƻ\u0adb\x03\x02\x02\x02ƽૡ\x03\x02\x02\x02')
buf.write('ƿ૧\x03\x02\x02\x02ǁ૮\x03\x02\x02\x02ǃ\u0af7\x03')
buf.write('\x02\x02\x02Dž\u0b00\x03\x02\x02\x02LJଅ\x03\x02\x02\x02ljଋ')
buf.write('\x03\x02\x02\x02Nj\u0b12\x03\x02\x02\x02Ǎଘ\x03\x02\x02\x02Ǐ')
buf.write('ଡ\x03\x02\x02\x02Ǒଦ\x03\x02\x02\x02Ǔପ\x03\x02\x02\x02')
buf.write('Ǖଲ\x03\x02\x02\x02Ǘ\u0b3b\x03\x02\x02\x02Ǚି\x03')
buf.write(
'\x02\x02\x02Ǜ\u0b45\x03\x02\x02\x02ǝ\u0b4e\x03\x02\x02\x02ǟ\u0b54'
)
buf.write('\x03\x02\x02\x02ǡ\u0b5b\x03\x02\x02\x02ǣୟ\x03\x02\x02\x02ǥ')
buf.write('ୢ\x03\x02\x02\x02ǧ୪\x03\x02\x02\x02ǩ୲\x03\x02\x02\x02')
buf.write('ǫ\u0b79\x03\x02\x02\x02ǭ\u0b81\x03\x02\x02\x02ǯஒ\x03')
buf.write(
'\x02\x02\x02DZ\u0b9d\x03\x02\x02\x02dzந\x03\x02\x02\x02ǵ\u0bad')
buf.write('\x03\x02\x02\x02Ƿவ\x03\x02\x02\x02ǹ\u0bc3\x03\x02\x02\x02ǻ')
buf.write(
'ே\x03\x02\x02\x02ǽ\u0bce\x03\x02\x02\x02ǿ\u0bd3\x03\x02\x02\x02')
buf.write('ȁ\u0bd9\x03\x02\x02\x02ȃ\u0be0\x03\x02\x02\x02ȅ௨\x03')
buf.write('\x02\x02\x02ȇ௲\x03\x02\x02\x02ȉ௹\x03\x02\x02\x02ȋ\u0bfc')
buf.write('\x03\x02\x02\x02ȍఀ\x03\x02\x02\x02ȏఄ\x03\x02\x02\x02ȑ')
buf.write('ఈ\x03\x02\x02\x02ȓఋ\x03\x02\x02\x02ȕఐ\x03\x02\x02\x02')
buf.write('ȗక\x03\x02\x02\x02șజ\x03\x02\x02\x02țట\x03')
buf.write('\x02\x02\x02ȝధ\x03\x02\x02\x02ȟభ\x03\x02\x02\x02ȡస')
buf.write('\x03\x02\x02\x02ȣీ\x03\x02\x02\x02ȥౄ\x03\x02\x02\x02ȧ')
buf.write('ొ\x03\x02\x02\x02ȩ\u0c4f\x03\x02\x02\x02ȫౚ\x03\x02\x02\x02')
buf.write('ȭౢ\x03\x02\x02\x02ȯ\u0c72\x03\x02\x02\x02ȱ౽\x03')
buf.write('\x02\x02\x02ȳ಄\x03\x02\x02\x02ȵಎ\x03\x02\x02\x02ȷಖ')
buf.write('\x03\x02\x02\x02ȹಛ\x03\x02\x02\x02Ȼತ\x03\x02\x02\x02Ƚ')
buf.write(
'ಪ\x03\x02\x02\x02ȿ\u0cb4\x03\x02\x02\x02Ɂ\u0cba\x03\x02\x02\x02')
buf.write('Ƀಿ\x03\x02\x02\x02Ʌೋ\x03\x02\x02\x02ɇ\u0cd4\x03')
buf.write('\x02\x02\x02ɉೞ\x03\x02\x02\x02ɋ\u0ce5\x03\x02\x02\x02ɍ೯')
buf.write('\x03\x02\x02\x02ɏ\u0cf9\x03\x02\x02\x02ɑഁ\x03\x02\x02\x02ɓ')
buf.write('ഇ\x03\x02\x02\x02ɕ\u0d11\x03\x02\x02\x02ɗഗ\x03\x02\x02\x02')
buf.write('əഝ\x03\x02\x02\x02ɛഡ\x03\x02\x02\x02ɝദ\x03')
buf.write('\x02\x02\x02ɟഫ\x03\x02\x02\x02ɡല\x03\x02\x02\x02ɣശ')
buf.write('\x03\x02\x02\x02ɥീ\x03\x02\x02\x02ɧൌ\x03\x02\x02\x02ɩ')
buf.write(
'\u0d53\x03\x02\x02\x02ɫ൝\x03\x02\x02\x02ɭ\u0d64\x03\x02\x02\x02')
buf.write('ɯ൬\x03\x02\x02\x02ɱ൴\x03\x02\x02\x02ɳඈ\x03')
buf.write('\x02\x02\x02ɵඏ\x03\x02\x02\x02ɷග\x03\x02\x02\x02ɹඣ')
buf.write('\x03\x02\x02\x02ɻත\x03\x02\x02\x02ɽඳ\x03\x02\x02\x02ɿ')
buf.write('ර\x03\x02\x02\x02ʁෂ\x03\x02\x02\x02ʃ\u0dc8\x03\x02\x02\x02')
buf.write('ʅෑ\x03\x02\x02\x02ʇෘ\x03\x02\x02\x02ʉො\x03')
buf.write('\x02\x02\x02ʋ\u0de2\x03\x02\x02\x02ʍ෧\x03\x02\x02\x02ʏ෭')
buf.write('\x03\x02\x02\x02ʑ෴\x03\x02\x02\x02ʓ\u0df9\x03\x02\x02\x02ʕ')
buf.write('ฃ\x03\x02\x02\x02ʗช\x03\x02\x02\x02ʙถ\x03\x02\x02\x02')
buf.write('ʛบ\x03\x02\x02\x02ʝม\x03\x02\x02\x02ʟศ\x03')
buf.write('\x02\x02\x02ʡอ\x03\x02\x02\x02ʣี\x03\x02\x02\x02ʥ\u0e3c')
buf.write('\x03\x02\x02\x02ʧแ\x03\x02\x02\x02ʩ๊\x03\x02\x02\x02ʫ')
buf.write(
'๕\x03\x02\x02\x02ʭ\u0e62\x03\x02\x02\x02ʯ\u0e74\x03\x02\x02\x02')
buf.write('ʱ\u0e80\x03\x02\x02\x02ʳຐ\x03\x02\x02\x02ʵດ\x03')
buf.write('\x02\x02\x02ʷນ\x03\x02\x02\x02ʹຢ\x03\x02\x02\x02ʻຨ')
buf.write('\x03\x02\x02\x02ʽອ\x03\x02\x02\x02ʿຶ\x03\x02\x02\x02ˁ')
buf.write('\u0ebf\x03\x02\x02\x02˃່\x03\x02\x02\x02˅໗\x03\x02\x02\x02')
buf.write('ˇໞ\x03\x02\x02\x02ˉ\u0ee3\x03\x02\x02\x02ˋ\u0ee8\x03')
buf.write(
'\x02\x02\x02ˍ\u0ef1\x03\x02\x02\x02ˏ\u0efa\x03\x02\x02\x02ˑ\u0eff'
)
buf.write('\x03\x02\x02\x02˓།\x03\x02\x02\x02˕༕\x03\x02\x02\x02˗')
buf.write('༞\x03\x02\x02\x02˙༩\x03\x02\x02\x02˛༯\x03\x02\x02\x02')
buf.write('˝༷\x03\x02\x02\x02˟ཁ\x03\x02\x02\x02ˡཎ\x03')
buf.write('\x02\x02\x02ˣཕ\x03\x02\x02\x02˥འ\x03\x02\x02\x02˧ཧ')
buf.write('\x03\x02\x02\x02˩ཱི\x03\x02\x02\x02˫ྀ\x03\x02\x02\x02˭')
buf.write('ྎ\x03\x02\x02\x02˯ྖ\x03\x02\x02\x02˱ྞ\x03\x02\x02\x02')
buf.write('˳ྦ\x03\x02\x02\x02˵ྫྷ\x03\x02\x02\x02˷ྰ\x03')
buf.write('\x02\x02\x02˹ྵ\x03\x02\x02\x02˻ྺ\x03\x02\x02\x02˽࿄')
buf.write(
'\x03\x02\x02\x02˿\u0fe0\x03\x02\x02\x02́\u0ffb\x03\x02\x02\x02̃')
buf.write('ဓ\x03\x02\x02\x02̅အ\x03\x02\x02\x02̇ု\x03\x02\x02\x02')
buf.write('̉ဿ\x03\x02\x02\x02̋၏\x03\x02\x02\x02̍ၒ\x03')
buf.write('\x02\x02\x02̏ၛ\x03\x02\x02\x02̑ၧ\x03\x02\x02\x02̓ၱ')
buf.write('\x03\x02\x02\x02̕ၷ\x03\x02\x02\x02̗ၿ\x03\x02\x02\x02̙')
buf.write('ႄ\x03\x02\x02\x02̛ႉ\x03\x02\x02\x02̝႒\x03\x02\x02\x02')
buf.write('̟႗\x03\x02\x02\x02̡Ⴁ\x03\x02\x02\x02̣Ⴇ\x03')
buf.write('\x02\x02\x02̥Ⴍ\x03\x02\x02\x02̧Ⴔ\x03\x02\x02\x02̩Ⴞ')
buf.write(
'\x03\x02\x02\x02̫\u10c6\x03\x02\x02\x02̭\u10cc\x03\x02\x02\x02̯')
buf.write('დ\x03\x02\x02\x02̱მ\x03\x02\x02\x02̳ტ\x03\x02\x02\x02')
buf.write('̵ჩ\x03\x02\x02\x02̷ჭ\x03\x02\x02\x02̹ჳ\x03')
buf.write('\x02\x02\x02̻ჼ\x03\x02\x02\x02̽ᄂ\x03\x02\x02\x02̿ᄉ')
buf.write('\x03\x02\x02\x02́ᄑ\x03\x02\x02\x02̓ᄚ\x03\x02\x02\x02ͅ')
buf.write('ᄣ\x03\x02\x02\x02͇ᄪ\x03\x02\x02\x02͉ᄲ\x03\x02\x02\x02')
buf.write('͋ᄺ\x03\x02\x02\x02͍ᅃ\x03\x02\x02\x02͏ᅈ\x03')
buf.write('\x02\x02\x02͑ᅐ\x03\x02\x02\x02͓ᅛ\x03\x02\x02\x02͕ᅠ')
buf.write('\x03\x02\x02\x02͗ᅩ\x03\x02\x02\x02͙ᅯ\x03\x02\x02\x02͛')
buf.write('ᅵ\x03\x02\x02\x02͝ᅺ\x03\x02\x02\x02͟ᆁ\x03\x02\x02\x02')
buf.write('͡ᆆ\x03\x02\x02\x02ͣᆌ\x03\x02\x02\x02ͥᆐ\x03')
buf.write('\x02\x02\x02ͧᆗ\x03\x02\x02\x02ͩᆥ\x03\x02\x02\x02ͫᆭ')
buf.write('\x03\x02\x02\x02ͭᆺ\x03\x02\x02\x02ͯᇅ\x03\x02\x02\x02ͱ')
buf.write('ᇏ\x03\x02\x02\x02ͳᇙ\x03\x02\x02\x02͵ᇧ\x03\x02\x02\x02')
buf.write('ͷᇰ\x03\x02\x02\x02\u0379ᇶ\x03\x02\x02\x02ͻᇿ\x03')
buf.write('\x02\x02\x02ͽሇ\x03\x02\x02\x02Ϳሔ\x03\x02\x02\x02\u0381ም')
buf.write('\x03\x02\x02\x02\u0383ሢ\x03\x02\x02\x02΅ሦ\x03\x02\x02\x02·')
buf.write(
'ሿ\x03\x02\x02\x02Ήቄ\x03\x02\x02\x02\u038b\u124f\x03\x02\x02\x02')
buf.write('\u038dቡ\x03\x02\x02\x02Ώቱ\x03\x02\x02\x02Αኄ\x03')
buf.write('\x02\x02\x02Γኛ\x03\x02\x02\x02Εኪ\x03\x02\x02\x02Ηኴ')
buf.write(
'\x03\x02\x02\x02Ι\u12bf\x03\x02\x02\x02Λ\u12c7\x03\x02\x02\x02Ν')
buf.write('ዔ\x03\x02\x02\x02Οዤ\x03\x02\x02\x02Ρዴ\x03\x02\x02\x02')
buf.write('Σዹ\x03\x02\x02\x02Υዽ\x03\x02\x02\x02Χጂ\x03')
buf.write('\x02\x02\x02Ωጆ\x03\x02\x02\x02Ϋጋ\x03\x02\x02\x02έጏ')
buf.write('\x03\x02\x02\x02ί\u1316\x03\x02\x02\x02αጚ\x03\x02\x02\x02γ')
buf.write('ጠ\x03\x02\x02\x02εጰ\x03\x02\x02\x02ηጻ\x03\x02\x02\x02')
buf.write('ιጿ\x03\x02\x02\x02λፈ\x03\x02\x02\x02νፎ\x03')
buf.write('\x02\x02\x02οፕ\x03\x02\x02\x02ρፚ\x03\x02\x02\x02σ፡')
buf.write('\x03\x02\x02\x02υ፮\x03\x02\x02\x02χ፻\x03\x02\x02\x02ω')
buf.write('ᎈ\x03\x02\x02\x02ϋᎋ\x03\x02\x02\x02ύᎍ\x03\x02\x02\x02')
buf.write('Ϗᎏ\x03\x02\x02\x02ϑ\u139e\x03\x02\x02\x02ϓᎪ\x03')
buf.write('\x02\x02\x02ϕᎳ\x03\x02\x02\x02ϗᎵ\x03\x02\x02\x02ϙᏀ')
buf.write('\x03\x02\x02\x02ϛᏋ\x03\x02\x02\x02ϝᏖ\x03\x02\x02\x02ϟ')
buf.write('Ꮱ\x03\x02\x02\x02ϡᏣ\x03\x02\x02\x02ϣᏭ\x03\x02\x02\x02')
buf.write('ϥᏯ\x03\x02\x02\x02ϧᏱ\x03\x02\x02\x02ϩᏳ\x03')
buf.write('\x02\x02\x02ϫᏵ\x03\x02\x02\x02ϭᏸ\x03\x02\x02\x02ϯᏺ')
buf.write('\x03\x02\x02\x02ϱᏼ\x03\x02\x02\x02ϳ\u13fe\x03\x02\x02\x02ϵ')
buf.write('᐀\x03\x02\x02\x02Ϸᐂ\x03\x02\x02\x02Ϲᐄ\x03\x02\x02\x02')
buf.write('ϻᐕ\x03\x02\x02\x02Ͻᐗ\x03\x02\x02\x02Ͽᐙ\x03')
buf.write('\x02\x02\x02Ёᐛ\x03\x02\x02\x02Ѓᐞ\x03\x02\x02\x02Ѕᐠ')
buf.write('\x03\x02\x02\x02Їᐫ\x03\x02\x02\x02Љᐭ\x03\x02\x02\x02Ћ')
buf.write('ᐯ\x03\x02\x02\x02Ѝᐱ\x03\x02\x02\x02Џᐳ\x03\x02\x02\x02')
buf.write('Бᐵ\x03\x02\x02\x02Гᐷ\x03\x02\x02\x02Еᐺ\x03')
buf.write('\x02\x02\x02Зᐼ\x03\x02\x02\x02Йᐾ\x03\x02\x02\x02Лᑀ')
buf.write('\x03\x02\x02\x02Нᑂ\x03\x02\x02\x02Пᑅ\x03\x02\x02\x02С')
buf.write('ᑋ\x03\x02\x02\x02Уᑎ\x03\x02\x02\x02Хᑕ\x03\x02\x02\x02')
buf.write('Чᑠ\x03\x02\x02\x02Щᑯ\x03\x02\x02\x02Ыᑽ\x03')
buf.write('\x02\x02\x02Эᒐ\x03\x02\x02\x02Яᒔ\x03\x02\x02\x02бᒖ')
buf.write('\x03\x02\x02\x02гᒞ\x03\x02\x02\x02еᒣ\x03\x02\x02\x02з')
buf.write('ᒥ\x03\x02\x02\x02йᒧ\x03\x02\x02\x02лᒩ\x03\x02\x02\x02')
buf.write('нᒫ\x03\x02\x02\x02пᒭ\x03\x02\x02\x02сᒯ\x03')
buf.write('\x02\x02\x02уᒱ\x03\x02\x02\x02хᒳ\x03\x02\x02\x02чᒵ')
buf.write('\x03\x02\x02\x02щᒷ\x03\x02\x02\x02ыᒹ\x03\x02\x02\x02э')
buf.write('ᒻ\x03\x02\x02\x02яᒽ\x03\x02\x02\x02ёᒿ\x03\x02\x02\x02')
buf.write('ѓᓁ\x03\x02\x02\x02ѕᓃ\x03\x02\x02\x02їᓅ\x03')
buf.write('\x02\x02\x02љᓇ\x03\x02\x02\x02ћᓉ\x03\x02\x02\x02ѝᓋ')
buf.write('\x03\x02\x02\x02џᓍ\x03\x02\x02\x02ѡᓏ\x03\x02\x02\x02ѣ')
buf.write('ᓑ\x03\x02\x02\x02ѥᓓ\x03\x02\x02\x02ѧᓕ\x03\x02\x02\x02')
buf.write('ѩѪ\x070\x02\x02Ѫѫ\x070\x02\x02ѫ\x04\x03\x02')
buf.write('\x02\x02Ѭѭ\x05еț\x02ѭ\x06\x03\x02\x02\x02Ѯ')
buf.write('ѯ\x05еț\x02ѯѰ\x05лȞ\x02Ѱ')
buf.write('ѱ\x05лȞ\x02ѱ\x08\x03\x02\x02\x02Ѳѳ\x05е')
buf.write('ț\x02ѳѴ\x05пȠ\x02Ѵѵ\x05ћ')
buf.write('Ȯ\x02ѵѶ\x05нȟ\x02Ѷѷ\x05ї')
buf.write('Ȭ\x02ѷ\n\x03\x02\x02\x02Ѹѹ\x05еț\x02ѹ')
buf.write('Ѻ\x05сȡ\x02Ѻѻ\x05нȟ\x02ѻ')
buf.write('Ѽ\x05яȨ\x02Ѽѽ\x05ћȮ\x02ѽ')
buf.write('\x0c\x03\x02\x02\x02Ѿѿ\x05еț\x02ѿҀ\x05с')
buf.write('ȡ\x02Ҁҁ\x05сȡ\x02ҁ҂\x05ї')
buf.write('Ȭ\x02҂҃\x05нȟ\x02҃҄\x05с')
buf.write('ȡ\x02҄҅\x05еț\x02҅҆\x05ћ')
buf.write('Ȯ\x02҆҇\x05нȟ\x02҇\x0e\x03\x02\x02\x02')
buf.write('҈҉\x05еț\x02҉Ҋ\x05ыȦ')
buf.write('\x02Ҋҋ\x05ыȦ\x02ҋ\x10\x03\x02\x02\x02Ҍ')
buf.write('ҍ\x05еț\x02ҍҎ\x05ыȦ\x02Ҏ')
buf.write('ҏ\x05ћȮ\x02ҏҐ\x05нȟ\x02Ґ')
buf.write('ґ\x05їȬ\x02ґ\x12\x03\x02\x02\x02Ғғ\x05')
buf.write('еț\x02ғҔ\x05яȨ\x02Ҕҕ')
buf.write('\x05еț\x02ҕҖ\x05ыȦ\x02Җҗ')
buf.write('\x05ѥȳ\x02җҘ\x05ѧȴ\x02Ҙҙ')
buf.write('\x05нȟ\x02ҙ\x14\x03\x02\x02\x02Ққ\x05е')
buf.write('ț\x02қҜ\x05яȨ\x02Ҝҝ\x05л')
buf.write('Ȟ\x02ҝ\x16\x03\x02\x02\x02Ҟҟ\x05еț\x02')
buf.write('ҟҠ\x05яȨ\x02Ҡҡ\x05ѥȳ')
buf.write('\x02ҡ\x18\x03\x02\x02\x02Ңң\x05еț\x02ң')
buf.write('Ҥ\x05їȬ\x02Ҥҥ\x05їȬ\x02ҥ')
buf.write('Ҧ\x05еț\x02Ҧҧ\x05ѥȳ\x02ҧ')
buf.write('\x1a\x03\x02\x02\x02Ҩҩ\x05еț\x02ҩҪ\x05')
buf.write('љȭ\x02Ҫ\x1c\x03\x02\x02\x02ҫҬ\x05еț')
buf.write('\x02Ҭҭ\x05љȭ\x02ҭҮ\x05љȭ')
buf.write('\x02Үү\x05ѝȯ\x02үҰ\x05эȧ')
buf.write('\x02Ұұ\x05нȟ\x02ұ\x1e\x03\x02\x02\x02Ҳ')
buf.write('ҳ\x05еț\x02ҳҴ\x05љȭ\x02Ҵ')
buf.write('ҵ\x05љȭ\x02ҵҶ\x05нȟ\x02Ҷ')
buf.write('ҷ\x05їȬ\x02ҷҸ\x05ћȮ\x02Ҹ')
buf.write(' \x03\x02\x02\x02ҹҺ\x05еț\x02Һһ\x05љ')
buf.write('ȭ\x02һҼ\x05йȝ\x02Ҽ"\x03\x02\x02\x02ҽ')
buf.write('Ҿ\x05еț\x02Ҿҿ\x05љȭ\x02ҿ')
buf.write('Ӏ\x05љȭ\x02ӀӁ\x05ёȩ\x02Ӂ')
buf.write('ӂ\x05йȝ\x02ӂӃ\x05хȣ\x02Ӄ')
buf.write('ӄ\x05еț\x02ӄӅ\x05ћȮ\x02Ӆ')
buf.write('ӆ\x05нȟ\x02ӆ$\x03\x02\x02\x02Ӈӈ\x05е')
buf.write('ț\x02ӈӉ\x05ћȮ\x02Ӊ&\x03\x02\x02\x02ӊ')
buf.write('Ӌ\x05еț\x02Ӌӌ\x05ћȮ\x02ӌ')
buf.write('Ӎ\x05ћȮ\x02Ӎӎ\x05їȬ\x02ӎ')
buf.write('ӏ\x05хȣ\x02ӏӐ\x05зȜ\x02Ӑ')
buf.write('ӑ\x05ѝȯ\x02ӑӒ\x05ћȮ\x02Ӓ')
buf.write('ӓ\x05нȟ\x02ӓ(\x03\x02\x02\x02Ӕӕ\x05е')
buf.write('ț\x02ӕӖ\x05ѝȯ\x02Ӗӗ\x05л')
buf.write('Ȟ\x02ӗӘ\x05хȣ\x02Әә\x05ћ')
buf.write('Ȯ\x02ә*\x03\x02\x02\x02Ӛӛ\x05еț\x02ӛ')
buf.write('Ӝ\x05ѝȯ\x02Ӝӝ\x05ћȮ\x02ӝ')
buf.write('Ӟ\x05уȢ\x02Ӟӟ\x05хȣ\x02ӟ')
buf.write('Ӡ\x05лȞ\x02Ӡ,\x03\x02\x02\x02ӡӢ\x05е')
buf.write('ț\x02Ӣӣ\x05ѝȯ\x02ӣӤ\x05ћ')
buf.write('Ȯ\x02Ӥӥ\x05ёȩ\x02ӥ.\x03\x02\x02\x02Ӧ')
buf.write('ӧ\x05еț\x02ӧӨ\x05ѝȯ\x02Ө')
buf.write('ө\x05ћȮ\x02өӪ\x05ёȩ\x02Ӫ')
buf.write('ӫ\x05эȧ\x02ӫӬ\x05еț\x02Ӭ')
buf.write('ӭ\x05ћȮ\x02ӭӮ\x05хȣ\x02Ӯ')
buf.write('ӯ\x05йȝ\x02ӯ0\x03\x02\x02\x02Ӱӱ\x05')
buf.write('еț\x02ӱӲ\x05ѝȯ\x02Ӳӳ')
buf.write('\x05ћȮ\x02ӳӴ\x05ёȩ\x02Ӵӵ')
buf.write('\x05яȨ\x02ӵӶ\x05ёȩ\x02Ӷӷ')
buf.write('\x05эȧ\x02ӷӸ\x05ёȩ\x02Ӹӹ')
buf.write('\x05ѝȯ\x02ӹӺ\x05љȭ\x02Ӻӻ')
buf.write('\x07a\x02\x02ӻӼ\x05ћȮ\x02Ӽӽ\x05ї')
buf.write('Ȭ\x02ӽӾ\x05еț\x02Ӿӿ\x05я')
buf.write('Ȩ\x02ӿԀ\x05љȭ\x02Ԁԁ\x05е')
buf.write('ț\x02ԁԂ\x05йȝ\x02Ԃԃ\x05ћ')
buf.write('Ȯ\x02ԃԄ\x05хȣ\x02Ԅԅ\x05ё')
buf.write('ȩ\x02ԅԆ\x05яȨ\x02Ԇ2\x03\x02\x02\x02')
buf.write('ԇԈ\x05зȜ\x02Ԉԉ\x05еț')
buf.write('\x02ԉԊ\x05ћȮ\x02Ԋԋ\x05йȝ')
buf.write('\x02ԋԌ\x05уȢ\x02Ԍ4\x03\x02\x02\x02ԍ')
buf.write('Ԏ\x05зȜ\x02Ԏԏ\x05нȟ\x02ԏ')
buf.write('Ԑ\x05пȠ\x02Ԑԑ\x05ёȩ\x02ԑ')
buf.write('Ԓ\x05їȬ\x02Ԓԓ\x05нȟ\x02ԓ')
buf.write('6\x03\x02\x02\x02Ԕԕ\x05зȜ\x02ԕԖ\x05')
buf.write('нȟ\x02Ԗԗ\x05сȡ\x02ԗԘ')
buf.write('\x05хȣ\x02Ԙԙ\x05яȨ\x02ԙ8\x03')
buf.write('\x02\x02\x02Ԛԛ\x05зȜ\x02ԛԜ\x05н')
buf.write('ȟ\x02Ԝԝ\x05ћȮ\x02ԝԞ\x05ѡ')
buf.write('ȱ\x02Ԟԟ\x05нȟ\x02ԟԠ\x05н')
buf.write('ȟ\x02Ԡԡ\x05яȨ\x02ԡ:\x03\x02\x02\x02Ԣ')
buf.write('ԣ\x05зȜ\x02ԣԤ\x05пȠ\x02Ԥ')
buf.write('ԥ\x05хȣ\x02ԥԦ\x05ыȦ\x02Ԧ')
buf.write('ԧ\x05нȟ\x02ԧ<\x03\x02\x02\x02Ԩԩ\x05з')
buf.write('Ȝ\x02ԩԪ\x05хȣ\x02Ԫԫ\x05я')
buf.write('Ȩ\x02ԫԬ\x05еț\x02Ԭԭ\x05ї')
buf.write('Ȭ\x02ԭԮ\x05ѥȳ\x02Ԯԯ\x07a\x02')
buf.write('\x02ԯ\u0530\x05лȞ\x02\u0530Ա\x05ёȩ')
buf.write('\x02ԱԲ\x05ѝȯ\x02ԲԳ\x05зȜ')
buf.write('\x02ԳԴ\x05ыȦ\x02ԴԵ\x05нȟ')
buf.write('\x02Ե>\x03\x02\x02\x02ԶԷ\x05зȜ\x02ԷԸ')
buf.write('\x05хȣ\x02ԸԹ\x05яȨ\x02ԹԺ')
buf.write('\x05еț\x02ԺԻ\x05їȬ\x02ԻԼ')
buf.write('\x05ѥȳ\x02ԼԽ\x07a\x02\x02ԽԾ\x05п')
buf.write('Ƞ\x02ԾԿ\x05ыȦ\x02ԿՀ\x05ё')
buf.write('ȩ\x02ՀՁ\x05еț\x02ՁՂ\x05ћ')
buf.write('Ȯ\x02Ղ@\x03\x02\x02\x02ՃՄ\x05зȜ\x02Մ')
buf.write('Յ\x05хȣ\x02ՅՆ\x05яȨ\x02Ն')
buf.write('Շ\x05еț\x02ՇՈ\x05їȬ\x02Ո')
buf.write('Չ\x05ѥȳ\x02ՉՊ\x07a\x02\x02ՊՋ')
buf.write('\x05хȣ\x02ՋՌ\x05яȨ\x02ՌՍ')
buf.write('\x05ћȮ\x02ՍՎ\x05нȟ\x02ՎՏ')
buf.write('\x05сȡ\x02ՏՐ\x05нȟ\x02ՐՑ')
buf.write('\x05їȬ\x02ՑB\x03\x02\x02\x02ՒՓ\x05зȜ')
buf.write('\x02ՓՔ\x05ыȦ\x02ՔՕ\x05ёȩ')
buf.write('\x02ՕՖ\x05зȜ\x02ՖD\x03\x02\x02\x02\u0557\u0558')
buf.write('\x05зȜ\x02\u0558ՙ\x05ыȦ\x02ՙ՚')
buf.write('\x05ёȩ\x02՚՛\x05йȝ\x02՛՜')
buf.write('\x05щȥ\x02՜F\x03\x02\x02\x02՝՞\x05зȜ')
buf.write('\x02՞՟\x05ёȩ\x02՟ՠ\x05лȞ')
buf.write('\x02ՠա\x05ѥȳ\x02աH\x03\x02\x02\x02բգ')
buf.write('\x05зȜ\x02գդ\x05ёȩ\x02դե')
buf.write('\x05ёȩ\x02եզ\x05ыȦ\x02զէ')
buf.write('\x05нȟ\x02էը\x05еț\x02ըթ')
buf.write('\x05яȨ\x02թJ\x03\x02\x02\x02ժի\x05зȜ')
buf.write('\x02իլ\x05ёȩ\x02լխ\x05ћȮ')
buf.write('\x02խծ\x05уȢ\x02ծL\x03\x02\x02\x02կհ')
buf.write('\x05зȜ\x02հձ\x05їȬ\x02ձղ')
buf.write('\x05нȟ\x02ղճ\x05еț\x02ճմ')
buf.write('\x05лȞ\x02մյ\x05ћȮ\x02յն')
buf.write('\x05уȢ\x02նN\x03\x02\x02\x02շո\x05зȜ')
buf.write('\x02ոչ\x05ѝȯ\x02չպ\x05ыȦ')
buf.write('\x02պջ\x05щȥ\x02ջP\x03\x02\x02\x02ռս')
buf.write('\x05зȜ\x02սվ\x05ѥȳ\x02վR\x03')
buf.write('\x02\x02\x02տր\x05зȜ\x02րց\x05ѥ')
buf.write('ȳ\x02ցւ\x05ћȮ\x02ւփ\x05н')
buf.write('ȟ\x02փT\x03\x02\x02\x02քօ\x05йȝ\x02օ')
buf.write('V\x03\x02\x02\x02ֆև\x05йȝ\x02ևֈ\x05е')
buf.write('ț\x02ֈ։\x05йȝ\x02։֊\x05у')
buf.write('Ȣ\x02֊\u058b\x05нȟ\x02\u058bX\x03\x02\x02\x02\u058c')
buf.write('֍\x05йȝ\x02֍֎\x05еț\x02֎')
buf.write('֏\x05ыȦ\x02֏\u0590\x05ыȦ\x02\u0590')
buf.write('Z\x03\x02\x02\x02֑֒\x05йȝ\x02֒֓\x05е')
buf.write('ț\x02֓֔\x05яȨ\x02֔֕\x05ё')
buf.write('ȩ\x02֖֕\x05яȨ\x02֖֗\x05х')
buf.write('ȣ\x02֗֘\x05йȝ\x02֘֙\x05е')
buf.write('ț\x02֚֙\x05ыȦ\x02֚\\\x03\x02\x02\x02֛')
buf.write('֜\x05йȝ\x02֜֝\x05еț\x02֝')
buf.write('֞\x05љȭ\x02֞֟\x05йȝ\x02֟')
buf.write('֠\x05еț\x02֠֡\x05лȞ\x02֡')
buf.write('֢\x05нȟ\x02֢^\x03\x02\x02\x02֣֤\x05й')
buf.write('ȝ\x02֤֥\x05еț\x02֥֦\x05љ')
buf.write('ȭ\x02֦֧\x05нȟ\x02֧`\x03\x02\x02\x02֨')
buf.write('֩\x05йȝ\x02֪֩\x05еț\x02֪')
buf.write('֫\x05љȭ\x02֫֬\x05ћȮ\x02֬')
buf.write('b\x03\x02\x02\x02֭֮\x05йȝ\x02֮֯\x05у')
buf.write('Ȣ\x02ְ֯\x05еț\x02ְֱ\x05ї')
buf.write('Ȭ\x02ֱd\x03\x02\x02\x02ֲֳ\x05йȝ\x02ֳ')
buf.write('ִ\x05уȢ\x02ִֵ\x05еț\x02ֵ')
buf.write('ֶ\x05їȬ\x02ֶַ\x07a\x02\x02ַָ')
buf.write('\x05йȝ\x02ָֹ\x05љȭ\x02ֹf\x03')
buf.write('\x02\x02\x02ֺֻ\x05йȝ\x02ֻּ\x05у')
buf.write('Ȣ\x02ּֽ\x05еț\x02ֽ־\x05ї')
buf.write('Ȭ\x02־ֿ\x05еț\x02ֿ׀\x05й')
buf.write('ȝ\x02׀ׁ\x05ћȮ\x02ׁׂ\x05н')
buf.write('ȟ\x02ׂ׃\x05їȬ\x02׃h\x03\x02\x02\x02ׄ')
buf.write('ׅ\x05йȝ\x02ׅ׆\x05уȢ\x02׆')
buf.write('ׇ\x05нȟ\x02ׇ\u05c8\x05йȝ\x02\u05c8')
buf.write('\u05c9\x05щȥ\x02\u05c9j\x03\x02\x02\x02\u05ca\u05cb\x05й')
buf.write('ȝ\x02\u05cb\u05cc\x05уȢ\x02\u05cc\u05cd\x05ї')
buf.write('Ȭ\x02\u05cdl\x03\x02\x02\x02\u05ce\u05cf\x05йȝ\x02\u05cf')
buf.write('א\x05ыȦ\x02אב\x05ёȩ\x02ב')
buf.write('ג\x05зȜ\x02גn\x03\x02\x02\x02דה\x05й')
buf.write('ȝ\x02הו\x05ыȦ\x02וז\x05ё')
buf.write('ȩ\x02זח\x05љȭ\x02חט\x05н')
buf.write('ȟ\x02טp\x03\x02\x02\x02יך\x05йȝ\x02ך')
buf.write('כ\x05ыȦ\x02כל\x05ѝȯ\x02ל')
buf.write('ם\x05љȭ\x02םמ\x05ћȮ\x02מ')
buf.write('ן\x05нȟ\x02ןנ\x05їȬ\x02נ')
buf.write('r\x03\x02\x02\x02סע\x05йȝ\x02עף\x05ё')
buf.write('ȩ\x02ףפ\x05ыȦ\x02פץ\x05ы')
buf.write('Ȧ\x02ץצ\x05нȟ\x02צק\x05й')
buf.write('ȝ\x02קר\x05ћȮ\x02רt\x03\x02\x02\x02ש')
buf.write('ת\x05йȝ\x02ת\u05eb\x05ёȩ\x02\u05eb')
buf.write('\u05ec\x05ыȦ\x02\u05ec\u05ed\x05ѝȯ\x02\u05ed')
buf.write('\u05ee\x05эȧ\x02\u05eeׯ\x05яȨ\x02ׯ')
buf.write('װ\x05љȭ\x02װv\x03\x02\x02\x02ױײ\x05й')
buf.write('ȝ\x02ײ׳\x05ёȩ\x02׳״\x05э')
buf.write('ȧ\x02״\u05f5\x05эȧ\x02\u05f5\u05f6\x05н')
buf.write('ȟ\x02\u05f6\u05f7\x05яȨ\x02\u05f7\u05f8\x05ћ')
buf.write('Ȯ\x02\u05f8x\x03\x02\x02\x02\u05f9\u05fa\x05йȝ\x02\u05fa')
buf.write('\u05fb\x05ёȩ\x02\u05fb\u05fc\x05эȧ\x02\u05fc')
buf.write('\u05fd\x05эȧ\x02\u05fd\u05fe\x05хȣ\x02\u05fe')
buf.write('\u05ff\x05ћȮ\x02\u05ffz\x03\x02\x02\x02\u0600\u0601\x05й')
buf.write('ȝ\x02\u0601\u0602\x05ёȩ\x02\u0602\u0603\x05э')
buf.write('ȧ\x02\u0603\u0604\x05эȧ\x02\u0604\u0605\x05х')
buf.write('ȣ\x02\u0605؆\x05ћȮ\x02؆؇\x05ћ')
buf.write('Ȯ\x02؇؈\x05нȟ\x02؈؉\x05л')
buf.write('Ȟ\x02؉|\x03\x02\x02\x02؊؋\x05йȝ\x02؋')
buf.write('،\x05ёȩ\x02،؍\x05эȧ\x02؍')
buf.write('؎\x05ѓȪ\x02؎؏\x05еț\x02؏')
buf.write('ؐ\x05ћȮ\x02ؐؑ\x05хȣ\x02ؑ')
buf.write('ؒ\x05зȜ\x02ؒؓ\x05хȣ\x02ؓ')
buf.write('ؔ\x05ыȦ\x02ؔؕ\x05хȣ\x02ؕ')
buf.write('ؖ\x05ћȮ\x02ؖؗ\x05ѥȳ\x02ؗ')
buf.write('~\x03\x02\x02\x02ؘؙ\x05йȝ\x02ؙؚ\x05ё')
buf.write('ȩ\x02ؚ؛\x05эȧ\x02؛\u061c\x05ѓ')
buf.write('Ȫ\x02\u061c؝\x05хȣ\x02؝؞\x05ы')
buf.write('Ȧ\x02؞؟\x05нȟ\x02؟\x80\x03\x02\x02')
buf.write('\x02ؠء\x05йȝ\x02ءآ\x05ёȩ')
buf.write('\x02آأ\x05эȧ\x02أؤ\x05ѓȪ')
buf.write('\x02ؤإ\x05ёȩ\x02إئ\x05ѝȯ')
buf.write('\x02ئا\x05яȨ\x02اب\x05лȞ')
buf.write('\x02ب\x82\x03\x02\x02\x02ةت\x05йȝ\x02ت')
buf.write('ث\x05ёȩ\x02ثج\x05яȨ\x02ج')
buf.write('ح\x05яȨ\x02حخ\x05нȟ\x02خ')
buf.write('د\x05йȝ\x02دذ\x05ћȮ\x02ذ')
buf.write('\x84\x03\x02\x02\x02رز\x05йȝ\x02زس')
buf.write('\x05ёȩ\x02سش\x05яȨ\x02شص')
buf.write('\x05яȨ\x02صض\x05нȟ\x02ضط')
buf.write('\x05йȝ\x02طظ\x05ћȮ\x02ظع')
buf.write('\x07a\x02\x02عغ\x05зȜ\x02غػ\x05ѥ')
buf.write('ȳ\x02ػؼ\x07a\x02\x02ؼؽ\x05їȬ')
buf.write('\x02ؽؾ\x05ёȩ\x02ؾؿ\x05ёȩ')
buf.write('\x02ؿـ\x05ћȮ\x02ـ\x86\x03\x02\x02\x02ف')
buf.write('ق\x05йȝ\x02قك\x05ёȩ\x02ك')
buf.write('ل\x05яȨ\x02لم\x05љȭ\x02م')
buf.write('ن\x05ћȮ\x02نه\x05еț\x02ه')
buf.write('و\x05яȨ\x02وى\x05ћȮ\x02ى')
buf.write('\x88\x03\x02\x02\x02يً\x05йȝ\x02ًٌ')
buf.write('\x05ёȩ\x02ٌٍ\x05яȨ\x02ٍَ')
buf.write('\x05љȭ\x02َُ\x05ћȮ\x02ُِ')
buf.write('\x05їȬ\x02ِّ\x05еț\x02ّْ')
buf.write('\x05хȣ\x02ْٓ\x05яȨ\x02ٓٔ')
buf.write('\x05ћȮ\x02ٔ\x8a\x03\x02\x02\x02ٕٖ\x05й')
buf.write('ȝ\x02ٖٗ\x05ёȩ\x02ٗ٘\x05я')
buf.write('Ȩ\x02٘ٙ\x05љȭ\x02ٙٚ\x05ћ')
buf.write('Ȯ\x02ٚٛ\x05їȬ\x02ٜٛ\x05е')
buf.write('ț\x02ٜٝ\x05хȣ\x02ٝٞ\x05я')
buf.write('Ȩ\x02ٟٞ\x05ћȮ\x02ٟ٠\x05љ')
buf.write('ȭ\x02٠\x8c\x03\x02\x02\x02١٢\x05йȝ')
buf.write('\x02٢٣\x05ёȩ\x02٣٤\x05яȨ')
buf.write('\x02٤٥\x05љȭ\x02٥٦\x05ћȮ')
buf.write('\x02٦٧\x05їȬ\x02٧٨\x05ѝȯ')
buf.write('\x02٨٩\x05йȝ\x02٩٪\x05ћȮ')
buf.write('\x02٪٫\x05ёȩ\x02٫٬\x05їȬ')
buf.write('\x02٬\x8e\x03\x02\x02\x02٭ٮ\x05йȝ\x02ٮ')
buf.write('ٯ\x05ёȩ\x02ٯٰ\x05яȨ\x02ٰ')
buf.write('ٱ\x05ћȮ\x02ٱٲ\x05нȟ\x02ٲ')
buf.write('ٳ\x05яȨ\x02ٳٴ\x05ћȮ\x02ٴ')
buf.write('\x90\x03\x02\x02\x02ٵٶ\x05йȝ\x02ٶٷ')
buf.write('\x05ёȩ\x02ٷٸ\x05яȨ\x02ٸٹ')
buf.write('\x05ћȮ\x02ٹٺ\x05нȟ\x02ٺٻ')
buf.write('\x05ѣȲ\x02ٻټ\x05ћȮ\x02ټ\x92')
buf.write('\x03\x02\x02\x02ٽپ\x05йȝ\x02پٿ\x05ё')
buf.write('ȩ\x02ٿڀ\x05яȨ\x02ڀځ\x05ћ')
buf.write('Ȯ\x02ځڂ\x05хȣ\x02ڂڃ\x05я')
buf.write('Ȩ\x02ڃڄ\x05ѝȯ\x02ڄڅ\x05н')
buf.write('ȟ\x02څ\x94\x03\x02\x02\x02چڇ\x05йȝ')
buf.write('\x02ڇڈ\x05ёȩ\x02ڈډ\x05яȨ')
buf.write('\x02ډڊ\x05џȰ\x02ڊڋ\x05нȟ')
buf.write('\x02ڋڌ\x05їȬ\x02ڌڍ\x05ћȮ')
buf.write('\x02ڍ\x96\x03\x02\x02\x02ڎڏ\x05йȝ\x02ڏ')
buf.write('ڐ\x05ёȩ\x02ڐڑ\x05їȬ\x02ڑ')
buf.write('ڒ\x05їȬ\x02ڒړ\x05ѝȯ\x02ړ')
buf.write('ڔ\x05ѓȪ\x02ڔڕ\x05ћȮ\x02ڕ')
buf.write('ږ\x07a\x02\x02ږڗ\x05ѣȲ\x02ڗژ')
buf.write('\x05хȣ\x02ژڙ\x05лȞ\x02ڙ\x98')
buf.write('\x03\x02\x02\x02ښڛ\x05йȝ\x02ڛڜ\x05ё')
buf.write('ȩ\x02ڜڝ\x05їȬ\x02ڝڞ\x05ї')
buf.write('Ȭ\x02ڞڟ\x05ѝȯ\x02ڟڠ\x05ѓ')
buf.write('Ȫ\x02ڠڡ\x05ћȮ\x02ڡڢ\x07a\x02')
buf.write('\x02ڢڣ\x05ѣȲ\x02ڣڤ\x05хȣ')
buf.write('\x02ڤڥ\x05лȞ\x02ڥڦ\x07a\x02\x02ڦ')
buf.write('ڧ\x05еț\x02ڧڨ\x05ыȦ\x02ڨ')
buf.write('ک\x05ыȦ\x02ک\x9a\x03\x02\x02\x02ڪګ')
buf.write('\x05йȝ\x02ګڬ\x05ёȩ\x02ڬڭ')
buf.write('\x05љȭ\x02ڭڮ\x05ћȮ\x02ڮ\x9c')
buf.write('\x03\x02\x02\x02گڰ\x05йȝ\x02ڰڱ\x05ё')
buf.write('ȩ\x02ڱڲ\x05ѝȯ\x02ڲڳ\x05я')
buf.write('Ȩ\x02ڳڴ\x05ћȮ\x02ڴ\x9e\x03\x02\x02')
buf.write('\x02ڵڶ\x05йȝ\x02ڶڷ\x05їȬ')
buf.write('\x02ڷڸ\x05нȟ\x02ڸڹ\x05еț')
buf.write('\x02ڹں\x05ћȮ\x02ںڻ\x05нȟ')
buf.write('\x02ڻ\xa0\x03\x02\x02\x02ڼڽ\x05йȝ\x02ڽ')
buf.write('ھ\x05їȬ\x02ھڿ\x05ёȩ\x02ڿ')
buf.write('ۀ\x05љȭ\x02ۀہ\x05љȭ\x02ہ')
buf.write('¢\x03\x02\x02\x02ۂۃ\x05йȝ\x02ۃۄ')
buf.write('\x05ѝȯ\x02ۄۅ\x05зȜ\x02ۅۆ')
buf.write('\x05нȟ\x02ۆ¤\x03\x02\x02\x02ۇۈ\x05й')
buf.write('ȝ\x02ۈۉ\x05ѝȯ\x02ۉۊ\x05ї')
buf.write('Ȭ\x02ۊۋ\x05їȬ\x02ۋی\x05н')
buf.write('ȟ\x02یۍ\x05яȨ\x02ۍێ\x05ћ')
buf.write('Ȯ\x02ێ¦\x03\x02\x02\x02ۏې\x05йȝ')
buf.write('\x02ېۑ\x05ѝȯ\x02ۑے\x05їȬ')
buf.write('\x02ےۓ\x05їȬ\x02ۓ۔\x05нȟ')
buf.write('\x02۔ە\x05яȨ\x02ەۖ\x05ћȮ')
buf.write('\x02ۖۗ\x07a\x02\x02ۗۘ\x05ѝȯ\x02ۘ')
buf.write('ۙ\x05љȭ\x02ۙۚ\x05нȟ\x02ۚ')
buf.write('ۛ\x05їȬ\x02ۛ¨\x03\x02\x02\x02ۜ\u06dd')
buf.write('\x05йȝ\x02\u06dd۞\x05ѝȯ\x02۞۟')
buf.write('\x05їȬ\x02۟۠\x05љȭ\x02۠ۡ')
buf.write('\x05ёȩ\x02ۡۢ\x05їȬ\x02ۢª')
buf.write('\x03\x02\x02\x02ۣۤ\x05йȝ\x02ۤۥ\x05ѝ')
buf.write('ȯ\x02ۥۦ\x05љȭ\x02ۦۧ\x05ћ')
buf.write('Ȯ\x02ۧۨ\x05ёȩ\x02ۨ۩\x05э')
buf.write('ȧ\x02۩۪\x05лȞ\x02۪۫\x05е')
buf.write('ț\x02۫۬\x05ћȮ\x02ۭ۬\x05ѝ')
buf.write('ȯ\x02ۭۮ\x05эȧ\x02ۮ¬\x03\x02\x02')
buf.write('\x02ۯ۰\x05йȝ\x02۰۱\x05ѥȳ')
buf.write('\x02۱۲\x05йȝ\x02۲۳\x05ыȦ')
buf.write('\x02۳۴\x05нȟ\x02۴®\x03\x02\x02\x02۵')
buf.write('۶\x05лȞ\x02۶۷\x05еț\x02۷')
buf.write('۸\x05ћȮ\x02۸۹\x05еț\x02۹')
buf.write('°\x03\x02\x02\x02ۺۻ\x05лȞ\x02ۻۼ')
buf.write('\x05еț\x02ۼ۽\x05ћȮ\x02۽۾')
buf.write('\x05еț\x02۾ۿ\x05зȜ\x02ۿ܀')
buf.write('\x05еț\x02܀܁\x05љȭ\x02܁܂')
buf.write('\x05нȟ\x02܂²\x03\x02\x02\x02܃܄\x05л')
buf.write('Ȟ\x02܄܅\x05еț\x02܅܆\x05ћ')
buf.write('Ȯ\x02܆܇\x05нȟ\x02܇´\x03\x02\x02')
buf.write('\x02܈܉\x05лȞ\x02܉܊\x05еț')
buf.write('\x02܊܋\x05ѥȳ\x02܋¶\x03\x02\x02\x02܌')
buf.write('܍\x05лȞ\x02܍\u070e\x05зȜ\x02\u070e')
buf.write('\u070f\x07a\x02\x02\u070fܐ\x05їȬ\x02ܐܑ')
buf.write('\x05ёȩ\x02ܑܒ\x05ыȦ\x02ܒܓ')
buf.write('\x05нȟ\x02ܓܔ\x07a\x02\x02ܔܕ\x05й')
buf.write('ȝ\x02ܕܖ\x05уȢ\x02ܖܗ\x05е')
buf.write('ț\x02ܗܘ\x05яȨ\x02ܘܙ\x05с')
buf.write('ȡ\x02ܙܚ\x05нȟ\x02ܚ¸\x03\x02\x02')
buf.write('\x02ܛܜ\x05лȞ\x02ܜܝ\x05зȜ')
buf.write('\x02ܝܞ\x05ћȮ\x02ܞܟ\x05хȣ')
buf.write('\x02ܟܠ\x05эȧ\x02ܠܡ\x05нȟ')
buf.write('\x02ܡܢ\x05ѧȴ\x02ܢܣ\x05ёȩ')
buf.write('\x02ܣܤ\x05яȨ\x02ܤܥ\x05нȟ')
buf.write('\x02ܥº\x03\x02\x02\x02ܦܧ\x05лȞ\x02ܧ')
buf.write('ܨ\x05лȞ\x02ܨܩ\x05ыȦ\x02ܩ')
buf.write('¼\x03\x02\x02\x02ܪܫ\x05лȞ\x02ܫܬ')
buf.write('\x05нȟ\x02ܬܭ\x05зȜ\x02ܭܮ')
buf.write('\x05ѝȯ\x02ܮܯ\x05сȡ\x02ܯ¾')
buf.write('\x03\x02\x02\x02ܱܰ\x05лȞ\x02ܱܲ\x05н')
buf.write('ȟ\x02ܲܳ\x05йȝ\x02ܳÀ\x03\x02\x02')
buf.write('\x02ܴܵ\x05лȞ\x02ܵܶ\x05нȟ')
buf.write('\x02ܷܶ\x05йȝ\x02ܷܸ\x05хȣ')
buf.write('\x02ܸܹ\x05эȧ\x02ܹܺ\x05еț')
buf.write('\x02ܻܺ\x05ыȦ\x02ܻÂ\x03\x02\x02\x02ܼ')
buf.write('ܽ\x05лȞ\x02ܾܽ\x05нȟ\x02ܾ')
buf.write('ܿ\x05йȝ\x02ܿ݀\x05ыȦ\x02݀')
buf.write('݁\x05еț\x02݂݁\x05їȬ\x02݂')
buf.write('݃\x05нȟ\x02݃Ä\x03\x02\x02\x02݄݅')
buf.write('\x05лȞ\x02݆݅\x05нȟ\x02݆݇')
buf.write('\x05йȝ\x02݈݇\x05ёȩ\x02݈݉')
buf.write('\x05эȧ\x02݉݊\x05ѓȪ\x02݊\u074b')
buf.write('\x05ёȩ\x02\u074b\u074c\x05љȭ\x02\u074cݍ')
buf.write('\x05нȟ\x02ݍÆ\x03\x02\x02\x02ݎݏ\x05л')
buf.write('Ȟ\x02ݏݐ\x05нȟ\x02ݐݑ\x05й')
buf.write('ȝ\x02ݑݒ\x05їȬ\x02ݒݓ\x05н')
buf.write('ȟ\x02ݓݔ\x05эȧ\x02ݔݕ\x05н')
buf.write('ȟ\x02ݕݖ\x05яȨ\x02ݖݗ\x05ћ')
buf.write('Ȯ\x02ݗÈ\x03\x02\x02\x02ݘݙ\x05лȞ')
buf.write('\x02ݙݚ\x05нȟ\x02ݚݛ\x05пȠ')
buf.write('\x02ݛݜ\x05еț\x02ݜݝ\x05ѝȯ')
buf.write('\x02ݝݞ\x05ыȦ\x02ݞݟ\x05ћȮ')
buf.write('\x02ݟÊ\x03\x02\x02\x02ݠݡ\x05лȞ\x02ݡ')
buf.write('ݢ\x05нȟ\x02ݢݣ\x05пȠ\x02ݣ')
buf.write('ݤ\x05еț\x02ݤݥ\x05ѝȯ\x02ݥ')
buf.write('ݦ\x05ыȦ\x02ݦݧ\x05ћȮ\x02ݧ')
buf.write('ݨ\x05љȭ\x02ݨÌ\x03\x02\x02\x02ݩݪ')
buf.write('\x05лȞ\x02ݪݫ\x05нȟ\x02ݫݬ')
buf.write('\x05пȠ\x02ݬݭ\x05нȟ\x02ݭݮ')
buf.write('\x05їȬ\x02ݮݯ\x05їȬ\x02ݯݰ')
buf.write('\x05нȟ\x02ݰݱ\x05лȞ\x02ݱÎ')
buf.write('\x03\x02\x02\x02ݲݳ\x05лȞ\x02ݳݴ\x05н')
buf.write('ȟ\x02ݴݵ\x05пȠ\x02ݵݶ\x05х')
buf.write('ȣ\x02ݶݷ\x05яȨ\x02ݷݸ\x05н')
buf.write('ȟ\x02ݸݹ\x05їȬ\x02ݹÐ\x03\x02\x02')
buf.write('\x02ݺݻ\x05лȞ\x02ݻݼ\x05нȟ')
buf.write('\x02ݼݽ\x05ыȦ\x02ݽݾ\x05нȟ')
buf.write('\x02ݾݿ\x05ћȮ\x02ݿހ\x05нȟ')
buf.write('\x02ހÒ\x03\x02\x02\x02ށނ\x05лȞ\x02ނ')
buf.write('ރ\x05нȟ\x02ރބ\x05ѓȪ\x02ބ')
buf.write('ޅ\x05ћȮ\x02ޅކ\x05уȢ\x02ކ')
buf.write('Ô\x03\x02\x02\x02އވ\x05лȞ\x02ވމ')
buf.write('\x05нȟ\x02މފ\x05љȭ\x02ފދ')
buf.write('\x05йȝ\x02ދÖ\x03\x02\x02\x02ތލ\x05л')
buf.write('Ȟ\x02ލގ\x05нȟ\x02ގޏ\x05ћ')
buf.write('Ȯ\x02ޏސ\x05нȟ\x02ސޑ\x05ї')
buf.write('Ȭ\x02ޑޒ\x05эȧ\x02ޒޓ\x05х')
buf.write('ȣ\x02ޓޔ\x05яȨ\x02ޔޕ\x05х')
buf.write('ȣ\x02ޕޖ\x05љȭ\x02ޖޗ\x05ћ')
buf.write('Ȯ\x02ޗޘ\x05хȣ\x02ޘޙ\x05й')
buf.write('ȝ\x02ޙØ\x03\x02\x02\x02ޚޛ\x05лȞ')
buf.write('\x02ޛޜ\x05хȣ\x02ޜޝ\x05эȧ')
buf.write('\x02ޝޞ\x05нȟ\x02ޞޟ\x05яȨ')
buf.write('\x02ޟޠ\x05љȭ\x02ޠޡ\x05хȣ')
buf.write('\x02ޡޢ\x05ёȩ\x02ޢޣ\x05яȨ')
buf.write('\x02ޣÚ\x03\x02\x02\x02ޤޥ\x05лȞ\x02ޥ')
buf.write('ަ\x05хȣ\x02ަާ\x05љȭ\x02ާ')
buf.write('ި\x05еț\x02ިީ\x05зȜ\x02ީ')
buf.write('ު\x05ыȦ\x02ުޫ\x05нȟ\x02ޫ')
buf.write('Ü\x03\x02\x02\x02ެޭ\x05лȞ\x02ޭޮ')
buf.write('\x05хȣ\x02ޮޯ\x05љȭ\x02ޯް')
buf.write('\x05еț\x02ްޱ\x05љȭ\x02ޱ\u07b2')
buf.write('\x05љȭ\x02\u07b2\u07b3\x05ёȩ\x02\u07b3\u07b4')
buf.write('\x05йȝ\x02\u07b4\u07b5\x05хȣ\x02\u07b5\u07b6')
buf.write('\x05еț\x02\u07b6\u07b7\x05ћȮ\x02\u07b7\u07b8')
buf.write('\x05нȟ\x02\u07b8Þ\x03\x02\x02\x02\u07b9\u07ba\x05л')
buf.write('Ȟ\x02\u07ba\u07bb\x05хȣ\x02\u07bb\u07bc\x05љ')
buf.write('ȭ\x02\u07bc\u07bd\x05ћȮ\x02\u07bd\u07be\x05х')
buf.write('ȣ\x02\u07be\u07bf\x05яȨ\x02\u07bf߀\x05й')
buf.write('ȝ\x02߀߁\x05ћȮ\x02߁à\x03\x02\x02')
buf.write('\x02߂߃\x05лȞ\x02߃߄\x05ёȩ')
buf.write('\x02߄߅\x05йȝ\x02߅߆\x05ѝȯ')
buf.write('\x02߆߇\x05эȧ\x02߇߈\x05нȟ')
buf.write('\x02߈߉\x05яȨ\x02߉ߊ\x05ћȮ')
buf.write('\x02ߊâ\x03\x02\x02\x02ߋߌ\x05лȞ\x02ߌ')
buf.write('ߍ\x05ёȩ\x02ߍߎ\x05ѝȯ\x02ߎ')
buf.write('ߏ\x05зȜ\x02ߏߐ\x05ыȦ\x02ߐ')
buf.write('ߑ\x05нȟ\x02ߑä\x03\x02\x02\x02ߒߓ')
buf.write('\x05лȞ\x02ߓߔ\x05їȬ\x02ߔߕ')
buf.write('\x05ёȩ\x02ߕߖ\x05ѓȪ\x02ߖæ')
buf.write('\x03\x02\x02\x02ߗߘ\x05лȞ\x02ߘߙ\x05љ')
buf.write('ȭ\x02ߙߚ\x05хȣ\x02ߚߛ\x05я')
buf.write('Ȩ\x02ߛߜ\x05ћȮ\x02ߜߝ\x05н')
buf.write('ȟ\x02ߝߞ\x05їȬ\x02ߞߟ\x05џ')
buf.write('Ȱ\x02ߟߠ\x05еț\x02ߠߡ\x05ы')
buf.write('Ȧ\x02ߡߢ\x07a\x02\x02ߢߣ\x05ѝȯ')
buf.write('\x02ߣߤ\x05яȨ\x02ߤߥ\x05йȝ')
buf.write('\x02ߥߦ\x05ёȩ\x02ߦߧ\x05яȨ')
buf.write('\x02ߧߨ\x05љȭ\x02ߨߩ\x05ћȮ')
buf.write('\x02ߩߪ\x05їȬ\x02ߪ߫\x05еț')
buf.write('\x02߫߬\x05хȣ\x02߬߭\x05яȨ')
buf.write('\x02߭߮\x05нȟ\x02߮߯\x05лȞ')
buf.write('\x02߯è\x03\x02\x02\x02߰߱\x05нȟ\x02߱')
buf.write('߲\x05еț\x02߲߳\x05йȝ\x02߳')
buf.write('ߴ\x05уȢ\x02ߴê\x03\x02\x02\x02ߵ߶')
buf.write('\x05нȟ\x02߶߷\x05ыȦ\x02߷߸')
buf.write('\x05нȟ\x02߸߹\x05эȧ\x02߹ߺ')
buf.write('\x05нȟ\x02ߺ\u07fb\x05яȨ\x02\u07fb\u07fc')
buf.write('\x05ћȮ\x02\u07fcì\x03\x02\x02\x02߽߾\x05н')
buf.write('ȟ\x02߾߿\x05ыȦ\x02߿ࠀ\x05љ')
buf.write('ȭ\x02ࠀࠁ\x05нȟ\x02ࠁî\x03\x02\x02')
buf.write('\x02ࠂࠃ\x05нȟ\x02ࠃࠄ\x05ыȦ')
buf.write('\x02ࠄࠅ\x05љȭ\x02ࠅࠆ\x05хȣ')
buf.write('\x02ࠆࠇ\x05пȠ\x02ࠇð\x03\x02\x02\x02ࠈ')
buf.write('ࠉ\x05нȟ\x02ࠉࠊ\x05эȧ\x02ࠊ')
buf.write('ࠋ\x05ѓȪ\x02ࠋࠌ\x05ћȮ\x02ࠌ')
buf.write('ࠍ\x05ѥȳ\x02ࠍò\x03\x02\x02\x02ࠎࠏ')
buf.write('\x05нȟ\x02ࠏࠐ\x05яȨ\x02ࠐࠑ')
buf.write('\x05еț\x02ࠑࠒ\x05зȜ\x02ࠒࠓ')
buf.write('\x05ыȦ\x02ࠓࠔ\x05нȟ\x02ࠔô')
buf.write('\x03\x02\x02\x02ࠕࠖ\x05нȟ\x02ࠖࠗ\x05я')
buf.write('Ȩ\x02ࠗ࠘\x05йȝ\x02࠘࠙\x05ё')
buf.write('ȩ\x02࠙ࠚ\x05лȞ\x02ࠚࠛ\x05х')
buf.write('ȣ\x02ࠛࠜ\x05яȨ\x02ࠜࠝ\x05с')
buf.write('ȡ\x02ࠝö\x03\x02\x02\x02ࠞࠟ\x05нȟ')
buf.write('\x02ࠟࠠ\x05яȨ\x02ࠠࠡ\x05лȞ')
buf.write('\x02ࠡø\x03\x02\x02\x02ࠢࠣ\x05нȟ\x02ࠣ')
buf.write('ࠤ\x05яȨ\x02ࠤࠥ\x05ћȮ\x02ࠥ')
buf.write('ࠦ\x05хȣ\x02ࠦࠧ\x05ћȮ\x02ࠧ')
buf.write('ࠨ\x05ѥȳ\x02ࠨࠩ\x05нȟ\x02ࠩ')
buf.write('ࠪ\x05љȭ\x02ࠪࠫ\x05йȝ\x02ࠫ')
buf.write('ࠬ\x05еț\x02ࠬ࠭\x05ѓȪ\x02࠭')
buf.write('\u082e\x05хȣ\x02\u082e\u082f\x05яȨ\x02\u082f')
buf.write('࠰\x05сȡ\x02࠰ú\x03\x02\x02\x02࠱࠲')
buf.write('\x05нȟ\x02࠲࠳\x05їȬ\x02࠳࠴')
buf.write('\x05їȬ\x02࠴ü\x03\x02\x02\x02࠵࠶\x05н')
buf.write('ȟ\x02࠶࠷\x05їȬ\x02࠷࠸\x05ї')
buf.write('Ȭ\x02࠸࠹\x05ёȩ\x02࠹࠺\x05ї')
buf.write('Ȭ\x02࠺࠻\x05љȭ\x02࠻þ\x03\x02\x02')
buf.write('\x02࠼࠽\x05нȟ\x02࠽࠾\x05љȭ')
buf.write('\x02࠾\u083f\x05йȝ\x02\u083fࡀ\x05еț')
buf.write('\x02ࡀࡁ\x05ѓȪ\x02ࡁࡂ\x05нȟ')
buf.write('\x02ࡂĀ\x03\x02\x02\x02ࡃࡄ\x05нȟ\x02ࡄ')
buf.write('ࡅ\x05џȰ\x02ࡅࡆ\x05еț\x02ࡆ')
buf.write('ࡇ\x05ыȦ\x02ࡇࡈ\x05яȨ\x02ࡈ')
buf.write('ࡉ\x05еț\x02ࡉࡊ\x05эȧ\x02ࡊ')
buf.write('ࡋ\x05нȟ\x02ࡋĂ\x03\x02\x02\x02ࡌࡍ')
buf.write('\x05нȟ\x02ࡍࡎ\x05ѣȲ\x02ࡎࡏ')
buf.write('\x05йȝ\x02ࡏࡐ\x05нȟ\x02ࡐࡑ')
buf.write('\x05ѓȪ\x02ࡑࡒ\x05ћȮ\x02ࡒĄ')
buf.write('\x03\x02\x02\x02ࡓࡔ\x05нȟ\x02ࡔࡕ\x05ѣ')
buf.write('Ȳ\x02ࡕࡖ\x05йȝ\x02ࡖࡗ\x05н')
buf.write('ȟ\x02ࡗࡘ\x05ѓȪ\x02ࡘ࡙\x05ћ')
buf.write('Ȯ\x02࡙࡚\x05хȣ\x02࡚࡛\x05ё')
buf.write('ȩ\x02࡛\u085c\x05яȨ\x02\u085cĆ\x03\x02\x02')
buf.write('\x02\u085d࡞\x05нȟ\x02࡞\u085f\x05ѣȲ')
buf.write('\x02\u085fࡠ\x05йȝ\x02ࡠࡡ\x05нȟ')
buf.write('\x02ࡡࡢ\x05ѓȪ\x02ࡢࡣ\x05ћȮ')
buf.write('\x02ࡣࡤ\x05хȣ\x02ࡤࡥ\x05ёȩ')
buf.write('\x02ࡥࡦ\x05яȨ\x02ࡦࡧ\x07a\x02\x02ࡧ')
buf.write('ࡨ\x05хȣ\x02ࡨࡩ\x05яȨ\x02ࡩ')
buf.write('ࡪ\x05хȣ\x02ࡪ\u086b\x05ћȮ\x02\u086b')
buf.write('Ĉ\x03\x02\x02\x02\u086c\u086d\x05нȟ\x02\u086d\u086e')
buf.write('\x05ѣȲ\x02\u086e\u086f\x05йȝ\x02\u086fࡰ')
buf.write('\x05нȟ\x02ࡰࡱ\x05ѓȪ\x02ࡱࡲ')
buf.write('\x05ћȮ\x02ࡲࡳ\x05хȣ\x02ࡳࡴ')
buf.write('\x05ёȩ\x02ࡴࡵ\x05яȨ\x02ࡵࡶ')
buf.write('\x05љȭ\x02ࡶĊ\x03\x02\x02\x02ࡷࡸ\x05н')
buf.write('ȟ\x02ࡸࡹ\x05ѣȲ\x02ࡹࡺ\x05й')
buf.write('ȝ\x02ࡺࡻ\x05ыȦ\x02ࡻࡼ\x05ѝ')
buf.write('ȯ\x02ࡼࡽ\x05лȞ\x02ࡽࡾ\x05н')
buf.write('ȟ\x02ࡾČ\x03\x02\x02\x02ࡿࢀ\x05нȟ')
buf.write('\x02ࢀࢁ\x05ѣȲ\x02ࢁࢂ\x05йȝ')
buf.write('\x02ࢂࢃ\x05ыȦ\x02ࢃࢄ\x05ѝȯ')
buf.write('\x02ࢄࢅ\x05љȭ\x02ࢅࢆ\x05хȣ')
buf.write('\x02ࢆࢇ\x05џȰ\x02ࢇ࢈\x05нȟ')
buf.write('\x02࢈Ď\x03\x02\x02\x02ࢉࢊ\x05нȟ\x02ࢊ')
buf.write('ࢋ\x05ѣȲ\x02ࢋࢌ\x05нȟ\x02ࢌ')
buf.write('ࢍ\x05йȝ\x02ࢍࢎ\x05ѝȯ\x02ࢎ')
buf.write('\u088f\x05ћȮ\x02\u088f\u0890\x05нȟ\x02\u0890')
buf.write('Đ\x03\x02\x02\x02\u0891\u0892\x05нȟ\x02\u0892\u0893')
buf.write('\x05ѣȲ\x02\u0893\u0894\x05хȣ\x02\u0894\u0895')
buf.write('\x05љȭ\x02\u0895\u0896\x05ћȮ\x02\u0896\u0897')
buf.write('\x05љȭ\x02\u0897Ē\x03\x02\x02\x02࢙࢘\x05н')
buf.write('ȟ\x02࢙࢚\x05ѣȲ\x02࢚࢛\x05х')
buf.write('ȣ\x02࢛࢜\x05ћȮ\x02࢜Ĕ\x03\x02\x02')
buf.write('\x02࢝࢞\x05нȟ\x02࢞࢟\x05ѣȲ')
buf.write('\x02࢟ࢠ\x05ѓȪ\x02ࢠࢡ\x05ыȦ')
buf.write('\x02ࢡࢢ\x05еț\x02ࢢࢣ\x05хȣ')
buf.write('\x02ࢣࢤ\x05яȨ\x02ࢤĖ\x03\x02\x02\x02ࢥ')
buf.write('ࢦ\x05нȟ\x02ࢦࢧ\x05ѣȲ\x02ࢧ')
buf.write('ࢨ\x05ћȮ\x02ࢨࢩ\x05нȟ\x02ࢩ')
buf.write('ࢪ\x05їȬ\x02ࢪࢫ\x05яȨ\x02ࢫ')
buf.write('ࢬ\x05еț\x02ࢬࢭ\x05ыȦ\x02ࢭ')
buf.write('Ę\x03\x02\x02\x02ࢮࢯ\x05нȟ\x02ࢯࢰ')
buf.write('\x05ѣȲ\x02ࢰࢱ\x05ћȮ\x02ࢱࢲ')
buf.write('\x05їȬ\x02ࢲࢳ\x05еț\x02ࢳࢴ')
buf.write('\x05йȝ\x02ࢴࢵ\x05ћȮ\x02ࢵĚ')
buf.write('\x03\x02\x02\x02ࢶࢷ\x05пȠ\x02ࢷࢸ\x05е')
buf.write('ț\x02ࢸࢹ\x05хȣ\x02ࢹࢺ\x05ы')
buf.write('Ȧ\x02ࢺࢻ\x05ѝȯ\x02ࢻࢼ\x05ї')
buf.write('Ȭ\x02ࢼࢽ\x05нȟ\x02ࢽĜ\x03\x02\x02')
buf.write('\x02ࢾࢿ\x05пȠ\x02ࢿࣀ\x05еț')
buf.write('\x02ࣀࣁ\x05ыȦ\x02ࣁࣂ\x05љȭ')
buf.write('\x02ࣂࣃ\x05нȟ\x02ࣃĞ\x03\x02\x02\x02ࣄ')
buf.write('ࣅ\x05пȠ\x02ࣅࣆ\x05нȟ\x02ࣆ')
buf.write('ࣇ\x05ћȮ\x02ࣇࣈ\x05йȝ\x02ࣈ')
buf.write('ࣉ\x05уȢ\x02ࣉĠ\x03\x02\x02\x02࣊࣋')
buf.write('\x05пȠ\x02࣋࣌\x05хȣ\x02࣌࣍')
buf.write('\x05яȨ\x02࣍࣎\x05еț\x02࣏࣎')
buf.write('\x05ыȦ\x02࣏Ģ\x03\x02\x02\x02࣐࣑\x05п')
buf.write('Ƞ\x02࣑࣒\x05хȣ\x02࣒࣓\x05ї')
buf.write('Ȭ\x02࣓ࣔ\x05љȭ\x02ࣔࣕ\x05ћ')
buf.write('Ȯ\x02ࣕĤ\x03\x02\x02\x02ࣖࣗ\x05пȠ')
buf.write('\x02ࣗࣘ\x05хȣ\x02ࣘࣙ\x05їȬ')
buf.write('\x02ࣙࣚ\x05љȭ\x02ࣚࣛ\x05ћȮ')
buf.write('\x02ࣛࣜ\x07a\x02\x02ࣜࣝ\x05џȰ\x02ࣝ')
buf.write('ࣞ\x05еț\x02ࣞࣟ\x05ыȦ\x02ࣟ')
buf.write('࣠\x05ѝȯ\x02࣠࣡\x05нȟ\x02࣡')
buf.write('Ħ\x03\x02\x02\x02\u08e2ࣣ\x05пȠ\x02ࣣࣤ')
buf.write('\x05ыȦ\x02ࣤࣥ\x05ёȩ\x02ࣦࣥ')
buf.write('\x05еț\x02ࣦࣧ\x05ћȮ\x02ࣧĨ')
buf.write('\x03\x02\x02\x02ࣩࣨ\x05пȠ\x02ࣩ࣪\x05ё')
buf.write('ȩ\x02࣪࣫\x05ыȦ\x02࣫࣬\x05ы')
buf.write('Ȧ\x02࣭࣬\x05ёȩ\x02࣭࣮\x05ѡ')
buf.write('ȱ\x02࣮࣯\x05хȣ\x02ࣰ࣯\x05я')
buf.write('Ȩ\x02ࣰࣱ\x05сȡ\x02ࣱĪ\x03\x02\x02')
buf.write('\x02ࣲࣳ\x05пȠ\x02ࣳࣴ\x05ёȩ')
buf.write('\x02ࣴࣵ\x05ыȦ\x02ࣶࣵ\x05ыȦ')
buf.write('\x02ࣶࣷ\x05ёȩ\x02ࣷࣸ\x05ѡȱ')
buf.write('\x02ࣹࣸ\x05љȭ\x02ࣹĬ\x03\x02\x02\x02ࣺ')
buf.write('ࣻ\x05пȠ\x02ࣻࣼ\x05ёȩ\x02ࣼ')
buf.write('ࣽ\x05їȬ\x02ࣽĮ\x03\x02\x02\x02ࣾࣿ')
buf.write('\x05пȠ\x02ࣿऀ\x05ёȩ\x02ऀँ')
buf.write('\x05їȬ\x02ँं\x05еț\x02ंः')
buf.write('\x05ыȦ\x02ःऄ\x05ыȦ\x02ऄİ')
buf.write('\x03\x02\x02\x02अआ\x05пȠ\x02आइ\x05ё')
buf.write('ȩ\x02इई\x05їȬ\x02ईउ\x05й')
buf.write('ȝ\x02उऊ\x05нȟ\x02ऊIJ\x03\x02\x02')
buf.write('\x02ऋऌ\x05пȠ\x02ऌऍ\x05їȬ')
buf.write('\x02ऍऎ\x05ёȩ\x02ऎए\x05эȧ')
buf.write('\x02एĴ\x03\x02\x02\x02ऐऑ\x05пȠ\x02ऑ')
buf.write('ऒ\x05ѝȯ\x02ऒओ\x05ыȦ\x02ओ')
buf.write('औ\x05ыȦ\x02औĶ\x03\x02\x02\x02कख')
buf.write('\x05пȠ\x02खग\x05ѝȯ\x02गघ')
buf.write('\x05яȨ\x02घङ\x05йȝ\x02ङच')
buf.write('\x05ћȮ\x02चछ\x05хȣ\x02छज')
buf.write('\x05ёȩ\x02जझ\x05яȨ\x02झĸ')
buf.write('\x03\x02\x02\x02ञट\x05сȡ\x02टठ\x05ё')
buf.write('ȩ\x02ठड\x05ћȮ\x02डढ\x05ё')
buf.write('ȩ\x02ढĺ\x03\x02\x02\x02णत\x05сȡ')
buf.write('\x02तथ\x05їȬ\x02थद\x05еț')
buf.write('\x02दध\x05яȨ\x02धन\x05ћȮ')
buf.write('\x02नļ\x03\x02\x02\x02ऩप\x05сȡ\x02प')
buf.write('फ\x05їȬ\x02फब\x05ёȩ\x02ब')
buf.write('भ\x05ѝȯ\x02भम\x05ѓȪ\x02म')
buf.write('ľ\x03\x02\x02\x02यर\x05сȡ\x02रऱ')
buf.write('\x05їȬ\x02ऱल\x05ёȩ\x02लळ')
buf.write('\x05ѝȯ\x02ळऴ\x05ѓȪ\x02ऴव')
buf.write('\x05хȣ\x02वश\x05яȨ\x02शष')
buf.write('\x05сȡ\x02षŀ\x03\x02\x02\x02सह\x05у')
buf.write('Ȣ\x02हऺ\x05еț\x02ऺऻ\x05љ')
buf.write('ȭ\x02ऻ़\x05уȢ\x02़ł\x03\x02\x02')
buf.write('\x02ऽा\x05уȢ\x02ाि\x05еț')
buf.write('\x02िी\x05џȰ\x02ीु\x05хȣ')
buf.write('\x02ुू\x05яȨ\x02ूृ\x05сȡ')
buf.write('\x02ृń\x03\x02\x02\x02ॄॅ\x05уȢ\x02ॅ')
buf.write('ॆ\x05хȣ\x02ॆे\x05лȞ\x02े')
buf.write('ै\x05нȟ\x02ैņ\x03\x02\x02\x02ॉॊ')
buf.write('\x05уȢ\x02ॊो\x05ёȩ\x02ोौ')
buf.write('\x05ѝȯ\x02ौ्\x05їȬ\x02्ň')
buf.write('\x03\x02\x02\x02ॎॏ\x05хȣ\x02ॏॐ\x05п')
buf.write('Ƞ\x02ॐŊ\x03\x02\x02\x02॒॑\x05хȣ')
buf.write('\x02॒॓\x05сȡ\x02॓॔\x05яȨ')
buf.write('\x02॔ॕ\x05ёȩ\x02ॕॖ\x05їȬ')
buf.write('\x02ॖॗ\x05нȟ\x02ॗŌ\x03\x02\x02\x02क़')
buf.write('ख़\x05хȣ\x02ख़ग़\x05эȧ\x02ग़')
buf.write('ज़\x05эȧ\x02ज़ड़\x05нȟ\x02ड़')
buf.write('ढ़\x05лȞ\x02ढ़फ़\x05хȣ\x02फ़')
buf.write('य़\x05еț\x02य़ॠ\x05ћȮ\x02ॠ')
buf.write('ॡ\x05нȟ\x02ॡŎ\x03\x02\x02\x02ॢॣ')
buf.write('\x05хȣ\x02ॣ।\x05яȨ\x02।Ő')
buf.write('\x03\x02\x02\x02॥०\x05хȣ\x02०१\x05я')
buf.write('Ȩ\x02१२\x05йȝ\x02२३\x05ы')
buf.write('Ȧ\x02३४\x05ѝȯ\x02४५\x05л')
buf.write('Ȟ\x02५६\x05нȟ\x02६Œ\x03\x02\x02')
buf.write('\x02७८\x05хȣ\x02८९\x05яȨ')
buf.write('\x02९॰\x05йȝ\x02॰ॱ\x05ыȦ')
buf.write('\x02ॱॲ\x05ѝȯ\x02ॲॳ\x05лȞ')
buf.write('\x02ॳॴ\x05хȣ\x02ॴॵ\x05яȨ')
buf.write('\x02ॵॶ\x05сȡ\x02ॶŔ\x03\x02\x02\x02ॷ')
buf.write('ॸ\x05хȣ\x02ॸॹ\x05яȨ\x02ॹ')
buf.write('ॺ\x05йȝ\x02ॺॻ\x05їȬ\x02ॻ')
buf.write('ॼ\x05нȟ\x02ॼॽ\x05эȧ\x02ॽ')
buf.write('ॾ\x05нȟ\x02ॾॿ\x05яȨ\x02ॿ')
buf.write('ঀ\x05ћȮ\x02ঀŖ\x03\x02\x02\x02ঁং')
buf.write('\x05хȣ\x02ংঃ\x05яȨ\x02ঃ\u0984')
buf.write('\x05лȞ\x02\u0984অ\x05нȟ\x02অআ')
buf.write('\x05яȨ\x02আই\x05ћȮ\x02ইŘ')
buf.write('\x03\x02\x02\x02ঈউ\x05хȣ\x02উঊ\x05я')
buf.write('Ȩ\x02ঊঋ\x05лȞ\x02ঋঌ\x05н')
buf.write('ȟ\x02ঌ\u098d\x05ѣȲ\x02\u098dŚ\x03\x02\x02')
buf.write('\x02\u098eএ\x05хȣ\x02এঐ\x05яȨ')
buf.write('\x02ঐ\u0991\x05лȞ\x02\u0991\u0992\x05нȟ')
buf.write('\x02\u0992ও\x05ѣȲ\x02ওঔ\x05нȟ')
buf.write('\x02ঔক\x05лȞ\x02কŜ\x03\x02\x02\x02খ')
buf.write('গ\x05хȣ\x02গঘ\x05яȨ\x02ঘ')
buf.write('ঙ\x05лȞ\x02ঙচ\x05хȣ\x02চ')
buf.write('ছ\x05йȝ\x02ছজ\x05еț\x02জ')
buf.write('ঝ\x05ћȮ\x02ঝঞ\x05ёȩ\x02ঞ')
buf.write('ট\x05їȬ\x02টŞ\x03\x02\x02\x02ঠড')
buf.write('\x05хȣ\x02ডঢ\x05яȨ\x02ঢণ')
buf.write('\x05лȞ\x02ণত\x05хȣ\x02তথ')
buf.write('\x05йȝ\x02থদ\x05нȟ\x02দধ')
buf.write('\x05љȭ\x02ধŠ\x03\x02\x02\x02ন\u09a9\x05х')
buf.write('ȣ\x02\u09a9প\x05яȨ\x02পফ\x05п')
buf.write('Ƞ\x02ফব\x05хȣ\x02বভ\x05я')
buf.write('Ȩ\x02ভম\x05хȣ\x02ময\x05ћ')
buf.write('Ȯ\x02যর\x05нȟ\x02রŢ\x03\x02\x02')
buf.write('\x02\u09b1ল\x05хȣ\x02ল\u09b3\x05яȨ')
buf.write('\x02\u09b3\u09b4\x05ыȦ\x02\u09b4\u09b5\x05хȣ')
buf.write('\x02\u09b5শ\x05яȨ\x02শষ\x05нȟ')
buf.write('\x02ষŤ\x03\x02\x02\x02সহ\x05хȣ\x02হ')
buf.write('\u09ba\x05яȨ\x02\u09ba\u09bb\x05яȨ\x02\u09bb')
buf.write('়\x05нȟ\x02়ঽ\x05їȬ\x02ঽ')
buf.write('Ŧ\x03\x02\x02\x02াি\x05хȣ\x02িী')
buf.write('\x05яȨ\x02ীু\x05ёȩ\x02ুূ')
buf.write('\x05ѝȯ\x02ূৃ\x05ћȮ\x02ৃŨ')
buf.write('\x03\x02\x02\x02ৄ\u09c5\x05хȣ\x02\u09c5\u09c6\x05я')
buf.write('Ȩ\x02\u09c6ে\x05љȭ\x02েৈ\x05н')
buf.write('ȟ\x02ৈ\u09c9\x05їȬ\x02\u09c9\u09ca\x05ћ')
buf.write('Ȯ\x02\u09caŪ\x03\x02\x02\x02োৌ\x05хȣ')
buf.write('\x02ৌ্\x05яȨ\x02্ৎ\x05љȭ')
buf.write('\x02ৎ\u09cf\x05ћȮ\x02\u09cf\u09d0\x05еț')
buf.write('\x02\u09d0\u09d1\x05яȨ\x02\u09d1\u09d2\x05ћȮ')
buf.write('\x02\u09d2\u09d3\x05хȣ\x02\u09d3\u09d4\x05еț')
buf.write('\x02\u09d4\u09d5\x05зȜ\x02\u09d5\u09d6\x05ыȦ')
buf.write('\x02\u09d6ৗ\x05нȟ\x02ৗŬ\x03\x02\x02\x02\u09d8')
buf.write('\u09d9\x05хȣ\x02\u09d9\u09da\x05яȨ\x02\u09da')
buf.write('\u09db\x05љȭ\x02\u09dbড়\x05ћȮ\x02ড়')
buf.write('ঢ়\x05нȟ\x02ঢ়\u09de\x05еț\x02\u09de')
buf.write('য়\x05лȞ\x02য়Ů\x03\x02\x02\x02ৠৡ')
buf.write('\x05хȣ\x02ৡৢ\x05яȨ\x02ৢৣ')
buf.write('\x05ћȮ\x02ৣŰ\x03\x02\x02\x02\u09e4\u09e5\x05х')
buf.write('ȣ\x02\u09e5০\x05яȨ\x02০১\x05ћ')
buf.write('Ȯ\x02১২\x05нȟ\x02২৩\x05с')
buf.write('ȡ\x02৩৪\x05нȟ\x02৪৫\x05ї')
buf.write('Ȭ\x02৫Ų\x03\x02\x02\x02৬৭\x05хȣ')
buf.write('\x02৭৮\x05яȨ\x02৮৯\x05ћȮ')
buf.write('\x02৯ৰ\x05нȟ\x02ৰৱ\x05їȬ')
buf.write('\x02ৱ৲\x05љȭ\x02৲৳\x05нȟ')
buf.write('\x02৳৴\x05йȝ\x02৴৵\x05ћȮ')
buf.write('\x02৵Ŵ\x03\x02\x02\x02৶৷\x05хȣ\x02৷')
buf.write('৸\x05яȨ\x02৸৹\x05ћȮ\x02৹')
buf.write('৺\x05нȟ\x02৺৻\x05їȬ\x02৻')
buf.write('ৼ\x05џȰ\x02ৼ৽\x05еț\x02৽')
buf.write('৾\x05ыȦ\x02৾Ŷ\x03\x02\x02\x02\u09ff\u0a00')
buf.write('\x05хȣ\x02\u0a00ਁ\x05яȨ\x02ਁਂ')
buf.write('\x05ћȮ\x02ਂਃ\x05ёȩ\x02ਃŸ')
buf.write('\x03\x02\x02\x02\u0a04ਅ\x05хȣ\x02ਅਆ\x05я')
buf.write('Ȩ\x02ਆਇ\x05џȰ\x02ਇਈ\x05е')
buf.write('ț\x02ਈਉ\x05ыȦ\x02ਉਊ\x05х')
buf.write('ȣ\x02ਊ\u0a0b\x05лȞ\x02\u0a0b\u0a0c\x05е')
buf.write('ț\x02\u0a0c\u0a0d\x05ћȮ\x02\u0a0d\u0a0e\x05н')
buf.write('ȟ\x02\u0a0eź\x03\x02\x02\x02ਏਐ\x05хȣ')
buf.write('\x02ਐ\u0a11\x05љȭ\x02\u0a11ż\x03\x02\x02\x02\u0a12')
buf.write('ਓ\x05хȣ\x02ਓਔ\x05љȭ\x02ਔ')
buf.write('ਕ\x05ёȩ\x02ਕਖ\x05ыȦ\x02ਖ')
buf.write('ਗ\x05еț\x02ਗਘ\x05ћȮ\x02ਘ')
buf.write('ਙ\x05хȣ\x02ਙਚ\x05ёȩ\x02ਚ')
buf.write('ਛ\x05яȨ\x02ਛž\x03\x02\x02\x02ਜਝ')
buf.write('\x05хȣ\x02ਝਞ\x05ћȮ\x02ਞਟ')
buf.write('\x05нȟ\x02ਟਠ\x05їȬ\x02ਠਡ')
buf.write('\x05еț\x02ਡਢ\x05ћȮ\x02ਢਣ')
buf.write('\x05нȟ\x02ਣƀ\x03\x02\x02\x02ਤਥ\x05ч')
buf.write('Ȥ\x02ਥਦ\x05еț\x02ਦਧ\x05џ')
buf.write('Ȱ\x02ਧਨ\x05еț\x02ਨƂ\x03\x02\x02')
buf.write('\x02\u0a29ਪ\x05чȤ\x02ਪਫ\x05ёȩ')
buf.write('\x02ਫਬ\x05хȣ\x02ਬਭ\x05яȨ')
buf.write('\x02ਭƄ\x03\x02\x02\x02ਮਯ\x05щȥ\x02ਯ')
buf.write('ਰ\x05нȟ\x02ਰ\u0a31\x05нȟ\x02\u0a31')
buf.write('ਲ\x05ѓȪ\x02ਲƆ\x03\x02\x02\x02ਲ਼\u0a34')
buf.write('\x05ыȦ\x02\u0a34ਵ\x05еț\x02ਵਸ਼')
buf.write('\x05яȨ\x02ਸ਼\u0a37\x05сȡ\x02\u0a37ਸ')
buf.write('\x05ѝȯ\x02ਸਹ\x05еț\x02ਹ\u0a3a')
buf.write('\x05сȡ\x02\u0a3a\u0a3b\x05нȟ\x02\u0a3bƈ')
buf.write('\x03\x02\x02\x02਼\u0a3d\x05ыȦ\x02\u0a3dਾ\x05е')
buf.write('ț\x02ਾਿ\x05љȭ\x02ਿੀ\x05ћ')
buf.write('Ȯ\x02ੀƊ\x03\x02\x02\x02ੁੂ\x05ыȦ')
buf.write('\x02ੂ\u0a43\x05еț\x02\u0a43\u0a44\x05љȭ')
buf.write('\x02\u0a44\u0a45\x05ћȮ\x02\u0a45\u0a46\x07a\x02\x02\u0a46')
buf.write('ੇ\x05џȰ\x02ੇੈ\x05еț\x02ੈ')
buf.write('\u0a49\x05ыȦ\x02\u0a49\u0a4a\x05ѝȯ\x02\u0a4a')
buf.write('ੋ\x05нȟ\x02ੋƌ\x03\x02\x02\x02ੌ੍')
buf.write('\x05ыȦ\x02੍\u0a4e\x05нȟ\x02\u0a4e\u0a4f')
buf.write('\x05еț\x02\u0a4f\u0a50\x05лȞ\x02\u0a50ੑ')
buf.write('\x05хȣ\x02ੑ\u0a52\x05яȨ\x02\u0a52\u0a53')
buf.write('\x05сȡ\x02\u0a53Ǝ\x03\x02\x02\x02\u0a54\u0a55\x05ы')
buf.write('Ȧ\x02\u0a55\u0a56\x05нȟ\x02\u0a56\u0a57\x05п')
buf.write('Ƞ\x02\u0a57\u0a58\x05ћȮ\x02\u0a58Ɛ\x03\x02\x02')
buf.write('\x02ਖ਼ਗ਼\x05ыȦ\x02ਗ਼ਜ਼\x05нȟ')
buf.write('\x02ਜ਼ੜ\x05џȰ\x02ੜ\u0a5d\x05нȟ')
buf.write('\x02\u0a5dਫ਼\x05ыȦ\x02ਫ਼ƒ\x03\x02\x02\x02\u0a5f')
buf.write('\u0a60\x05ыȦ\x02\u0a60\u0a61\x05хȣ\x02\u0a61')
buf.write('\u0a62\x05зȜ\x02\u0a62\u0a63\x05їȬ\x02\u0a63')
buf.write('\u0a64\x05еț\x02\u0a64\u0a65\x05їȬ\x02\u0a65')
buf.write('੦\x05ѥȳ\x02੦Ɣ\x03\x02\x02\x02੧੨')
buf.write('\x05ыȦ\x02੨੩\x05хȣ\x02੩੪')
buf.write('\x05щȥ\x02੪੫\x05нȟ\x02੫Ɩ')
buf.write('\x03\x02\x02\x02੬੭\x05ыȦ\x02੭੮\x05х')
buf.write('ȣ\x02੮੯\x05щȥ\x02੯ੰ\x05н')
buf.write('ȟ\x02ੰੱ\x074\x02\x02ੱƘ\x03\x02\x02\x02ੲ')
buf.write('ੳ\x05ыȦ\x02ੳੴ\x05хȣ\x02ੴ')
buf.write('ੵ\x05щȥ\x02ੵ੶\x05нȟ\x02੶')
buf.write('\u0a77\x076\x02\x02\u0a77ƚ\x03\x02\x02\x02\u0a78\u0a79\x05ы'
)
buf.write('Ȧ\x02\u0a79\u0a7a\x05хȣ\x02\u0a7a\u0a7b\x05щ')
buf.write('ȥ\x02\u0a7b\u0a7c\x05нȟ\x02\u0a7c\u0a7d\x05й')
buf.write('ȝ\x02\u0a7dƜ\x03\x02\x02\x02\u0a7e\u0a7f\x05ыȦ')
buf.write('\x02\u0a7f\u0a80\x05хȣ\x02\u0a80ઁ\x05эȧ')
buf.write('\x02ઁં\x05хȣ\x02ંઃ\x05ћȮ')
buf.write('\x02ઃƞ\x03\x02\x02\x02\u0a84અ\x05ыȦ\x02અ')
buf.write('આ\x05ёȩ\x02આઇ\x05йȝ\x02ઇ')
buf.write('ઈ\x05еț\x02ઈઉ\x05ыȦ\x02ઉ')
buf.write('Ơ\x03\x02\x02\x02ઊઋ\x05ыȦ\x02ઋઌ')
buf.write('\x05ёȩ\x02ઌઍ\x05йȝ\x02ઍ\u0a8e')
buf.write('\x05щȥ\x02\u0a8eƢ\x03\x02\x02\x02એઐ\x05ы')
buf.write('Ȧ\x02ઐઑ\x05ёȩ\x02ઑ\u0a92\x05й')
buf.write('ȝ\x02\u0a92ઓ\x05щȥ\x02ઓઔ\x05н')
buf.write('ȟ\x02ઔક\x05лȞ\x02કƤ\x03\x02\x02')
buf.write('\x02ખગ\x05ыȦ\x02ગઘ\x05ёȩ')
buf.write('\x02ઘઙ\x05сȡ\x02ઙƦ\x03\x02\x02\x02ચ')
buf.write('છ\x05ыȦ\x02છજ\x05ёȩ\x02જ')
buf.write('ઝ\x05сȡ\x02ઝઞ\x05ёȩ\x02ઞ')
buf.write('ટ\x05пȠ\x02ટઠ\x05пȠ\x02ઠ')
buf.write('ƨ\x03\x02\x02\x02ડઢ\x05ыȦ\x02ઢણ')
buf.write('\x05ёȩ\x02ણત\x05сȡ\x02તથ')
buf.write('\x05ёȩ\x02થદ\x05яȨ\x02દƪ')
buf.write('\x03\x02\x02\x02ધન\x05ыȦ\x02ન\u0aa9\x05ё')
buf.write('ȩ\x02\u0aa9પ\x05яȨ\x02પફ\x05с')
buf.write('ȡ\x02ફƬ\x03\x02\x02\x02બભ\x05ыȦ')
buf.write('\x02ભમ\x05ёȩ\x02મય\x05ёȩ')
buf.write('\x02યર\x05ѓȪ\x02રƮ\x03\x02\x02\x02\u0ab1')
buf.write('લ\x05эȧ\x02લળ\x05еț\x02ળ')
buf.write('\u0ab4\x05хȣ\x02\u0ab4વ\x05яȨ\x02વ')
buf.write('ư\x03\x02\x02\x02શષ\x05эȧ\x02ષસ')
buf.write('\x05еț\x02સહ\x05ѓȪ\x02હƲ')
buf.write('\x03\x02\x02\x02\u0aba\u0abb\x05эȧ\x02\u0abb઼\x05е')
buf.write('ț\x02઼ઽ\x05ћȮ\x02ઽા\x05й')
buf.write('ȝ\x02ાિ\x05уȢ\x02િી\x05н')
buf.write('ȟ\x02ીુ\x05лȞ\x02ુƴ\x03\x02\x02')
buf.write('\x02ૂૃ\x05эȧ\x02ૃૄ\x05еț')
buf.write('\x02ૄૅ\x05ѣȲ\x02ૅ\u0ac6\x05џȰ')
buf.write('\x02\u0ac6ે\x05еț\x02ેૈ\x05ыȦ')
buf.write('\x02ૈૉ\x05ѝȯ\x02ૉ\u0aca\x05нȟ')
buf.write('\x02\u0acaƶ\x03\x02\x02\x02ોૌ\x05эȧ\x02ૌ')
buf.write('્\x05нȟ\x02્\u0ace\x05еț\x02\u0ace')
buf.write('\u0acf\x05љȭ\x02\u0acfૐ\x05ѝȯ\x02ૐ')
buf.write('\u0ad1\x05їȬ\x02\u0ad1\u0ad2\x05нȟ\x02\u0ad2')
buf.write('\u0ad3\x05љȭ\x02\u0ad3Ƹ\x03\x02\x02\x02\u0ad4\u0ad5')
buf.write('\x05эȧ\x02\u0ad5\u0ad6\x05нȟ\x02\u0ad6\u0ad7')
buf.write('\x05эȧ\x02\u0ad7\u0ad8\x05зȜ\x02\u0ad8\u0ad9')
buf.write('\x05нȟ\x02\u0ad9\u0ada\x05їȬ\x02\u0adaƺ')
buf.write('\x03\x02\x02\x02\u0adb\u0adc\x05эȧ\x02\u0adc\u0add\x05н')
buf.write('ȟ\x02\u0add\u0ade\x05їȬ\x02\u0ade\u0adf\x05с')
buf.write('ȡ\x02\u0adfૠ\x05нȟ\x02ૠƼ\x03\x02\x02')
buf.write('\x02ૡૢ\x05эȧ\x02ૢૣ\x05хȣ')
buf.write('\x02ૣ\u0ae4\x05яȨ\x02\u0ae4\u0ae5\x05ѝȯ')
buf.write('\x02\u0ae5૦\x05љȭ\x02૦ƾ\x03\x02\x02\x02૧')
buf.write('૨\x05эȧ\x02૨૩\x05хȣ\x02૩')
buf.write('૪\x05яȨ\x02૪૫\x05ѝȯ\x02૫')
buf.write('૬\x05ћȮ\x02૬૭\x05нȟ\x02૭')
buf.write('ǀ\x03\x02\x02\x02૮૯\x05эȧ\x02૯૰')
buf.write('\x05хȣ\x02૰૱\x05яȨ\x02૱\u0af2')
buf.write('\x05џȰ\x02\u0af2\u0af3\x05еț\x02\u0af3\u0af4')
buf.write('\x05ыȦ\x02\u0af4\u0af5\x05ѝȯ\x02\u0af5\u0af6')
buf.write('\x05нȟ\x02\u0af6ǂ\x03\x02\x02\x02\u0af7\u0af8\x05э')
buf.write('ȧ\x02\u0af8ૹ\x05ыȦ\x02ૹૺ\x05љ')
buf.write('ȭ\x02ૺૻ\x05ыȦ\x02ૻૼ\x05е')
buf.write('ț\x02ૼ૽\x05зȜ\x02૽૾\x05н')
buf.write('ȟ\x02૾૿\x05ыȦ\x02૿DŽ\x03\x02\x02')
buf.write('\x02\u0b00ଁ\x05эȧ\x02ଁଂ\x05ёȩ')
buf.write('\x02ଂଃ\x05лȞ\x02ଃ\u0b04\x05нȟ')
buf.write('\x02\u0b04dž\x03\x02\x02\x02ଅଆ\x05эȧ\x02ଆ')
buf.write('ଇ\x05ёȩ\x02ଇଈ\x05лȞ\x02ଈ')
buf.write('ଉ\x05нȟ\x02ଉଊ\x05ыȦ\x02ଊ')
buf.write('Lj\x03\x02\x02\x02ଋଌ\x05эȧ\x02ଌ\u0b0d')
buf.write('\x05ёȩ\x02\u0b0d\u0b0e\x05лȞ\x02\u0b0eଏ')
buf.write('\x05хȣ\x02ଏଐ\x05пȠ\x02ଐ\u0b11')
buf.write('\x05ѥȳ\x02\u0b11NJ\x03\x02\x02\x02\u0b12ଓ\x05э')
buf.write('ȧ\x02ଓଔ\x05ёȩ\x02ଔକ\x05я')
buf.write('Ȩ\x02କଖ\x05ћȮ\x02ଖଗ\x05у')
buf.write('Ȣ\x02ଗnj\x03\x02\x02\x02ଘଙ\x05эȧ')
buf.write('\x02ଙଚ\x05ѝȯ\x02ଚଛ\x05ыȦ')
buf.write('\x02ଛଜ\x05ћȮ\x02ଜଝ\x05хȣ')
buf.write('\x02ଝଞ\x05љȭ\x02ଞଟ\x05нȟ')
buf.write('\x02ଟଠ\x05ћȮ\x02ଠǎ\x03\x02\x02\x02ଡ')
buf.write('ଢ\x05яȨ\x02ଢଣ\x05еț\x02ଣ')
buf.write('ତ\x05эȧ\x02ତଥ\x05нȟ\x02ଥ')
buf.write('ǐ\x03\x02\x02\x02ଦଧ\x05яȨ\x02ଧନ')
buf.write('\x05еț\x02ନ\u0b29\x05яȨ\x02\u0b29ǒ')
buf.write('\x03\x02\x02\x02ପଫ\x05яȨ\x02ଫବ\x05е')
buf.write('ț\x02ବଭ\x05ћȮ\x02ଭମ\x05ѝ')
buf.write('ȯ\x02ମଯ\x05їȬ\x02ଯର\x05е')
buf.write('ț\x02ର\u0b31\x05ыȦ\x02\u0b31ǔ\x03\x02\x02')
buf.write('\x02ଲଳ\x05яȨ\x02ଳ\u0b34\x05еț')
buf.write('\x02\u0b34ଵ\x05ћȮ\x02ଵଶ\x05ѝȯ')
buf.write('\x02ଶଷ\x05їȬ\x02ଷସ\x05еț')
buf.write('\x02ସହ\x05ыȦ\x02ହ\u0b3a\x05яȨ')
buf.write('\x02\u0b3aǖ\x03\x02\x02\x02\u0b3b଼\x05яȨ\x02଼')
buf.write('ଽ\x05еț\x02ଽା\x05џȰ\x02ା')
buf.write('ǘ\x03\x02\x02\x02ିୀ\x05яȨ\x02ୀୁ')
buf.write('\x05йȝ\x02ୁୂ\x05уȢ\x02ୂୃ')
buf.write('\x05еț\x02ୃୄ\x05їȬ\x02ୄǚ')
buf.write('\x03\x02\x02\x02\u0b45\u0b46\x05яȨ\x02\u0b46େ\x05й')
buf.write('ȝ\x02େୈ\x05уȢ\x02ୈ\u0b49\x05е')
buf.write('ț\x02\u0b49\u0b4a\x05їȬ\x02\u0b4aୋ\x07a\x02')
buf.write('\x02ୋୌ\x05йȝ\x02ୌ୍\x05љȭ')
buf.write('\x02୍ǜ\x03\x02\x02\x02\u0b4e\u0b4f\x05яȨ\x02\u0b4f')
buf.write('\u0b50\x05йȝ\x02\u0b50\u0b51\x05ыȦ\x02\u0b51')
buf.write('\u0b52\x05ёȩ\x02\u0b52\u0b53\x05зȜ\x02\u0b53')
buf.write('Ǟ\x03\x02\x02\x02\u0b54୕\x05яȨ\x02୕ୖ')
buf.write('\x05нȟ\x02ୖୗ\x05љȭ\x02ୗ\u0b58')
buf.write('\x05ћȮ\x02\u0b58\u0b59\x05нȟ\x02\u0b59\u0b5a')
buf.write('\x05лȞ\x02\u0b5aǠ\x03\x02\x02\x02\u0b5bଡ଼\x05я')
buf.write('Ȩ\x02ଡ଼ଢ଼\x05нȟ\x02ଢ଼\u0b5e\x05ѡ')
buf.write('ȱ\x02\u0b5eǢ\x03\x02\x02\x02ୟୠ\x05яȨ')
buf.write('\x02ୠୡ\x05ёȩ\x02ୡǤ\x03\x02\x02\x02ୢ')
buf.write('ୣ\x05яȨ\x02ୣ\u0b64\x05ёȩ\x02\u0b64')
buf.write('\u0b65\x05еț\x02\u0b65୦\x05ѝȯ\x02୦')
buf.write('୧\x05лȞ\x02୧୨\x05хȣ\x02୨')
buf.write('୩\x05ћȮ\x02୩Ǧ\x03\x02\x02\x02୪୫')
buf.write('\x05яȨ\x02୫୬\x05ёȩ\x02୬୭')
buf.write('\x05йȝ\x02୭୮\x05еț\x02୮୯')
buf.write('\x05йȝ\x02୯୰\x05уȢ\x02୰ୱ')
buf.write('\x05нȟ\x02ୱǨ\x03\x02\x02\x02୲୳\x05я')
buf.write('Ȩ\x02୳୴\x05ёȩ\x02୴୵\x05й')
buf.write('ȝ\x02୵୶\x05ёȩ\x02୶୷\x05ѓ')
buf.write('Ȫ\x02୷\u0b78\x05ѥȳ\x02\u0b78Ǫ\x03\x02\x02')
buf.write('\x02\u0b79\u0b7a\x05яȨ\x02\u0b7a\u0b7b\x05ёȩ')
buf.write('\x02\u0b7b\u0b7c\x05йȝ\x02\u0b7c\u0b7d\x05ѥȳ')
buf.write('\x02\u0b7d\u0b7e\x05йȝ\x02\u0b7e\u0b7f\x05ыȦ')
buf.write('\x02\u0b7f\u0b80\x05нȟ\x02\u0b80Ǭ\x03\x02\x02\x02\u0b81')
buf.write('ஂ\x05яȨ\x02ஂஃ\x05ёȩ\x02ஃ')
buf.write('\u0b84\x05нȟ\x02\u0b84அ\x05яȨ\x02அ')
buf.write('ஆ\x05ћȮ\x02ஆஇ\x05хȣ\x02இ')
buf.write('ஈ\x05ћȮ\x02ஈஉ\x05ѥȳ\x02உ')
buf.write('ஊ\x05нȟ\x02ஊ\u0b8b\x05љȭ\x02\u0b8b')
buf.write('\u0b8c\x05йȝ\x02\u0b8c\u0b8d\x05еț\x02\u0b8d')
buf.write('எ\x05ѓȪ\x02எஏ\x05хȣ\x02ஏ')
buf.write('ஐ\x05яȨ\x02ஐ\u0b91\x05сȡ\x02\u0b91')
buf.write('Ǯ\x03\x02\x02\x02ஒஓ\x05яȨ\x02ஓஔ')
buf.write('\x05ёȩ\x02ஔக\x05эȧ\x02க\u0b96')
buf.write('\x05еț\x02\u0b96\u0b97\x05ѣȲ\x02\u0b97\u0b98')
buf.write('\x05џȰ\x02\u0b98ங\x05еț\x02ஙச')
buf.write('\x05ыȦ\x02ச\u0b9b\x05ѝȯ\x02\u0b9bஜ')
buf.write('\x05нȟ\x02ஜǰ\x03\x02\x02\x02\u0b9dஞ\x05я')
buf.write('Ȩ\x02ஞட\x05ёȩ\x02ட\u0ba0\x05э')
buf.write('ȧ\x02\u0ba0\u0ba1\x05хȣ\x02\u0ba1\u0ba2\x05я')
buf.write('Ȩ\x02\u0ba2ண\x05џȰ\x02ணத\x05е')
buf.write('ț\x02த\u0ba5\x05ыȦ\x02\u0ba5\u0ba6\x05ѝ')
buf.write('ȯ\x02\u0ba6\u0ba7\x05нȟ\x02\u0ba7Dz\x03\x02\x02')
buf.write('\x02நன\x05яȨ\x02னப\x05ёȩ')
buf.write('\x02ப\u0bab\x05яȨ\x02\u0bab\u0bac\x05нȟ')
buf.write('\x02\u0bacǴ\x03\x02\x02\x02\u0badம\x05яȨ\x02ம')
buf.write('ய\x05ёȩ\x02யர\x05ёȩ\x02ர')
buf.write('ற\x05їȬ\x02றல\x05лȞ\x02ல')
buf.write('ள\x05нȟ\x02ளழ\x05їȬ\x02ழ')
buf.write('Ƕ\x03\x02\x02\x02வஶ\x05яȨ\x02ஶஷ')
buf.write('\x05ёȩ\x02ஷஸ\x05љȭ\x02ஸஹ')
buf.write('\x05йȝ\x02ஹ\u0bba\x05уȢ\x02\u0bba\u0bbb')
buf.write('\x05нȟ\x02\u0bbb\u0bbc\x05эȧ\x02\u0bbc\u0bbd')
buf.write('\x05еț\x02\u0bbdா\x05йȝ\x02ாி')
buf.write('\x05уȢ\x02ிீ\x05нȟ\x02ீு')
buf.write('\x05йȝ\x02ுூ\x05щȥ\x02ூǸ')
buf.write('\x03\x02\x02\x02\u0bc3\u0bc4\x05яȨ\x02\u0bc4\u0bc5\x05ё')
buf.write('ȩ\x02\u0bc5ெ\x05ћȮ\x02ெǺ\x03\x02\x02')
buf.write('\x02ேை\x05яȨ\x02ை\u0bc9\x05ёȩ')
buf.write('\x02\u0bc9ொ\x05ѡȱ\x02ொோ\x05еț')
buf.write('\x02ோௌ\x05хȣ\x02ௌ்\x05ћȮ')
buf.write('\x02்Ǽ\x03\x02\x02\x02\u0bce\u0bcf\x05яȨ\x02\u0bcf')
buf.write('ௐ\x05ѝȯ\x02ௐ\u0bd1\x05ыȦ\x02\u0bd1')
buf.write('\u0bd2\x05ыȦ\x02\u0bd2Ǿ\x03\x02\x02\x02\u0bd3\u0bd4')
buf.write('\x05яȨ\x02\u0bd4\u0bd5\x05ѝȯ\x02\u0bd5\u0bd6')
buf.write('\x05ыȦ\x02\u0bd6ௗ\x05ыȦ\x02ௗ\u0bd8')
buf.write('\x05љȭ\x02\u0bd8Ȁ\x03\x02\x02\x02\u0bd9\u0bda\x05я')
buf.write('Ȩ\x02\u0bda\u0bdb\x05ѝȯ\x02\u0bdb\u0bdc\x05э')
buf.write('ȧ\x02\u0bdc\u0bdd\x05зȜ\x02\u0bdd\u0bde\x05н')
buf.write('ȟ\x02\u0bde\u0bdf\x05їȬ\x02\u0bdfȂ\x03\x02\x02')
buf.write('\x02\u0be0\u0be1\x05яȨ\x02\u0be1\u0be2\x05ѝȯ')
buf.write('\x02\u0be2\u0be3\x05эȧ\x02\u0be3\u0be4\x05нȟ')
buf.write('\x02\u0be4\u0be5\x05їȬ\x02\u0be5௦\x05хȣ')
buf.write('\x02௦௧\x05йȝ\x02௧Ȅ\x03\x02\x02\x02௨')
buf.write('௩\x05яȨ\x02௩௪\x05џȰ\x02௪')
buf.write('௫\x05еț\x02௫௬\x05їȬ\x02௬')
buf.write('௭\x05йȝ\x02௭௮\x05уȢ\x02௮')
buf.write('௯\x05еț\x02௯௰\x05їȬ\x02௰')
buf.write('௱\x074\x02\x02௱Ȇ\x03\x02\x02\x02௲௳\x05ё')
buf.write('ȩ\x02௳௴\x05зȜ\x02௴௵\x05ч')
buf.write('Ȥ\x02௵௶\x05нȟ\x02௶௷\x05й')
buf.write('ȝ\x02௷௸\x05ћȮ\x02௸Ȉ\x03\x02\x02')
buf.write('\x02௹௺\x05ёȩ\x02௺\u0bfb\x05пȠ')
buf.write('\x02\u0bfbȊ\x03\x02\x02\x02\u0bfc\u0bfd\x05ёȩ\x02\u0bfd')
buf.write('\u0bfe\x05пȠ\x02\u0bfe\u0bff\x05пȠ\x02\u0bff')
buf.write('Ȍ\x03\x02\x02\x02ఀఁ\x05ёȩ\x02ఁం')
buf.write('\x05хȣ\x02ంః\x05лȞ\x02ఃȎ')
buf.write('\x03\x02\x02\x02ఄఅ\x05ёȩ\x02అఆ\x05ы')
buf.write('Ȧ\x02ఆఇ\x05лȞ\x02ఇȐ\x03\x02\x02')
buf.write('\x02ఈఉ\x05ёȩ\x02ఉఊ\x05яȨ')
buf.write('\x02ఊȒ\x03\x02\x02\x02ఋఌ\x05ёȩ\x02ఌ')
buf.write('\u0c0d\x05яȨ\x02\u0c0dఎ\x05ыȦ\x02ఎ')
buf.write('ఏ\x05ѥȳ\x02ఏȔ\x03\x02\x02\x02ఐ\u0c11')
buf.write('\x05ёȩ\x02\u0c11ఒ\x05ѓȪ\x02ఒఓ')
buf.write('\x05нȟ\x02ఓఔ\x05яȨ\x02ఔȖ')
buf.write('\x03\x02\x02\x02కఖ\x05ёȩ\x02ఖగ\x05ѓ')
buf.write('Ȫ\x02గఘ\x05ћȮ\x02ఘఙ\x05х')
buf.write('ȣ\x02ఙచ\x05ёȩ\x02చఛ\x05я')
buf.write('Ȩ\x02ఛȘ\x03\x02\x02\x02జఝ\x05ёȩ')
buf.write('\x02ఝఞ\x05їȬ\x02ఞȚ\x03\x02\x02\x02ట')
buf.write('ఠ\x05ёȩ\x02ఠడ\x05їȬ\x02డ')
buf.write('ఢ\x05еț\x02ఢణ\x05лȞ\x02ణ')
buf.write('త\x05еț\x02తథ\x05ћȮ\x02థ')
buf.write('ద\x05еț\x02దȜ\x03\x02\x02\x02ధన')
buf.write('\x05ёȩ\x02న\u0c29\x05їȬ\x02\u0c29ప')
buf.write('\x05лȞ\x02పఫ\x05нȟ\x02ఫబ')
buf.write('\x05їȬ\x02బȞ\x03\x02\x02\x02భమ\x05ё')
buf.write('ȩ\x02మయ\x05їȬ\x02యర\x05л')
buf.write('Ȟ\x02రఱ\x05хȣ\x02ఱల\x05я')
buf.write('Ȩ\x02లళ\x05еț\x02ళఴ\x05ы')
buf.write('Ȧ\x02ఴవ\x05хȣ\x02వశ\x05ћ')
buf.write('Ȯ\x02శష\x05ѥȳ\x02షȠ\x03\x02\x02')
buf.write('\x02సహ\x05ёȩ\x02హ\u0c3a\x05љȭ')
buf.write('\x02\u0c3a\u0c3b\x05нȟ\x02\u0c3b఼\x05їȬ')
buf.write('\x02఼ఽ\x05їȬ\x02ఽా\x05ёȩ')
buf.write('\x02ాి\x05їȬ\x02ిȢ\x03\x02\x02\x02ీ')
buf.write('ు\x05ёȩ\x02ుూ\x05ѝȯ\x02ూ')
buf.write('ృ\x05ћȮ\x02ృȤ\x03\x02\x02\x02ౄ\u0c45')
buf.write('\x05ёȩ\x02\u0c45ె\x05ѝȯ\x02ెే')
buf.write('\x05ћȮ\x02ేై\x05нȟ\x02ై\u0c49')
buf.write('\x05їȬ\x02\u0c49Ȧ\x03\x02\x02\x02ొో\x05ё')
buf.write('ȩ\x02ోౌ\x05џȰ\x02ౌ్\x05н')
buf.write('ȟ\x02్\u0c4e\x05їȬ\x02\u0c4eȨ\x03\x02\x02')
buf.write('\x02\u0c4f\u0c50\x05ёȩ\x02\u0c50\u0c51\x05џȰ')
buf.write('\x02\u0c51\u0c52\x05нȟ\x02\u0c52\u0c53\x05їȬ')
buf.write('\x02\u0c53\u0c54\x05їȬ\x02\u0c54ౕ\x05хȣ')
buf.write('\x02ౕౖ\x05лȞ\x02ౖ\u0c57\x05хȣ')
buf.write('\x02\u0c57ౘ\x05яȨ\x02ౘౙ\x05сȡ')
buf.write('\x02ౙȪ\x03\x02\x02\x02ౚ\u0c5b\x05ѓȪ\x02\u0c5b')
buf.write('\u0c5c\x05еț\x02\u0c5cౝ\x05йȝ\x02ౝ')
buf.write('\u0c5e\x05щȥ\x02\u0c5e\u0c5f\x05еț\x02\u0c5f')
buf.write('ౠ\x05сȡ\x02ౠౡ\x05нȟ\x02ౡ')
buf.write('Ȭ\x03\x02\x02\x02ౢౣ\x05ѓȪ\x02ౣ\u0c64')
buf.write('\x05еț\x02\u0c64\u0c65\x05їȬ\x02\u0c65౦')
buf.write('\x05еț\x02౦౧\x05ыȦ\x02౧౨')
buf.write('\x05ыȦ\x02౨౩\x05нȟ\x02౩౪')
buf.write('\x05ыȦ\x02౪౫\x07a\x02\x02౫౬\x05н')
buf.write('ȟ\x02౬౭\x05яȨ\x02౭౮\x05е')
buf.write('ț\x02౮౯\x05зȜ\x02౯\u0c70\x05ы')
buf.write('Ȧ\x02\u0c70\u0c71\x05нȟ\x02\u0c71Ȯ\x03\x02\x02')
buf.write('\x02\u0c72\u0c73\x05ѓȪ\x02\u0c73\u0c74\x05еț')
buf.write('\x02\u0c74\u0c75\x05їȬ\x02\u0c75\u0c76\x05еț')
buf.write('\x02\u0c76౷\x05эȧ\x02౷౸\x05нȟ')
buf.write('\x02౸౹\x05ћȮ\x02౹౺\x05нȟ')
buf.write('\x02౺౻\x05їȬ\x02౻౼\x05љȭ')
buf.write('\x02౼Ȱ\x03\x02\x02\x02౽౾\x05ѓȪ\x02౾')
buf.write('౿\x05еț\x02౿ಀ\x05їȬ\x02ಀ')
buf.write('ಁ\x05нȟ\x02ಁಂ\x05яȨ\x02ಂ')
buf.write('ಃ\x05ћȮ\x02ಃȲ\x03\x02\x02\x02಄ಅ')
buf.write('\x05ѓȪ\x02ಅಆ\x05еț\x02ಆಇ')
buf.write('\x05їȬ\x02ಇಈ\x05ћȮ\x02ಈಉ')
buf.write('\x05хȣ\x02ಉಊ\x05ћȮ\x02ಊಋ')
buf.write('\x05хȣ\x02ಋಌ\x05ёȩ\x02ಌ\u0c8d')
buf.write('\x05яȨ\x02\u0c8dȴ\x03\x02\x02\x02ಎಏ\x05ѓ')
buf.write('Ȫ\x02ಏಐ\x05еț\x02ಐ\u0c91\x05љ')
buf.write('ȭ\x02\u0c91ಒ\x05љȭ\x02ಒಓ\x05х')
buf.write('ȣ\x02ಓಔ\x05яȨ\x02ಔಕ\x05с')
buf.write('ȡ\x02ಕȶ\x03\x02\x02\x02ಖಗ\x05ѓȪ')
buf.write('\x02ಗಘ\x05еț\x02ಘಙ\x05ћȮ')
buf.write('\x02ಙಚ\x05уȢ\x02ಚȸ\x03\x02\x02\x02ಛ')
buf.write("ಜ\x07'\x02\x02ಜಝ\x05їȬ\x02ಝಞ")
buf.write('\x05ёȩ\x02ಞಟ\x05ѡȱ\x02ಟಠ')
buf.write('\x05ћȮ\x02ಠಡ\x05ѥȳ\x02ಡಢ')
buf.write('\x05ѓȪ\x02ಢಣ\x05нȟ\x02ಣȺ')
buf.write("\x03\x02\x02\x02ತಥ\x07'\x02\x02ಥದ\x05ћȮ")
buf.write('\x02ದಧ\x05ѥȳ\x02ಧನ\x05ѓȪ')
buf.write('\x02ನ\u0ca9\x05нȟ\x02\u0ca9ȼ\x03\x02\x02\x02ಪ')
buf.write('ಫ\x05ѓȪ\x02ಫಬ\x05хȣ\x02ಬ')
buf.write('ಭ\x05ѓȪ\x02ಭಮ\x05нȟ\x02ಮ')
buf.write('ಯ\x05ыȦ\x02ಯರ\x05хȣ\x02ರ')
buf.write('ಱ\x05яȨ\x02ಱಲ\x05нȟ\x02ಲ')
buf.write('ಳ\x05лȞ\x02ಳȾ\x03\x02\x02\x02\u0cb4ವ')
buf.write('\x05ѓȪ\x02ವಶ\x05хȣ\x02ಶಷ')
buf.write('\x05џȰ\x02ಷಸ\x05ёȩ\x02ಸಹ')
buf.write('\x05ћȮ\x02ಹɀ\x03\x02\x02\x02\u0cba\u0cbb\x05ѓ')
buf.write('Ȫ\x02\u0cbb಼\x05ыȦ\x02಼ಽ\x05е')
buf.write('ț\x02ಽಾ\x05яȨ\x02ಾɂ\x03\x02\x02')
buf.write('\x02ಿೀ\x05ѓȪ\x02ೀು\x05ыȦ')
buf.write('\x02ುೂ\x05љȭ\x02ೂೃ\x07a\x02\x02ೃ')
buf.write('ೄ\x05хȣ\x02ೄ\u0cc5\x05яȨ\x02\u0cc5')
buf.write('ೆ\x05ћȮ\x02ೆೇ\x05нȟ\x02ೇ')
buf.write('ೈ\x05сȡ\x02ೈ\u0cc9\x05нȟ\x02\u0cc9')
buf.write('ೊ\x05їȬ\x02ೊɄ\x03\x02\x02\x02ೋೌ')
buf.write('\x05ѓȪ\x02ೌ್\x05ёȩ\x02್\u0cce')
buf.write('\x05љȭ\x02\u0cce\u0ccf\x05хȣ\x02\u0ccf\u0cd0')
buf.write('\x05ћȮ\x02\u0cd0\u0cd1\x05хȣ\x02\u0cd1\u0cd2')
buf.write('\x05џȰ\x02\u0cd2\u0cd3\x05нȟ\x02\u0cd3Ɇ')
buf.write('\x03\x02\x02\x02\u0cd4ೕ\x05ѓȪ\x02ೕೖ\x05ё')
buf.write('ȩ\x02ೖ\u0cd7\x05љȭ\x02\u0cd7\u0cd8\x05х')
buf.write('ȣ\x02\u0cd8\u0cd9\x05ћȮ\x02\u0cd9\u0cda\x05х')
buf.write('ȣ\x02\u0cda\u0cdb\x05џȰ\x02\u0cdb\u0cdc\x05н')
buf.write('ȟ\x02\u0cdcೝ\x05яȨ\x02ೝɈ\x03\x02\x02')
buf.write('\x02ೞ\u0cdf\x05ѓȪ\x02\u0cdfೠ\x05їȬ')
buf.write('\x02ೠೡ\x05еț\x02ೡೢ\x05сȡ')
buf.write('\x02ೢೣ\x05эȧ\x02ೣ\u0ce4\x05еț')
buf.write('\x02\u0ce4Ɋ\x03\x02\x02\x02\u0ce5೦\x05ѓȪ\x02೦')
buf.write('೧\x05їȬ\x02೧೨\x05нȟ\x02೨')
buf.write('೩\x05йȝ\x02೩೪\x05нȟ\x02೪')
buf.write('೫\x05лȞ\x02೫೬\x05хȣ\x02೬')
buf.write('೭\x05яȨ\x02೭೮\x05сȡ\x02೮')
buf.write('Ɍ\x03\x02\x02\x02೯\u0cf0\x05ѓȪ\x02\u0cf0ೱ')
buf.write('\x05їȬ\x02ೱೲ\x05нȟ\x02ೲ\u0cf3')
buf.write('\x05йȝ\x02\u0cf3\u0cf4\x05хȣ\x02\u0cf4\u0cf5')
buf.write('\x05љȭ\x02\u0cf5\u0cf6\x05хȣ\x02\u0cf6\u0cf7')
buf.write('\x05ёȩ\x02\u0cf7\u0cf8\x05яȨ\x02\u0cf8Ɏ')
buf.write('\x03\x02\x02\x02\u0cf9\u0cfa\x05ѓȪ\x02\u0cfa\u0cfb\x05ї')
buf.write('Ȭ\x02\u0cfb\u0cfc\x05нȟ\x02\u0cfc\u0cfd\x05љ')
buf.write('ȭ\x02\u0cfd\u0cfe\x05нȟ\x02\u0cfe\u0cff\x05я')
buf.write('Ȩ\x02\u0cffഀ\x05ћȮ\x02ഀɐ\x03\x02\x02')
buf.write('\x02ഁം\x05ѓȪ\x02ംഃ\x05їȬ')
buf.write('\x02ഃഄ\x05хȣ\x02ഄഅ\x05ёȩ')
buf.write('\x02അആ\x05їȬ\x02ആɒ\x03\x02\x02\x02ഇ')
buf.write('ഈ\x05ѓȪ\x02ഈഉ\x05їȬ\x02ഉ')
buf.write('ഊ\x05ёȩ\x02ഊഋ\x05йȝ\x02ഋ')
buf.write('ഌ\x05нȟ\x02ഌ\u0d0d\x05лȞ\x02\u0d0d')
buf.write('എ\x05ѝȯ\x02എഏ\x05їȬ\x02ഏ')
buf.write('ഐ\x05нȟ\x02ഐɔ\x03\x02\x02\x02\u0d11ഒ')
buf.write('\x05їȬ\x02ഒഓ\x05еț\x02ഓഔ')
buf.write('\x05хȣ\x02ഔക\x05љȭ\x02കഖ')
buf.write('\x05нȟ\x02ഖɖ\x03\x02\x02\x02ഗഘ\x05ї')
buf.write('Ȭ\x02ഘങ\x05еț\x02ങച\x05я')
buf.write('Ȩ\x02ചഛ\x05сȡ\x02ഛജ\x05н')
buf.write('ȟ\x02ജɘ\x03\x02\x02\x02ഝഞ\x05їȬ')
buf.write('\x02ഞട\x05еț\x02ടഠ\x05ѡȱ')
buf.write('\x02ഠɚ\x03\x02\x02\x02ഡഢ\x05їȬ\x02ഢ')
buf.write('ണ\x05нȟ\x02ണത\x05еț\x02ത')
buf.write('ഥ\x05лȞ\x02ഥɜ\x03\x02\x02\x02ദധ')
buf.write('\x05їȬ\x02ധന\x05нȟ\x02നഩ')
buf.write('\x05еț\x02ഩപ\x05ыȦ\x02പɞ')
buf.write('\x03\x02\x02\x02ഫബ\x05їȬ\x02ബഭ\x05н')
buf.write('ȟ\x02ഭമ\x05йȝ\x02മയ\x05ё')
buf.write('ȩ\x02യര\x05їȬ\x02രറ\x05л')
buf.write('Ȟ\x02റɠ\x03\x02\x02\x02ലള\x05їȬ')
buf.write('\x02ളഴ\x05нȟ\x02ഴവ\x05пȠ')
buf.write('\x02വɢ\x03\x02\x02\x02ശഷ\x05їȬ\x02ഷ')
buf.write('സ\x05нȟ\x02സഹ\x05пȠ\x02ഹ')
buf.write('ഺ\x05нȟ\x02ഺ഻\x05їȬ\x02഻')
buf.write('഼\x05нȟ\x02഼ഽ\x05яȨ\x02ഽ')
buf.write('ാ\x05йȝ\x02ാി\x05нȟ\x02ി')
buf.write('ɤ\x03\x02\x02\x02ീു\x05їȬ\x02ുൂ')
buf.write('\x05нȟ\x02ൂൃ\x05пȠ\x02ൃൄ')
buf.write('\x05нȟ\x02ൄ\u0d45\x05їȬ\x02\u0d45െ')
buf.write('\x05нȟ\x02െേ\x05яȨ\x02േൈ')
buf.write('\x05йȝ\x02ൈ\u0d49\x05хȣ\x02\u0d49ൊ')
buf.write('\x05яȨ\x02ൊോ\x05сȡ\x02ോɦ')
buf.write('\x03\x02\x02\x02ൌ്\x05їȬ\x02്ൎ\x05н')
buf.write('ȟ\x02ൎ൏\x05чȤ\x02൏\u0d50\x05н')
buf.write('ȟ\x02\u0d50\u0d51\x05йȝ\x02\u0d51\u0d52\x05ћ')
buf.write('Ȯ\x02\u0d52ɨ\x03\x02\x02\x02\u0d53ൔ\x05їȬ')
buf.write('\x02ൔൕ\x05нȟ\x02ൕൖ\x05ыȦ')
buf.write('\x02ൖൗ\x05хȣ\x02ൗ൘\x05нȟ')
buf.write('\x02൘൙\x05љȭ\x02൙൚\x07a\x02\x02൚')
buf.write('൛\x05ёȩ\x02൛൜\x05яȨ\x02൜')
buf.write('ɪ\x03\x02\x02\x02൝൞\x05їȬ\x02൞ൟ')
buf.write('\x05нȟ\x02ൟൠ\x05яȨ\x02ൠൡ')
buf.write('\x05еț\x02ൡൢ\x05эȧ\x02ൢൣ')
buf.write('\x05нȟ\x02ൣɬ\x03\x02\x02\x02\u0d64\u0d65\x05ї')
buf.write('Ȭ\x02\u0d65൦\x05нȟ\x02൦൧\x05ѓ')
buf.write('Ȫ\x02൧൨\x05ыȦ\x02൨൩\x05е')
buf.write('ț\x02൩൪\x05йȝ\x02൪൫\x05н')
buf.write('ȟ\x02൫ɮ\x03\x02\x02\x02൬൭\x05їȬ')
buf.write('\x02൭൮\x05нȟ\x02൮൯\x05љȭ')
buf.write('\x02൯൰\x05ѓȪ\x02൰൱\x05нȟ')
buf.write('\x02൱൲\x05йȝ\x02൲൳\x05ћȮ')
buf.write('\x02൳ɰ\x03\x02\x02\x02൴൵\x05їȬ\x02൵')
buf.write('൶\x05нȟ\x02൶൷\x05љȭ\x02൷')
buf.write('൸\x05ћȮ\x02൸൹\x05їȬ\x02൹')
buf.write('ൺ\x05хȣ\x02ൺൻ\x05йȝ\x02ൻ')
buf.write('ർ\x05ћȮ\x02ർൽ\x07a\x02\x02ൽൾ')
buf.write('\x05їȬ\x02ൾൿ\x05нȟ\x02ൿ\u0d80')
buf.write('\x05пȠ\x02\u0d80ඁ\x05нȟ\x02ඁං')
buf.write('\x05їȬ\x02ංඃ\x05нȟ\x02ඃ\u0d84')
buf.write('\x05яȨ\x02\u0d84අ\x05йȝ\x02අආ')
buf.write('\x05нȟ\x02ආඇ\x05љȭ\x02ඇɲ')
buf.write('\x03\x02\x02\x02ඈඉ\x05їȬ\x02ඉඊ\x05н')
buf.write('ȟ\x02ඊඋ\x05љȭ\x02උඌ\x05ѝ')
buf.write('ȯ\x02ඌඍ\x05ыȦ\x02ඍඎ\x05ћ')
buf.write('Ȯ\x02ඎɴ\x03\x02\x02\x02ඏඐ\x05їȬ')
buf.write('\x02ඐඑ\x05нȟ\x02එඒ\x05љȭ')
buf.write('\x02ඒඓ\x05ѝȯ\x02ඓඔ\x05ыȦ')
buf.write('\x02ඔඕ\x05ћȮ\x02ඕඖ\x07a\x02\x02ඖ')
buf.write('\u0d97\x05йȝ\x02\u0d97\u0d98\x05еț\x02\u0d98')
buf.write('\u0d99\x05йȝ\x02\u0d99ක\x05уȢ\x02ක')
buf.write('ඛ\x05нȟ\x02ඛɶ\x03\x02\x02\x02ගඝ')
buf.write('\x05їȬ\x02ඝඞ\x05нȟ\x02ඞඟ')
buf.write('\x05ћȮ\x02ඟච\x05ѝȯ\x02චඡ')
buf.write('\x05їȬ\x02ඡජ\x05яȨ\x02ජɸ')
buf.write('\x03\x02\x02\x02ඣඤ\x05їȬ\x02ඤඥ\x05н')
buf.write('ȟ\x02ඥඦ\x05ћȮ\x02ඦට\x05ѝ')
buf.write('ȯ\x02ටඨ\x05їȬ\x02ඨඩ\x05я')
buf.write('Ȩ\x02ඩඪ\x05хȣ\x02ඪණ\x05я')
buf.write('Ȩ\x02ණඬ\x05сȡ\x02ඬɺ\x03\x02\x02')
buf.write('\x02තථ\x05їȬ\x02ථද\x05нȟ')
buf.write('\x02දධ\x05ѝȯ\x02ධන\x05љȭ')
buf.write('\x02න\u0db2\x05нȟ\x02\u0db2ɼ\x03\x02\x02\x02ඳ')
buf.write('ප\x05їȬ\x02පඵ\x05нȟ\x02ඵ')
buf.write('බ\x05џȰ\x02බභ\x05нȟ\x02භ')
buf.write('ම\x05їȬ\x02මඹ\x05љȭ\x02ඹ')
buf.write('ය\x05нȟ\x02යɾ\x03\x02\x02\x02ර\u0dbc')
buf.write('\x05їȬ\x02\u0dbcල\x05нȟ\x02ල\u0dbe')
buf.write('\x05џȰ\x02\u0dbe\u0dbf\x05ёȩ\x02\u0dbfව')
buf.write('\x05щȥ\x02වශ\x05нȟ\x02ශʀ')
buf.write('\x03\x02\x02\x02ෂස\x05їȬ\x02සහ\x05х')
buf.write('ȣ\x02හළ\x05сȡ\x02ළෆ\x05у')
buf.write('Ȣ\x02ෆ\u0dc7\x05ћȮ\x02\u0dc7ʂ\x03\x02\x02')
buf.write('\x02\u0dc8\u0dc9\x05їȬ\x02\u0dc9්\x05ёȩ')
buf.write('\x02්\u0dcb\x05ыȦ\x02\u0dcb\u0dcc\x05ыȦ')
buf.write('\x02\u0dcc\u0dcd\x05зȜ\x02\u0dcd\u0dce\x05еț')
buf.write('\x02\u0dceා\x05йȝ\x02ාැ\x05щȥ')
buf.write('\x02ැʄ\x03\x02\x02\x02ෑි\x05їȬ\x02ි')
buf.write('ී\x05ёȩ\x02ීු\x05ыȦ\x02ු')
buf.write('\u0dd5\x05ыȦ\x02\u0dd5ූ\x05ѝȯ\x02ූ')
buf.write('\u0dd7\x05ѓȪ\x02\u0dd7ʆ\x03\x02\x02\x02ෘෙ')
buf.write('\x05їȬ\x02ෙේ\x05ёȩ\x02ේෛ')
buf.write('\x05ѡȱ\x02ෛʈ\x03\x02\x02\x02ොෝ\x05ї')
buf.write('Ȭ\x02ෝෞ\x05ёȩ\x02ෞෟ\x05ѡ')
buf.write('ȱ\x02ෟ\u0de0\x05хȣ\x02\u0de0\u0de1\x05л')
buf.write('Ȟ\x02\u0de1ʊ\x03\x02\x02\x02\u0de2\u0de3\x05їȬ')
buf.write('\x02\u0de3\u0de4\x05ёȩ\x02\u0de4\u0de5\x05ѡȱ')
buf.write('\x02\u0de5෦\x05љȭ\x02෦ʌ\x03\x02\x02\x02෧')
buf.write('෨\x05їȬ\x02෨෩\x05ѝȯ\x02෩')
buf.write('෪\x05ыȦ\x02෪෫\x05нȟ\x02෫')
buf.write('෬\x05љȭ\x02෬ʎ\x03\x02\x02\x02෭෮')
buf.write('\x05љȭ\x02෮෯\x05еț\x02෯\u0df0')
buf.write('\x05эȧ\x02\u0df0\u0df1\x05ѓȪ\x02\u0df1ෲ')
buf.write('\x05ыȦ\x02ෲෳ\x05нȟ\x02ෳʐ')
buf.write('\x03\x02\x02\x02෴\u0df5\x05љȭ\x02\u0df5\u0df6\x05е')
buf.write('ț\x02\u0df6\u0df7\x05џȰ\x02\u0df7\u0df8\x05н')
buf.write('ȟ\x02\u0df8ʒ\x03\x02\x02\x02\u0df9\u0dfa\x05љȭ')
buf.write('\x02\u0dfa\u0dfb\x05еț\x02\u0dfb\u0dfc\x05џȰ')
buf.write('\x02\u0dfc\u0dfd\x05нȟ\x02\u0dfd\u0dfe\x05ѓȪ')
buf.write('\x02\u0dfe\u0dff\x05ёȩ\x02\u0dff\u0e00\x05хȣ')
buf.write('\x02\u0e00ก\x05яȨ\x02กข\x05ћȮ')
buf.write('\x02ขʔ\x03\x02\x02\x02ฃค\x05љȭ\x02ค')
buf.write('ฅ\x05йȝ\x02ฅฆ\x05уȢ\x02ฆ')
buf.write('ง\x05нȟ\x02งจ\x05эȧ\x02จ')
buf.write('ฉ\x05еț\x02ฉʖ\x03\x02\x02\x02ชซ')
buf.write('\x05љȭ\x02ซฌ\x05йȝ\x02ฌญ')
buf.write('\x05уȢ\x02ญฎ\x05нȟ\x02ฎฏ')
buf.write('\x05эȧ\x02ฏฐ\x05еț\x02ฐฑ')
buf.write('\x05йȝ\x02ฑฒ\x05уȢ\x02ฒณ')
buf.write('\x05нȟ\x02ณด\x05йȝ\x02ดต')
buf.write('\x05щȥ\x02ตʘ\x03\x02\x02\x02ถท\x05љ')
buf.write('ȭ\x02ทธ\x05йȝ\x02ธน\x05я')
buf.write('Ȩ\x02นʚ\x03\x02\x02\x02บป\x05љȭ')
buf.write('\x02ปผ\x05нȟ\x02ผฝ\x05еț')
buf.write('\x02ฝพ\x05їȬ\x02พฟ\x05йȝ')
buf.write('\x02ฟภ\x05уȢ\x02ภʜ\x03\x02\x02\x02ม')
buf.write('ย\x05љȭ\x02ยร\x05нȟ\x02ร')
buf.write('ฤ\x05йȝ\x02ฤล\x05ёȩ\x02ล')
buf.write('ฦ\x05яȨ\x02ฦว\x05лȞ\x02ว')
buf.write('ʞ\x03\x02\x02\x02ศษ\x05љȭ\x02ษส')
buf.write('\x05нȟ\x02สห\x05нȟ\x02หฬ')
buf.write('\x05лȞ\x02ฬʠ\x03\x02\x02\x02อฮ\x05љ')
buf.write('ȭ\x02ฮฯ\x05нȟ\x02ฯะ\x05с')
buf.write('ȡ\x02ะั\x05эȧ\x02ัา\x05н')
buf.write('ȟ\x02าำ\x05яȨ\x02ำิ\x05ћ')
buf.write('Ȯ\x02ิʢ\x03\x02\x02\x02ีึ\x05љȭ')
buf.write('\x02ึื\x05нȟ\x02ืุ\x05ыȦ')
buf.write('\x02ุู\x05нȟ\x02ฺู\x05йȝ')
buf.write('\x02ฺ\u0e3b\x05ћȮ\x02\u0e3bʤ\x03\x02\x02\x02\u0e3c')
buf.write('\u0e3d\x05љȭ\x02\u0e3d\u0e3e\x05нȟ\x02\u0e3e')
buf.write('฿\x05ыȦ\x02฿เ\x05пȠ\x02เ')
buf.write('ʦ\x03\x02\x02\x02แโ\x05љȭ\x02โใ')
buf.write('\x05нȟ\x02ใไ\x05ѕȫ\x02ไๅ')
buf.write('\x05ѝȯ\x02ๅๆ\x05нȟ\x02ๆ็')
buf.write('\x05яȨ\x02็่\x05йȝ\x02่้')
buf.write('\x05нȟ\x02้ʨ\x03\x02\x02\x02๊๋\x05љ')
buf.write('ȭ\x02๋์\x05нȟ\x02์ํ\x05ѕ')
buf.write('ȫ\x02ํ๎\x05ѝȯ\x02๎๏\x05н')
buf.write('ȟ\x02๏๐\x05яȨ\x02๐๑\x05ћ')
buf.write('Ȯ\x02๑๒\x05хȣ\x02๒๓\x05е')
buf.write('ț\x02๓๔\x05ыȦ\x02๔ʪ\x03\x02\x02')
buf.write('\x02๕๖\x05љȭ\x02๖๗\x05нȟ')
buf.write('\x02๗๘\x05їȬ\x02๘๙\x05хȣ')
buf.write('\x02๙๚\x05еț\x02๚๛\x05ыȦ')
buf.write('\x02๛\u0e5c\x05хȣ\x02\u0e5c\u0e5d\x05ѧȴ')
buf.write('\x02\u0e5d\u0e5e\x05еț\x02\u0e5e\u0e5f\x05зȜ')
buf.write('\x02\u0e5f\u0e60\x05ыȦ\x02\u0e60\u0e61\x05нȟ')
buf.write('\x02\u0e61ʬ\x03\x02\x02\x02\u0e62\u0e63\x05љȭ\x02\u0e63')
buf.write('\u0e64\x05нȟ\x02\u0e64\u0e65\x05їȬ\x02\u0e65')
buf.write('\u0e66\x05хȣ\x02\u0e66\u0e67\x05еț\x02\u0e67')
buf.write('\u0e68\x05ыȦ\x02\u0e68\u0e69\x05ыȦ\x02\u0e69')
buf.write('\u0e6a\x05ѥȳ\x02\u0e6a\u0e6b\x07a\x02\x02\u0e6b\u0e6c')
buf.write('\x05їȬ\x02\u0e6c\u0e6d\x05нȟ\x02\u0e6d\u0e6e')
buf.write('\x05ѝȯ\x02\u0e6e\u0e6f\x05љȭ\x02\u0e6f\u0e70')
buf.write('\x05еț\x02\u0e70\u0e71\x05зȜ\x02\u0e71\u0e72')
buf.write('\x05ыȦ\x02\u0e72\u0e73\x05нȟ\x02\u0e73ʮ')
buf.write('\x03\x02\x02\x02\u0e74\u0e75\x05љȭ\x02\u0e75\u0e76\x05н')
buf.write('ȟ\x02\u0e76\u0e77\x05їȬ\x02\u0e77\u0e78\x05џ')
buf.write('Ȱ\x02\u0e78\u0e79\x05нȟ\x02\u0e79\u0e7a\x05ї')
buf.write('Ȭ\x02\u0e7a\u0e7b\x05нȟ\x02\u0e7b\u0e7c\x05ї')
buf.write('Ȭ\x02\u0e7c\u0e7d\x05їȬ\x02\u0e7d\u0e7e\x05ё')
buf.write('ȩ\x02\u0e7e\u0e7f\x05їȬ\x02\u0e7fʰ\x03\x02\x02')
buf.write('\x02\u0e80ກ\x05љȭ\x02ກຂ\x05нȟ')
buf.write('\x02ຂ\u0e83\x05љȭ\x02\u0e83ຄ\x05љȭ')
buf.write('\x02ຄ\u0e85\x05хȣ\x02\u0e85ຆ\x05ёȩ')
buf.write('\x02ຆງ\x05яȨ\x02ງຈ\x05ћȮ')
buf.write('\x02ຈຉ\x05хȣ\x02ຉຊ\x05эȧ')
buf.write('\x02ຊ\u0e8b\x05нȟ\x02\u0e8bຌ\x05ѧȴ')
buf.write('\x02ຌຍ\x05ёȩ\x02ຍຎ\x05яȨ')
buf.write('\x02ຎຏ\x05нȟ\x02ຏʲ\x03\x02\x02\x02ຐ')
buf.write('ຑ\x05љȭ\x02ຑຒ\x05нȟ\x02ຒ')
buf.write('ຓ\x05ћȮ\x02ຓʴ\x03\x02\x02\x02ດຕ')
buf.write('\x05љȭ\x02ຕຖ\x05нȟ\x02ຖທ')
buf.write('\x05ћȮ\x02ທຘ\x05љȭ\x02ຘʶ')
buf.write('\x03\x02\x02\x02ນບ\x05љȭ\x02ບປ\x05н')
buf.write('ȟ\x02ປຜ\x05ћȮ\x02ຜຝ\x05ћ')
buf.write('Ȯ\x02ຝພ\x05хȣ\x02ພຟ\x05я')
buf.write('Ȩ\x02ຟຠ\x05сȡ\x02ຠມ\x05љ')
buf.write('ȭ\x02ມʸ\x03\x02\x02\x02ຢຣ\x05љȭ')
buf.write('\x02ຣ\u0ea4\x05уȢ\x02\u0ea4ລ\x05еț')
buf.write('\x02ລ\u0ea6\x05їȬ\x02\u0ea6ວ\x05нȟ')
buf.write('\x02ວʺ\x03\x02\x02\x02ຨຩ\x05љȭ\x02ຩ')
buf.write('ສ\x05уȢ\x02ສຫ\x05ёȩ\x02ຫ')
buf.write('ຬ\x05ѡȱ\x02ຬʼ\x03\x02\x02\x02ອຮ')
buf.write('\x05љȭ\x02ຮຯ\x05уȢ\x02ຯະ')
buf.write('\x05ѝȯ\x02ະັ\x05ћȮ\x02ັາ')
buf.write('\x05лȞ\x02າຳ\x05ёȩ\x02ຳິ')
buf.write('\x05ѡȱ\x02ິີ\x05яȨ\x02ີʾ')
buf.write('\x03\x02\x02\x02ຶື\x05љȭ\x02ືຸ\x05х')
buf.write('ȣ\x02ຸູ\x05зȜ\x02຺ູ\x05ы')
buf.write('Ȧ\x02຺ົ\x05хȣ\x02ົຼ\x05я')
buf.write('Ȩ\x02ຼຽ\x05сȡ\x02ຽ\u0ebe\x05љ')
buf.write('ȭ\x02\u0ebeˀ\x03\x02\x02\x02\u0ebfເ\x05љȭ')
buf.write('\x02ເແ\x05хȣ\x02ແໂ\x05сȡ')
buf.write('\x02ໂໃ\x05яȨ\x02ໃໄ\x05ћȮ')
buf.write('\x02ໄ\u0ec5\x05ѥȳ\x02\u0ec5ໆ\x05ѓȪ')
buf.write('\x02ໆ\u0ec7\x05нȟ\x02\u0ec7˂\x03\x02\x02\x02່')
buf.write('້\x05љȭ\x02້໊\x05хȣ\x02໊')
buf.write('໋\x05эȧ\x02໋໌\x05ѓȪ\x02໌')
buf.write('ໍ\x05ыȦ\x02ໍ\u0ece\x05нȟ\x02\u0ece')
buf.write('\u0ecf\x07a\x02\x02\u0ecf໐\x05хȣ\x02໐໑')
buf.write('\x05яȨ\x02໑໒\x05ћȮ\x02໒໓')
buf.write('\x05нȟ\x02໓໔\x05сȡ\x02໔໕')
buf.write('\x05нȟ\x02໕໖\x05їȬ\x02໖˄')
buf.write('\x03\x02\x02\x02໗໘\x05љȭ\x02໘໙\x05х')
buf.write('ȣ\x02໙\u0eda\x05яȨ\x02\u0eda\u0edb\x05с')
buf.write('ȡ\x02\u0edbໜ\x05ыȦ\x02ໜໝ\x05н')
buf.write('ȟ\x02ໝˆ\x03\x02\x02\x02ໞໟ\x05љȭ')
buf.write('\x02ໟ\u0ee0\x05хȣ\x02\u0ee0\u0ee1\x05ѧȴ')
buf.write('\x02\u0ee1\u0ee2\x05нȟ\x02\u0ee2ˈ\x03\x02\x02\x02\u0ee3')
buf.write('\u0ee4\x05љȭ\x02\u0ee4\u0ee5\x05щȥ\x02\u0ee5')
buf.write('\u0ee6\x05хȣ\x02\u0ee6\u0ee7\x05ѓȪ\x02\u0ee7')
buf.write('ˊ\x03\x02\x02\x02\u0ee8\u0ee9\x05љȭ\x02\u0ee9\u0eea')
buf.write('\x05эȧ\x02\u0eea\u0eeb\x05еț\x02\u0eeb\u0eec')
buf.write('\x05ыȦ\x02\u0eec\u0eed\x05ыȦ\x02\u0eed\u0eee')
buf.write('\x05хȣ\x02\u0eee\u0eef\x05яȨ\x02\u0eef\u0ef0')
buf.write('\x05ћȮ\x02\u0ef0ˌ\x03\x02\x02\x02\u0ef1\u0ef2\x05љ')
buf.write('ȭ\x02\u0ef2\u0ef3\x05яȨ\x02\u0ef3\u0ef4\x05е')
buf.write('ț\x02\u0ef4\u0ef5\x05ѓȪ\x02\u0ef5\u0ef6\x05љ')
buf.write('ȭ\x02\u0ef6\u0ef7\x05уȢ\x02\u0ef7\u0ef8\x05ё')
buf.write('ȩ\x02\u0ef8\u0ef9\x05ћȮ\x02\u0ef9ˎ\x03\x02\x02')
buf.write('\x02\u0efa\u0efb\x05љȭ\x02\u0efb\u0efc\x05ёȩ')
buf.write('\x02\u0efc\u0efd\x05эȧ\x02\u0efd\u0efe\x05нȟ')
buf.write('\x02\u0efeː\x03\x02\x02\x02\u0effༀ\x05љȭ\x02ༀ')
buf.write('༁\x05ѓȪ\x02༁༂\x05нȟ\x02༂')
buf.write('༃\x05йȝ\x02༃༄\x05хȣ\x02༄')
buf.write('༅\x05пȠ\x02༅༆\x05хȣ\x02༆')
buf.write('༇\x05йȝ\x02༇༈\x05еț\x02༈')
buf.write('༉\x05ћȮ\x02༉༊\x05хȣ\x02༊')
buf.write('་\x05ёȩ\x02་༌\x05яȨ\x02༌')
buf.write('˒\x03\x02\x02\x02།༎\x05љȭ\x02༎༏')
buf.write('\x05ѕȫ\x02༏༐\x05ыȦ\x02༐༑')
buf.write('\x05лȞ\x02༑༒\x05еț\x02༒༓')
buf.write('\x05ћȮ\x02༓༔\x05еț\x02༔˔')
buf.write('\x03\x02\x02\x02༕༖\x05љȭ\x02༖༗\x05ѕ')
buf.write('ȫ\x02༗༘\x05ыȦ\x02༘༙\x05н')
buf.write('ȟ\x02༙༚\x05їȬ\x02༚༛\x05ї')
buf.write('Ȭ\x02༛༜\x05ёȩ\x02༜༝\x05ї')
buf.write('Ȭ\x02༝˖\x03\x02\x02\x02༞༟\x05љȭ')
buf.write('\x02༟༠\x05ћȮ\x02༠༡\x05еț')
buf.write('\x02༡༢\x05яȨ\x02༢༣\x05лȞ')
buf.write('\x02༣༤\x05еț\x02༤༥\x05ыȦ')
buf.write('\x02༥༦\x05ёȩ\x02༦༧\x05яȨ')
buf.write('\x02༧༨\x05нȟ\x02༨˘\x03\x02\x02\x02༩')
buf.write('༪\x05љȭ\x02༪༫\x05ћȮ\x02༫')
buf.write('༬\x05еț\x02༬༭\x05їȬ\x02༭')
buf.write('༮\x05ћȮ\x02༮˚\x03\x02\x02\x02༯༰')
buf.write('\x05љȭ\x02༰༱\x05ћȮ\x02༱༲')
buf.write('\x05еț\x02༲༳\x05їȬ\x02༳༴')
buf.write('\x05ћȮ\x02༴༵\x05ѝȯ\x02༵༶')
buf.write('\x05ѓȪ\x02༶˜\x03\x02\x02\x02༷༸\x05љ')
buf.write('ȭ\x02༸༹\x05ћȮ\x02༹༺\x05е')
buf.write('ț\x02༺༻\x05ћȮ\x02༻༼\x05н')
buf.write('ȟ\x02༼༽\x05эȧ\x02༽༾\x05н')
buf.write('ȟ\x02༾༿\x05яȨ\x02༿ཀ\x05ћ')
buf.write('Ȯ\x02ཀ˞\x03\x02\x02\x02ཁག\x05љȭ')
buf.write('\x02གགྷ\x05ћȮ\x02གྷང\x05еț')
buf.write('\x02ངཅ\x05ћȮ\x02ཅཆ\x05нȟ')
buf.write('\x02ཆཇ\x05эȧ\x02ཇ\u0f48\x05нȟ')
buf.write('\x02\u0f48ཉ\x05яȨ\x02ཉཊ\x05ћȮ')
buf.write('\x02ཊཋ\x07a\x02\x02ཋཌ\x05хȣ\x02ཌ')
buf.write('ཌྷ\x05лȞ\x02ཌྷˠ\x03\x02\x02\x02ཎཏ')
buf.write('\x05љȭ\x02ཏཐ\x05ћȮ\x02ཐད')
buf.write('\x05еț\x02དདྷ\x05ћȮ\x02དྷན')
buf.write('\x05хȣ\x02ནཔ\x05йȝ\x02པˢ')
buf.write('\x03\x02\x02\x02ཕབ\x05љȭ\x02བབྷ\x05ћ')
buf.write('Ȯ\x02བྷམ\x05еț\x02མཙ\x05ћ')
buf.write('Ȯ\x02ཙཚ\x05хȣ\x02ཚཛ\x05љ')
buf.write('ȭ\x02ཛཛྷ\x05ћȮ\x02ཛྷཝ\x05х')
buf.write('ȣ\x02ཝཞ\x05йȝ\x02ཞཟ\x05љ')
buf.write('ȭ\x02ཟˤ\x03\x02\x02\x02འཡ\x05љȭ')
buf.write('\x02ཡར\x05ћȮ\x02རལ\x05їȬ')
buf.write('\x02ལཤ\x05хȣ\x02ཤཥ\x05яȨ')
buf.write('\x02ཥས\x05сȡ\x02ས˦\x03\x02\x02\x02ཧ')
buf.write('ཨ\x05љȭ\x02ཨཀྵ\x05ѝȯ\x02ཀྵ')
buf.write('ཪ\x05зȜ\x02ཪཫ\x05эȧ\x02ཫ')
buf.write('ཬ\x05ѝȯ\x02ཬ\u0f6d\x05ыȦ\x02\u0f6d')
buf.write('\u0f6e\x05ћȮ\x02\u0f6e\u0f6f\x05хȣ\x02\u0f6f')
buf.write('\u0f70\x05љȭ\x02\u0f70ཱ\x05нȟ\x02ཱ')
buf.write('ི\x05ћȮ\x02ི˨\x03\x02\x02\x02ཱིུ')
buf.write('\x05љȭ\x02ཱུུ\x05ѝȯ\x02ཱུྲྀ')
buf.write('\x05зȜ\x02ྲྀཷ\x05ѓȪ\x02ཷླྀ')
buf.write('\x05еț\x02ླྀཹ\x05їȬ\x02ཹེ')
buf.write('\x05ћȮ\x02ེཻ\x05хȣ\x02ཻོ')
buf.write('\x05ћȮ\x02ོཽ\x05хȣ\x02ཽཾ')
buf.write('\x05ёȩ\x02ཾཿ\x05яȨ\x02ཿ˪')
buf.write('\x03\x02\x02\x02ཱྀྀ\x05љȭ\x02ཱྀྂ\x05ѝ')
buf.write('ȯ\x02ྂྃ\x05зȜ\x02྄ྃ\x05љ')
buf.write('ȭ\x02྄྅\x05ћȮ\x02྅྆\x05х')
buf.write('ȣ\x02྆྇\x05ћȮ\x02྇ྈ\x05ѝ')
buf.write('ȯ\x02ྈྉ\x05ћȮ\x02ྉྊ\x05е')
buf.write('ț\x02ྊྋ\x05зȜ\x02ྋྌ\x05ы')
buf.write('Ȧ\x02ྌྍ\x05нȟ\x02ྍˬ\x03\x02\x02')
buf.write('\x02ྎྏ\x05љȭ\x02ྏྐ\x05ѝȯ')
buf.write('\x02ྐྑ\x05зȜ\x02ྑྒ\x05ћȮ')
buf.write('\x02ྒྒྷ\x05ѥȳ\x02ྒྷྔ\x05ѓȪ')
buf.write('\x02ྔྕ\x05нȟ\x02ྕˮ\x03\x02\x02\x02ྖ')
buf.write('ྗ\x05љȭ\x02ྗ\u0f98\x05ѝȯ\x02\u0f98')
buf.write('ྙ\x05йȝ\x02ྙྚ\x05йȝ\x02ྚ')
buf.write('ྛ\x05нȟ\x02ྛྜ\x05љȭ\x02ྜ')
buf.write('ྜྷ\x05љȭ\x02ྜྷ˰\x03\x02\x02\x02ྞྟ')
buf.write('\x05љȭ\x02ྟྠ\x05ѝȯ\x02ྠྡ')
buf.write('\x05љȭ\x02ྡྡྷ\x05ѓȪ\x02ྡྷྣ')
buf.write('\x05нȟ\x02ྣྤ\x05яȨ\x02ྤྥ')
buf.write('\x05лȞ\x02ྥ˲\x03\x02\x02\x02ྦྦྷ\x05ћ')
buf.write('Ȯ\x02ྦྷྨ\x05еț\x02ྨྩ\x05з')
buf.write('Ȝ\x02ྩྪ\x05ыȦ\x02ྪྫ\x05н')
buf.write('ȟ\x02ྫ˴\x03\x02\x02\x02ྫྷྭ\x05ћȮ')
buf.write('\x02ྭྮ\x05уȢ\x02ྮྯ\x05нȟ')
buf.write('\x02ྯ˶\x03\x02\x02\x02ྰྱ\x05ћȮ\x02ྱ')
buf.write('ྲ\x05уȢ\x02ྲླ\x05нȟ\x02ླ')
buf.write('ྴ\x05яȨ\x02ྴ˸\x03\x02\x02\x02ྵྶ')
buf.write('\x05ћȮ\x02ྶྷ\x05хȣ\x02ྷྸ')
buf.write('\x05эȧ\x02ྸྐྵ\x05нȟ\x02ྐྵ˺')
buf.write('\x03\x02\x02\x02ྺྻ\x05ћȮ\x02ྻྼ\x05х')
buf.write('ȣ\x02ྼ\u0fbd\x05эȧ\x02\u0fbd྾\x05н')
buf.write('ȟ\x02྾྿\x05љȭ\x02྿࿀\x05ћ')
buf.write('Ȯ\x02࿀࿁\x05еț\x02࿁࿂\x05э')
buf.write('ȧ\x02࿂࿃\x05ѓȪ\x02࿃˼\x03\x02\x02')
buf.write('\x02࿄࿅\x05ћȮ\x02࿅࿆\x05хȣ')
buf.write('\x02࿆࿇\x05эȧ\x02࿇࿈\x05нȟ')
buf.write('\x02࿈࿉\x05љȭ\x02࿉࿊\x05ћȮ')
buf.write('\x02࿊࿋\x05еț\x02࿋࿌\x05эȧ')
buf.write('\x02࿌\u0fcd\x05ѓȪ\x02\u0fcd࿎\x07a\x02\x02࿎')
buf.write('࿏\x05ыȦ\x02࿏࿐\x05ћȮ\x02࿐')
buf.write('࿑\x05ѧȴ\x02࿑࿒\x07a\x02\x02࿒࿓')
buf.write('\x05ѝȯ\x02࿓࿔\x05яȨ\x02࿔࿕')
buf.write('\x05йȝ\x02࿕࿖\x05ёȩ\x02࿖࿗')
buf.write('\x05яȨ\x02࿗࿘\x05љȭ\x02࿘࿙')
buf.write('\x05ћȮ\x02࿙࿚\x05їȬ\x02࿚\u0fdb')
buf.write('\x05еț\x02\u0fdb\u0fdc\x05хȣ\x02\u0fdc\u0fdd')
buf.write('\x05яȨ\x02\u0fdd\u0fde\x05нȟ\x02\u0fde\u0fdf')
buf.write('\x05лȞ\x02\u0fdf˾\x03\x02\x02\x02\u0fe0\u0fe1\x05ћ')
buf.write('Ȯ\x02\u0fe1\u0fe2\x05хȣ\x02\u0fe2\u0fe3\x05э')
buf.write('ȧ\x02\u0fe3\u0fe4\x05нȟ\x02\u0fe4\u0fe5\x05љ')
buf.write('ȭ\x02\u0fe5\u0fe6\x05ћȮ\x02\u0fe6\u0fe7\x05е')
buf.write('ț\x02\u0fe7\u0fe8\x05эȧ\x02\u0fe8\u0fe9\x05ѓ')
buf.write('Ȫ\x02\u0fe9\u0fea\x07a\x02\x02\u0fea\u0feb\x05ћȮ')
buf.write('\x02\u0feb\u0fec\x05ѧȴ\x02\u0fec\u0fed\x07a\x02\x02\u0fed')
buf.write('\u0fee\x05ѝȯ\x02\u0fee\u0fef\x05яȨ\x02\u0fef')
buf.write('\u0ff0\x05йȝ\x02\u0ff0\u0ff1\x05ёȩ\x02\u0ff1')
buf.write('\u0ff2\x05яȨ\x02\u0ff2\u0ff3\x05љȭ\x02\u0ff3')
buf.write('\u0ff4\x05ћȮ\x02\u0ff4\u0ff5\x05їȬ\x02\u0ff5')
buf.write('\u0ff6\x05еț\x02\u0ff6\u0ff7\x05хȣ\x02\u0ff7')
buf.write('\u0ff8\x05яȨ\x02\u0ff8\u0ff9\x05нȟ\x02\u0ff9')
buf.write('\u0ffa\x05лȞ\x02\u0ffà\x03\x02\x02\x02\u0ffb\u0ffc')
buf.write('\x05ћȮ\x02\u0ffc\u0ffd\x05хȣ\x02\u0ffd\u0ffe')
buf.write('\x05эȧ\x02\u0ffe\u0fff\x05нȟ\x02\u0fffက')
buf.write('\x05љȭ\x02ကခ\x05ћȮ\x02ခဂ')
buf.write('\x05еț\x02ဂဃ\x05эȧ\x02ဃင')
buf.write('\x05ѓȪ\x02ငစ\x07a\x02\x02စဆ\x05ѝ')
buf.write('ȯ\x02ဆဇ\x05яȨ\x02ဇဈ\x05й')
buf.write('ȝ\x02ဈဉ\x05ёȩ\x02ဉည\x05я')
buf.write('Ȩ\x02ညဋ\x05љȭ\x02ဋဌ\x05ћ')
buf.write('Ȯ\x02ဌဍ\x05їȬ\x02ဍဎ\x05е')
buf.write('ț\x02ဎဏ\x05хȣ\x02ဏတ\x05я')
buf.write('Ȩ\x02တထ\x05нȟ\x02ထဒ\x05л')
buf.write('Ȟ\x02ဒ̂\x03\x02\x02\x02ဓန\x05ћȮ')
buf.write('\x02နပ\x05хȣ\x02ပဖ\x05эȧ')
buf.write('\x02ဖဗ\x05нȟ\x02ဗဘ\x05ѧȴ')
buf.write('\x02ဘမ\x05ёȩ\x02မယ\x05яȨ')
buf.write('\x02ယရ\x05нȟ\x02ရလ\x07a\x02\x02လ')
buf.write('ဝ\x05еț\x02ဝသ\x05зȜ\x02သ')
buf.write('ဟ\x05зȜ\x02ဟဠ\x05їȬ\x02ဠ')
buf.write('̄\x03\x02\x02\x02အဢ\x05ћȮ\x02ဢဣ')
buf.write('\x05хȣ\x02ဣဤ\x05эȧ\x02ဤဥ')
buf.write('\x05нȟ\x02ဥဦ\x05ѧȴ\x02ဦဧ')
buf.write('\x05ёȩ\x02ဧဨ\x05яȨ\x02ဨဩ')
buf.write('\x05нȟ\x02ဩဪ\x07a\x02\x02ဪါ\x05у')
buf.write('Ȣ\x02ါာ\x05ёȩ\x02ာိ\x05ѝ')
buf.write('ȯ\x02ိီ\x05їȬ\x02ီ̆\x03\x02\x02')
buf.write('\x02ုူ\x05ћȮ\x02ူေ\x05хȣ')
buf.write('\x02ေဲ\x05эȧ\x02ဲဳ\x05нȟ')
buf.write('\x02ဳဴ\x05ѧȴ\x02ဴဵ\x05ёȩ')
buf.write('\x02ဵံ\x05яȨ\x02ံ့\x05нȟ')
buf.write('\x02့း\x07a\x02\x02း္\x05эȧ\x02္')
buf.write('်\x05хȣ\x02်ျ\x05яȨ\x02ျ')
buf.write('ြ\x05ѝȯ\x02ြွ\x05ћȮ\x02ွ')
buf.write('ှ\x05нȟ\x02ှ̈\x03\x02\x02\x02ဿ၀')
buf.write('\x05ћȮ\x02၀၁\x05хȣ\x02၁၂')
buf.write('\x05эȧ\x02၂၃\x05нȟ\x02၃၄')
buf.write('\x05ѧȴ\x02၄၅\x05ёȩ\x02၅၆')
buf.write('\x05яȨ\x02၆၇\x05нȟ\x02၇၈')
buf.write('\x07a\x02\x02၈၉\x05їȬ\x02၉၊\x05н')
buf.write('ȟ\x02၊။\x05сȡ\x02။၌\x05х')
buf.write('ȣ\x02၌၍\x05ёȩ\x02၍၎\x05я')
buf.write('Ȩ\x02၎̊\x03\x02\x02\x02၏ၐ\x05ћȮ')
buf.write('\x02ၐၑ\x05ёȩ\x02ၑ̌\x03\x02\x02\x02ၒ')
buf.write('ၓ\x05ћȮ\x02ၓၔ\x05їȬ\x02ၔ')
buf.write('ၕ\x05еț\x02ၕၖ\x05хȣ\x02ၖ')
buf.write('ၗ\x05ыȦ\x02ၗၘ\x05хȣ\x02ၘ')
buf.write('ၙ\x05яȨ\x02ၙၚ\x05сȡ\x02ၚ')
buf.write('̎\x03\x02\x02\x02ၛၜ\x05ћȮ\x02ၜၝ')
buf.write('\x05їȬ\x02ၝၞ\x05еț\x02ၞၟ')
buf.write('\x05яȨ\x02ၟၠ\x05љȭ\x02ၠၡ')
buf.write('\x05еț\x02ၡၢ\x05йȝ\x02ၢၣ')
buf.write('\x05ћȮ\x02ၣၤ\x05хȣ\x02ၤၥ')
buf.write('\x05ёȩ\x02ၥၦ\x05яȨ\x02ၦ̐')
buf.write('\x03\x02\x02\x02ၧၨ\x05ћȮ\x02ၨၩ\x05ї')
buf.write('Ȭ\x02ၩၪ\x05еț\x02ၪၫ\x05я')
buf.write('Ȩ\x02ၫၬ\x05љȭ\x02ၬၭ\x05ы')
buf.write('Ȧ\x02ၭၮ\x05еț\x02ၮၯ\x05ћ')
buf.write('Ȯ\x02ၯၰ\x05нȟ\x02ၰ̒\x03\x02\x02')
buf.write('\x02ၱၲ\x05ћȮ\x02ၲၳ\x05їȬ')
buf.write('\x02ၳၴ\x05нȟ\x02ၴၵ\x05еț')
buf.write('\x02ၵၶ\x05ћȮ\x02ၶ̔\x03\x02\x02\x02ၷ')
buf.write('ၸ\x05ћȮ\x02ၸၹ\x05їȬ\x02ၹ')
buf.write('ၺ\x05хȣ\x02ၺၻ\x05сȡ\x02ၻ')
buf.write('ၼ\x05сȡ\x02ၼၽ\x05нȟ\x02ၽ')
buf.write('ၾ\x05їȬ\x02ၾ̖\x03\x02\x02\x02ၿႀ')
buf.write('\x05ћȮ\x02ႀႁ\x05їȬ\x02ႁႂ')
buf.write('\x05хȣ\x02ႂႃ\x05эȧ\x02ႃ̘')
buf.write('\x03\x02\x02\x02ႄႅ\x05ћȮ\x02ႅႆ\x05ї')
buf.write('Ȭ\x02ႆႇ\x05ѝȯ\x02ႇႈ\x05н')
buf.write('ȟ\x02ႈ̚\x03\x02\x02\x02ႉႊ\x05ћȮ')
buf.write('\x02ႊႋ\x05їȬ\x02ႋႌ\x05ѝȯ')
buf.write('\x02ႌႍ\x05яȨ\x02ႍႎ\x05йȝ')
buf.write('\x02ႎႏ\x05еț\x02ႏ႐\x05ћȮ')
buf.write('\x02႐႑\x05нȟ\x02႑̜\x03\x02\x02\x02႒')
buf.write('႓\x05ћȮ\x02႓႔\x05ѥȳ\x02႔')
buf.write('႕\x05ѓȪ\x02႕႖\x05нȟ\x02႖')
buf.write('̞\x03\x02\x02\x02႗႘\x05ѝȯ\x02႘႙')
buf.write('\x05яȨ\x02႙ႚ\x05зȜ\x02ႚႛ')
buf.write('\x05ёȩ\x02ႛႜ\x05ѝȯ\x02ႜႝ')
buf.write('\x05яȨ\x02ႝ႞\x05лȞ\x02႞႟')
buf.write('\x05нȟ\x02႟Ⴀ\x05лȞ\x02Ⴀ̠')
buf.write('\x03\x02\x02\x02ႡႢ\x05ѝȯ\x02ႢႣ\x05я')
buf.write('Ȩ\x02ႣႤ\x05лȞ\x02ႤႥ\x05н')
buf.write('ȟ\x02ႥႦ\x05їȬ\x02Ⴆ̢\x03\x02\x02')
buf.write('\x02ႧႨ\x05ѝȯ\x02ႨႩ\x05яȨ')
buf.write('\x02ႩႪ\x05хȣ\x02ႪႫ\x05ёȩ')
buf.write('\x02ႫႬ\x05яȨ\x02Ⴌ̤\x03\x02\x02\x02Ⴍ')
buf.write('Ⴎ\x05ѝȯ\x02ႮႯ\x05яȨ\x02Ⴏ')
buf.write('Ⴐ\x05хȣ\x02ႰႱ\x05ѕȫ\x02Ⴑ')
buf.write('Ⴒ\x05ѝȯ\x02ႲႳ\x05нȟ\x02Ⴓ')
buf.write('̦\x03\x02\x02\x02ႴႵ\x05ѝȯ\x02ႵႶ')
buf.write('\x05яȨ\x02ႶႷ\x05ыȦ\x02ႷႸ')
buf.write('\x05хȣ\x02ႸႹ\x05эȧ\x02ႹႺ')
buf.write('\x05хȣ\x02ႺႻ\x05ћȮ\x02ႻႼ')
buf.write('\x05нȟ\x02ႼႽ\x05лȞ\x02Ⴝ̨')
buf.write('\x03\x02\x02\x02ႾႿ\x05ѝȯ\x02ႿჀ\x05я')
buf.write('Ȩ\x02ჀჁ\x05ѓȪ\x02ჁჂ\x05х')
buf.write('ȣ\x02ჂჃ\x05џȰ\x02ჃჄ\x05ё')
buf.write('ȩ\x02ჄჅ\x05ћȮ\x02Ⴥ̪\x03\x02\x02')
buf.write('\x02\u10c6Ⴧ\x05ѝȯ\x02Ⴧ\u10c8\x05яȨ')
buf.write('\x02\u10c8\u10c9\x05ћȮ\x02\u10c9\u10ca\x05хȣ')
buf.write('\x02\u10ca\u10cb\x05ыȦ\x02\u10cb̬\x03\x02\x02\x02\u10cc')
buf.write('Ⴭ\x05ѝȯ\x02Ⴭ\u10ce\x05ѓȪ\x02\u10ce')
buf.write('\u10cf\x05лȞ\x02\u10cfა\x05еț\x02ა')
buf.write('ბ\x05ћȮ\x02ბგ\x05нȟ\x02გ')
buf.write('̮\x03\x02\x02\x02დე\x05ѝȯ\x02ევ')
buf.write('\x05ѓȪ\x02ვზ\x05лȞ\x02ზთ')
buf.write('\x05еț\x02თი\x05ћȮ\x02იკ')
buf.write('\x05нȟ\x02კლ\x05лȞ\x02ლ̰')
buf.write('\x03\x02\x02\x02მნ\x05ѝȯ\x02ნო\x05ѓ')
buf.write('Ȫ\x02ოპ\x05љȭ\x02პჟ\x05н')
buf.write('ȟ\x02ჟრ\x05їȬ\x02რს\x05ћ')
buf.write('Ȯ\x02ს̲\x03\x02\x02\x02ტუ\x05ѝȯ')
buf.write('\x02უფ\x05їȬ\x02ფქ\x05ёȩ')
buf.write('\x02ქღ\x05ѡȱ\x02ღყ\x05хȣ')
buf.write('\x02ყშ\x05лȞ\x02შ̴\x03\x02\x02\x02ჩ')
buf.write('ც\x05ѝȯ\x02ცძ\x05љȭ\x02ძ')
buf.write('წ\x05нȟ\x02წ̶\x03\x02\x02\x02ჭხ')
buf.write('\x05ѝȯ\x02ხჯ\x05љȭ\x02ჯჰ')
buf.write('\x05хȣ\x02ჰჱ\x05яȨ\x02ჱჲ')
buf.write('\x05сȡ\x02ჲ̸\x03\x02\x02\x02ჳჴ\x05џ')
buf.write('Ȱ\x02ჴჵ\x05еț\x02ჵჶ\x05ы')
buf.write('Ȧ\x02ჶჷ\x05хȣ\x02ჷჸ\x05л')
buf.write('Ȟ\x02ჸჹ\x05еț\x02ჹჺ\x05ћ')
buf.write('Ȯ\x02ჺ჻\x05нȟ\x02჻̺\x03\x02\x02')
buf.write('\x02ჼჽ\x05џȰ\x02ჽჾ\x05еț')
buf.write('\x02ჾჿ\x05ыȦ\x02ჿᄀ\x05ѝȯ')
buf.write('\x02ᄀᄁ\x05нȟ\x02ᄁ̼\x03\x02\x02\x02ᄂ')
buf.write('ᄃ\x05џȰ\x02ᄃᄄ\x05еț\x02ᄄ')
buf.write('ᄅ\x05ыȦ\x02ᄅᄆ\x05ѝȯ\x02ᄆ')
buf.write('ᄇ\x05нȟ\x02ᄇᄈ\x05љȭ\x02ᄈ')
buf.write('̾\x03\x02\x02\x02ᄉᄊ\x05џȰ\x02ᄊᄋ')
buf.write('\x05еț\x02ᄋᄌ\x05їȬ\x02ᄌᄍ')
buf.write('\x05йȝ\x02ᄍᄎ\x05уȢ\x02ᄎᄏ')
buf.write('\x05еț\x02ᄏᄐ\x05їȬ\x02ᄐ̀')
buf.write('\x03\x02\x02\x02ᄑᄒ\x05џȰ\x02ᄒᄓ\x05е')
buf.write('ț\x02ᄓᄔ\x05їȬ\x02ᄔᄕ\x05й')
buf.write('ȝ\x02ᄕᄖ\x05уȢ\x02ᄖᄗ\x05е')
buf.write('ț\x02ᄗᄘ\x05їȬ\x02ᄘᄙ\x074')
buf.write('\x02\x02ᄙ͂\x03\x02\x02\x02ᄚᄛ\x05џȰ\x02ᄛ')
buf.write('ᄜ\x05еț\x02ᄜᄝ\x05їȬ\x02ᄝ')
buf.write('ᄞ\x05хȣ\x02ᄞᄟ\x05еț\x02ᄟ')
buf.write('ᄠ\x05зȜ\x02ᄠᄡ\x05ыȦ\x02ᄡ')
buf.write('ᄢ\x05нȟ\x02ᄢ̈́\x03\x02\x02\x02ᄣᄤ')
buf.write('\x05џȰ\x02ᄤᄥ\x05еț\x02ᄥᄦ')
buf.write('\x05їȬ\x02ᄦᄧ\x05їȬ\x02ᄧᄨ')
buf.write('\x05еț\x02ᄨᄩ\x05ѥȳ\x02ᄩ͆')
buf.write('\x03\x02\x02\x02ᄪᄫ\x05џȰ\x02ᄫᄬ\x05е')
buf.write('ț\x02ᄬᄭ\x05їȬ\x02ᄭᄮ\x05ѥ')
buf.write('ȳ\x02ᄮᄯ\x05хȣ\x02ᄯᄰ\x05я')
buf.write('Ȩ\x02ᄰᄱ\x05сȡ\x02ᄱ͈\x03\x02\x02')
buf.write('\x02ᄲᄳ\x05џȰ\x02ᄳᄴ\x05нȟ')
buf.write('\x02ᄴᄵ\x05їȬ\x02ᄵᄶ\x05љȭ')
buf.write('\x02ᄶᄷ\x05хȣ\x02ᄷᄸ\x05ёȩ')
buf.write('\x02ᄸᄹ\x05яȨ\x02ᄹ͊\x03\x02\x02\x02ᄺ')
buf.write('ᄻ\x05џȰ\x02ᄻᄼ\x05нȟ\x02ᄼ')
buf.write('ᄽ\x05їȬ\x02ᄽᄾ\x05љȭ\x02ᄾ')
buf.write('ᄿ\x05хȣ\x02ᄿᅀ\x05ёȩ\x02ᅀ')
buf.write('ᅁ\x05яȨ\x02ᅁᅂ\x05љȭ\x02ᅂ')
buf.write('͌\x03\x02\x02\x02ᅃᅄ\x05ѡȱ\x02ᅄᅅ')
buf.write('\x05еț\x02ᅅᅆ\x05хȣ\x02ᅆᅇ')
buf.write('\x05ћȮ\x02ᅇ͎\x03\x02\x02\x02ᅈᅉ\x05ѡ')
buf.write('ȱ\x02ᅉᅊ\x05еț\x02ᅊᅋ\x05ї')
buf.write('Ȭ\x02ᅋᅌ\x05яȨ\x02ᅌᅍ\x05х')
buf.write('ȣ\x02ᅍᅎ\x05яȨ\x02ᅎᅏ\x05с')
buf.write('ȡ\x02ᅏ͐\x03\x02\x02\x02ᅐᅑ\x05ѡȱ')
buf.write('\x02ᅑᅒ\x05нȟ\x02ᅒᅓ\x05ыȦ')
buf.write('\x02ᅓᅔ\x05ыȦ\x02ᅔᅕ\x05пȠ')
buf.write('\x02ᅕᅖ\x05ёȩ\x02ᅖᅗ\x05їȬ')
buf.write('\x02ᅗᅘ\x05эȧ\x02ᅘᅙ\x05нȟ')
buf.write('\x02ᅙᅚ\x05лȞ\x02ᅚ͒\x03\x02\x02\x02ᅛ')
buf.write('ᅜ\x05ѡȱ\x02ᅜᅝ\x05уȢ\x02ᅝ')
buf.write('ᅞ\x05нȟ\x02ᅞᅟ\x05яȨ\x02ᅟ')
buf.write('͔\x03\x02\x02\x02ᅠᅡ\x05ѡȱ\x02ᅡᅢ')
buf.write('\x05уȢ\x02ᅢᅣ\x05нȟ\x02ᅣᅤ')
buf.write('\x05яȨ\x02ᅤᅥ\x05нȟ\x02ᅥᅦ')
buf.write('\x05џȰ\x02ᅦᅧ\x05нȟ\x02ᅧᅨ')
buf.write('\x05їȬ\x02ᅨ͖\x03\x02\x02\x02ᅩᅪ\x05ѡ')
buf.write('ȱ\x02ᅪᅫ\x05уȢ\x02ᅫᅬ\x05н')
buf.write('ȟ\x02ᅬᅭ\x05їȬ\x02ᅭᅮ\x05н')
buf.write('ȟ\x02ᅮ͘\x03\x02\x02\x02ᅯᅰ\x05ѡȱ')
buf.write('\x02ᅰᅱ\x05уȢ\x02ᅱᅲ\x05хȣ')
buf.write('\x02ᅲᅳ\x05ыȦ\x02ᅳᅴ\x05нȟ')
buf.write('\x02ᅴ͚\x03\x02\x02\x02ᅵᅶ\x05ѡȱ\x02ᅶ')
buf.write('ᅷ\x05хȣ\x02ᅷᅸ\x05ћȮ\x02ᅸ')
buf.write('ᅹ\x05уȢ\x02ᅹ͜\x03\x02\x02\x02ᅺᅻ')
buf.write('\x05ѡȱ\x02ᅻᅼ\x05хȣ\x02ᅼᅽ')
buf.write('\x05ћȮ\x02ᅽᅾ\x05уȢ\x02ᅾᅿ')
buf.write('\x05хȣ\x02ᅿᆀ\x05яȨ\x02ᆀ͞')
buf.write('\x03\x02\x02\x02ᆁᆂ\x05ѡȱ\x02ᆂᆃ\x05ё')
buf.write('ȩ\x02ᆃᆄ\x05їȬ\x02ᆄᆅ\x05щ')
buf.write('ȥ\x02ᆅ͠\x03\x02\x02\x02ᆆᆇ\x05ѡȱ')
buf.write('\x02ᆇᆈ\x05їȬ\x02ᆈᆉ\x05хȣ')
buf.write('\x02ᆉᆊ\x05ћȮ\x02ᆊᆋ\x05нȟ')
buf.write('\x02ᆋ͢\x03\x02\x02\x02ᆌᆍ\x05ѣȲ\x02ᆍ')
buf.write('ᆎ\x05эȧ\x02ᆎᆏ\x05ыȦ\x02ᆏ')
buf.write('ͤ\x03\x02\x02\x02ᆐᆑ\x05ѣȲ\x02ᆑᆒ')
buf.write('\x05эȧ\x02ᆒᆓ\x05ыȦ\x02ᆓᆔ')
buf.write('\x05еț\x02ᆔᆕ\x05сȡ\x02ᆕᆖ')
buf.write('\x05сȡ\x02ᆖͦ\x03\x02\x02\x02ᆗᆘ\x05ѣ')
buf.write('Ȳ\x02ᆘᆙ\x05эȧ\x02ᆙᆚ\x05ы')
buf.write('Ȧ\x02ᆚᆛ\x05еț\x02ᆛᆜ\x05ћ')
buf.write('Ȯ\x02ᆜᆝ\x05ћȮ\x02ᆝᆞ\x05ї')
buf.write('Ȭ\x02ᆞᆟ\x05хȣ\x02ᆟᆠ\x05з')
buf.write('Ȝ\x02ᆠᆡ\x05ѝȯ\x02ᆡᆢ\x05ћ')
buf.write('Ȯ\x02ᆢᆣ\x05нȟ\x02ᆣᆤ\x05љ')
buf.write('ȭ\x02ᆤͨ\x03\x02\x02\x02ᆥᆦ\x05ѣȲ')
buf.write('\x02ᆦᆧ\x05эȧ\x02ᆧᆨ\x05ыȦ')
buf.write('\x02ᆨᆩ\x05йȝ\x02ᆩᆪ\x05еț')
buf.write('\x02ᆪᆫ\x05љȭ\x02ᆫᆬ\x05ћȮ')
buf.write('\x02ᆬͪ\x03\x02\x02\x02ᆭᆮ\x05ѣȲ\x02ᆮ')
buf.write('ᆯ\x05эȧ\x02ᆯᆰ\x05ыȦ\x02ᆰ')
buf.write('ᆱ\x05йȝ\x02ᆱᆲ\x05ёȩ\x02ᆲ')
buf.write('ᆳ\x05ыȦ\x02ᆳᆴ\x05еț\x02ᆴ')
buf.write('ᆵ\x05ћȮ\x02ᆵᆶ\x05ћȮ\x02ᆶ')
buf.write('ᆷ\x05џȰ\x02ᆷᆸ\x05еț\x02ᆸ')
buf.write('ᆹ\x05ыȦ\x02ᆹͬ\x03\x02\x02\x02ᆺᆻ')
buf.write('\x05ѣȲ\x02ᆻᆼ\x05эȧ\x02ᆼᆽ')
buf.write('\x05ыȦ\x02ᆽᆾ\x05нȟ\x02ᆾᆿ')
buf.write('\x05ыȦ\x02ᆿᇀ\x05нȟ\x02ᇀᇁ')
buf.write('\x05эȧ\x02ᇁᇂ\x05нȟ\x02ᇂᇃ')
buf.write('\x05яȨ\x02ᇃᇄ\x05ћȮ\x02ᇄͮ')
buf.write('\x03\x02\x02\x02ᇅᇆ\x05ѣȲ\x02ᇆᇇ\x05э')
buf.write('ȧ\x02ᇇᇈ\x05ыȦ\x02ᇈᇉ\x05н')
buf.write('ȟ\x02ᇉᇊ\x05ѣȲ\x02ᇊᇋ\x05х')
buf.write('ȣ\x02ᇋᇌ\x05љȭ\x02ᇌᇍ\x05ћ')
buf.write('Ȯ\x02ᇍᇎ\x05љȭ\x02ᇎͰ\x03\x02\x02')
buf.write('\x02ᇏᇐ\x05ѣȲ\x02ᇐᇑ\x05эȧ')
buf.write('\x02ᇑᇒ\x05ыȦ\x02ᇒᇓ\x05пȠ')
buf.write('\x02ᇓᇔ\x05ёȩ\x02ᇔᇕ\x05їȬ')
buf.write('\x02ᇕᇖ\x05нȟ\x02ᇖᇗ\x05љȭ')
buf.write('\x02ᇗᇘ\x05ћȮ\x02ᇘͲ\x03\x02\x02\x02ᇙ')
buf.write('ᇚ\x05ѣȲ\x02ᇚᇛ\x05эȧ\x02ᇛ')
buf.write('ᇜ\x05ыȦ\x02ᇜᇝ\x05яȨ\x02ᇝ')
buf.write('ᇞ\x05еț\x02ᇞᇟ\x05эȧ\x02ᇟ')
buf.write('ᇠ\x05нȟ\x02ᇠᇡ\x05љȭ\x02ᇡ')
buf.write('ᇢ\x05ѓȪ\x02ᇢᇣ\x05еț\x02ᇣ')
buf.write('ᇤ\x05йȝ\x02ᇤᇥ\x05нȟ\x02ᇥ')
buf.write('ᇦ\x05љȭ\x02ᇦʹ\x03\x02\x02\x02ᇧᇨ')
buf.write('\x05ѣȲ\x02ᇨᇩ\x05эȧ\x02ᇩᇪ')
buf.write('\x05ыȦ\x02ᇪᇫ\x05ѓȪ\x02ᇫᇬ')
buf.write('\x05еț\x02ᇬᇭ\x05їȬ\x02ᇭᇮ')
buf.write('\x05љȭ\x02ᇮᇯ\x05нȟ\x02ᇯͶ')
buf.write('\x03\x02\x02\x02ᇰᇱ\x05ѣȲ\x02ᇱᇲ\x05э')
buf.write('ȧ\x02ᇲᇳ\x05ыȦ\x02ᇳᇴ\x05ѓ')
buf.write('Ȫ\x02ᇴᇵ\x05хȣ\x02ᇵ\u0378\x03\x02\x02')
buf.write('\x02ᇶᇷ\x05ѣȲ\x02ᇷᇸ\x05эȧ')
buf.write('\x02ᇸᇹ\x05ыȦ\x02ᇹᇺ\x05ѕȫ')
buf.write('\x02ᇺᇻ\x05ѝȯ\x02ᇻᇼ\x05нȟ')
buf.write('\x02ᇼᇽ\x05їȬ\x02ᇽᇾ\x05ѥȳ')
buf.write('\x02ᇾͺ\x03\x02\x02\x02ᇿሀ\x05ѣȲ\x02ሀ')
buf.write('ሁ\x05эȧ\x02ሁሂ\x05ыȦ\x02ሂ')
buf.write('ሃ\x05їȬ\x02ሃሄ\x05ёȩ\x02ሄ')
buf.write('ህ\x05ёȩ\x02ህሆ\x05ћȮ\x02ሆ')
buf.write('ͼ\x03\x02\x02\x02ሇለ\x05ѣȲ\x02ለሉ')
buf.write('\x05эȧ\x02ሉሊ\x05ыȦ\x02ሊላ')
buf.write('\x05љȭ\x02ላሌ\x05нȟ\x02ሌል')
buf.write('\x05їȬ\x02ልሎ\x05хȣ\x02ሎሏ')
buf.write('\x05еț\x02ሏሐ\x05ыȦ\x02ሐሑ')
buf.write('\x05хȣ\x02ሑሒ\x05ѧȴ\x02ሒሓ')
buf.write('\x05нȟ\x02ሓ;\x03\x02\x02\x02ሔሕ\x05ѣ')
buf.write('Ȳ\x02ሕሖ\x05эȧ\x02ሖሗ\x05ы')
buf.write('Ȧ\x02ሗመ\x05ћȮ\x02መሙ\x05е')
buf.write('ț\x02ሙሚ\x05зȜ\x02ሚማ\x05ы')
buf.write('Ȧ\x02ማሜ\x05нȟ\x02ሜ\u0380\x03\x02\x02')
buf.write('\x02ምሞ\x05ѥȳ\x02ሞሟ\x05нȟ')
buf.write('\x02ሟሠ\x05еț\x02ሠሡ\x05їȬ')
buf.write('\x02ሡ\u0382\x03\x02\x02\x02ሢሣ\x05ѥȳ\x02ሣ')
buf.write('ሤ\x05нȟ\x02ሤሥ\x05љȭ\x02ሥ')
buf.write('΄\x03\x02\x02\x02ሦሧ\x05ѥȳ\x02ሧረ')
buf.write('\x05эȧ\x02ረሩ\x05хȣ\x02ሩሪ')
buf.write('\x05яȨ\x02ሪራ\x05ћȮ\x02ራሬ')
buf.write('\x05нȟ\x02ሬር\x05їȬ\x02ርሮ')
buf.write('\x05џȰ\x02ሮሯ\x05еț\x02ሯሰ')
buf.write('\x05ыȦ\x02ሰሱ\x07a\x02\x02ሱሲ\x05ѝ')
buf.write('ȯ\x02ሲሳ\x05яȨ\x02ሳሴ\x05й')
buf.write('ȝ\x02ሴስ\x05ёȩ\x02ስሶ\x05я')
buf.write('Ȩ\x02ሶሷ\x05љȭ\x02ሷሸ\x05ћ')
buf.write('Ȯ\x02ሸሹ\x05їȬ\x02ሹሺ\x05е')
buf.write('ț\x02ሺሻ\x05хȣ\x02ሻሼ\x05я')
buf.write('Ȩ\x02ሼሽ\x05нȟ\x02ሽሾ\x05л')
buf.write('Ȟ\x02ሾΆ\x03\x02\x02\x02ሿቀ\x05ѧȴ')
buf.write('\x02ቀቁ\x05ёȩ\x02ቁቂ\x05яȨ')
buf.write('\x02ቂቃ\x05нȟ\x02ቃΈ\x03\x02\x02\x02ቄ')
buf.write('ቅ\x05ѓȪ\x02ቅቆ\x05їȬ\x02ቆ')
buf.write('ቇ\x05нȟ\x02ቇቈ\x05лȞ\x02ቈ')
buf.write('\u1249\x05хȣ\x02\u1249ቊ\x05йȝ\x02ቊ')
buf.write('ቋ\x05ћȮ\x02ቋቌ\x05хȣ\x02ቌ')
buf.write('ቍ\x05ёȩ\x02ቍ\u124e\x05яȨ\x02\u124e')
buf.write('Ί\x03\x02\x02\x02\u124fቐ\x05ѓȪ\x02ቐቑ')
buf.write('\x05їȬ\x02ቑቒ\x05нȟ\x02ቒቓ')
buf.write('\x05лȞ\x02ቓቔ\x05хȣ\x02ቔቕ')
buf.write('\x05йȝ\x02ቕቖ\x05ћȮ\x02ቖ\u1257')
buf.write('\x05хȣ\x02\u1257ቘ\x05ёȩ\x02ቘ\u1259')
buf.write('\x05яȨ\x02\u1259ቚ\x07a\x02\x02ቚቛ\x05з')
buf.write('Ȝ\x02ቛቜ\x05ёȩ\x02ቜቝ\x05ѝ')
buf.write('ȯ\x02ቝ\u125e\x05яȨ\x02\u125e\u125f\x05л')
buf.write('Ȟ\x02\u125fበ\x05љȭ\x02በΌ\x03\x02\x02')
buf.write('\x02ቡቢ\x05ѓȪ\x02ቢባ\x05їȬ')
buf.write('\x02ባቤ\x05нȟ\x02ቤብ\x05лȞ')
buf.write('\x02ብቦ\x05хȣ\x02ቦቧ\x05йȝ')
buf.write('\x02ቧቨ\x05ћȮ\x02ቨቩ\x05хȣ')
buf.write('\x02ቩቪ\x05ёȩ\x02ቪቫ\x05яȨ')
buf.write('\x02ቫቬ\x07a\x02\x02ቬቭ\x05йȝ\x02ቭ')
buf.write('ቮ\x05ёȩ\x02ቮቯ\x05љȭ\x02ቯ')
buf.write('ተ\x05ћȮ\x02ተΎ\x03\x02\x02\x02ቱቲ')
buf.write('\x05ѓȪ\x02ቲታ\x05їȬ\x02ታቴ')
buf.write('\x05нȟ\x02ቴት\x05лȞ\x02ትቶ')
buf.write('\x05хȣ\x02ቶቷ\x05йȝ\x02ቷቸ')
buf.write('\x05ћȮ\x02ቸቹ\x05хȣ\x02ቹቺ')
buf.write('\x05ёȩ\x02ቺቻ\x05яȨ\x02ቻቼ')
buf.write('\x07a\x02\x02ቼች\x05лȞ\x02ችቾ\x05н')
buf.write('ȟ\x02ቾቿ\x05ћȮ\x02ቿኀ\x05е')
buf.write('ț\x02ኀኁ\x05хȣ\x02ኁኂ\x05ы')
buf.write('Ȧ\x02ኂኃ\x05љȭ\x02ኃΐ\x03\x02\x02')
buf.write('\x02ኄኅ\x05ѓȪ\x02ኅኆ\x05їȬ')
buf.write('\x02ኆኇ\x05нȟ\x02ኇኈ\x05лȞ')
buf.write('\x02ኈ\u1289\x05хȣ\x02\u1289ኊ\x05йȝ')
buf.write('\x02ኊኋ\x05ћȮ\x02ኋኌ\x05хȣ')
buf.write('\x02ኌኍ\x05ёȩ\x02ኍ\u128e\x05яȨ')
buf.write('\x02\u128e\u128f\x07a\x02\x02\u128fነ\x05ѓȪ\x02ነ')
buf.write('ኑ\x05їȬ\x02ኑኒ\x05ёȩ\x02ኒ')
buf.write('ና\x05зȜ\x02ናኔ\x05еț\x02ኔ')
buf.write('ን\x05зȜ\x02ንኖ\x05хȣ\x02ኖ')
buf.write('ኗ\x05ыȦ\x02ኗኘ\x05хȣ\x02ኘ')
buf.write('ኙ\x05ћȮ\x02ኙኚ\x05ѥȳ\x02ኚ')
buf.write('Β\x03\x02\x02\x02ኛኜ\x05ѓȪ\x02ኜኝ')
buf.write('\x05їȬ\x02ኝኞ\x05нȟ\x02ኞኟ')
buf.write('\x05лȞ\x02ኟአ\x05хȣ\x02አኡ')
buf.write('\x05йȝ\x02ኡኢ\x05ћȮ\x02ኢኣ')
buf.write('\x05хȣ\x02ኣኤ\x05ёȩ\x02ኤእ')
buf.write('\x05яȨ\x02እኦ\x07a\x02\x02ኦኧ\x05љ')
buf.write('ȭ\x02ኧከ\x05нȟ\x02ከኩ\x05ћ')
buf.write('Ȯ\x02ኩΔ\x03\x02\x02\x02ኪካ\x05йȝ')
buf.write('\x02ካኬ\x05ѝȯ\x02ኬክ\x05эȧ')
buf.write('\x02ክኮ\x05нȟ\x02ኮኯ\x07a\x02\x02ኯ')
buf.write('ኰ\x05лȞ\x02ኰ\u12b1\x05хȣ\x02\u12b1')
buf.write('ኲ\x05љȭ\x02ኲኳ\x05ћȮ\x02ኳ')
buf.write('Ζ\x03\x02\x02\x02ኴኵ\x05лȞ\x02ኵ\u12b6')
buf.write('\x05нȟ\x02\u12b6\u12b7\x05яȨ\x02\u12b7ኸ')
buf.write('\x05љȭ\x02ኸኹ\x05нȟ\x02ኹኺ')
buf.write('\x07a\x02\x02ኺኻ\x05їȬ\x02ኻኼ\x05е')
buf.write('ț\x02ኼኽ\x05яȨ\x02ኽኾ\x05щ')
buf.write('ȥ\x02ኾΘ\x03\x02\x02\x02\u12bfዀ\x05ыȦ')
buf.write('\x02ዀ\u12c1\x05хȣ\x02\u12c1ዂ\x05љȭ')
buf.write('\x02ዂዃ\x05ћȮ\x02ዃዄ\x05еț')
buf.write('\x02ዄዅ\x05сȡ\x02ዅ\u12c6\x05сȡ')
buf.write('\x02\u12c6Κ\x03\x02\x02\x02\u12c7ወ\x05ѓȪ\x02ወ')
buf.write('ዉ\x05нȟ\x02ዉዊ\x05їȬ\x02ዊ')
buf.write('ዋ\x05йȝ\x02ዋዌ\x05нȟ\x02ዌ')
buf.write('ው\x05яȨ\x02ውዎ\x05ћȮ\x02ዎ')
buf.write('ዏ\x07a\x02\x02ዏዐ\x05їȬ\x02ዐዑ')
buf.write('\x05еț\x02ዑዒ\x05яȨ\x02ዒዓ')
buf.write('\x05щȥ\x02ዓΜ\x03\x02\x02\x02ዔዕ\x05ѓ')
buf.write('Ȫ\x02ዕዖ\x05нȟ\x02ዖ\u12d7\x05ї')
buf.write('Ȭ\x02\u12d7ዘ\x05йȝ\x02ዘዙ\x05н')
buf.write('ȟ\x02ዙዚ\x05яȨ\x02ዚዛ\x05ћ')
buf.write('Ȯ\x02ዛዜ\x05хȣ\x02ዜዝ\x05ы')
buf.write('Ȧ\x02ዝዞ\x05нȟ\x02ዞዟ\x07a\x02')
buf.write('\x02ዟዠ\x05йȝ\x02ዠዡ\x05ёȩ')
buf.write('\x02ዡዢ\x05яȨ\x02ዢዣ\x05ћȮ')
buf.write('\x02ዣΞ\x03\x02\x02\x02ዤዥ\x05ѓȪ\x02ዥ')
buf.write('ዦ\x05нȟ\x02ዦዧ\x05їȬ\x02ዧ')
buf.write('የ\x05йȝ\x02የዩ\x05нȟ\x02ዩ')
buf.write('ዪ\x05яȨ\x02ዪያ\x05ћȮ\x02ያ')
buf.write('ዬ\x05хȣ\x02ዬይ\x05ыȦ\x02ይ')
buf.write('ዮ\x05нȟ\x02ዮዯ\x07a\x02\x02ዯደ')
buf.write('\x05лȞ\x02ደዱ\x05хȣ\x02ዱዲ')
buf.write('\x05љȭ\x02ዲዳ\x05йȝ\x02ዳΠ')
buf.write('\x03\x02\x02\x02ዴድ\x05їȬ\x02ድዶ\x05е')
buf.write('ț\x02ዶዷ\x05яȨ\x02ዷዸ\x05щ')
buf.write('ȥ\x02ዸ\u03a2\x03\x02\x02\x02ዹዺ\x05еț')
buf.write('\x02ዺዻ\x05џȰ\x02ዻዼ\x05сȡ')
buf.write('\x02ዼΤ\x03\x02\x02\x02ዽዾ\x05йȝ\x02ዾ')
buf.write('ዿ\x05ёȩ\x02ዿጀ\x05їȬ\x02ጀ')
buf.write('ጁ\x05їȬ\x02ጁΦ\x03\x02\x02\x02ጂጃ')
buf.write('\x05ыȦ\x02ጃጄ\x05еț\x02ጄጅ')
buf.write('\x05сȡ\x02ጅΨ\x03\x02\x02\x02ጆጇ\x05ы')
buf.write('Ȧ\x02ጇገ\x05нȟ\x02ገጉ\x05е')
buf.write('ț\x02ጉጊ\x05лȞ\x02ጊΪ\x03\x02\x02')
buf.write('\x02ጋጌ\x05эȧ\x02ጌግ\x05еț')
buf.write('\x02ግጎ\x05ѣȲ\x02ጎά\x03\x02\x02\x02ጏ')
buf.write('ጐ\x05эȧ\x02ጐ\u1311\x05нȟ\x02\u1311')
buf.write('ጒ\x05лȞ\x02ጒጓ\x05хȣ\x02ጓ')
buf.write('ጔ\x05еț\x02ጔጕ\x05яȨ\x02ጕ')
buf.write('ή\x03\x02\x02\x02\u1316\u1317\x05эȧ\x02\u1317ጘ')
buf.write('\x05хȣ\x02ጘጙ\x05яȨ\x02ጙΰ')
buf.write('\x03\x02\x02\x02ጚጛ\x05яȨ\x02ጛጜ\x05ћ')
buf.write('Ȯ\x02ጜጝ\x05хȣ\x02ጝጞ\x05ы')
buf.write('Ȧ\x02ጞጟ\x05нȟ\x02ጟβ\x03\x02\x02')
buf.write('\x02ጠጡ\x05їȬ\x02ጡጢ\x05еț')
buf.write('\x02ጢጣ\x05ћȮ\x02ጣጤ\x05хȣ')
buf.write('\x02ጤጥ\x05ёȩ\x02ጥጦ\x07a\x02\x02ጦ')
buf.write('ጧ\x05ћȮ\x02ጧጨ\x05ёȩ\x02ጨ')
buf.write('ጩ\x07a\x02\x02ጩጪ\x05їȬ\x02ጪጫ')
buf.write('\x05нȟ\x02ጫጬ\x05ѓȪ\x02ጬጭ')
buf.write('\x05ёȩ\x02ጭጮ\x05їȬ\x02ጮጯ')
buf.write('\x05ћȮ\x02ጯδ\x03\x02\x02\x02ጰጱ\x05ї')
buf.write('Ȭ\x02ጱጲ\x05ёȩ\x02ጲጳ\x05ѡ')
buf.write('ȱ\x02ጳጴ\x07a\x02\x02ጴጵ\x05яȨ')
buf.write('\x02ጵጶ\x05ѝȯ\x02ጶጷ\x05эȧ')
buf.write('\x02ጷጸ\x05зȜ\x02ጸጹ\x05нȟ')
buf.write('\x02ጹጺ\x05їȬ\x02ጺζ\x03\x02\x02\x02ጻ')
buf.write('ጼ\x05љȭ\x02ጼጽ\x05ѝȯ\x02ጽ')
buf.write('ጾ\x05эȧ\x02ጾθ\x03\x02\x02\x02ጿፀ')
buf.write('\x05џȰ\x02ፀፁ\x05еț\x02ፁፂ')
buf.write('\x05їȬ\x02ፂፃ\x05хȣ\x02ፃፄ')
buf.write('\x05еț\x02ፄፅ\x05яȨ\x02ፅፆ')
buf.write('\x05йȝ\x02ፆፇ\x05нȟ\x02ፇκ')
buf.write('\x03\x02\x02\x02ፈፉ\x05їȬ\x02ፉፊ\x05н')
buf.write('ȟ\x02ፊፋ\x05сȡ\x02ፋፌ\x05ї')
buf.write('Ȭ\x02ፌፍ\x07a\x02\x02ፍμ\x03\x02\x02\x02ፎ')
buf.write('ፏ\x05љȭ\x02ፏፐ\x05ћȮ\x02ፐ')
buf.write('ፑ\x05лȞ\x02ፑፒ\x05лȞ\x02ፒ')
buf.write('ፓ\x05нȟ\x02ፓፔ\x05џȰ\x02ፔ')
buf.write('ξ\x03\x02\x02\x02ፕፖ\x05џȰ\x02ፖፗ')
buf.write('\x05еț\x02ፗፘ\x05їȬ\x02ፘፙ')
buf.write('\x07a\x02\x02ፙπ\x03\x02\x02\x02ፚ\u135b\x05йȝ')
buf.write('\x02\u135b\u135c\x05ёȩ\x02\u135c፝\x05џȰ')
buf.write('\x02፝፞\x05еț\x02፞፟\x05їȬ')
buf.write('\x02፟፠\x07a\x02\x02፠ς\x03\x02\x02\x02፡።')
buf.write('\x05яȨ\x02።፩\x07)\x02\x02፣፨\n\x02\x02')
buf.write('\x02፤፥\x07)\x02\x02፥፨\x07)\x02\x02፦፨\x05')
buf.write('Эȗ\x02፧፣\x03\x02\x02\x02፧፤\x03\x02\x02\x02')
buf.write('፧፦\x03\x02\x02\x02፨፫\x03\x02\x02\x02፩፧\x03')
buf.write('\x02\x02\x02፩፪\x03\x02\x02\x02፪፬\x03\x02\x02\x02፫፩')
buf.write('\x03\x02\x02\x02፬፭\x07)\x02\x02፭τ\x03\x02\x02\x02፮')
buf.write('፷\x05зȜ\x02፯፳\x07)\x02\x02፰፲')
buf.write('\x0423\x02፱፰\x03\x02\x02\x02፲፵\x03\x02\x02\x02፳')
buf.write('፱\x03\x02\x02\x02፳፴\x03\x02\x02\x02፴፶\x03\x02\x02\x02')
buf.write('፵፳\x03\x02\x02\x02፶፸\x07)\x02\x02፷፯\x03')
buf.write('\x02\x02\x02፸፹\x03\x02\x02\x02፹፷\x03\x02\x02\x02፹፺')
buf.write('\x03\x02\x02\x02፺φ\x03\x02\x02\x02፻ᎄ\x05ѣȲ')
buf.write('\x02፼ᎀ\x07)\x02\x02\u137d\u137f\t\x03\x02\x02\u137e\u137d')
buf.write(
'\x03\x02\x02\x02\u137fᎂ\x03\x02\x02\x02ᎀ\u137e\x03\x02\x02\x02ᎀ')
buf.write('ᎁ\x03\x02\x02\x02ᎁᎃ\x03\x02\x02\x02ᎂᎀ\x03\x02\x02\x02')
buf.write('ᎃᎅ\x07)\x02\x02ᎄ፼\x03\x02\x02\x02ᎅᎆ\x03')
buf.write('\x02\x02\x02ᎆᎄ\x03\x02\x02\x02ᎆᎇ\x03\x02\x02\x02ᎇψ')
buf.write('\x03\x02\x02\x02ᎈᎉ\x070\x02\x02ᎉᎊ\x070\x02\x02ᎊ')
buf.write('ϊ\x03\x02\x02\x02ᎋᎌ\x070\x02\x02ᎌό\x03\x02\x02')
buf.write('\x02ᎍᎎ\x05УȒ\x02ᎎώ\x03\x02\x02\x02ᎏ')
buf.write('᎘\x05Хȓ\x02᎐᎒\t\x04\x02\x02᎑᎓')
buf.write('\t\x05\x02\x02᎒᎑\x03\x02\x02\x02᎒᎓\x03\x02\x02\x02᎓')
buf.write('᎖\x03\x02\x02\x02᎔᎗\x05Хȓ\x02᎕᎗')
buf.write('\x05УȒ\x02᎖᎔\x03\x02\x02\x02᎖᎕\x03\x02\x02')
buf.write('\x02᎗᎙\x03\x02\x02\x02᎘᎐\x03\x02\x02\x02᎘᎙')
buf.write('\x03\x02\x02\x02᎙\u139c\x03\x02\x02\x02\u139a\u139d\x05лȞ')
buf.write(
'\x02\u139b\u139d\x05пȠ\x02\u139c\u139a\x03\x02\x02\x02\u139c')
buf.write(
'\u139b\x03\x02\x02\x02\u139c\u139d\x03\x02\x02\x02\u139dϐ\x03\x02\x02\x02'
)
buf.write('\u139eᎥ\x07)\x02\x02\u139fᎤ\n\x02\x02\x02ᎠᎡ\x07')
buf.write(')\x02\x02ᎡᎤ\x07)\x02\x02ᎢᎤ\x05Эȗ\x02Ꭳ')
buf.write('\u139f\x03\x02\x02\x02ᎣᎠ\x03\x02\x02\x02ᎣᎢ\x03\x02\x02\x02')
buf.write('ᎤᎧ\x03\x02\x02\x02ᎥᎣ\x03\x02\x02\x02ᎥᎦ\x03')
buf.write('\x02\x02\x02ᎦᎨ\x03\x02\x02\x02ᎧᎥ\x03\x02\x02\x02ᎨᎩ')
buf.write('\x07)\x02\x02Ꭹϒ\x03\x02\x02\x02ᎪᎯ\x05ѕȫ')
buf.write('\x02ᎫᎰ\x05ϗǬ\x02ᎬᎰ\x05ϙǭ')
buf.write('\x02ᎭᎰ\x05ϛǮ\x02ᎮᎰ\x05ϝǯ')
buf.write('\x02ᎯᎫ\x03\x02\x02\x02ᎯᎬ\x03\x02\x02\x02ᎯᎭ')
buf.write('\x03\x02\x02\x02ᎯᎮ\x03\x02\x02\x02ᎰᎱ\x03\x02\x02\x02Ꮁ')
buf.write('Ꮂ\x08Ǫ\x02\x02Ꮂϔ\x03\x02\x02\x02ᎳᎴ\x07)')
buf.write('\x02\x02Ꮄϖ\x03\x02\x02\x02ᎵᎶ\x05ϕǫ\x02Ꮆ')
buf.write('Ꮊ\x07>\x02\x02ᎷᎹ\x0b\x02\x02\x02ᎸᎷ\x03\x02\x02\x02')
buf.write('ᎹᎼ\x03\x02\x02\x02ᎺᎻ\x03\x02\x02\x02ᎺᎸ\x03')
buf.write('\x02\x02\x02ᎻᎽ\x03\x02\x02\x02ᎼᎺ\x03\x02\x02\x02ᎽᎾ')
buf.write('\x07@\x02\x02ᎾᎿ\x05ϕǫ\x02ᎿϘ\x03\x02\x02')
buf.write('\x02ᏀᏁ\x05ϕǫ\x02ᏁᏅ\x07}\x02\x02Ꮒ')
buf.write('Ꮔ\x0b\x02\x02\x02ᏃᏂ\x03\x02\x02\x02ᏄᏇ\x03\x02\x02')
buf.write('\x02ᏅᏆ\x03\x02\x02\x02ᏅᏃ\x03\x02\x02\x02ᏆᏈ')
buf.write('\x03\x02\x02\x02ᏇᏅ\x03\x02\x02\x02ᏈᏉ\x07\x7f\x02\x02Ꮙ')
buf.write('Ꮚ\x05ϕǫ\x02ᏊϚ\x03\x02\x02\x02ᏋᏌ')
buf.write('\x05ϕǫ\x02ᏌᏐ\x07]\x02\x02ᏍᏏ\x0b\x02\x02')
buf.write('\x02ᏎᏍ\x03\x02\x02\x02ᏏᏒ\x03\x02\x02\x02ᏐᏑ')
buf.write('\x03\x02\x02\x02ᏐᏎ\x03\x02\x02\x02ᏑᏓ\x03\x02\x02\x02Ꮢ')
buf.write('Ꮠ\x03\x02\x02\x02ᏓᏔ\x07_\x02\x02ᏔᏕ\x05ϕ')
buf.write('ǫ\x02ᏕϜ\x03\x02\x02\x02ᏖᏗ\x05ϕǫ')
buf.write('\x02ᏗᏛ\x07*\x02\x02ᏘᏚ\x0b\x02\x02\x02ᏙᏘ')
buf.write('\x03\x02\x02\x02ᏚᏝ\x03\x02\x02\x02ᏛᏜ\x03\x02\x02\x02Ꮫ')
buf.write('Ꮩ\x03\x02\x02\x02ᏜᏞ\x03\x02\x02\x02ᏝᏛ\x03\x02\x02\x02')
buf.write('ᏞᏟ\x07+\x02\x02ᏟᏠ\x05ϕǫ\x02Ꮰ')
buf.write('Ϟ\x03\x02\x02\x02ᏡᏢ\n\x06\x02\x02ᏢϠ\x03\x02\x02\x02')
buf.write('ᏣᏧ\x07$\x02\x02ᏤᏨ\n\x07\x02\x02ᏥᏦ\x07')
buf.write('$\x02\x02ᏦᏨ\x07$\x02\x02ᏧᏤ\x03\x02\x02\x02ᏧᏥ')
buf.write('\x03\x02\x02\x02ᏨᏩ\x03\x02\x02\x02ᏩᏧ\x03\x02\x02\x02Ꮹ')
buf.write('Ꮺ\x03\x02\x02\x02ᏪᏫ\x03\x02\x02\x02ᏫᏬ\x07$\x02\x02')
buf.write("ᏬϢ\x03\x02\x02\x02ᏭᏮ\x07'\x02\x02ᏮϤ\x03")
buf.write('\x02\x02\x02ᏯᏰ\x07(\x02\x02ᏰϦ\x03\x02\x02\x02ᏱᏲ')
buf.write('\x07*\x02\x02ᏲϨ\x03\x02\x02\x02ᏳᏴ\x07+\x02\x02ᏴϪ')
buf.write(
'\x03\x02\x02\x02Ᏽ\u13f6\x07,\x02\x02\u13f6\u13f7\x07,\x02\x02\u13f7Ϭ'
)
buf.write('\x03\x02\x02\x02ᏸᏹ\x07,\x02\x02ᏹϮ\x03\x02\x02\x02ᏺ')
buf.write('ᏻ\x07-\x02\x02ᏻϰ\x03\x02\x02\x02ᏼᏽ\x07/\x02\x02ᏽ')
buf.write(
'ϲ\x03\x02\x02\x02\u13fe\u13ff\x07.\x02\x02\u13ffϴ\x03\x02\x02\x02'
)
buf.write('᐀ᐁ\x071\x02\x02ᐁ϶\x03\x02\x02\x02ᐂᐃ')
buf.write('\x07B\x02\x02ᐃϸ\x03\x02\x02\x02ᐄᐅ\x07<\x02\x02ᐅᐆ')
buf.write('\x07?\x02\x02ᐆϺ\x03\x02\x02\x02ᐇᐈ\x07<\x02\x02ᐈᐍ')
buf.write('\x05Сȑ\x02ᐉᐌ\x05Сȑ\x02ᐊᐌ')
buf.write('\t\x08\x02\x02ᐋᐉ\x03\x02\x02\x02ᐋᐊ\x03\x02\x02\x02ᐌ')
buf.write('ᐏ\x03\x02\x02\x02ᐍᐋ\x03\x02\x02\x02ᐍᐎ\x03\x02\x02\x02')
buf.write('ᐎᐖ\x03\x02\x02\x02ᐏᐍ\x03\x02\x02\x02ᐐᐑ\x07')
buf.write('<\x02\x02ᐑᐖ\x05ϡDZ\x02ᐒᐓ\x07<\x02\x02ᐓ')
buf.write('ᐖ\x05ύǧ\x02ᐔᐖ\x05Бȉ\x02ᐕ')
buf.write('ᐇ\x03\x02\x02\x02ᐕᐐ\x03\x02\x02\x02ᐕᐒ\x03\x02\x02\x02')
buf.write('ᐕᐔ\x03\x02\x02\x02ᐖϼ\x03\x02\x02\x02ᐗᐘ\x07')
buf.write('<\x02\x02ᐘϾ\x03\x02\x02\x02ᐙᐚ\x07=\x02\x02ᐚЀ')
buf.write('\x03\x02\x02\x02ᐛᐜ\x07>\x02\x02ᐜᐝ\x07?\x02\x02ᐝЂ')
buf.write('\x03\x02\x02\x02ᐞᐟ\x07>\x02\x02ᐟЄ\x03\x02\x02\x02ᐠ')
buf.write('ᐡ\x07@\x02\x02ᐡᐢ\x07?\x02\x02ᐢІ\x03\x02\x02\x02ᐣ')
buf.write('ᐤ\x07#\x02\x02ᐤᐬ\x07?\x02\x02ᐥᐦ\x07>\x02\x02ᐦ')
buf.write('ᐬ\x07@\x02\x02ᐧᐨ\x07`\x02\x02ᐨᐬ\x07?\x02\x02ᐩ')
buf.write('ᐪ\x07\x80\x02\x02ᐪᐬ\x07?\x02\x02ᐫᐣ\x03\x02')
buf.write('\x02\x02ᐫᐥ\x03\x02\x02\x02ᐫᐧ\x03\x02\x02\x02ᐫᐩ')
buf.write('\x03\x02\x02\x02ᐬЈ\x03\x02\x02\x02ᐭᐮ\x07`\x02\x02ᐮ')
buf.write('Њ\x03\x02\x02\x02ᐯᐰ\x07\x80\x02\x02ᐰЌ\x03\x02')
buf.write('\x02\x02ᐱᐲ\x07#\x02\x02ᐲЎ\x03\x02\x02\x02ᐳᐴ')
buf.write('\x07@\x02\x02ᐴА\x03\x02\x02\x02ᐵᐶ\x07A\x02\x02ᐶВ')
buf.write('\x03\x02\x02\x02ᐷᐸ\x07~\x02\x02ᐸᐹ\x07~\x02\x02ᐹД')
buf.write('\x03\x02\x02\x02ᐺᐻ\x07~\x02\x02ᐻЖ\x03\x02\x02\x02ᐼ')
buf.write('ᐽ\x07?\x02\x02ᐽИ\x03\x02\x02\x02ᐾᐿ\x07]\x02\x02ᐿ')
buf.write('К\x03\x02\x02\x02ᑀᑁ\x07_\x02\x02ᑁМ\x03\x02\x02\x02')
buf.write('ᑂᑃ\x07a\x02\x02ᑃО\x03\x02\x02\x02ᑄᑆ\t')
buf.write('\t\x02\x02ᑅᑄ\x03\x02\x02\x02ᑆᑇ\x03\x02\x02\x02ᑇᑅ')
buf.write('\x03\x02\x02\x02ᑇᑈ\x03\x02\x02\x02ᑈᑉ\x03\x02\x02\x02ᑉ')
buf.write('ᑊ\x08Ȑ\x03\x02ᑊР\x03\x02\x02\x02ᑋᑌ\t\n')
buf.write('\x02\x02ᑌТ\x03\x02\x02\x02ᑍᑏ\x042;\x02ᑎᑍ')
buf.write('\x03\x02\x02\x02ᑏᑐ\x03\x02\x02\x02ᑐᑎ\x03\x02\x02\x02ᑐ')
buf.write('ᑑ\x03\x02\x02\x02ᑑФ\x03\x02\x02\x02ᑒᑔ\x05ύ')
buf.write('ǧ\x02ᑓᑒ\x03\x02\x02\x02ᑔᑗ\x03\x02\x02\x02ᑕ')
buf.write('ᑓ\x03\x02\x02\x02ᑕᑖ\x03\x02\x02\x02ᑖᑙ\x03\x02\x02\x02')
buf.write('ᑗᑕ\x03\x02\x02\x02ᑘᑚ\x070\x02\x02ᑙᑘ')
buf.write('\x03\x02\x02\x02ᑙᑚ\x03\x02\x02\x02ᑚᑜ\x03\x02\x02\x02ᑛ')
buf.write('ᑝ\x05ύǧ\x02ᑜᑛ\x03\x02\x02\x02ᑝᑞ')
buf.write('\x03\x02\x02\x02ᑞᑜ\x03\x02\x02\x02ᑞᑟ\x03\x02\x02\x02ᑟ')
buf.write('Ц\x03\x02\x02\x02ᑠᑡ\x07/\x02\x02ᑡᑢ\x07/\x02\x02ᑢ')
buf.write('ᑦ\x03\x02\x02\x02ᑣᑥ\n\x0b\x02\x02ᑤᑣ\x03\x02\x02')
buf.write('\x02ᑥᑨ\x03\x02\x02\x02ᑦᑤ\x03\x02\x02\x02ᑦᑧ')
buf.write('\x03\x02\x02\x02ᑧᑫ\x03\x02\x02\x02ᑨᑦ\x03\x02\x02\x02ᑩ')
buf.write('ᑬ\x05Эȗ\x02ᑪᑬ\x07\x02\x02\x03ᑫᑩ')
buf.write('\x03\x02\x02\x02ᑫᑪ\x03\x02\x02\x02ᑬᑭ\x03\x02\x02\x02ᑭ')
buf.write('ᑮ\x08Ȕ\x04\x02ᑮШ\x03\x02\x02\x02ᑯᑰ\x071')
buf.write('\x02\x02ᑰᑱ\x07,\x02\x02ᑱᑵ\x03\x02\x02\x02ᑲᑴ')
buf.write('\x0b\x02\x02\x02ᑳᑲ\x03\x02\x02\x02ᑴᑷ\x03\x02\x02\x02ᑵ')
buf.write('ᑶ\x03\x02\x02\x02ᑵᑳ\x03\x02\x02\x02ᑶᑸ\x03\x02\x02\x02')
buf.write('ᑷᑵ\x03\x02\x02\x02ᑸᑹ\x07,\x02\x02ᑹᑺ\x07')
buf.write('1\x02\x02ᑺᑻ\x03\x02\x02\x02ᑻᑼ\x08ȕ\x04\x02ᑼ')
buf.write('Ъ\x03\x02\x02\x02ᑽᑾ\x07r\x02\x02ᑾᑿ\x07t\x02\x02ᑿ')
buf.write('ᒀ\x07q\x02\x02ᒀᒁ\x07o\x02\x02ᒁᒂ\x07r\x02\x02ᒂ')
buf.write('ᒃ\x07v\x02\x02ᒃᒄ\x03\x02\x02\x02ᒄᒈ\x05Я')
buf.write('Ș\x02ᒅᒇ\n\x0b\x02\x02ᒆᒅ\x03\x02\x02\x02ᒇ')
buf.write('ᒊ\x03\x02\x02\x02ᒈᒆ\x03\x02\x02\x02ᒈᒉ\x03\x02\x02\x02')
buf.write('ᒉᒍ\x03\x02\x02\x02ᒊᒈ\x03\x02\x02\x02ᒋᒎ\x05')
buf.write('Эȗ\x02ᒌᒎ\x07\x02\x02\x03ᒍᒋ\x03\x02\x02\x02')
buf.write('ᒍᒌ\x03\x02\x02\x02ᒎЬ\x03\x02\x02\x02ᒏᒑ\x07')
buf.write('\x0f\x02\x02ᒐᒏ\x03\x02\x02\x02ᒐᒑ\x03\x02\x02\x02ᒑ')
buf.write('ᒒ\x03\x02\x02\x02ᒒᒓ\x07\x0c\x02\x02ᒓЮ\x03\x02\x02\x02')
buf.write('ᒔᒕ\t\x0c\x02\x02ᒕа\x03\x02\x02\x02ᒖᒛ\x05')
buf.write('Сȑ\x02ᒗᒚ\x05Сȑ\x02ᒘᒚ')
buf.write('\t\r\x02\x02ᒙᒗ\x03\x02\x02\x02ᒙᒘ\x03\x02\x02\x02ᒚ')
buf.write('ᒝ\x03\x02\x02\x02ᒛᒙ\x03\x02\x02\x02ᒛᒜ\x03\x02\x02\x02')
buf.write('ᒜв\x03\x02\x02\x02ᒝᒛ\x03\x02\x02\x02ᒞᒟ\x07')
buf.write('B\x02\x02ᒟᒠ\x07#\x02\x02ᒠᒡ\x03\x02\x02\x02ᒡᒢ')
buf.write('\x08Ț\x04\x02ᒢд\x03\x02\x02\x02ᒣᒤ\t\x0e\x02\x02')
buf.write('ᒤж\x03\x02\x02\x02ᒥᒦ\t\x0f\x02\x02ᒦи')
buf.write('\x03\x02\x02\x02ᒧᒨ\t\x10\x02\x02ᒨк\x03\x02\x02\x02ᒩ')
buf.write('ᒪ\t\x11\x02\x02ᒪм\x03\x02\x02\x02ᒫᒬ\t\x04\x02')
buf.write('\x02ᒬо\x03\x02\x02\x02ᒭᒮ\t\x12\x02\x02ᒮр')
buf.write('\x03\x02\x02\x02ᒯᒰ\t\x13\x02\x02ᒰт\x03\x02\x02\x02ᒱ')
buf.write('ᒲ\t\x14\x02\x02ᒲф\x03\x02\x02\x02ᒳᒴ\t\x15\x02')
buf.write('\x02ᒴц\x03\x02\x02\x02ᒵᒶ\t\x16\x02\x02ᒶш')
buf.write('\x03\x02\x02\x02ᒷᒸ\t\x17\x02\x02ᒸъ\x03\x02\x02\x02ᒹ')
buf.write('ᒺ\t\x18\x02\x02ᒺь\x03\x02\x02\x02ᒻᒼ\t\x19\x02')
buf.write('\x02ᒼю\x03\x02\x02\x02ᒽᒾ\t\x1a\x02\x02ᒾѐ')
buf.write('\x03\x02\x02\x02ᒿᓀ\t\x1b\x02\x02ᓀђ\x03\x02\x02\x02ᓁ')
buf.write('ᓂ\t\x1c\x02\x02ᓂє\x03\x02\x02\x02ᓃᓄ\t\x1d\x02')
buf.write('\x02ᓄі\x03\x02\x02\x02ᓅᓆ\t\x1e\x02\x02ᓆј')
buf.write('\x03\x02\x02\x02ᓇᓈ\t\x1f\x02\x02ᓈњ\x03\x02\x02\x02ᓉ')
buf.write('ᓊ\t \x02\x02ᓊќ\x03\x02\x02\x02ᓋᓌ\t!\x02\x02ᓌ')
buf.write('ў\x03\x02\x02\x02ᓍᓎ\t"\x02\x02ᓎѠ\x03\x02\x02\x02')
buf.write('ᓏᓐ\t#\x02\x02ᓐѢ\x03\x02\x02\x02ᓑᓒ\t')
buf.write('$\x02\x02ᓒѤ\x03\x02\x02\x02ᓓᓔ\t%\x02\x02ᓔѦ')
buf.write("\x03\x02\x02\x02ᓕᓖ\t&\x02\x02ᓖѨ\x03\x02\x02\x02'\x02፧")
buf.write('፩፳፹ᎀᎆ᎒᎖᎘\u139c')
buf.write('ᎣᎥᎯᎺᏅᏐᏛᏧᏩ')
buf.write('ᐋᐍᐕᐫᑇᑐᑕᑙᑞ')
buf.write('ᑦᑫᑵᒈᒍᒐᒙᒛ\x05\tǪ')
buf.write('\x02\x08\x02\x02\x02\x03\x02')
return buf.getvalue()
class PlSqlLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]
T__0 = 1
A_LETTER = 2
ADD = 3
AFTER = 4
AGENT = 5
AGGREGATE = 6
ALL = 7
ALTER = 8
ANALYZE = 9
AND = 10
ANY = 11
ARRAY = 12
AS = 13
ASSUME = 14
ASSERT = 15
ASC = 16
ASSOCIATE = 17
AT = 18
ATTRIBUTE = 19
AUDIT = 20
AUTHID = 21
AUTO = 22
AUTOMATIC = 23
AUTONOMOUS_TRANSACTION = 24
BATCH = 25
BEFORE = 26
BEGIN = 27
BETWEEN = 28
BFILE = 29
BINARY_DOUBLE = 30
BINARY_FLOAT = 31
BINARY_INTEGER = 32
BLOB = 33
BLOCK = 34
BODY = 35
BOOLEAN = 36
BOTH = 37
BREADTH = 38
BULK = 39
BY = 40
BYTE = 41
C_LETTER = 42
CACHE = 43
CALL = 44
CANONICAL = 45
CASCADE = 46
CASE = 47
CAST = 48
CHAR = 49
CHAR_CS = 50
CHARACTER = 51
CHECK = 52
CHR = 53
CLOB = 54
CLOSE = 55
CLUSTER = 56
COLLECT = 57
COLUMNS = 58
COMMENT = 59
COMMIT = 60
COMMITTED = 61
COMPATIBILITY = 62
COMPILE = 63
COMPOUND = 64
CONNECT = 65
CONNECT_BY_ROOT = 66
CONSTANT = 67
CONSTRAINT = 68
CONSTRAINTS = 69
CONSTRUCTOR = 70
CONTENT = 71
CONTEXT = 72
CONTINUE = 73
CONVERT = 74
CORRUPT_XID = 75
CORRUPT_XID_ALL = 76
COST = 77
COUNT = 78
CREATE = 79
CROSS = 80
CUBE = 81
CURRENT = 82
CURRENT_USER = 83
CURSOR = 84
CUSTOMDATUM = 85
CYCLE = 86
DATA = 87
DATABASE = 88
DATE = 89
DAY = 90
DB_ROLE_CHANGE = 91
DBTIMEZONE = 92
DDL = 93
DEBUG = 94
DEC = 95
DECIMAL = 96
DECLARE = 97
DECOMPOSE = 98
DECREMENT = 99
DEFAULT = 100
DEFAULTS = 101
DEFERRED = 102
DEFINER = 103
DELETE = 104
DEPTH = 105
DESC = 106
DETERMINISTIC = 107
DIMENSION = 108
DISABLE = 109
DISASSOCIATE = 110
DISTINCT = 111
DOCUMENT = 112
DOUBLE = 113
DROP = 114
DSINTERVAL_UNCONSTRAINED = 115
EACH = 116
ELEMENT = 117
ELSE = 118
ELSIF = 119
EMPTY = 120
ENABLE = 121
ENCODING = 122
END = 123
ENTITYESCAPING = 124
ERR = 125
ERRORS = 126
ESCAPE = 127
EVALNAME = 128
EXCEPT = 129
EXCEPTION = 130
EXCEPTION_INIT = 131
EXCEPTIONS = 132
EXCLUDE = 133
EXCLUSIVE = 134
EXECUTE = 135
EXISTS = 136
EXIT = 137
EXPLAIN = 138
EXTERNAL = 139
EXTRACT = 140
FAILURE = 141
FALSE = 142
FETCH = 143
FINAL = 144
FIRST = 145
FIRST_VALUE = 146
FLOAT = 147
FOLLOWING = 148
FOLLOWS = 149
FOR = 150
FORALL = 151
FORCE = 152
FROM = 153
FULL = 154
FUNCTION = 155
GOTO = 156
GRANT = 157
GROUP = 158
GROUPING = 159
HASH = 160
HAVING = 161
HIDE = 162
HOUR = 163
IF = 164
IGNORE = 165
IMMEDIATE = 166
IN = 167
INCLUDE = 168
INCLUDING = 169
INCREMENT = 170
INDENT = 171
INDEX = 172
INDEXED = 173
INDICATOR = 174
INDICES = 175
INFINITE = 176
INLINE = 177
INNER = 178
INOUT = 179
INSERT = 180
INSTANTIABLE = 181
INSTEAD = 182
INT = 183
INTEGER = 184
INTERSECT = 185
INTERVAL = 186
INTO = 187
INVALIDATE = 188
IS = 189
ISOLATION = 190
ITERATE = 191
JAVA = 192
JOIN = 193
KEEP = 194
LANGUAGE = 195
LAST = 196
LAST_VALUE = 197
LEADING = 198
LEFT = 199
LEVEL = 200
LIBRARY = 201
LIKE = 202
LIKE2 = 203
LIKE4 = 204
LIKEC = 205
LIMIT = 206
LOCAL = 207
LOCK = 208
LOCKED = 209
LOG = 210
LOGOFF = 211
LOGON = 212
LONG = 213
LOOP = 214
MAIN = 215
MAP = 216
MATCHED = 217
MAXVALUE = 218
MEASURES = 219
MEMBER = 220
MERGE = 221
MINUS = 222
MINUTE = 223
MINVALUE = 224
MLSLABEL = 225
MODE = 226
MODEL = 227
MODIFY = 228
MONTH = 229
MULTISET = 230
NAME = 231
NAN = 232
NATURAL = 233
NATURALN = 234
NAV = 235
NCHAR = 236
NCHAR_CS = 237
NCLOB = 238
NESTED = 239
NEW = 240
NO = 241
NOAUDIT = 242
NOCACHE = 243
NOCOPY = 244
NOCYCLE = 245
NOENTITYESCAPING = 246
NOMAXVALUE = 247
NOMINVALUE = 248
NONE = 249
NOORDER = 250
NOSCHEMACHECK = 251
NOT = 252
NOWAIT = 253
NULL = 254
NULLS = 255
NUMBER = 256
NUMERIC = 257
NVARCHAR2 = 258
OBJECT = 259
OF = 260
OFF = 261
OID = 262
OLD = 263
ON = 264
ONLY = 265
OPEN = 266
OPTION = 267
OR = 268
ORADATA = 269
ORDER = 270
ORDINALITY = 271
OSERROR = 272
OUT = 273
OUTER = 274
OVER = 275
OVERRIDING = 276
PACKAGE = 277
PARALLEL_ENABLE = 278
PARAMETERS = 279
PARENT = 280
PARTITION = 281
PASSING = 282
PATH = 283
PERCENT_ROWTYPE = 284
PERCENT_TYPE = 285
PIPELINED = 286
PIVOT = 287
PLAN = 288
PLS_INTEGER = 289
POSITIVE = 290
POSITIVEN = 291
PRAGMA = 292
PRECEDING = 293
PRECISION = 294
PRESENT = 295
PRIOR = 296
PROCEDURE = 297
RAISE = 298
RANGE = 299
RAW = 300
READ = 301
REAL = 302
RECORD = 303
REF = 304
REFERENCE = 305
REFERENCING = 306
REJECT = 307
RELIES_ON = 308
RENAME = 309
REPLACE = 310
RESPECT = 311
RESTRICT_REFERENCES = 312
RESULT = 313
RESULT_CACHE = 314
RETURN = 315
RETURNING = 316
REUSE = 317
REVERSE = 318
REVOKE = 319
RIGHT = 320
ROLLBACK = 321
ROLLUP = 322
ROW = 323
ROWID = 324
ROWS = 325
RULES = 326
SAMPLE = 327
SAVE = 328
SAVEPOINT = 329
SCHEMA = 330
SCHEMACHECK = 331
SCN = 332
SEARCH = 333
SECOND = 334
SEED = 335
SEGMENT = 336
SELECT = 337
SELF = 338
SEQUENCE = 339
SEQUENTIAL = 340
SERIALIZABLE = 341
SERIALLY_REUSABLE = 342
SERVERERROR = 343
SESSIONTIMEZONE = 344
SET = 345
SETS = 346
SETTINGS = 347
SHARE = 348
SHOW = 349
SHUTDOWN = 350
SIBLINGS = 351
SIGNTYPE = 352
SIMPLE_INTEGER = 353
SINGLE = 354
SIZE = 355
SKIP_ = 356
SMALLINT = 357
SNAPSHOT = 358
SOME = 359
SPECIFICATION = 360
SQLDATA = 361
SQLERROR = 362
STANDALONE = 363
START = 364
STARTUP = 365
STATEMENT = 366
STATEMENT_ID = 367
STATIC = 368
STATISTICS = 369
STRING = 370
SUBMULTISET = 371
SUBPARTITION = 372
SUBSTITUTABLE = 373
SUBTYPE = 374
SUCCESS = 375
SUSPEND = 376
TABLE = 377
THE = 378
THEN = 379
TIME = 380
TIMESTAMP = 381
TIMESTAMP_LTZ_UNCONSTRAINED = 382
TIMESTAMP_TZ_UNCONSTRAINED = 383
TIMESTAMP_UNCONSTRAINED = 384
TIMEZONE_ABBR = 385
TIMEZONE_HOUR = 386
TIMEZONE_MINUTE = 387
TIMEZONE_REGION = 388
TO = 389
TRAILING = 390
TRANSACTION = 391
TRANSLATE = 392
TREAT = 393
TRIGGER = 394
TRIM = 395
TRUE = 396
TRUNCATE = 397
TYPE = 398
UNBOUNDED = 399
UNDER = 400
UNION = 401
UNIQUE = 402
UNLIMITED = 403
UNPIVOT = 404
UNTIL = 405
UPDATE = 406
UPDATED = 407
UPSERT = 408
UROWID = 409
USE = 410
USING = 411
VALIDATE = 412
VALUE = 413
VALUES = 414
VARCHAR = 415
VARCHAR2 = 416
VARIABLE = 417
VARRAY = 418
VARYING = 419
VERSION = 420
VERSIONS = 421
WAIT = 422
WARNING = 423
WELLFORMED = 424
WHEN = 425
WHENEVER = 426
WHERE = 427
WHILE = 428
WITH = 429
WITHIN = 430
WORK = 431
WRITE = 432
XML = 433
XMLAGG = 434
XMLATTRIBUTES = 435
XMLCAST = 436
XMLCOLATTVAL = 437
XMLELEMENT = 438
XMLEXISTS = 439
XMLFOREST = 440
XMLNAMESPACES = 441
XMLPARSE = 442
XMLPI = 443
XMLQUERY = 444
XMLROOT = 445
XMLSERIALIZE = 446
XMLTABLE = 447
YEAR = 448
YES = 449
YMINTERVAL_UNCONSTRAINED = 450
ZONE = 451
PREDICTION = 452
PREDICTION_BOUNDS = 453
PREDICTION_COST = 454
PREDICTION_DETAILS = 455
PREDICTION_PROBABILITY = 456
PREDICTION_SET = 457
CUME_DIST = 458
DENSE_RANK = 459
LISTAGG = 460
PERCENT_RANK = 461
PERCENTILE_CONT = 462
PERCENTILE_DISC = 463
RANK = 464
AVG = 465
CORR = 466
LAG = 467
LEAD = 468
MAX = 469
MEDIAN = 470
MIN = 471
NTILE = 472
RATIO_TO_REPORT = 473
ROW_NUMBER = 474
SUM = 475
VARIANCE = 476
REGR_ = 477
STDDEV = 478
VAR_ = 479
COVAR_ = 480
NATIONAL_CHAR_STRING_LIT = 481
BIT_STRING_LIT = 482
HEX_STRING_LIT = 483
DOUBLE_PERIOD = 484
PERIOD = 485
UNSIGNED_INTEGER = 486
APPROXIMATE_NUM_LIT = 487
CHAR_STRING = 488
DELIMITED_ID = 489
PERCENT = 490
AMPERSAND = 491
LEFT_PAREN = 492
RIGHT_PAREN = 493
DOUBLE_ASTERISK = 494
ASTERISK = 495
PLUS_SIGN = 496
MINUS_SIGN = 497
COMMA = 498
SOLIDUS = 499
AT_SIGN = 500
ASSIGN_OP = 501
BINDVAR = 502
COLON = 503
SEMICOLON = 504
LESS_THAN_OR_EQUALS_OP = 505
LESS_THAN_OP = 506
GREATER_THAN_OR_EQUALS_OP = 507
NOT_EQUAL_OP = 508
CARRET_OPERATOR_PART = 509
TILDE_OPERATOR_PART = 510
EXCLAMATION_OPERATOR_PART = 511
GREATER_THAN_OP = 512
CONCATENATION_OP = 513
VERTICAL_BAR = 514
EQUALS_OP = 515
LEFT_BRACKET = 516
RIGHT_BRACKET = 517
INTRODUCER = 518
SPACES = 519
SINGLE_LINE_COMMENT = 520
MULTI_LINE_COMMENT = 521
PROMPT = 522
REGULAR_ID = 523
ZV = 524
channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']
modeNames = ['DEFAULT_MODE']
literalNames = ['<INVALID>', "'..'", "'.'", "'%'", "'&'", "'('", "')'",
"'**'", "'*'", "'+'", "'-'", "','", "'/'", "'@'", "':='", "':'",
"';'", "'<='", "'<'", "'>='", "'^'", "'~'", "'!'", "'>'", "'||'",
"'|'", "'='", "'['", "']'", "'_'", "'@!'"]
symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',
'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',
'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',
'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',
'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',
'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',
'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',
'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',
'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',
'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',
'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',
'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',
'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',
'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',
'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',
'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',
'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',
'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',
'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',
'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',
'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',
'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',
'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',
'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',
'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',
'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',
'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',
'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',
'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',
'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',
'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',
'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',
'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',
'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',
'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',
'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',
'ZV']
ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',
'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',
'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',
'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',
'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',
'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',
'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',
'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',
'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',
'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',
'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',
'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',
'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',
'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',
'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',
'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',
'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',
'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',
'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',
'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',
'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',
'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',
'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',
'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',
'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',
'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',
'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',
'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',
'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',
'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',
'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',
'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',
'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',
'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',
'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',
'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',
'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',
'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',
'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',
'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',
'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',
'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',
'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',
'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',
'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',
'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',
'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',
'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',
'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',
'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',
'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',
'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',
'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',
'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',
'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',
'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',
'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',
'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',
'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',
'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',
'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',
'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',
'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',
'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',
'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',
'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',
'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',
'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',
'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',
'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',
'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',
'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',
'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',
'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',
'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',
'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',
'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',
'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',
'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',
'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',
'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',
'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',
'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',
'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',
'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',
'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',
'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',
'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',
'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',
'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',
'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',
'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',
'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',
'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',
'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',
'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',
'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',
'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
'V', 'W', 'X', 'Y', 'Z']
grammarFileName = 'PlSql.g4'
def __init__(self, input=None, output: TextIO=sys.stdout):
super().__init__(input, output)
self.checkVersion('4.7.2')
self._interp = LexerATNSimulator(self, self.atn, self.
decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
<|reserved_special_token_1|>
# Generated from /home/mridul/PycharmProjects/BTP_2k18-19/PlSql.g4 by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\u020e")
buf.write("\u14d7\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4")
buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080")
buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083")
buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087")
buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a")
buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e")
buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091")
buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095")
buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098")
buf.write("\4\u0099\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c")
buf.write("\t\u009c\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f")
buf.write("\4\u00a0\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3")
buf.write("\t\u00a3\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6")
buf.write("\4\u00a7\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\4\u00aa")
buf.write("\t\u00aa\4\u00ab\t\u00ab\4\u00ac\t\u00ac\4\u00ad\t\u00ad")
buf.write("\4\u00ae\t\u00ae\4\u00af\t\u00af\4\u00b0\t\u00b0\4\u00b1")
buf.write("\t\u00b1\4\u00b2\t\u00b2\4\u00b3\t\u00b3\4\u00b4\t\u00b4")
buf.write("\4\u00b5\t\u00b5\4\u00b6\t\u00b6\4\u00b7\t\u00b7\4\u00b8")
buf.write("\t\u00b8\4\u00b9\t\u00b9\4\u00ba\t\u00ba\4\u00bb\t\u00bb")
buf.write("\4\u00bc\t\u00bc\4\u00bd\t\u00bd\4\u00be\t\u00be\4\u00bf")
buf.write("\t\u00bf\4\u00c0\t\u00c0\4\u00c1\t\u00c1\4\u00c2\t\u00c2")
buf.write("\4\u00c3\t\u00c3\4\u00c4\t\u00c4\4\u00c5\t\u00c5\4\u00c6")
buf.write("\t\u00c6\4\u00c7\t\u00c7\4\u00c8\t\u00c8\4\u00c9\t\u00c9")
buf.write("\4\u00ca\t\u00ca\4\u00cb\t\u00cb\4\u00cc\t\u00cc\4\u00cd")
buf.write("\t\u00cd\4\u00ce\t\u00ce\4\u00cf\t\u00cf\4\u00d0\t\u00d0")
buf.write("\4\u00d1\t\u00d1\4\u00d2\t\u00d2\4\u00d3\t\u00d3\4\u00d4")
buf.write("\t\u00d4\4\u00d5\t\u00d5\4\u00d6\t\u00d6\4\u00d7\t\u00d7")
buf.write("\4\u00d8\t\u00d8\4\u00d9\t\u00d9\4\u00da\t\u00da\4\u00db")
buf.write("\t\u00db\4\u00dc\t\u00dc\4\u00dd\t\u00dd\4\u00de\t\u00de")
buf.write("\4\u00df\t\u00df\4\u00e0\t\u00e0\4\u00e1\t\u00e1\4\u00e2")
buf.write("\t\u00e2\4\u00e3\t\u00e3\4\u00e4\t\u00e4\4\u00e5\t\u00e5")
buf.write("\4\u00e6\t\u00e6\4\u00e7\t\u00e7\4\u00e8\t\u00e8\4\u00e9")
buf.write("\t\u00e9\4\u00ea\t\u00ea\4\u00eb\t\u00eb\4\u00ec\t\u00ec")
buf.write("\4\u00ed\t\u00ed\4\u00ee\t\u00ee\4\u00ef\t\u00ef\4\u00f0")
buf.write("\t\u00f0\4\u00f1\t\u00f1\4\u00f2\t\u00f2\4\u00f3\t\u00f3")
buf.write("\4\u00f4\t\u00f4\4\u00f5\t\u00f5\4\u00f6\t\u00f6\4\u00f7")
buf.write("\t\u00f7\4\u00f8\t\u00f8\4\u00f9\t\u00f9\4\u00fa\t\u00fa")
buf.write("\4\u00fb\t\u00fb\4\u00fc\t\u00fc\4\u00fd\t\u00fd\4\u00fe")
buf.write("\t\u00fe\4\u00ff\t\u00ff\4\u0100\t\u0100\4\u0101\t\u0101")
buf.write("\4\u0102\t\u0102\4\u0103\t\u0103\4\u0104\t\u0104\4\u0105")
buf.write("\t\u0105\4\u0106\t\u0106\4\u0107\t\u0107\4\u0108\t\u0108")
buf.write("\4\u0109\t\u0109\4\u010a\t\u010a\4\u010b\t\u010b\4\u010c")
buf.write("\t\u010c\4\u010d\t\u010d\4\u010e\t\u010e\4\u010f\t\u010f")
buf.write("\4\u0110\t\u0110\4\u0111\t\u0111\4\u0112\t\u0112\4\u0113")
buf.write("\t\u0113\4\u0114\t\u0114\4\u0115\t\u0115\4\u0116\t\u0116")
buf.write("\4\u0117\t\u0117\4\u0118\t\u0118\4\u0119\t\u0119\4\u011a")
buf.write("\t\u011a\4\u011b\t\u011b\4\u011c\t\u011c\4\u011d\t\u011d")
buf.write("\4\u011e\t\u011e\4\u011f\t\u011f\4\u0120\t\u0120\4\u0121")
buf.write("\t\u0121\4\u0122\t\u0122\4\u0123\t\u0123\4\u0124\t\u0124")
buf.write("\4\u0125\t\u0125\4\u0126\t\u0126\4\u0127\t\u0127\4\u0128")
buf.write("\t\u0128\4\u0129\t\u0129\4\u012a\t\u012a\4\u012b\t\u012b")
buf.write("\4\u012c\t\u012c\4\u012d\t\u012d\4\u012e\t\u012e\4\u012f")
buf.write("\t\u012f\4\u0130\t\u0130\4\u0131\t\u0131\4\u0132\t\u0132")
buf.write("\4\u0133\t\u0133\4\u0134\t\u0134\4\u0135\t\u0135\4\u0136")
buf.write("\t\u0136\4\u0137\t\u0137\4\u0138\t\u0138\4\u0139\t\u0139")
buf.write("\4\u013a\t\u013a\4\u013b\t\u013b\4\u013c\t\u013c\4\u013d")
buf.write("\t\u013d\4\u013e\t\u013e\4\u013f\t\u013f\4\u0140\t\u0140")
buf.write("\4\u0141\t\u0141\4\u0142\t\u0142\4\u0143\t\u0143\4\u0144")
buf.write("\t\u0144\4\u0145\t\u0145\4\u0146\t\u0146\4\u0147\t\u0147")
buf.write("\4\u0148\t\u0148\4\u0149\t\u0149\4\u014a\t\u014a\4\u014b")
buf.write("\t\u014b\4\u014c\t\u014c\4\u014d\t\u014d\4\u014e\t\u014e")
buf.write("\4\u014f\t\u014f\4\u0150\t\u0150\4\u0151\t\u0151\4\u0152")
buf.write("\t\u0152\4\u0153\t\u0153\4\u0154\t\u0154\4\u0155\t\u0155")
buf.write("\4\u0156\t\u0156\4\u0157\t\u0157\4\u0158\t\u0158\4\u0159")
buf.write("\t\u0159\4\u015a\t\u015a\4\u015b\t\u015b\4\u015c\t\u015c")
buf.write("\4\u015d\t\u015d\4\u015e\t\u015e\4\u015f\t\u015f\4\u0160")
buf.write("\t\u0160\4\u0161\t\u0161\4\u0162\t\u0162\4\u0163\t\u0163")
buf.write("\4\u0164\t\u0164\4\u0165\t\u0165\4\u0166\t\u0166\4\u0167")
buf.write("\t\u0167\4\u0168\t\u0168\4\u0169\t\u0169\4\u016a\t\u016a")
buf.write("\4\u016b\t\u016b\4\u016c\t\u016c\4\u016d\t\u016d\4\u016e")
buf.write("\t\u016e\4\u016f\t\u016f\4\u0170\t\u0170\4\u0171\t\u0171")
buf.write("\4\u0172\t\u0172\4\u0173\t\u0173\4\u0174\t\u0174\4\u0175")
buf.write("\t\u0175\4\u0176\t\u0176\4\u0177\t\u0177\4\u0178\t\u0178")
buf.write("\4\u0179\t\u0179\4\u017a\t\u017a\4\u017b\t\u017b\4\u017c")
buf.write("\t\u017c\4\u017d\t\u017d\4\u017e\t\u017e\4\u017f\t\u017f")
buf.write("\4\u0180\t\u0180\4\u0181\t\u0181\4\u0182\t\u0182\4\u0183")
buf.write("\t\u0183\4\u0184\t\u0184\4\u0185\t\u0185\4\u0186\t\u0186")
buf.write("\4\u0187\t\u0187\4\u0188\t\u0188\4\u0189\t\u0189\4\u018a")
buf.write("\t\u018a\4\u018b\t\u018b\4\u018c\t\u018c\4\u018d\t\u018d")
buf.write("\4\u018e\t\u018e\4\u018f\t\u018f\4\u0190\t\u0190\4\u0191")
buf.write("\t\u0191\4\u0192\t\u0192\4\u0193\t\u0193\4\u0194\t\u0194")
buf.write("\4\u0195\t\u0195\4\u0196\t\u0196\4\u0197\t\u0197\4\u0198")
buf.write("\t\u0198\4\u0199\t\u0199\4\u019a\t\u019a\4\u019b\t\u019b")
buf.write("\4\u019c\t\u019c\4\u019d\t\u019d\4\u019e\t\u019e\4\u019f")
buf.write("\t\u019f\4\u01a0\t\u01a0\4\u01a1\t\u01a1\4\u01a2\t\u01a2")
buf.write("\4\u01a3\t\u01a3\4\u01a4\t\u01a4\4\u01a5\t\u01a5\4\u01a6")
buf.write("\t\u01a6\4\u01a7\t\u01a7\4\u01a8\t\u01a8\4\u01a9\t\u01a9")
buf.write("\4\u01aa\t\u01aa\4\u01ab\t\u01ab\4\u01ac\t\u01ac\4\u01ad")
buf.write("\t\u01ad\4\u01ae\t\u01ae\4\u01af\t\u01af\4\u01b0\t\u01b0")
buf.write("\4\u01b1\t\u01b1\4\u01b2\t\u01b2\4\u01b3\t\u01b3\4\u01b4")
buf.write("\t\u01b4\4\u01b5\t\u01b5\4\u01b6\t\u01b6\4\u01b7\t\u01b7")
buf.write("\4\u01b8\t\u01b8\4\u01b9\t\u01b9\4\u01ba\t\u01ba\4\u01bb")
buf.write("\t\u01bb\4\u01bc\t\u01bc\4\u01bd\t\u01bd\4\u01be\t\u01be")
buf.write("\4\u01bf\t\u01bf\4\u01c0\t\u01c0\4\u01c1\t\u01c1\4\u01c2")
buf.write("\t\u01c2\4\u01c3\t\u01c3\4\u01c4\t\u01c4\4\u01c5\t\u01c5")
buf.write("\4\u01c6\t\u01c6\4\u01c7\t\u01c7\4\u01c8\t\u01c8\4\u01c9")
buf.write("\t\u01c9\4\u01ca\t\u01ca\4\u01cb\t\u01cb\4\u01cc\t\u01cc")
buf.write("\4\u01cd\t\u01cd\4\u01ce\t\u01ce\4\u01cf\t\u01cf\4\u01d0")
buf.write("\t\u01d0\4\u01d1\t\u01d1\4\u01d2\t\u01d2\4\u01d3\t\u01d3")
buf.write("\4\u01d4\t\u01d4\4\u01d5\t\u01d5\4\u01d6\t\u01d6\4\u01d7")
buf.write("\t\u01d7\4\u01d8\t\u01d8\4\u01d9\t\u01d9\4\u01da\t\u01da")
buf.write("\4\u01db\t\u01db\4\u01dc\t\u01dc\4\u01dd\t\u01dd\4\u01de")
buf.write("\t\u01de\4\u01df\t\u01df\4\u01e0\t\u01e0\4\u01e1\t\u01e1")
buf.write("\4\u01e2\t\u01e2\4\u01e3\t\u01e3\4\u01e4\t\u01e4\4\u01e5")
buf.write("\t\u01e5\4\u01e6\t\u01e6\4\u01e7\t\u01e7\4\u01e8\t\u01e8")
buf.write("\4\u01e9\t\u01e9\4\u01ea\t\u01ea\4\u01eb\t\u01eb\4\u01ec")
buf.write("\t\u01ec\4\u01ed\t\u01ed\4\u01ee\t\u01ee\4\u01ef\t\u01ef")
buf.write("\4\u01f0\t\u01f0\4\u01f1\t\u01f1\4\u01f2\t\u01f2\4\u01f3")
buf.write("\t\u01f3\4\u01f4\t\u01f4\4\u01f5\t\u01f5\4\u01f6\t\u01f6")
buf.write("\4\u01f7\t\u01f7\4\u01f8\t\u01f8\4\u01f9\t\u01f9\4\u01fa")
buf.write("\t\u01fa\4\u01fb\t\u01fb\4\u01fc\t\u01fc\4\u01fd\t\u01fd")
buf.write("\4\u01fe\t\u01fe\4\u01ff\t\u01ff\4\u0200\t\u0200\4\u0201")
buf.write("\t\u0201\4\u0202\t\u0202\4\u0203\t\u0203\4\u0204\t\u0204")
buf.write("\4\u0205\t\u0205\4\u0206\t\u0206\4\u0207\t\u0207\4\u0208")
buf.write("\t\u0208\4\u0209\t\u0209\4\u020a\t\u020a\4\u020b\t\u020b")
buf.write("\4\u020c\t\u020c\4\u020d\t\u020d\4\u020e\t\u020e\4\u020f")
buf.write("\t\u020f\4\u0210\t\u0210\4\u0211\t\u0211\4\u0212\t\u0212")
buf.write("\4\u0213\t\u0213\4\u0214\t\u0214\4\u0215\t\u0215\4\u0216")
buf.write("\t\u0216\4\u0217\t\u0217\4\u0218\t\u0218\4\u0219\t\u0219")
buf.write("\4\u021a\t\u021a\4\u021b\t\u021b\4\u021c\t\u021c\4\u021d")
buf.write("\t\u021d\4\u021e\t\u021e\4\u021f\t\u021f\4\u0220\t\u0220")
buf.write("\4\u0221\t\u0221\4\u0222\t\u0222\4\u0223\t\u0223\4\u0224")
buf.write("\t\u0224\4\u0225\t\u0225\4\u0226\t\u0226\4\u0227\t\u0227")
buf.write("\4\u0228\t\u0228\4\u0229\t\u0229\4\u022a\t\u022a\4\u022b")
buf.write("\t\u022b\4\u022c\t\u022c\4\u022d\t\u022d\4\u022e\t\u022e")
buf.write("\4\u022f\t\u022f\4\u0230\t\u0230\4\u0231\t\u0231\4\u0232")
buf.write("\t\u0232\4\u0233\t\u0233\4\u0234\t\u0234\3\2\3\2\3\2\3")
buf.write("\3\3\3\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6")
buf.write("\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3")
buf.write("\7\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n")
buf.write("\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3")
buf.write("\f\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3")
buf.write("\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20")
buf.write("\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\24\3\24\3\24\3\24")
buf.write("\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25")
buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27")
buf.write("\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30")
buf.write("\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33")
buf.write("\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\35")
buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36")
buf.write("\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37")
buf.write("\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3")
buf.write(" \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3")
buf.write("!\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3")
buf.write("$\3%\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3\'\3\'\3\'\3")
buf.write("\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3")
buf.write("*\3*\3+\3+\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3.\3.\3.\3")
buf.write(".\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3/\3\60\3\60")
buf.write("\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62")
buf.write("\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64")
buf.write("\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65")
buf.write("\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\67\3\67\3\67")
buf.write("\3\67\3\67\38\38\38\38\38\38\39\39\39\39\39\39\39\39\3")
buf.write(":\3:\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3")
buf.write("<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3")
buf.write(">\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3?\3?\3?\3?\3?\3?\3")
buf.write("?\3@\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3A\3")
buf.write("B\3B\3B\3B\3B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3C\3C\3C\3C\3")
buf.write("C\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3D\3D\3E\3E\3E\3")
buf.write("E\3E\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3F\3F\3F\3")
buf.write("F\3F\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3")
buf.write("H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3")
buf.write("J\3J\3J\3K\3K\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3")
buf.write("L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3M\3M\3M\3M\3M\3M\3M\3")
buf.write("M\3M\3M\3N\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3")
buf.write("P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3S\3S\3S\3S\3")
buf.write("S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3U\3")
buf.write("U\3U\3U\3U\3U\3U\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3")
buf.write("W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3")
buf.write("Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3")
buf.write("\\\3\\\3\\\3\\\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]")
buf.write("\3]\3]\3]\3]\3]\3]\3^\3^\3^\3^\3_\3_\3_\3_\3_\3_\3`\3")
buf.write("`\3`\3`\3a\3a\3a\3a\3a\3a\3a\3a\3b\3b\3b\3b\3b\3b\3b\3")
buf.write("b\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3d\3d\3")
buf.write("d\3d\3d\3e\3e\3e\3e\3e\3e\3e\3e\3f\3f\3f\3f\3f\3f\3f\3")
buf.write("f\3f\3g\3g\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3h\3h\3h\3h\3")
buf.write("h\3i\3i\3i\3i\3i\3i\3i\3j\3j\3j\3j\3j\3j\3k\3k\3k\3k\3")
buf.write("k\3l\3l\3l\3l\3l\3l\3l\3l\3l\3l\3l\3l\3l\3l\3m\3m\3m\3")
buf.write("m\3m\3m\3m\3m\3m\3m\3n\3n\3n\3n\3n\3n\3n\3n\3o\3o\3o\3")
buf.write("o\3o\3o\3o\3o\3o\3o\3o\3o\3o\3p\3p\3p\3p\3p\3p\3p\3p\3")
buf.write("p\3q\3q\3q\3q\3q\3q\3q\3q\3q\3r\3r\3r\3r\3r\3r\3r\3s\3")
buf.write("s\3s\3s\3s\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3")
buf.write("t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3u\3u\3u\3u\3u\3v\3v\3")
buf.write("v\3v\3v\3v\3v\3v\3w\3w\3w\3w\3w\3x\3x\3x\3x\3x\3x\3y\3")
buf.write("y\3y\3y\3y\3y\3z\3z\3z\3z\3z\3z\3z\3{\3{\3{\3{\3{\3{\3")
buf.write("{\3{\3{\3|\3|\3|\3|\3}\3}\3}\3}\3}\3}\3}\3}\3}\3}\3}\3")
buf.write("}\3}\3}\3}\3~\3~\3~\3~\3\177\3\177\3\177\3\177\3\177\3")
buf.write("\177\3\177\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080")
buf.write("\3\u0080\3\u0081\3\u0081\3\u0081\3\u0081\3\u0081\3\u0081")
buf.write("\3\u0081\3\u0081\3\u0081\3\u0082\3\u0082\3\u0082\3\u0082")
buf.write("\3\u0082\3\u0082\3\u0082\3\u0083\3\u0083\3\u0083\3\u0083")
buf.write("\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0084")
buf.write("\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084")
buf.write("\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084")
buf.write("\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085")
buf.write("\3\u0085\3\u0085\3\u0085\3\u0085\3\u0086\3\u0086\3\u0086")
buf.write("\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086\3\u0087\3\u0087")
buf.write("\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087")
buf.write("\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088")
buf.write("\3\u0088\3\u0088\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089")
buf.write("\3\u0089\3\u0089\3\u008a\3\u008a\3\u008a\3\u008a\3\u008a")
buf.write("\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b")
buf.write("\3\u008b\3\u008c\3\u008c\3\u008c\3\u008c\3\u008c\3\u008c")
buf.write("\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d\3\u008d\3\u008d")
buf.write("\3\u008d\3\u008d\3\u008d\3\u008d\3\u008e\3\u008e\3\u008e")
buf.write("\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008f\3\u008f")
buf.write("\3\u008f\3\u008f\3\u008f\3\u008f\3\u0090\3\u0090\3\u0090")
buf.write("\3\u0090\3\u0090\3\u0090\3\u0091\3\u0091\3\u0091\3\u0091")
buf.write("\3\u0091\3\u0091\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092")
buf.write("\3\u0092\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093")
buf.write("\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0094")
buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0095\3\u0095")
buf.write("\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095")
buf.write("\3\u0095\3\u0096\3\u0096\3\u0096\3\u0096\3\u0096\3\u0096")
buf.write("\3\u0096\3\u0096\3\u0097\3\u0097\3\u0097\3\u0097\3\u0098")
buf.write("\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0099")
buf.write("\3\u0099\3\u0099\3\u0099\3\u0099\3\u0099\3\u009a\3\u009a")
buf.write("\3\u009a\3\u009a\3\u009a\3\u009b\3\u009b\3\u009b\3\u009b")
buf.write("\3\u009b\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c")
buf.write("\3\u009c\3\u009c\3\u009c\3\u009d\3\u009d\3\u009d\3\u009d")
buf.write("\3\u009d\3\u009e\3\u009e\3\u009e\3\u009e\3\u009e\3\u009e")
buf.write("\3\u009f\3\u009f\3\u009f\3\u009f\3\u009f\3\u009f\3\u00a0")
buf.write("\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0")
buf.write("\3\u00a0\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a2")
buf.write("\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a3")
buf.write("\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a4\3\u00a4\3\u00a4")
buf.write("\3\u00a4\3\u00a4\3\u00a5\3\u00a5\3\u00a5\3\u00a6\3\u00a6")
buf.write("\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a7\3\u00a7")
buf.write("\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7")
buf.write("\3\u00a7\3\u00a8\3\u00a8\3\u00a8\3\u00a9\3\u00a9\3\u00a9")
buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00aa\3\u00aa")
buf.write("\3\u00aa\3\u00aa\3\u00aa\3\u00aa\3\u00aa\3\u00aa\3\u00aa")
buf.write("\3\u00aa\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab")
buf.write("\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ac\3\u00ac\3\u00ac")
buf.write("\3\u00ac\3\u00ac\3\u00ac\3\u00ac\3\u00ad\3\u00ad\3\u00ad")
buf.write("\3\u00ad\3\u00ad\3\u00ad\3\u00ae\3\u00ae\3\u00ae\3\u00ae")
buf.write("\3\u00ae\3\u00ae\3\u00ae\3\u00ae\3\u00af\3\u00af\3\u00af")
buf.write("\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af")
buf.write("\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b0")
buf.write("\3\u00b0\3\u00b1\3\u00b1\3\u00b1\3\u00b1\3\u00b1\3\u00b1")
buf.write("\3\u00b1\3\u00b1\3\u00b1\3\u00b2\3\u00b2\3\u00b2\3\u00b2")
buf.write("\3\u00b2\3\u00b2\3\u00b2\3\u00b3\3\u00b3\3\u00b3\3\u00b3")
buf.write("\3\u00b3\3\u00b3\3\u00b4\3\u00b4\3\u00b4\3\u00b4\3\u00b4")
buf.write("\3\u00b4\3\u00b5\3\u00b5\3\u00b5\3\u00b5\3\u00b5\3\u00b5")
buf.write("\3\u00b5\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6")
buf.write("\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6")
buf.write("\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7")
buf.write("\3\u00b7\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b9\3\u00b9")
buf.write("\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00ba")
buf.write("\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00ba")
buf.write("\3\u00ba\3\u00ba\3\u00bb\3\u00bb\3\u00bb\3\u00bb\3\u00bb")
buf.write("\3\u00bb\3\u00bb\3\u00bb\3\u00bb\3\u00bc\3\u00bc\3\u00bc")
buf.write("\3\u00bc\3\u00bc\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd")
buf.write("\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00be")
buf.write("\3\u00be\3\u00be\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf")
buf.write("\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00c0\3\u00c0")
buf.write("\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c1")
buf.write("\3\u00c1\3\u00c1\3\u00c1\3\u00c1\3\u00c2\3\u00c2\3\u00c2")
buf.write("\3\u00c2\3\u00c2\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3")
buf.write("\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4")
buf.write("\3\u00c4\3\u00c4\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5")
buf.write("\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6")
buf.write("\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c7\3\u00c7\3\u00c7")
buf.write("\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c8\3\u00c8")
buf.write("\3\u00c8\3\u00c8\3\u00c8\3\u00c9\3\u00c9\3\u00c9\3\u00c9")
buf.write("\3\u00c9\3\u00c9\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00ca")
buf.write("\3\u00ca\3\u00ca\3\u00ca\3\u00cb\3\u00cb\3\u00cb\3\u00cb")
buf.write("\3\u00cb\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc")
buf.write("\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00ce")
buf.write("\3\u00ce\3\u00ce\3\u00ce\3\u00ce\3\u00ce\3\u00cf\3\u00cf")
buf.write("\3\u00cf\3\u00cf\3\u00cf\3\u00cf\3\u00d0\3\u00d0\3\u00d0")
buf.write("\3\u00d0\3\u00d0\3\u00d0\3\u00d1\3\u00d1\3\u00d1\3\u00d1")
buf.write("\3\u00d1\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d2")
buf.write("\3\u00d2\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d4\3\u00d4")
buf.write("\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d5\3\u00d5")
buf.write("\3\u00d5\3\u00d5\3\u00d5\3\u00d5\3\u00d6\3\u00d6\3\u00d6")
buf.write("\3\u00d6\3\u00d6\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7")
buf.write("\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d9\3\u00d9")
buf.write("\3\u00d9\3\u00d9\3\u00da\3\u00da\3\u00da\3\u00da\3\u00da")
buf.write("\3\u00da\3\u00da\3\u00da\3\u00db\3\u00db\3\u00db\3\u00db")
buf.write("\3\u00db\3\u00db\3\u00db\3\u00db\3\u00db\3\u00dc\3\u00dc")
buf.write("\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc")
buf.write("\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd")
buf.write("\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de\3\u00df")
buf.write("\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00e0\3\u00e0")
buf.write("\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e1\3\u00e1")
buf.write("\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1")
buf.write("\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2")
buf.write("\3\u00e2\3\u00e2\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e3")
buf.write("\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e5")
buf.write("\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e6")
buf.write("\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e7\3\u00e7")
buf.write("\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7")
buf.write("\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e9\3\u00e9")
buf.write("\3\u00e9\3\u00e9\3\u00ea\3\u00ea\3\u00ea\3\u00ea\3\u00ea")
buf.write("\3\u00ea\3\u00ea\3\u00ea\3\u00eb\3\u00eb\3\u00eb\3\u00eb")
buf.write("\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00ec\3\u00ec")
buf.write("\3\u00ec\3\u00ec\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed")
buf.write("\3\u00ed\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee")
buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ef\3\u00ef\3\u00ef\3\u00ef")
buf.write("\3\u00ef\3\u00ef\3\u00f0\3\u00f0\3\u00f0\3\u00f0\3\u00f0")
buf.write("\3\u00f0\3\u00f0\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f2")
buf.write("\3\u00f2\3\u00f2\3\u00f3\3\u00f3\3\u00f3\3\u00f3\3\u00f3")
buf.write("\3\u00f3\3\u00f3\3\u00f3\3\u00f4\3\u00f4\3\u00f4\3\u00f4")
buf.write("\3\u00f4\3\u00f4\3\u00f4\3\u00f4\3\u00f5\3\u00f5\3\u00f5")
buf.write("\3\u00f5\3\u00f5\3\u00f5\3\u00f5\3\u00f6\3\u00f6\3\u00f6")
buf.write("\3\u00f6\3\u00f6\3\u00f6\3\u00f6\3\u00f6\3\u00f7\3\u00f7")
buf.write("\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7")
buf.write("\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f7")
buf.write("\3\u00f7\3\u00f8\3\u00f8\3\u00f8\3\u00f8\3\u00f8\3\u00f8")
buf.write("\3\u00f8\3\u00f8\3\u00f8\3\u00f8\3\u00f8\3\u00f9\3\u00f9")
buf.write("\3\u00f9\3\u00f9\3\u00f9\3\u00f9\3\u00f9\3\u00f9\3\u00f9")
buf.write("\3\u00f9\3\u00f9\3\u00fa\3\u00fa\3\u00fa\3\u00fa\3\u00fa")
buf.write("\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb")
buf.write("\3\u00fb\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc")
buf.write("\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc")
buf.write("\3\u00fc\3\u00fd\3\u00fd\3\u00fd\3\u00fd\3\u00fe\3\u00fe")
buf.write("\3\u00fe\3\u00fe\3\u00fe\3\u00fe\3\u00fe\3\u00ff\3\u00ff")
buf.write("\3\u00ff\3\u00ff\3\u00ff\3\u0100\3\u0100\3\u0100\3\u0100")
buf.write("\3\u0100\3\u0100\3\u0101\3\u0101\3\u0101\3\u0101\3\u0101")
buf.write("\3\u0101\3\u0101\3\u0102\3\u0102\3\u0102\3\u0102\3\u0102")
buf.write("\3\u0102\3\u0102\3\u0102\3\u0103\3\u0103\3\u0103\3\u0103")
buf.write("\3\u0103\3\u0103\3\u0103\3\u0103\3\u0103\3\u0103\3\u0104")
buf.write("\3\u0104\3\u0104\3\u0104\3\u0104\3\u0104\3\u0104\3\u0105")
buf.write("\3\u0105\3\u0105\3\u0106\3\u0106\3\u0106\3\u0106\3\u0107")
buf.write("\3\u0107\3\u0107\3\u0107\3\u0108\3\u0108\3\u0108\3\u0108")
buf.write("\3\u0109\3\u0109\3\u0109\3\u010a\3\u010a\3\u010a\3\u010a")
buf.write("\3\u010a\3\u010b\3\u010b\3\u010b\3\u010b\3\u010b\3\u010c")
buf.write("\3\u010c\3\u010c\3\u010c\3\u010c\3\u010c\3\u010c\3\u010d")
buf.write("\3\u010d\3\u010d\3\u010e\3\u010e\3\u010e\3\u010e\3\u010e")
buf.write("\3\u010e\3\u010e\3\u010e\3\u010f\3\u010f\3\u010f\3\u010f")
buf.write("\3\u010f\3\u010f\3\u0110\3\u0110\3\u0110\3\u0110\3\u0110")
buf.write("\3\u0110\3\u0110\3\u0110\3\u0110\3\u0110\3\u0110\3\u0111")
buf.write("\3\u0111\3\u0111\3\u0111\3\u0111\3\u0111\3\u0111\3\u0111")
buf.write("\3\u0112\3\u0112\3\u0112\3\u0112\3\u0113\3\u0113\3\u0113")
buf.write("\3\u0113\3\u0113\3\u0113\3\u0114\3\u0114\3\u0114\3\u0114")
buf.write("\3\u0114\3\u0115\3\u0115\3\u0115\3\u0115\3\u0115\3\u0115")
buf.write("\3\u0115\3\u0115\3\u0115\3\u0115\3\u0115\3\u0116\3\u0116")
buf.write("\3\u0116\3\u0116\3\u0116\3\u0116\3\u0116\3\u0116\3\u0117")
buf.write("\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117")
buf.write("\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117\3\u0117")
buf.write("\3\u0117\3\u0118\3\u0118\3\u0118\3\u0118\3\u0118\3\u0118")
buf.write("\3\u0118\3\u0118\3\u0118\3\u0118\3\u0118\3\u0119\3\u0119")
buf.write("\3\u0119\3\u0119\3\u0119\3\u0119\3\u0119\3\u011a\3\u011a")
buf.write("\3\u011a\3\u011a\3\u011a\3\u011a\3\u011a\3\u011a\3\u011a")
buf.write("\3\u011a\3\u011b\3\u011b\3\u011b\3\u011b\3\u011b\3\u011b")
buf.write("\3\u011b\3\u011b\3\u011c\3\u011c\3\u011c\3\u011c\3\u011c")
buf.write("\3\u011d\3\u011d\3\u011d\3\u011d\3\u011d\3\u011d\3\u011d")
buf.write("\3\u011d\3\u011d\3\u011e\3\u011e\3\u011e\3\u011e\3\u011e")
buf.write("\3\u011e\3\u011f\3\u011f\3\u011f\3\u011f\3\u011f\3\u011f")
buf.write("\3\u011f\3\u011f\3\u011f\3\u011f\3\u0120\3\u0120\3\u0120")
buf.write("\3\u0120\3\u0120\3\u0120\3\u0121\3\u0121\3\u0121\3\u0121")
buf.write("\3\u0121\3\u0122\3\u0122\3\u0122\3\u0122\3\u0122\3\u0122")
buf.write("\3\u0122\3\u0122\3\u0122\3\u0122\3\u0122\3\u0122\3\u0123")
buf.write("\3\u0123\3\u0123\3\u0123\3\u0123\3\u0123\3\u0123\3\u0123")
buf.write("\3\u0123\3\u0124\3\u0124\3\u0124\3\u0124\3\u0124\3\u0124")
buf.write("\3\u0124\3\u0124\3\u0124\3\u0124\3\u0125\3\u0125\3\u0125")
buf.write("\3\u0125\3\u0125\3\u0125\3\u0125\3\u0126\3\u0126\3\u0126")
buf.write("\3\u0126\3\u0126\3\u0126\3\u0126\3\u0126\3\u0126\3\u0126")
buf.write("\3\u0127\3\u0127\3\u0127\3\u0127\3\u0127\3\u0127\3\u0127")
buf.write("\3\u0127\3\u0127\3\u0127\3\u0128\3\u0128\3\u0128\3\u0128")
buf.write("\3\u0128\3\u0128\3\u0128\3\u0128\3\u0129\3\u0129\3\u0129")
buf.write("\3\u0129\3\u0129\3\u0129\3\u012a\3\u012a\3\u012a\3\u012a")
buf.write("\3\u012a\3\u012a\3\u012a\3\u012a\3\u012a\3\u012a\3\u012b")
buf.write("\3\u012b\3\u012b\3\u012b\3\u012b\3\u012b\3\u012c\3\u012c")
buf.write("\3\u012c\3\u012c\3\u012c\3\u012c\3\u012d\3\u012d\3\u012d")
buf.write("\3\u012d\3\u012e\3\u012e\3\u012e\3\u012e\3\u012e\3\u012f")
buf.write("\3\u012f\3\u012f\3\u012f\3\u012f\3\u0130\3\u0130\3\u0130")
buf.write("\3\u0130\3\u0130\3\u0130\3\u0130\3\u0131\3\u0131\3\u0131")
buf.write("\3\u0131\3\u0132\3\u0132\3\u0132\3\u0132\3\u0132\3\u0132")
buf.write("\3\u0132\3\u0132\3\u0132\3\u0132\3\u0133\3\u0133\3\u0133")
buf.write("\3\u0133\3\u0133\3\u0133\3\u0133\3\u0133\3\u0133\3\u0133")
buf.write("\3\u0133\3\u0133\3\u0134\3\u0134\3\u0134\3\u0134\3\u0134")
buf.write("\3\u0134\3\u0134\3\u0135\3\u0135\3\u0135\3\u0135\3\u0135")
buf.write("\3\u0135\3\u0135\3\u0135\3\u0135\3\u0135\3\u0136\3\u0136")
buf.write("\3\u0136\3\u0136\3\u0136\3\u0136\3\u0136\3\u0137\3\u0137")
buf.write("\3\u0137\3\u0137\3\u0137\3\u0137\3\u0137\3\u0137\3\u0138")
buf.write("\3\u0138\3\u0138\3\u0138\3\u0138\3\u0138\3\u0138\3\u0138")
buf.write("\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139")
buf.write("\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139")
buf.write("\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u0139\3\u013a")
buf.write("\3\u013a\3\u013a\3\u013a\3\u013a\3\u013a\3\u013a\3\u013b")
buf.write("\3\u013b\3\u013b\3\u013b\3\u013b\3\u013b\3\u013b\3\u013b")
buf.write("\3\u013b\3\u013b\3\u013b\3\u013b\3\u013b\3\u013c\3\u013c")
buf.write("\3\u013c\3\u013c\3\u013c\3\u013c\3\u013c\3\u013d\3\u013d")
buf.write("\3\u013d\3\u013d\3\u013d\3\u013d\3\u013d\3\u013d\3\u013d")
buf.write("\3\u013d\3\u013e\3\u013e\3\u013e\3\u013e\3\u013e\3\u013e")
buf.write("\3\u013f\3\u013f\3\u013f\3\u013f\3\u013f\3\u013f\3\u013f")
buf.write("\3\u013f\3\u0140\3\u0140\3\u0140\3\u0140\3\u0140\3\u0140")
buf.write("\3\u0140\3\u0141\3\u0141\3\u0141\3\u0141\3\u0141\3\u0141")
buf.write("\3\u0142\3\u0142\3\u0142\3\u0142\3\u0142\3\u0142\3\u0142")
buf.write("\3\u0142\3\u0142\3\u0143\3\u0143\3\u0143\3\u0143\3\u0143")
buf.write("\3\u0143\3\u0143\3\u0144\3\u0144\3\u0144\3\u0144\3\u0145")
buf.write("\3\u0145\3\u0145\3\u0145\3\u0145\3\u0145\3\u0146\3\u0146")
buf.write("\3\u0146\3\u0146\3\u0146\3\u0147\3\u0147\3\u0147\3\u0147")
buf.write("\3\u0147\3\u0147\3\u0148\3\u0148\3\u0148\3\u0148\3\u0148")
buf.write("\3\u0148\3\u0148\3\u0149\3\u0149\3\u0149\3\u0149\3\u0149")
buf.write("\3\u014a\3\u014a\3\u014a\3\u014a\3\u014a\3\u014a\3\u014a")
buf.write("\3\u014a\3\u014a\3\u014a\3\u014b\3\u014b\3\u014b\3\u014b")
buf.write("\3\u014b\3\u014b\3\u014b\3\u014c\3\u014c\3\u014c\3\u014c")
buf.write("\3\u014c\3\u014c\3\u014c\3\u014c\3\u014c\3\u014c\3\u014c")
buf.write("\3\u014c\3\u014d\3\u014d\3\u014d\3\u014d\3\u014e\3\u014e")
buf.write("\3\u014e\3\u014e\3\u014e\3\u014e\3\u014e\3\u014f\3\u014f")
buf.write("\3\u014f\3\u014f\3\u014f\3\u014f\3\u014f\3\u0150\3\u0150")
buf.write("\3\u0150\3\u0150\3\u0150\3\u0151\3\u0151\3\u0151\3\u0151")
buf.write("\3\u0151\3\u0151\3\u0151\3\u0151\3\u0152\3\u0152\3\u0152")
buf.write("\3\u0152\3\u0152\3\u0152\3\u0152\3\u0153\3\u0153\3\u0153")
buf.write("\3\u0153\3\u0153\3\u0154\3\u0154\3\u0154\3\u0154\3\u0154")
buf.write("\3\u0154\3\u0154\3\u0154\3\u0154\3\u0155\3\u0155\3\u0155")
buf.write("\3\u0155\3\u0155\3\u0155\3\u0155\3\u0155\3\u0155\3\u0155")
buf.write("\3\u0155\3\u0156\3\u0156\3\u0156\3\u0156\3\u0156\3\u0156")
buf.write("\3\u0156\3\u0156\3\u0156\3\u0156\3\u0156\3\u0156\3\u0156")
buf.write("\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157")
buf.write("\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157\3\u0157")
buf.write("\3\u0157\3\u0157\3\u0157\3\u0157\3\u0158\3\u0158\3\u0158")
buf.write("\3\u0158\3\u0158\3\u0158\3\u0158\3\u0158\3\u0158\3\u0158")
buf.write("\3\u0158\3\u0158\3\u0159\3\u0159\3\u0159\3\u0159\3\u0159")
buf.write("\3\u0159\3\u0159\3\u0159\3\u0159\3\u0159\3\u0159\3\u0159")
buf.write("\3\u0159\3\u0159\3\u0159\3\u0159\3\u015a\3\u015a\3\u015a")
buf.write("\3\u015a\3\u015b\3\u015b\3\u015b\3\u015b\3\u015b\3\u015c")
buf.write("\3\u015c\3\u015c\3\u015c\3\u015c\3\u015c\3\u015c\3\u015c")
buf.write("\3\u015c\3\u015d\3\u015d\3\u015d\3\u015d\3\u015d\3\u015d")
buf.write("\3\u015e\3\u015e\3\u015e\3\u015e\3\u015e\3\u015f\3\u015f")
buf.write("\3\u015f\3\u015f\3\u015f\3\u015f\3\u015f\3\u015f\3\u015f")
buf.write("\3\u0160\3\u0160\3\u0160\3\u0160\3\u0160\3\u0160\3\u0160")
buf.write("\3\u0160\3\u0160\3\u0161\3\u0161\3\u0161\3\u0161\3\u0161")
buf.write("\3\u0161\3\u0161\3\u0161\3\u0161\3\u0162\3\u0162\3\u0162")
buf.write("\3\u0162\3\u0162\3\u0162\3\u0162\3\u0162\3\u0162\3\u0162")
buf.write("\3\u0162\3\u0162\3\u0162\3\u0162\3\u0162\3\u0163\3\u0163")
buf.write("\3\u0163\3\u0163\3\u0163\3\u0163\3\u0163\3\u0164\3\u0164")
buf.write("\3\u0164\3\u0164\3\u0164\3\u0165\3\u0165\3\u0165\3\u0165")
buf.write("\3\u0165\3\u0166\3\u0166\3\u0166\3\u0166\3\u0166\3\u0166")
buf.write("\3\u0166\3\u0166\3\u0166\3\u0167\3\u0167\3\u0167\3\u0167")
buf.write("\3\u0167\3\u0167\3\u0167\3\u0167\3\u0167\3\u0168\3\u0168")
buf.write("\3\u0168\3\u0168\3\u0168\3\u0169\3\u0169\3\u0169\3\u0169")
buf.write("\3\u0169\3\u0169\3\u0169\3\u0169\3\u0169\3\u0169\3\u0169")
buf.write("\3\u0169\3\u0169\3\u0169\3\u016a\3\u016a\3\u016a\3\u016a")
buf.write("\3\u016a\3\u016a\3\u016a\3\u016a\3\u016b\3\u016b\3\u016b")
buf.write("\3\u016b\3\u016b\3\u016b\3\u016b\3\u016b\3\u016b\3\u016c")
buf.write("\3\u016c\3\u016c\3\u016c\3\u016c\3\u016c\3\u016c\3\u016c")
buf.write("\3\u016c\3\u016c\3\u016c\3\u016d\3\u016d\3\u016d\3\u016d")
buf.write("\3\u016d\3\u016d\3\u016e\3\u016e\3\u016e\3\u016e\3\u016e")
buf.write("\3\u016e\3\u016e\3\u016e\3\u016f\3\u016f\3\u016f\3\u016f")
buf.write("\3\u016f\3\u016f\3\u016f\3\u016f\3\u016f\3\u016f\3\u0170")
buf.write("\3\u0170\3\u0170\3\u0170\3\u0170\3\u0170\3\u0170\3\u0170")
buf.write("\3\u0170\3\u0170\3\u0170\3\u0170\3\u0170\3\u0171\3\u0171")
buf.write("\3\u0171\3\u0171\3\u0171\3\u0171\3\u0171\3\u0172\3\u0172")
buf.write("\3\u0172\3\u0172\3\u0172\3\u0172\3\u0172\3\u0172\3\u0172")
buf.write("\3\u0172\3\u0172\3\u0173\3\u0173\3\u0173\3\u0173\3\u0173")
buf.write("\3\u0173\3\u0173\3\u0174\3\u0174\3\u0174\3\u0174\3\u0174")
buf.write("\3\u0174\3\u0174\3\u0174\3\u0174\3\u0174\3\u0174\3\u0174")
buf.write("\3\u0175\3\u0175\3\u0175\3\u0175\3\u0175\3\u0175\3\u0175")
buf.write("\3\u0175\3\u0175\3\u0175\3\u0175\3\u0175\3\u0175\3\u0176")
buf.write("\3\u0176\3\u0176\3\u0176\3\u0176\3\u0176\3\u0176\3\u0176")
buf.write("\3\u0176\3\u0176\3\u0176\3\u0176\3\u0176\3\u0176\3\u0177")
buf.write("\3\u0177\3\u0177\3\u0177\3\u0177\3\u0177\3\u0177\3\u0177")
buf.write("\3\u0178\3\u0178\3\u0178\3\u0178\3\u0178\3\u0178\3\u0178")
buf.write("\3\u0178\3\u0179\3\u0179\3\u0179\3\u0179\3\u0179\3\u0179")
buf.write("\3\u0179\3\u0179\3\u017a\3\u017a\3\u017a\3\u017a\3\u017a")
buf.write("\3\u017a\3\u017b\3\u017b\3\u017b\3\u017b\3\u017c\3\u017c")
buf.write("\3\u017c\3\u017c\3\u017c\3\u017d\3\u017d\3\u017d\3\u017d")
buf.write("\3\u017d\3\u017e\3\u017e\3\u017e\3\u017e\3\u017e\3\u017e")
buf.write("\3\u017e\3\u017e\3\u017e\3\u017e\3\u017f\3\u017f\3\u017f")
buf.write("\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f")
buf.write("\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f")
buf.write("\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f\3\u017f")
buf.write("\3\u017f\3\u017f\3\u017f\3\u017f\3\u0180\3\u0180\3\u0180")
buf.write("\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180")
buf.write("\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180")
buf.write("\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180\3\u0180")
buf.write("\3\u0180\3\u0180\3\u0180\3\u0181\3\u0181\3\u0181\3\u0181")
buf.write("\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181")
buf.write("\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181")
buf.write("\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0181\3\u0182")
buf.write("\3\u0182\3\u0182\3\u0182\3\u0182\3\u0182\3\u0182\3\u0182")
buf.write("\3\u0182\3\u0182\3\u0182\3\u0182\3\u0182\3\u0182\3\u0183")
buf.write("\3\u0183\3\u0183\3\u0183\3\u0183\3\u0183\3\u0183\3\u0183")
buf.write("\3\u0183\3\u0183\3\u0183\3\u0183\3\u0183\3\u0183\3\u0184")
buf.write("\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184")
buf.write("\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184\3\u0184")
buf.write("\3\u0184\3\u0185\3\u0185\3\u0185\3\u0185\3\u0185\3\u0185")
buf.write("\3\u0185\3\u0185\3\u0185\3\u0185\3\u0185\3\u0185\3\u0185")
buf.write("\3\u0185\3\u0185\3\u0185\3\u0186\3\u0186\3\u0186\3\u0187")
buf.write("\3\u0187\3\u0187\3\u0187\3\u0187\3\u0187\3\u0187\3\u0187")
buf.write("\3\u0187\3\u0188\3\u0188\3\u0188\3\u0188\3\u0188\3\u0188")
buf.write("\3\u0188\3\u0188\3\u0188\3\u0188\3\u0188\3\u0188\3\u0189")
buf.write("\3\u0189\3\u0189\3\u0189\3\u0189\3\u0189\3\u0189\3\u0189")
buf.write("\3\u0189\3\u0189\3\u018a\3\u018a\3\u018a\3\u018a\3\u018a")
buf.write("\3\u018a\3\u018b\3\u018b\3\u018b\3\u018b\3\u018b\3\u018b")
buf.write("\3\u018b\3\u018b\3\u018c\3\u018c\3\u018c\3\u018c\3\u018c")
buf.write("\3\u018d\3\u018d\3\u018d\3\u018d\3\u018d\3\u018e\3\u018e")
buf.write("\3\u018e\3\u018e\3\u018e\3\u018e\3\u018e\3\u018e\3\u018e")
buf.write("\3\u018f\3\u018f\3\u018f\3\u018f\3\u018f\3\u0190\3\u0190")
buf.write("\3\u0190\3\u0190\3\u0190\3\u0190\3\u0190\3\u0190\3\u0190")
buf.write("\3\u0190\3\u0191\3\u0191\3\u0191\3\u0191\3\u0191\3\u0191")
buf.write("\3\u0192\3\u0192\3\u0192\3\u0192\3\u0192\3\u0192\3\u0193")
buf.write("\3\u0193\3\u0193\3\u0193\3\u0193\3\u0193\3\u0193\3\u0194")
buf.write("\3\u0194\3\u0194\3\u0194\3\u0194\3\u0194\3\u0194\3\u0194")
buf.write("\3\u0194\3\u0194\3\u0195\3\u0195\3\u0195\3\u0195\3\u0195")
buf.write("\3\u0195\3\u0195\3\u0195\3\u0196\3\u0196\3\u0196\3\u0196")
buf.write("\3\u0196\3\u0196\3\u0197\3\u0197\3\u0197\3\u0197\3\u0197")
buf.write("\3\u0197\3\u0197\3\u0198\3\u0198\3\u0198\3\u0198\3\u0198")
buf.write("\3\u0198\3\u0198\3\u0198\3\u0199\3\u0199\3\u0199\3\u0199")
buf.write("\3\u0199\3\u0199\3\u0199\3\u019a\3\u019a\3\u019a\3\u019a")
buf.write("\3\u019a\3\u019a\3\u019a\3\u019b\3\u019b\3\u019b\3\u019b")
buf.write("\3\u019c\3\u019c\3\u019c\3\u019c\3\u019c\3\u019c\3\u019d")
buf.write("\3\u019d\3\u019d\3\u019d\3\u019d\3\u019d\3\u019d\3\u019d")
buf.write("\3\u019d\3\u019e\3\u019e\3\u019e\3\u019e\3\u019e\3\u019e")
buf.write("\3\u019f\3\u019f\3\u019f\3\u019f\3\u019f\3\u019f\3\u019f")
buf.write("\3\u01a0\3\u01a0\3\u01a0\3\u01a0\3\u01a0\3\u01a0\3\u01a0")
buf.write("\3\u01a0\3\u01a1\3\u01a1\3\u01a1\3\u01a1\3\u01a1\3\u01a1")
buf.write("\3\u01a1\3\u01a1\3\u01a1\3\u01a2\3\u01a2\3\u01a2\3\u01a2")
buf.write("\3\u01a2\3\u01a2\3\u01a2\3\u01a2\3\u01a2\3\u01a3\3\u01a3")
buf.write("\3\u01a3\3\u01a3\3\u01a3\3\u01a3\3\u01a3\3\u01a4\3\u01a4")
buf.write("\3\u01a4\3\u01a4\3\u01a4\3\u01a4\3\u01a4\3\u01a4\3\u01a5")
buf.write("\3\u01a5\3\u01a5\3\u01a5\3\u01a5\3\u01a5\3\u01a5\3\u01a5")
buf.write("\3\u01a6\3\u01a6\3\u01a6\3\u01a6\3\u01a6\3\u01a6\3\u01a6")
buf.write("\3\u01a6\3\u01a6\3\u01a7\3\u01a7\3\u01a7\3\u01a7\3\u01a7")
buf.write("\3\u01a8\3\u01a8\3\u01a8\3\u01a8\3\u01a8\3\u01a8\3\u01a8")
buf.write("\3\u01a8\3\u01a9\3\u01a9\3\u01a9\3\u01a9\3\u01a9\3\u01a9")
buf.write("\3\u01a9\3\u01a9\3\u01a9\3\u01a9\3\u01a9\3\u01aa\3\u01aa")
buf.write("\3\u01aa\3\u01aa\3\u01aa\3\u01ab\3\u01ab\3\u01ab\3\u01ab")
buf.write("\3\u01ab\3\u01ab\3\u01ab\3\u01ab\3\u01ab\3\u01ac\3\u01ac")
buf.write("\3\u01ac\3\u01ac\3\u01ac\3\u01ac\3\u01ad\3\u01ad\3\u01ad")
buf.write("\3\u01ad\3\u01ad\3\u01ad\3\u01ae\3\u01ae\3\u01ae\3\u01ae")
buf.write("\3\u01ae\3\u01af\3\u01af\3\u01af\3\u01af\3\u01af\3\u01af")
buf.write("\3\u01af\3\u01b0\3\u01b0\3\u01b0\3\u01b0\3\u01b0\3\u01b1")
buf.write("\3\u01b1\3\u01b1\3\u01b1\3\u01b1\3\u01b1\3\u01b2\3\u01b2")
buf.write("\3\u01b2\3\u01b2\3\u01b3\3\u01b3\3\u01b3\3\u01b3\3\u01b3")
buf.write("\3\u01b3\3\u01b3\3\u01b4\3\u01b4\3\u01b4\3\u01b4\3\u01b4")
buf.write("\3\u01b4\3\u01b4\3\u01b4\3\u01b4\3\u01b4\3\u01b4\3\u01b4")
buf.write("\3\u01b4\3\u01b4\3\u01b5\3\u01b5\3\u01b5\3\u01b5\3\u01b5")
buf.write("\3\u01b5\3\u01b5\3\u01b5\3\u01b6\3\u01b6\3\u01b6\3\u01b6")
buf.write("\3\u01b6\3\u01b6\3\u01b6\3\u01b6\3\u01b6\3\u01b6\3\u01b6")
buf.write("\3\u01b6\3\u01b6\3\u01b7\3\u01b7\3\u01b7\3\u01b7\3\u01b7")
buf.write("\3\u01b7\3\u01b7\3\u01b7\3\u01b7\3\u01b7\3\u01b7\3\u01b8")
buf.write("\3\u01b8\3\u01b8\3\u01b8\3\u01b8\3\u01b8\3\u01b8\3\u01b8")
buf.write("\3\u01b8\3\u01b8\3\u01b9\3\u01b9\3\u01b9\3\u01b9\3\u01b9")
buf.write("\3\u01b9\3\u01b9\3\u01b9\3\u01b9\3\u01b9\3\u01ba\3\u01ba")
buf.write("\3\u01ba\3\u01ba\3\u01ba\3\u01ba\3\u01ba\3\u01ba\3\u01ba")
buf.write("\3\u01ba\3\u01ba\3\u01ba\3\u01ba\3\u01ba\3\u01bb\3\u01bb")
buf.write("\3\u01bb\3\u01bb\3\u01bb\3\u01bb\3\u01bb\3\u01bb\3\u01bb")
buf.write("\3\u01bc\3\u01bc\3\u01bc\3\u01bc\3\u01bc\3\u01bc\3\u01bd")
buf.write("\3\u01bd\3\u01bd\3\u01bd\3\u01bd\3\u01bd\3\u01bd\3\u01bd")
buf.write("\3\u01bd\3\u01be\3\u01be\3\u01be\3\u01be\3\u01be\3\u01be")
buf.write("\3\u01be\3\u01be\3\u01bf\3\u01bf\3\u01bf\3\u01bf\3\u01bf")
buf.write("\3\u01bf\3\u01bf\3\u01bf\3\u01bf\3\u01bf\3\u01bf\3\u01bf")
buf.write("\3\u01bf\3\u01c0\3\u01c0\3\u01c0\3\u01c0\3\u01c0\3\u01c0")
buf.write("\3\u01c0\3\u01c0\3\u01c0\3\u01c1\3\u01c1\3\u01c1\3\u01c1")
buf.write("\3\u01c1\3\u01c2\3\u01c2\3\u01c2\3\u01c2\3\u01c3\3\u01c3")
buf.write("\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3")
buf.write("\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3")
buf.write("\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3\3\u01c3")
buf.write("\3\u01c3\3\u01c3\3\u01c4\3\u01c4\3\u01c4\3\u01c4\3\u01c4")
buf.write("\3\u01c5\3\u01c5\3\u01c5\3\u01c5\3\u01c5\3\u01c5\3\u01c5")
buf.write("\3\u01c5\3\u01c5\3\u01c5\3\u01c5\3\u01c6\3\u01c6\3\u01c6")
buf.write("\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6")
buf.write("\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6\3\u01c6")
buf.write("\3\u01c6\3\u01c7\3\u01c7\3\u01c7\3\u01c7\3\u01c7\3\u01c7")
buf.write("\3\u01c7\3\u01c7\3\u01c7\3\u01c7\3\u01c7\3\u01c7\3\u01c7")
buf.write("\3\u01c7\3\u01c7\3\u01c7\3\u01c8\3\u01c8\3\u01c8\3\u01c8")
buf.write("\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8")
buf.write("\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8\3\u01c8")
buf.write("\3\u01c8\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9")
buf.write("\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9")
buf.write("\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9\3\u01c9")
buf.write("\3\u01c9\3\u01c9\3\u01c9\3\u01ca\3\u01ca\3\u01ca\3\u01ca")
buf.write("\3\u01ca\3\u01ca\3\u01ca\3\u01ca\3\u01ca\3\u01ca\3\u01ca")
buf.write("\3\u01ca\3\u01ca\3\u01ca\3\u01ca\3\u01cb\3\u01cb\3\u01cb")
buf.write("\3\u01cb\3\u01cb\3\u01cb\3\u01cb\3\u01cb\3\u01cb\3\u01cb")
buf.write("\3\u01cc\3\u01cc\3\u01cc\3\u01cc\3\u01cc\3\u01cc\3\u01cc")
buf.write("\3\u01cc\3\u01cc\3\u01cc\3\u01cc\3\u01cd\3\u01cd\3\u01cd")
buf.write("\3\u01cd\3\u01cd\3\u01cd\3\u01cd\3\u01cd\3\u01ce\3\u01ce")
buf.write("\3\u01ce\3\u01ce\3\u01ce\3\u01ce\3\u01ce\3\u01ce\3\u01ce")
buf.write("\3\u01ce\3\u01ce\3\u01ce\3\u01ce\3\u01cf\3\u01cf\3\u01cf")
buf.write("\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01cf")
buf.write("\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01cf\3\u01d0")
buf.write("\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0")
buf.write("\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0\3\u01d0")
buf.write("\3\u01d0\3\u01d1\3\u01d1\3\u01d1\3\u01d1\3\u01d1\3\u01d2")
buf.write("\3\u01d2\3\u01d2\3\u01d2\3\u01d3\3\u01d3\3\u01d3\3\u01d3")
buf.write("\3\u01d3\3\u01d4\3\u01d4\3\u01d4\3\u01d4\3\u01d5\3\u01d5")
buf.write("\3\u01d5\3\u01d5\3\u01d5\3\u01d6\3\u01d6\3\u01d6\3\u01d6")
buf.write("\3\u01d7\3\u01d7\3\u01d7\3\u01d7\3\u01d7\3\u01d7\3\u01d7")
buf.write("\3\u01d8\3\u01d8\3\u01d8\3\u01d8\3\u01d9\3\u01d9\3\u01d9")
buf.write("\3\u01d9\3\u01d9\3\u01d9\3\u01da\3\u01da\3\u01da\3\u01da")
buf.write("\3\u01da\3\u01da\3\u01da\3\u01da\3\u01da\3\u01da\3\u01da")
buf.write("\3\u01da\3\u01da\3\u01da\3\u01da\3\u01da\3\u01db\3\u01db")
buf.write("\3\u01db\3\u01db\3\u01db\3\u01db\3\u01db\3\u01db\3\u01db")
buf.write("\3\u01db\3\u01db\3\u01dc\3\u01dc\3\u01dc\3\u01dc\3\u01dd")
buf.write("\3\u01dd\3\u01dd\3\u01dd\3\u01dd\3\u01dd\3\u01dd\3\u01dd")
buf.write("\3\u01dd\3\u01de\3\u01de\3\u01de\3\u01de\3\u01de\3\u01de")
buf.write("\3\u01df\3\u01df\3\u01df\3\u01df\3\u01df\3\u01df\3\u01df")
buf.write("\3\u01e0\3\u01e0\3\u01e0\3\u01e0\3\u01e0\3\u01e1\3\u01e1")
buf.write("\3\u01e1\3\u01e1\3\u01e1\3\u01e1\3\u01e1\3\u01e2\3\u01e2")
buf.write("\3\u01e2\3\u01e2\3\u01e2\3\u01e2\7\u01e2\u1368\n\u01e2")
buf.write("\f\u01e2\16\u01e2\u136b\13\u01e2\3\u01e2\3\u01e2\3\u01e3")
buf.write("\3\u01e3\3\u01e3\7\u01e3\u1372\n\u01e3\f\u01e3\16\u01e3")
buf.write("\u1375\13\u01e3\3\u01e3\6\u01e3\u1378\n\u01e3\r\u01e3")
buf.write("\16\u01e3\u1379\3\u01e4\3\u01e4\3\u01e4\7\u01e4\u137f")
buf.write("\n\u01e4\f\u01e4\16\u01e4\u1382\13\u01e4\3\u01e4\6\u01e4")
buf.write("\u1385\n\u01e4\r\u01e4\16\u01e4\u1386\3\u01e5\3\u01e5")
buf.write("\3\u01e5\3\u01e6\3\u01e6\3\u01e7\3\u01e7\3\u01e8\3\u01e8")
buf.write("\3\u01e8\5\u01e8\u1393\n\u01e8\3\u01e8\3\u01e8\5\u01e8")
buf.write("\u1397\n\u01e8\5\u01e8\u1399\n\u01e8\3\u01e8\3\u01e8\5")
buf.write("\u01e8\u139d\n\u01e8\3\u01e9\3\u01e9\3\u01e9\3\u01e9\3")
buf.write("\u01e9\7\u01e9\u13a4\n\u01e9\f\u01e9\16\u01e9\u13a7\13")
buf.write("\u01e9\3\u01e9\3\u01e9\3\u01ea\3\u01ea\3\u01ea\3\u01ea")
buf.write("\3\u01ea\5\u01ea\u13b0\n\u01ea\3\u01ea\3\u01ea\3\u01eb")
buf.write("\3\u01eb\3\u01ec\3\u01ec\3\u01ec\7\u01ec\u13b9\n\u01ec")
buf.write("\f\u01ec\16\u01ec\u13bc\13\u01ec\3\u01ec\3\u01ec\3\u01ec")
buf.write("\3\u01ed\3\u01ed\3\u01ed\7\u01ed\u13c4\n\u01ed\f\u01ed")
buf.write("\16\u01ed\u13c7\13\u01ed\3\u01ed\3\u01ed\3\u01ed\3\u01ee")
buf.write("\3\u01ee\3\u01ee\7\u01ee\u13cf\n\u01ee\f\u01ee\16\u01ee")
buf.write("\u13d2\13\u01ee\3\u01ee\3\u01ee\3\u01ee\3\u01ef\3\u01ef")
buf.write("\3\u01ef\7\u01ef\u13da\n\u01ef\f\u01ef\16\u01ef\u13dd")
buf.write("\13\u01ef\3\u01ef\3\u01ef\3\u01ef\3\u01f0\3\u01f0\3\u01f1")
buf.write("\3\u01f1\3\u01f1\3\u01f1\6\u01f1\u13e8\n\u01f1\r\u01f1")
buf.write("\16\u01f1\u13e9\3\u01f1\3\u01f1\3\u01f2\3\u01f2\3\u01f3")
buf.write("\3\u01f3\3\u01f4\3\u01f4\3\u01f5\3\u01f5\3\u01f6\3\u01f6")
buf.write("\3\u01f6\3\u01f7\3\u01f7\3\u01f8\3\u01f8\3\u01f9\3\u01f9")
buf.write("\3\u01fa\3\u01fa\3\u01fb\3\u01fb\3\u01fc\3\u01fc\3\u01fd")
buf.write("\3\u01fd\3\u01fd\3\u01fe\3\u01fe\3\u01fe\3\u01fe\7\u01fe")
buf.write("\u140c\n\u01fe\f\u01fe\16\u01fe\u140f\13\u01fe\3\u01fe")
buf.write("\3\u01fe\3\u01fe\3\u01fe\3\u01fe\5\u01fe\u1416\n\u01fe")
buf.write("\3\u01ff\3\u01ff\3\u0200\3\u0200\3\u0201\3\u0201\3\u0201")
buf.write("\3\u0202\3\u0202\3\u0203\3\u0203\3\u0203\3\u0204\3\u0204")
buf.write("\3\u0204\3\u0204\3\u0204\3\u0204\3\u0204\3\u0204\5\u0204")
buf.write("\u142c\n\u0204\3\u0205\3\u0205\3\u0206\3\u0206\3\u0207")
buf.write("\3\u0207\3\u0208\3\u0208\3\u0209\3\u0209\3\u020a\3\u020a")
buf.write("\3\u020a\3\u020b\3\u020b\3\u020c\3\u020c\3\u020d\3\u020d")
buf.write("\3\u020e\3\u020e\3\u020f\3\u020f\3\u0210\6\u0210\u1446")
buf.write("\n\u0210\r\u0210\16\u0210\u1447\3\u0210\3\u0210\3\u0211")
buf.write("\3\u0211\3\u0212\6\u0212\u144f\n\u0212\r\u0212\16\u0212")
buf.write("\u1450\3\u0213\7\u0213\u1454\n\u0213\f\u0213\16\u0213")
buf.write("\u1457\13\u0213\3\u0213\5\u0213\u145a\n\u0213\3\u0213")
buf.write("\6\u0213\u145d\n\u0213\r\u0213\16\u0213\u145e\3\u0214")
buf.write("\3\u0214\3\u0214\3\u0214\7\u0214\u1465\n\u0214\f\u0214")
buf.write("\16\u0214\u1468\13\u0214\3\u0214\3\u0214\5\u0214\u146c")
buf.write("\n\u0214\3\u0214\3\u0214\3\u0215\3\u0215\3\u0215\3\u0215")
buf.write("\7\u0215\u1474\n\u0215\f\u0215\16\u0215\u1477\13\u0215")
buf.write("\3\u0215\3\u0215\3\u0215\3\u0215\3\u0215\3\u0216\3\u0216")
buf.write("\3\u0216\3\u0216\3\u0216\3\u0216\3\u0216\3\u0216\3\u0216")
buf.write("\7\u0216\u1487\n\u0216\f\u0216\16\u0216\u148a\13\u0216")
buf.write("\3\u0216\3\u0216\5\u0216\u148e\n\u0216\3\u0217\5\u0217")
buf.write("\u1491\n\u0217\3\u0217\3\u0217\3\u0218\3\u0218\3\u0219")
buf.write("\3\u0219\3\u0219\7\u0219\u149a\n\u0219\f\u0219\16\u0219")
buf.write("\u149d\13\u0219\3\u021a\3\u021a\3\u021a\3\u021a\3\u021a")
buf.write("\3\u021b\3\u021b\3\u021c\3\u021c\3\u021d\3\u021d\3\u021e")
buf.write("\3\u021e\3\u021f\3\u021f\3\u0220\3\u0220\3\u0221\3\u0221")
buf.write("\3\u0222\3\u0222\3\u0223\3\u0223\3\u0224\3\u0224\3\u0225")
buf.write("\3\u0225\3\u0226\3\u0226\3\u0227\3\u0227\3\u0228\3\u0228")
buf.write("\3\u0229\3\u0229\3\u022a\3\u022a\3\u022b\3\u022b\3\u022c")
buf.write("\3\u022c\3\u022d\3\u022d\3\u022e\3\u022e\3\u022f\3\u022f")
buf.write("\3\u0230\3\u0230\3\u0231\3\u0231\3\u0232\3\u0232\3\u0233")
buf.write("\3\u0233\3\u0234\3\u0234\7\u13ba\u13c5\u13d0\u13db\u1475")
buf.write("\2\u0235\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f")
buf.write("\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27")
buf.write("-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%")
buf.write("I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67")
buf.write("m8o9q:s;u<w=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089")
buf.write("F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099")
buf.write("N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9")
buf.write("V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9")
buf.write("^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9")
buf.write("f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9")
buf.write("n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9")
buf.write("v\u00ebw\u00edx\u00efy\u00f1z\u00f3{\u00f5|\u00f7}\u00f9")
buf.write("~\u00fb\177\u00fd\u0080\u00ff\u0081\u0101\u0082\u0103")
buf.write("\u0083\u0105\u0084\u0107\u0085\u0109\u0086\u010b\u0087")
buf.write("\u010d\u0088\u010f\u0089\u0111\u008a\u0113\u008b\u0115")
buf.write("\u008c\u0117\u008d\u0119\u008e\u011b\u008f\u011d\u0090")
buf.write("\u011f\u0091\u0121\u0092\u0123\u0093\u0125\u0094\u0127")
buf.write("\u0095\u0129\u0096\u012b\u0097\u012d\u0098\u012f\u0099")
buf.write("\u0131\u009a\u0133\u009b\u0135\u009c\u0137\u009d\u0139")
buf.write("\u009e\u013b\u009f\u013d\u00a0\u013f\u00a1\u0141\u00a2")
buf.write("\u0143\u00a3\u0145\u00a4\u0147\u00a5\u0149\u00a6\u014b")
buf.write("\u00a7\u014d\u00a8\u014f\u00a9\u0151\u00aa\u0153\u00ab")
buf.write("\u0155\u00ac\u0157\u00ad\u0159\u00ae\u015b\u00af\u015d")
buf.write("\u00b0\u015f\u00b1\u0161\u00b2\u0163\u00b3\u0165\u00b4")
buf.write("\u0167\u00b5\u0169\u00b6\u016b\u00b7\u016d\u00b8\u016f")
buf.write("\u00b9\u0171\u00ba\u0173\u00bb\u0175\u00bc\u0177\u00bd")
buf.write("\u0179\u00be\u017b\u00bf\u017d\u00c0\u017f\u00c1\u0181")
buf.write("\u00c2\u0183\u00c3\u0185\u00c4\u0187\u00c5\u0189\u00c6")
buf.write("\u018b\u00c7\u018d\u00c8\u018f\u00c9\u0191\u00ca\u0193")
buf.write("\u00cb\u0195\u00cc\u0197\u00cd\u0199\u00ce\u019b\u00cf")
buf.write("\u019d\u00d0\u019f\u00d1\u01a1\u00d2\u01a3\u00d3\u01a5")
buf.write("\u00d4\u01a7\u00d5\u01a9\u00d6\u01ab\u00d7\u01ad\u00d8")
buf.write("\u01af\u00d9\u01b1\u00da\u01b3\u00db\u01b5\u00dc\u01b7")
buf.write("\u00dd\u01b9\u00de\u01bb\u00df\u01bd\u00e0\u01bf\u00e1")
buf.write("\u01c1\u00e2\u01c3\u00e3\u01c5\u00e4\u01c7\u00e5\u01c9")
buf.write("\u00e6\u01cb\u00e7\u01cd\u00e8\u01cf\u00e9\u01d1\u00ea")
buf.write("\u01d3\u00eb\u01d5\u00ec\u01d7\u00ed\u01d9\u00ee\u01db")
buf.write("\u00ef\u01dd\u00f0\u01df\u00f1\u01e1\u00f2\u01e3\u00f3")
buf.write("\u01e5\u00f4\u01e7\u00f5\u01e9\u00f6\u01eb\u00f7\u01ed")
buf.write("\u00f8\u01ef\u00f9\u01f1\u00fa\u01f3\u00fb\u01f5\u00fc")
buf.write("\u01f7\u00fd\u01f9\u00fe\u01fb\u00ff\u01fd\u0100\u01ff")
buf.write("\u0101\u0201\u0102\u0203\u0103\u0205\u0104\u0207\u0105")
buf.write("\u0209\u0106\u020b\u0107\u020d\u0108\u020f\u0109\u0211")
buf.write("\u010a\u0213\u010b\u0215\u010c\u0217\u010d\u0219\u010e")
buf.write("\u021b\u010f\u021d\u0110\u021f\u0111\u0221\u0112\u0223")
buf.write("\u0113\u0225\u0114\u0227\u0115\u0229\u0116\u022b\u0117")
buf.write("\u022d\u0118\u022f\u0119\u0231\u011a\u0233\u011b\u0235")
buf.write("\u011c\u0237\u011d\u0239\u011e\u023b\u011f\u023d\u0120")
buf.write("\u023f\u0121\u0241\u0122\u0243\u0123\u0245\u0124\u0247")
buf.write("\u0125\u0249\u0126\u024b\u0127\u024d\u0128\u024f\u0129")
buf.write("\u0251\u012a\u0253\u012b\u0255\u012c\u0257\u012d\u0259")
buf.write("\u012e\u025b\u012f\u025d\u0130\u025f\u0131\u0261\u0132")
buf.write("\u0263\u0133\u0265\u0134\u0267\u0135\u0269\u0136\u026b")
buf.write("\u0137\u026d\u0138\u026f\u0139\u0271\u013a\u0273\u013b")
buf.write("\u0275\u013c\u0277\u013d\u0279\u013e\u027b\u013f\u027d")
buf.write("\u0140\u027f\u0141\u0281\u0142\u0283\u0143\u0285\u0144")
buf.write("\u0287\u0145\u0289\u0146\u028b\u0147\u028d\u0148\u028f")
buf.write("\u0149\u0291\u014a\u0293\u014b\u0295\u014c\u0297\u014d")
buf.write("\u0299\u014e\u029b\u014f\u029d\u0150\u029f\u0151\u02a1")
buf.write("\u0152\u02a3\u0153\u02a5\u0154\u02a7\u0155\u02a9\u0156")
buf.write("\u02ab\u0157\u02ad\u0158\u02af\u0159\u02b1\u015a\u02b3")
buf.write("\u015b\u02b5\u015c\u02b7\u015d\u02b9\u015e\u02bb\u015f")
buf.write("\u02bd\u0160\u02bf\u0161\u02c1\u0162\u02c3\u0163\u02c5")
buf.write("\u0164\u02c7\u0165\u02c9\u0166\u02cb\u0167\u02cd\u0168")
buf.write("\u02cf\u0169\u02d1\u016a\u02d3\u016b\u02d5\u016c\u02d7")
buf.write("\u016d\u02d9\u016e\u02db\u016f\u02dd\u0170\u02df\u0171")
buf.write("\u02e1\u0172\u02e3\u0173\u02e5\u0174\u02e7\u0175\u02e9")
buf.write("\u0176\u02eb\u0177\u02ed\u0178\u02ef\u0179\u02f1\u017a")
buf.write("\u02f3\u017b\u02f5\u017c\u02f7\u017d\u02f9\u017e\u02fb")
buf.write("\u017f\u02fd\u0180\u02ff\u0181\u0301\u0182\u0303\u0183")
buf.write("\u0305\u0184\u0307\u0185\u0309\u0186\u030b\u0187\u030d")
buf.write("\u0188\u030f\u0189\u0311\u018a\u0313\u018b\u0315\u018c")
buf.write("\u0317\u018d\u0319\u018e\u031b\u018f\u031d\u0190\u031f")
buf.write("\u0191\u0321\u0192\u0323\u0193\u0325\u0194\u0327\u0195")
buf.write("\u0329\u0196\u032b\u0197\u032d\u0198\u032f\u0199\u0331")
buf.write("\u019a\u0333\u019b\u0335\u019c\u0337\u019d\u0339\u019e")
buf.write("\u033b\u019f\u033d\u01a0\u033f\u01a1\u0341\u01a2\u0343")
buf.write("\u01a3\u0345\u01a4\u0347\u01a5\u0349\u01a6\u034b\u01a7")
buf.write("\u034d\u01a8\u034f\u01a9\u0351\u01aa\u0353\u01ab\u0355")
buf.write("\u01ac\u0357\u01ad\u0359\u01ae\u035b\u01af\u035d\u01b0")
buf.write("\u035f\u01b1\u0361\u01b2\u0363\u01b3\u0365\u01b4\u0367")
buf.write("\u01b5\u0369\u01b6\u036b\u01b7\u036d\u01b8\u036f\u01b9")
buf.write("\u0371\u01ba\u0373\u01bb\u0375\u01bc\u0377\u01bd\u0379")
buf.write("\u01be\u037b\u01bf\u037d\u01c0\u037f\u01c1\u0381\u01c2")
buf.write("\u0383\u01c3\u0385\u01c4\u0387\u01c5\u0389\u01c6\u038b")
buf.write("\u01c7\u038d\u01c8\u038f\u01c9\u0391\u01ca\u0393\u01cb")
buf.write("\u0395\u01cc\u0397\u01cd\u0399\u01ce\u039b\u01cf\u039d")
buf.write("\u01d0\u039f\u01d1\u03a1\u01d2\u03a3\u01d3\u03a5\u01d4")
buf.write("\u03a7\u01d5\u03a9\u01d6\u03ab\u01d7\u03ad\u01d8\u03af")
buf.write("\u01d9\u03b1\u01da\u03b3\u01db\u03b5\u01dc\u03b7\u01dd")
buf.write("\u03b9\u01de\u03bb\u01df\u03bd\u01e0\u03bf\u01e1\u03c1")
buf.write("\u01e2\u03c3\u01e3\u03c5\u01e4\u03c7\u01e5\u03c9\u01e6")
buf.write("\u03cb\u01e7\u03cd\u01e8\u03cf\u01e9\u03d1\u01ea\u03d3")
buf.write("\2\u03d5\2\u03d7\2\u03d9\2\u03db\2\u03dd\2\u03df\2\u03e1")
buf.write("\u01eb\u03e3\u01ec\u03e5\u01ed\u03e7\u01ee\u03e9\u01ef")
buf.write("\u03eb\u01f0\u03ed\u01f1\u03ef\u01f2\u03f1\u01f3\u03f3")
buf.write("\u01f4\u03f5\u01f5\u03f7\u01f6\u03f9\u01f7\u03fb\u01f8")
buf.write("\u03fd\u01f9\u03ff\u01fa\u0401\u01fb\u0403\u01fc\u0405")
buf.write("\u01fd\u0407\u01fe\u0409\u01ff\u040b\u0200\u040d\u0201")
buf.write("\u040f\u0202\u0411\2\u0413\u0203\u0415\u0204\u0417\u0205")
buf.write("\u0419\u0206\u041b\u0207\u041d\u0208\u041f\u0209\u0421")
buf.write("\2\u0423\2\u0425\2\u0427\u020a\u0429\u020b\u042b\u020c")
buf.write("\u042d\2\u042f\2\u0431\u020d\u0433\u020e\u0435\2\u0437")
buf.write("\2\u0439\2\u043b\2\u043d\2\u043f\2\u0441\2\u0443\2\u0445")
buf.write("\2\u0447\2\u0449\2\u044b\2\u044d\2\u044f\2\u0451\2\u0453")
buf.write("\2\u0455\2\u0457\2\u0459\2\u045b\2\u045d\2\u045f\2\u0461")
buf.write("\2\u0463\2\u0465\2\u0467\2\3\2\'\5\2\f\f\17\17))\5\2\62")
buf.write(";CHch\4\2GGgg\4\2--//\t\2\13\f\17\17\"\"**>>]]}}\5\2\f")
buf.write("\f\17\17$$\4\2\62;aa\5\2\13\f\17\17\"\"\4\2C\\c|\4\2\f")
buf.write("\f\17\17\4\2\13\13\"\"\5\2%&\62;aa\4\2CCcc\4\2DDdd\4\2")
buf.write("EEee\4\2FFff\4\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4\2LLll\4")
buf.write("\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRrr\4\2SSs")
buf.write("s\4\2TTtt\4\2UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2YYyy\4\2")
buf.write("ZZzz\4\2[[{{\4\2\\\\||\2\u14dd\2\3\3\2\2\2\2\5\3\2\2\2")
buf.write("\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17")
buf.write("\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3")
buf.write("\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2")
buf.write("\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3")
buf.write("\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2")
buf.write("\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3")
buf.write("\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E")
buf.write("\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2")
buf.write("O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2")
buf.write("\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2")
buf.write("\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2")
buf.write("\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3")
buf.write("\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177")
buf.write("\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2")
buf.write("\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d")
buf.write("\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2")
buf.write("\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b")
buf.write("\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2")
buf.write("\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9")
buf.write("\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2")
buf.write("\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7")
buf.write("\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2")
buf.write("\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5")
buf.write("\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2")
buf.write("\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3")
buf.write("\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2")
buf.write("\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1")
buf.write("\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7\3\2\2")
buf.write("\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef")
buf.write("\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2")
buf.write("\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd")
buf.write("\3\2\2\2\2\u00ff\3\2\2\2\2\u0101\3\2\2\2\2\u0103\3\2\2")
buf.write("\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u0109\3\2\2\2\2\u010b")
buf.write("\3\2\2\2\2\u010d\3\2\2\2\2\u010f\3\2\2\2\2\u0111\3\2\2")
buf.write("\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\2\u0117\3\2\2\2\2\u0119")
buf.write("\3\2\2\2\2\u011b\3\2\2\2\2\u011d\3\2\2\2\2\u011f\3\2\2")
buf.write("\2\2\u0121\3\2\2\2\2\u0123\3\2\2\2\2\u0125\3\2\2\2\2\u0127")
buf.write("\3\2\2\2\2\u0129\3\2\2\2\2\u012b\3\2\2\2\2\u012d\3\2\2")
buf.write("\2\2\u012f\3\2\2\2\2\u0131\3\2\2\2\2\u0133\3\2\2\2\2\u0135")
buf.write("\3\2\2\2\2\u0137\3\2\2\2\2\u0139\3\2\2\2\2\u013b\3\2\2")
buf.write("\2\2\u013d\3\2\2\2\2\u013f\3\2\2\2\2\u0141\3\2\2\2\2\u0143")
buf.write("\3\2\2\2\2\u0145\3\2\2\2\2\u0147\3\2\2\2\2\u0149\3\2\2")
buf.write("\2\2\u014b\3\2\2\2\2\u014d\3\2\2\2\2\u014f\3\2\2\2\2\u0151")
buf.write("\3\2\2\2\2\u0153\3\2\2\2\2\u0155\3\2\2\2\2\u0157\3\2\2")
buf.write("\2\2\u0159\3\2\2\2\2\u015b\3\2\2\2\2\u015d\3\2\2\2\2\u015f")
buf.write("\3\2\2\2\2\u0161\3\2\2\2\2\u0163\3\2\2\2\2\u0165\3\2\2")
buf.write("\2\2\u0167\3\2\2\2\2\u0169\3\2\2\2\2\u016b\3\2\2\2\2\u016d")
buf.write("\3\2\2\2\2\u016f\3\2\2\2\2\u0171\3\2\2\2\2\u0173\3\2\2")
buf.write("\2\2\u0175\3\2\2\2\2\u0177\3\2\2\2\2\u0179\3\2\2\2\2\u017b")
buf.write("\3\2\2\2\2\u017d\3\2\2\2\2\u017f\3\2\2\2\2\u0181\3\2\2")
buf.write("\2\2\u0183\3\2\2\2\2\u0185\3\2\2\2\2\u0187\3\2\2\2\2\u0189")
buf.write("\3\2\2\2\2\u018b\3\2\2\2\2\u018d\3\2\2\2\2\u018f\3\2\2")
buf.write("\2\2\u0191\3\2\2\2\2\u0193\3\2\2\2\2\u0195\3\2\2\2\2\u0197")
buf.write("\3\2\2\2\2\u0199\3\2\2\2\2\u019b\3\2\2\2\2\u019d\3\2\2")
buf.write("\2\2\u019f\3\2\2\2\2\u01a1\3\2\2\2\2\u01a3\3\2\2\2\2\u01a5")
buf.write("\3\2\2\2\2\u01a7\3\2\2\2\2\u01a9\3\2\2\2\2\u01ab\3\2\2")
buf.write("\2\2\u01ad\3\2\2\2\2\u01af\3\2\2\2\2\u01b1\3\2\2\2\2\u01b3")
buf.write("\3\2\2\2\2\u01b5\3\2\2\2\2\u01b7\3\2\2\2\2\u01b9\3\2\2")
buf.write("\2\2\u01bb\3\2\2\2\2\u01bd\3\2\2\2\2\u01bf\3\2\2\2\2\u01c1")
buf.write("\3\2\2\2\2\u01c3\3\2\2\2\2\u01c5\3\2\2\2\2\u01c7\3\2\2")
buf.write("\2\2\u01c9\3\2\2\2\2\u01cb\3\2\2\2\2\u01cd\3\2\2\2\2\u01cf")
buf.write("\3\2\2\2\2\u01d1\3\2\2\2\2\u01d3\3\2\2\2\2\u01d5\3\2\2")
buf.write("\2\2\u01d7\3\2\2\2\2\u01d9\3\2\2\2\2\u01db\3\2\2\2\2\u01dd")
buf.write("\3\2\2\2\2\u01df\3\2\2\2\2\u01e1\3\2\2\2\2\u01e3\3\2\2")
buf.write("\2\2\u01e5\3\2\2\2\2\u01e7\3\2\2\2\2\u01e9\3\2\2\2\2\u01eb")
buf.write("\3\2\2\2\2\u01ed\3\2\2\2\2\u01ef\3\2\2\2\2\u01f1\3\2\2")
buf.write("\2\2\u01f3\3\2\2\2\2\u01f5\3\2\2\2\2\u01f7\3\2\2\2\2\u01f9")
buf.write("\3\2\2\2\2\u01fb\3\2\2\2\2\u01fd\3\2\2\2\2\u01ff\3\2\2")
buf.write("\2\2\u0201\3\2\2\2\2\u0203\3\2\2\2\2\u0205\3\2\2\2\2\u0207")
buf.write("\3\2\2\2\2\u0209\3\2\2\2\2\u020b\3\2\2\2\2\u020d\3\2\2")
buf.write("\2\2\u020f\3\2\2\2\2\u0211\3\2\2\2\2\u0213\3\2\2\2\2\u0215")
buf.write("\3\2\2\2\2\u0217\3\2\2\2\2\u0219\3\2\2\2\2\u021b\3\2\2")
buf.write("\2\2\u021d\3\2\2\2\2\u021f\3\2\2\2\2\u0221\3\2\2\2\2\u0223")
buf.write("\3\2\2\2\2\u0225\3\2\2\2\2\u0227\3\2\2\2\2\u0229\3\2\2")
buf.write("\2\2\u022b\3\2\2\2\2\u022d\3\2\2\2\2\u022f\3\2\2\2\2\u0231")
buf.write("\3\2\2\2\2\u0233\3\2\2\2\2\u0235\3\2\2\2\2\u0237\3\2\2")
buf.write("\2\2\u0239\3\2\2\2\2\u023b\3\2\2\2\2\u023d\3\2\2\2\2\u023f")
buf.write("\3\2\2\2\2\u0241\3\2\2\2\2\u0243\3\2\2\2\2\u0245\3\2\2")
buf.write("\2\2\u0247\3\2\2\2\2\u0249\3\2\2\2\2\u024b\3\2\2\2\2\u024d")
buf.write("\3\2\2\2\2\u024f\3\2\2\2\2\u0251\3\2\2\2\2\u0253\3\2\2")
buf.write("\2\2\u0255\3\2\2\2\2\u0257\3\2\2\2\2\u0259\3\2\2\2\2\u025b")
buf.write("\3\2\2\2\2\u025d\3\2\2\2\2\u025f\3\2\2\2\2\u0261\3\2\2")
buf.write("\2\2\u0263\3\2\2\2\2\u0265\3\2\2\2\2\u0267\3\2\2\2\2\u0269")
buf.write("\3\2\2\2\2\u026b\3\2\2\2\2\u026d\3\2\2\2\2\u026f\3\2\2")
buf.write("\2\2\u0271\3\2\2\2\2\u0273\3\2\2\2\2\u0275\3\2\2\2\2\u0277")
buf.write("\3\2\2\2\2\u0279\3\2\2\2\2\u027b\3\2\2\2\2\u027d\3\2\2")
buf.write("\2\2\u027f\3\2\2\2\2\u0281\3\2\2\2\2\u0283\3\2\2\2\2\u0285")
buf.write("\3\2\2\2\2\u0287\3\2\2\2\2\u0289\3\2\2\2\2\u028b\3\2\2")
buf.write("\2\2\u028d\3\2\2\2\2\u028f\3\2\2\2\2\u0291\3\2\2\2\2\u0293")
buf.write("\3\2\2\2\2\u0295\3\2\2\2\2\u0297\3\2\2\2\2\u0299\3\2\2")
buf.write("\2\2\u029b\3\2\2\2\2\u029d\3\2\2\2\2\u029f\3\2\2\2\2\u02a1")
buf.write("\3\2\2\2\2\u02a3\3\2\2\2\2\u02a5\3\2\2\2\2\u02a7\3\2\2")
buf.write("\2\2\u02a9\3\2\2\2\2\u02ab\3\2\2\2\2\u02ad\3\2\2\2\2\u02af")
buf.write("\3\2\2\2\2\u02b1\3\2\2\2\2\u02b3\3\2\2\2\2\u02b5\3\2\2")
buf.write("\2\2\u02b7\3\2\2\2\2\u02b9\3\2\2\2\2\u02bb\3\2\2\2\2\u02bd")
buf.write("\3\2\2\2\2\u02bf\3\2\2\2\2\u02c1\3\2\2\2\2\u02c3\3\2\2")
buf.write("\2\2\u02c5\3\2\2\2\2\u02c7\3\2\2\2\2\u02c9\3\2\2\2\2\u02cb")
buf.write("\3\2\2\2\2\u02cd\3\2\2\2\2\u02cf\3\2\2\2\2\u02d1\3\2\2")
buf.write("\2\2\u02d3\3\2\2\2\2\u02d5\3\2\2\2\2\u02d7\3\2\2\2\2\u02d9")
buf.write("\3\2\2\2\2\u02db\3\2\2\2\2\u02dd\3\2\2\2\2\u02df\3\2\2")
buf.write("\2\2\u02e1\3\2\2\2\2\u02e3\3\2\2\2\2\u02e5\3\2\2\2\2\u02e7")
buf.write("\3\2\2\2\2\u02e9\3\2\2\2\2\u02eb\3\2\2\2\2\u02ed\3\2\2")
buf.write("\2\2\u02ef\3\2\2\2\2\u02f1\3\2\2\2\2\u02f3\3\2\2\2\2\u02f5")
buf.write("\3\2\2\2\2\u02f7\3\2\2\2\2\u02f9\3\2\2\2\2\u02fb\3\2\2")
buf.write("\2\2\u02fd\3\2\2\2\2\u02ff\3\2\2\2\2\u0301\3\2\2\2\2\u0303")
buf.write("\3\2\2\2\2\u0305\3\2\2\2\2\u0307\3\2\2\2\2\u0309\3\2\2")
buf.write("\2\2\u030b\3\2\2\2\2\u030d\3\2\2\2\2\u030f\3\2\2\2\2\u0311")
buf.write("\3\2\2\2\2\u0313\3\2\2\2\2\u0315\3\2\2\2\2\u0317\3\2\2")
buf.write("\2\2\u0319\3\2\2\2\2\u031b\3\2\2\2\2\u031d\3\2\2\2\2\u031f")
buf.write("\3\2\2\2\2\u0321\3\2\2\2\2\u0323\3\2\2\2\2\u0325\3\2\2")
buf.write("\2\2\u0327\3\2\2\2\2\u0329\3\2\2\2\2\u032b\3\2\2\2\2\u032d")
buf.write("\3\2\2\2\2\u032f\3\2\2\2\2\u0331\3\2\2\2\2\u0333\3\2\2")
buf.write("\2\2\u0335\3\2\2\2\2\u0337\3\2\2\2\2\u0339\3\2\2\2\2\u033b")
buf.write("\3\2\2\2\2\u033d\3\2\2\2\2\u033f\3\2\2\2\2\u0341\3\2\2")
buf.write("\2\2\u0343\3\2\2\2\2\u0345\3\2\2\2\2\u0347\3\2\2\2\2\u0349")
buf.write("\3\2\2\2\2\u034b\3\2\2\2\2\u034d\3\2\2\2\2\u034f\3\2\2")
buf.write("\2\2\u0351\3\2\2\2\2\u0353\3\2\2\2\2\u0355\3\2\2\2\2\u0357")
buf.write("\3\2\2\2\2\u0359\3\2\2\2\2\u035b\3\2\2\2\2\u035d\3\2\2")
buf.write("\2\2\u035f\3\2\2\2\2\u0361\3\2\2\2\2\u0363\3\2\2\2\2\u0365")
buf.write("\3\2\2\2\2\u0367\3\2\2\2\2\u0369\3\2\2\2\2\u036b\3\2\2")
buf.write("\2\2\u036d\3\2\2\2\2\u036f\3\2\2\2\2\u0371\3\2\2\2\2\u0373")
buf.write("\3\2\2\2\2\u0375\3\2\2\2\2\u0377\3\2\2\2\2\u0379\3\2\2")
buf.write("\2\2\u037b\3\2\2\2\2\u037d\3\2\2\2\2\u037f\3\2\2\2\2\u0381")
buf.write("\3\2\2\2\2\u0383\3\2\2\2\2\u0385\3\2\2\2\2\u0387\3\2\2")
buf.write("\2\2\u0389\3\2\2\2\2\u038b\3\2\2\2\2\u038d\3\2\2\2\2\u038f")
buf.write("\3\2\2\2\2\u0391\3\2\2\2\2\u0393\3\2\2\2\2\u0395\3\2\2")
buf.write("\2\2\u0397\3\2\2\2\2\u0399\3\2\2\2\2\u039b\3\2\2\2\2\u039d")
buf.write("\3\2\2\2\2\u039f\3\2\2\2\2\u03a1\3\2\2\2\2\u03a3\3\2\2")
buf.write("\2\2\u03a5\3\2\2\2\2\u03a7\3\2\2\2\2\u03a9\3\2\2\2\2\u03ab")
buf.write("\3\2\2\2\2\u03ad\3\2\2\2\2\u03af\3\2\2\2\2\u03b1\3\2\2")
buf.write("\2\2\u03b3\3\2\2\2\2\u03b5\3\2\2\2\2\u03b7\3\2\2\2\2\u03b9")
buf.write("\3\2\2\2\2\u03bb\3\2\2\2\2\u03bd\3\2\2\2\2\u03bf\3\2\2")
buf.write("\2\2\u03c1\3\2\2\2\2\u03c3\3\2\2\2\2\u03c5\3\2\2\2\2\u03c7")
buf.write("\3\2\2\2\2\u03c9\3\2\2\2\2\u03cb\3\2\2\2\2\u03cd\3\2\2")
buf.write("\2\2\u03cf\3\2\2\2\2\u03d1\3\2\2\2\2\u03d3\3\2\2\2\2\u03e1")
buf.write("\3\2\2\2\2\u03e3\3\2\2\2\2\u03e5\3\2\2\2\2\u03e7\3\2\2")
buf.write("\2\2\u03e9\3\2\2\2\2\u03eb\3\2\2\2\2\u03ed\3\2\2\2\2\u03ef")
buf.write("\3\2\2\2\2\u03f1\3\2\2\2\2\u03f3\3\2\2\2\2\u03f5\3\2\2")
buf.write("\2\2\u03f7\3\2\2\2\2\u03f9\3\2\2\2\2\u03fb\3\2\2\2\2\u03fd")
buf.write("\3\2\2\2\2\u03ff\3\2\2\2\2\u0401\3\2\2\2\2\u0403\3\2\2")
buf.write("\2\2\u0405\3\2\2\2\2\u0407\3\2\2\2\2\u0409\3\2\2\2\2\u040b")
buf.write("\3\2\2\2\2\u040d\3\2\2\2\2\u040f\3\2\2\2\2\u0413\3\2\2")
buf.write("\2\2\u0415\3\2\2\2\2\u0417\3\2\2\2\2\u0419\3\2\2\2\2\u041b")
buf.write("\3\2\2\2\2\u041d\3\2\2\2\2\u041f\3\2\2\2\2\u0427\3\2\2")
buf.write("\2\2\u0429\3\2\2\2\2\u042b\3\2\2\2\2\u0431\3\2\2\2\2\u0433")
buf.write("\3\2\2\2\3\u0469\3\2\2\2\5\u046c\3\2\2\2\7\u046e\3\2\2")
buf.write("\2\t\u0472\3\2\2\2\13\u0478\3\2\2\2\r\u047e\3\2\2\2\17")
buf.write("\u0488\3\2\2\2\21\u048c\3\2\2\2\23\u0492\3\2\2\2\25\u049a")
buf.write("\3\2\2\2\27\u049e\3\2\2\2\31\u04a2\3\2\2\2\33\u04a8\3")
buf.write("\2\2\2\35\u04ab\3\2\2\2\37\u04b2\3\2\2\2!\u04b9\3\2\2")
buf.write("\2#\u04bd\3\2\2\2%\u04c7\3\2\2\2\'\u04ca\3\2\2\2)\u04d4")
buf.write("\3\2\2\2+\u04da\3\2\2\2-\u04e1\3\2\2\2/\u04e6\3\2\2\2")
buf.write("\61\u04f0\3\2\2\2\63\u0507\3\2\2\2\65\u050d\3\2\2\2\67")
buf.write("\u0514\3\2\2\29\u051a\3\2\2\2;\u0522\3\2\2\2=\u0528\3")
buf.write("\2\2\2?\u0536\3\2\2\2A\u0543\3\2\2\2C\u0552\3\2\2\2E\u0557")
buf.write("\3\2\2\2G\u055d\3\2\2\2I\u0562\3\2\2\2K\u056a\3\2\2\2")
buf.write("M\u056f\3\2\2\2O\u0577\3\2\2\2Q\u057c\3\2\2\2S\u057f\3")
buf.write("\2\2\2U\u0584\3\2\2\2W\u0586\3\2\2\2Y\u058c\3\2\2\2[\u0591")
buf.write("\3\2\2\2]\u059b\3\2\2\2_\u05a3\3\2\2\2a\u05a8\3\2\2\2")
buf.write("c\u05ad\3\2\2\2e\u05b2\3\2\2\2g\u05ba\3\2\2\2i\u05c4\3")
buf.write("\2\2\2k\u05ca\3\2\2\2m\u05ce\3\2\2\2o\u05d3\3\2\2\2q\u05d9")
buf.write("\3\2\2\2s\u05e1\3\2\2\2u\u05e9\3\2\2\2w\u05f1\3\2\2\2")
buf.write("y\u05f9\3\2\2\2{\u0600\3\2\2\2}\u060a\3\2\2\2\177\u0618")
buf.write("\3\2\2\2\u0081\u0620\3\2\2\2\u0083\u0629\3\2\2\2\u0085")
buf.write("\u0631\3\2\2\2\u0087\u0641\3\2\2\2\u0089\u064a\3\2\2\2")
buf.write("\u008b\u0655\3\2\2\2\u008d\u0661\3\2\2\2\u008f\u066d\3")
buf.write("\2\2\2\u0091\u0675\3\2\2\2\u0093\u067d\3\2\2\2\u0095\u0686")
buf.write("\3\2\2\2\u0097\u068e\3\2\2\2\u0099\u069a\3\2\2\2\u009b")
buf.write("\u06aa\3\2\2\2\u009d\u06af\3\2\2\2\u009f\u06b5\3\2\2\2")
buf.write("\u00a1\u06bc\3\2\2\2\u00a3\u06c2\3\2\2\2\u00a5\u06c7\3")
buf.write("\2\2\2\u00a7\u06cf\3\2\2\2\u00a9\u06dc\3\2\2\2\u00ab\u06e3")
buf.write("\3\2\2\2\u00ad\u06ef\3\2\2\2\u00af\u06f5\3\2\2\2\u00b1")
buf.write("\u06fa\3\2\2\2\u00b3\u0703\3\2\2\2\u00b5\u0708\3\2\2\2")
buf.write("\u00b7\u070c\3\2\2\2\u00b9\u071b\3\2\2\2\u00bb\u0726\3")
buf.write("\2\2\2\u00bd\u072a\3\2\2\2\u00bf\u0730\3\2\2\2\u00c1\u0734")
buf.write("\3\2\2\2\u00c3\u073c\3\2\2\2\u00c5\u0744\3\2\2\2\u00c7")
buf.write("\u074e\3\2\2\2\u00c9\u0758\3\2\2\2\u00cb\u0760\3\2\2\2")
buf.write("\u00cd\u0769\3\2\2\2\u00cf\u0772\3\2\2\2\u00d1\u077a\3")
buf.write("\2\2\2\u00d3\u0781\3\2\2\2\u00d5\u0787\3\2\2\2\u00d7\u078c")
buf.write("\3\2\2\2\u00d9\u079a\3\2\2\2\u00db\u07a4\3\2\2\2\u00dd")
buf.write("\u07ac\3\2\2\2\u00df\u07b9\3\2\2\2\u00e1\u07c2\3\2\2\2")
buf.write("\u00e3\u07cb\3\2\2\2\u00e5\u07d2\3\2\2\2\u00e7\u07d7\3")
buf.write("\2\2\2\u00e9\u07f0\3\2\2\2\u00eb\u07f5\3\2\2\2\u00ed\u07fd")
buf.write("\3\2\2\2\u00ef\u0802\3\2\2\2\u00f1\u0808\3\2\2\2\u00f3")
buf.write("\u080e\3\2\2\2\u00f5\u0815\3\2\2\2\u00f7\u081e\3\2\2\2")
buf.write("\u00f9\u0822\3\2\2\2\u00fb\u0831\3\2\2\2\u00fd\u0835\3")
buf.write("\2\2\2\u00ff\u083c\3\2\2\2\u0101\u0843\3\2\2\2\u0103\u084c")
buf.write("\3\2\2\2\u0105\u0853\3\2\2\2\u0107\u085d\3\2\2\2\u0109")
buf.write("\u086c\3\2\2\2\u010b\u0877\3\2\2\2\u010d\u087f\3\2\2\2")
buf.write("\u010f\u0889\3\2\2\2\u0111\u0891\3\2\2\2\u0113\u0898\3")
buf.write("\2\2\2\u0115\u089d\3\2\2\2\u0117\u08a5\3\2\2\2\u0119\u08ae")
buf.write("\3\2\2\2\u011b\u08b6\3\2\2\2\u011d\u08be\3\2\2\2\u011f")
buf.write("\u08c4\3\2\2\2\u0121\u08ca\3\2\2\2\u0123\u08d0\3\2\2\2")
buf.write("\u0125\u08d6\3\2\2\2\u0127\u08e2\3\2\2\2\u0129\u08e8\3")
buf.write("\2\2\2\u012b\u08f2\3\2\2\2\u012d\u08fa\3\2\2\2\u012f\u08fe")
buf.write("\3\2\2\2\u0131\u0905\3\2\2\2\u0133\u090b\3\2\2\2\u0135")
buf.write("\u0910\3\2\2\2\u0137\u0915\3\2\2\2\u0139\u091e\3\2\2\2")
buf.write("\u013b\u0923\3\2\2\2\u013d\u0929\3\2\2\2\u013f\u092f\3")
buf.write("\2\2\2\u0141\u0938\3\2\2\2\u0143\u093d\3\2\2\2\u0145\u0944")
buf.write("\3\2\2\2\u0147\u0949\3\2\2\2\u0149\u094e\3\2\2\2\u014b")
buf.write("\u0951\3\2\2\2\u014d\u0958\3\2\2\2\u014f\u0962\3\2\2\2")
buf.write("\u0151\u0965\3\2\2\2\u0153\u096d\3\2\2\2\u0155\u0977\3")
buf.write("\2\2\2\u0157\u0981\3\2\2\2\u0159\u0988\3\2\2\2\u015b\u098e")
buf.write("\3\2\2\2\u015d\u0996\3\2\2\2\u015f\u09a0\3\2\2\2\u0161")
buf.write("\u09a8\3\2\2\2\u0163\u09b1\3\2\2\2\u0165\u09b8\3\2\2\2")
buf.write("\u0167\u09be\3\2\2\2\u0169\u09c4\3\2\2\2\u016b\u09cb\3")
buf.write("\2\2\2\u016d\u09d8\3\2\2\2\u016f\u09e0\3\2\2\2\u0171\u09e4")
buf.write("\3\2\2\2\u0173\u09ec\3\2\2\2\u0175\u09f6\3\2\2\2\u0177")
buf.write("\u09ff\3\2\2\2\u0179\u0a04\3\2\2\2\u017b\u0a0f\3\2\2\2")
buf.write("\u017d\u0a12\3\2\2\2\u017f\u0a1c\3\2\2\2\u0181\u0a24\3")
buf.write("\2\2\2\u0183\u0a29\3\2\2\2\u0185\u0a2e\3\2\2\2\u0187\u0a33")
buf.write("\3\2\2\2\u0189\u0a3c\3\2\2\2\u018b\u0a41\3\2\2\2\u018d")
buf.write("\u0a4c\3\2\2\2\u018f\u0a54\3\2\2\2\u0191\u0a59\3\2\2\2")
buf.write("\u0193\u0a5f\3\2\2\2\u0195\u0a67\3\2\2\2\u0197\u0a6c\3")
buf.write("\2\2\2\u0199\u0a72\3\2\2\2\u019b\u0a78\3\2\2\2\u019d\u0a7e")
buf.write("\3\2\2\2\u019f\u0a84\3\2\2\2\u01a1\u0a8a\3\2\2\2\u01a3")
buf.write("\u0a8f\3\2\2\2\u01a5\u0a96\3\2\2\2\u01a7\u0a9a\3\2\2\2")
buf.write("\u01a9\u0aa1\3\2\2\2\u01ab\u0aa7\3\2\2\2\u01ad\u0aac\3")
buf.write("\2\2\2\u01af\u0ab1\3\2\2\2\u01b1\u0ab6\3\2\2\2\u01b3\u0aba")
buf.write("\3\2\2\2\u01b5\u0ac2\3\2\2\2\u01b7\u0acb\3\2\2\2\u01b9")
buf.write("\u0ad4\3\2\2\2\u01bb\u0adb\3\2\2\2\u01bd\u0ae1\3\2\2\2")
buf.write("\u01bf\u0ae7\3\2\2\2\u01c1\u0aee\3\2\2\2\u01c3\u0af7\3")
buf.write("\2\2\2\u01c5\u0b00\3\2\2\2\u01c7\u0b05\3\2\2\2\u01c9\u0b0b")
buf.write("\3\2\2\2\u01cb\u0b12\3\2\2\2\u01cd\u0b18\3\2\2\2\u01cf")
buf.write("\u0b21\3\2\2\2\u01d1\u0b26\3\2\2\2\u01d3\u0b2a\3\2\2\2")
buf.write("\u01d5\u0b32\3\2\2\2\u01d7\u0b3b\3\2\2\2\u01d9\u0b3f\3")
buf.write("\2\2\2\u01db\u0b45\3\2\2\2\u01dd\u0b4e\3\2\2\2\u01df\u0b54")
buf.write("\3\2\2\2\u01e1\u0b5b\3\2\2\2\u01e3\u0b5f\3\2\2\2\u01e5")
buf.write("\u0b62\3\2\2\2\u01e7\u0b6a\3\2\2\2\u01e9\u0b72\3\2\2\2")
buf.write("\u01eb\u0b79\3\2\2\2\u01ed\u0b81\3\2\2\2\u01ef\u0b92\3")
buf.write("\2\2\2\u01f1\u0b9d\3\2\2\2\u01f3\u0ba8\3\2\2\2\u01f5\u0bad")
buf.write("\3\2\2\2\u01f7\u0bb5\3\2\2\2\u01f9\u0bc3\3\2\2\2\u01fb")
buf.write("\u0bc7\3\2\2\2\u01fd\u0bce\3\2\2\2\u01ff\u0bd3\3\2\2\2")
buf.write("\u0201\u0bd9\3\2\2\2\u0203\u0be0\3\2\2\2\u0205\u0be8\3")
buf.write("\2\2\2\u0207\u0bf2\3\2\2\2\u0209\u0bf9\3\2\2\2\u020b\u0bfc")
buf.write("\3\2\2\2\u020d\u0c00\3\2\2\2\u020f\u0c04\3\2\2\2\u0211")
buf.write("\u0c08\3\2\2\2\u0213\u0c0b\3\2\2\2\u0215\u0c10\3\2\2\2")
buf.write("\u0217\u0c15\3\2\2\2\u0219\u0c1c\3\2\2\2\u021b\u0c1f\3")
buf.write("\2\2\2\u021d\u0c27\3\2\2\2\u021f\u0c2d\3\2\2\2\u0221\u0c38")
buf.write("\3\2\2\2\u0223\u0c40\3\2\2\2\u0225\u0c44\3\2\2\2\u0227")
buf.write("\u0c4a\3\2\2\2\u0229\u0c4f\3\2\2\2\u022b\u0c5a\3\2\2\2")
buf.write("\u022d\u0c62\3\2\2\2\u022f\u0c72\3\2\2\2\u0231\u0c7d\3")
buf.write("\2\2\2\u0233\u0c84\3\2\2\2\u0235\u0c8e\3\2\2\2\u0237\u0c96")
buf.write("\3\2\2\2\u0239\u0c9b\3\2\2\2\u023b\u0ca4\3\2\2\2\u023d")
buf.write("\u0caa\3\2\2\2\u023f\u0cb4\3\2\2\2\u0241\u0cba\3\2\2\2")
buf.write("\u0243\u0cbf\3\2\2\2\u0245\u0ccb\3\2\2\2\u0247\u0cd4\3")
buf.write("\2\2\2\u0249\u0cde\3\2\2\2\u024b\u0ce5\3\2\2\2\u024d\u0cef")
buf.write("\3\2\2\2\u024f\u0cf9\3\2\2\2\u0251\u0d01\3\2\2\2\u0253")
buf.write("\u0d07\3\2\2\2\u0255\u0d11\3\2\2\2\u0257\u0d17\3\2\2\2")
buf.write("\u0259\u0d1d\3\2\2\2\u025b\u0d21\3\2\2\2\u025d\u0d26\3")
buf.write("\2\2\2\u025f\u0d2b\3\2\2\2\u0261\u0d32\3\2\2\2\u0263\u0d36")
buf.write("\3\2\2\2\u0265\u0d40\3\2\2\2\u0267\u0d4c\3\2\2\2\u0269")
buf.write("\u0d53\3\2\2\2\u026b\u0d5d\3\2\2\2\u026d\u0d64\3\2\2\2")
buf.write("\u026f\u0d6c\3\2\2\2\u0271\u0d74\3\2\2\2\u0273\u0d88\3")
buf.write("\2\2\2\u0275\u0d8f\3\2\2\2\u0277\u0d9c\3\2\2\2\u0279\u0da3")
buf.write("\3\2\2\2\u027b\u0dad\3\2\2\2\u027d\u0db3\3\2\2\2\u027f")
buf.write("\u0dbb\3\2\2\2\u0281\u0dc2\3\2\2\2\u0283\u0dc8\3\2\2\2")
buf.write("\u0285\u0dd1\3\2\2\2\u0287\u0dd8\3\2\2\2\u0289\u0ddc\3")
buf.write("\2\2\2\u028b\u0de2\3\2\2\2\u028d\u0de7\3\2\2\2\u028f\u0ded")
buf.write("\3\2\2\2\u0291\u0df4\3\2\2\2\u0293\u0df9\3\2\2\2\u0295")
buf.write("\u0e03\3\2\2\2\u0297\u0e0a\3\2\2\2\u0299\u0e16\3\2\2\2")
buf.write("\u029b\u0e1a\3\2\2\2\u029d\u0e21\3\2\2\2\u029f\u0e28\3")
buf.write("\2\2\2\u02a1\u0e2d\3\2\2\2\u02a3\u0e35\3\2\2\2\u02a5\u0e3c")
buf.write("\3\2\2\2\u02a7\u0e41\3\2\2\2\u02a9\u0e4a\3\2\2\2\u02ab")
buf.write("\u0e55\3\2\2\2\u02ad\u0e62\3\2\2\2\u02af\u0e74\3\2\2\2")
buf.write("\u02b1\u0e80\3\2\2\2\u02b3\u0e90\3\2\2\2\u02b5\u0e94\3")
buf.write("\2\2\2\u02b7\u0e99\3\2\2\2\u02b9\u0ea2\3\2\2\2\u02bb\u0ea8")
buf.write("\3\2\2\2\u02bd\u0ead\3\2\2\2\u02bf\u0eb6\3\2\2\2\u02c1")
buf.write("\u0ebf\3\2\2\2\u02c3\u0ec8\3\2\2\2\u02c5\u0ed7\3\2\2\2")
buf.write("\u02c7\u0ede\3\2\2\2\u02c9\u0ee3\3\2\2\2\u02cb\u0ee8\3")
buf.write("\2\2\2\u02cd\u0ef1\3\2\2\2\u02cf\u0efa\3\2\2\2\u02d1\u0eff")
buf.write("\3\2\2\2\u02d3\u0f0d\3\2\2\2\u02d5\u0f15\3\2\2\2\u02d7")
buf.write("\u0f1e\3\2\2\2\u02d9\u0f29\3\2\2\2\u02db\u0f2f\3\2\2\2")
buf.write("\u02dd\u0f37\3\2\2\2\u02df\u0f41\3\2\2\2\u02e1\u0f4e\3")
buf.write("\2\2\2\u02e3\u0f55\3\2\2\2\u02e5\u0f60\3\2\2\2\u02e7\u0f67")
buf.write("\3\2\2\2\u02e9\u0f73\3\2\2\2\u02eb\u0f80\3\2\2\2\u02ed")
buf.write("\u0f8e\3\2\2\2\u02ef\u0f96\3\2\2\2\u02f1\u0f9e\3\2\2\2")
buf.write("\u02f3\u0fa6\3\2\2\2\u02f5\u0fac\3\2\2\2\u02f7\u0fb0\3")
buf.write("\2\2\2\u02f9\u0fb5\3\2\2\2\u02fb\u0fba\3\2\2\2\u02fd\u0fc4")
buf.write("\3\2\2\2\u02ff\u0fe0\3\2\2\2\u0301\u0ffb\3\2\2\2\u0303")
buf.write("\u1013\3\2\2\2\u0305\u1021\3\2\2\2\u0307\u102f\3\2\2\2")
buf.write("\u0309\u103f\3\2\2\2\u030b\u104f\3\2\2\2\u030d\u1052\3")
buf.write("\2\2\2\u030f\u105b\3\2\2\2\u0311\u1067\3\2\2\2\u0313\u1071")
buf.write("\3\2\2\2\u0315\u1077\3\2\2\2\u0317\u107f\3\2\2\2\u0319")
buf.write("\u1084\3\2\2\2\u031b\u1089\3\2\2\2\u031d\u1092\3\2\2\2")
buf.write("\u031f\u1097\3\2\2\2\u0321\u10a1\3\2\2\2\u0323\u10a7\3")
buf.write("\2\2\2\u0325\u10ad\3\2\2\2\u0327\u10b4\3\2\2\2\u0329\u10be")
buf.write("\3\2\2\2\u032b\u10c6\3\2\2\2\u032d\u10cc\3\2\2\2\u032f")
buf.write("\u10d3\3\2\2\2\u0331\u10db\3\2\2\2\u0333\u10e2\3\2\2\2")
buf.write("\u0335\u10e9\3\2\2\2\u0337\u10ed\3\2\2\2\u0339\u10f3\3")
buf.write("\2\2\2\u033b\u10fc\3\2\2\2\u033d\u1102\3\2\2\2\u033f\u1109")
buf.write("\3\2\2\2\u0341\u1111\3\2\2\2\u0343\u111a\3\2\2\2\u0345")
buf.write("\u1123\3\2\2\2\u0347\u112a\3\2\2\2\u0349\u1132\3\2\2\2")
buf.write("\u034b\u113a\3\2\2\2\u034d\u1143\3\2\2\2\u034f\u1148\3")
buf.write("\2\2\2\u0351\u1150\3\2\2\2\u0353\u115b\3\2\2\2\u0355\u1160")
buf.write("\3\2\2\2\u0357\u1169\3\2\2\2\u0359\u116f\3\2\2\2\u035b")
buf.write("\u1175\3\2\2\2\u035d\u117a\3\2\2\2\u035f\u1181\3\2\2\2")
buf.write("\u0361\u1186\3\2\2\2\u0363\u118c\3\2\2\2\u0365\u1190\3")
buf.write("\2\2\2\u0367\u1197\3\2\2\2\u0369\u11a5\3\2\2\2\u036b\u11ad")
buf.write("\3\2\2\2\u036d\u11ba\3\2\2\2\u036f\u11c5\3\2\2\2\u0371")
buf.write("\u11cf\3\2\2\2\u0373\u11d9\3\2\2\2\u0375\u11e7\3\2\2\2")
buf.write("\u0377\u11f0\3\2\2\2\u0379\u11f6\3\2\2\2\u037b\u11ff\3")
buf.write("\2\2\2\u037d\u1207\3\2\2\2\u037f\u1214\3\2\2\2\u0381\u121d")
buf.write("\3\2\2\2\u0383\u1222\3\2\2\2\u0385\u1226\3\2\2\2\u0387")
buf.write("\u123f\3\2\2\2\u0389\u1244\3\2\2\2\u038b\u124f\3\2\2\2")
buf.write("\u038d\u1261\3\2\2\2\u038f\u1271\3\2\2\2\u0391\u1284\3")
buf.write("\2\2\2\u0393\u129b\3\2\2\2\u0395\u12aa\3\2\2\2\u0397\u12b4")
buf.write("\3\2\2\2\u0399\u12bf\3\2\2\2\u039b\u12c7\3\2\2\2\u039d")
buf.write("\u12d4\3\2\2\2\u039f\u12e4\3\2\2\2\u03a1\u12f4\3\2\2\2")
buf.write("\u03a3\u12f9\3\2\2\2\u03a5\u12fd\3\2\2\2\u03a7\u1302\3")
buf.write("\2\2\2\u03a9\u1306\3\2\2\2\u03ab\u130b\3\2\2\2\u03ad\u130f")
buf.write("\3\2\2\2\u03af\u1316\3\2\2\2\u03b1\u131a\3\2\2\2\u03b3")
buf.write("\u1320\3\2\2\2\u03b5\u1330\3\2\2\2\u03b7\u133b\3\2\2\2")
buf.write("\u03b9\u133f\3\2\2\2\u03bb\u1348\3\2\2\2\u03bd\u134e\3")
buf.write("\2\2\2\u03bf\u1355\3\2\2\2\u03c1\u135a\3\2\2\2\u03c3\u1361")
buf.write("\3\2\2\2\u03c5\u136e\3\2\2\2\u03c7\u137b\3\2\2\2\u03c9")
buf.write("\u1388\3\2\2\2\u03cb\u138b\3\2\2\2\u03cd\u138d\3\2\2\2")
buf.write("\u03cf\u138f\3\2\2\2\u03d1\u139e\3\2\2\2\u03d3\u13aa\3")
buf.write("\2\2\2\u03d5\u13b3\3\2\2\2\u03d7\u13b5\3\2\2\2\u03d9\u13c0")
buf.write("\3\2\2\2\u03db\u13cb\3\2\2\2\u03dd\u13d6\3\2\2\2\u03df")
buf.write("\u13e1\3\2\2\2\u03e1\u13e3\3\2\2\2\u03e3\u13ed\3\2\2\2")
buf.write("\u03e5\u13ef\3\2\2\2\u03e7\u13f1\3\2\2\2\u03e9\u13f3\3")
buf.write("\2\2\2\u03eb\u13f5\3\2\2\2\u03ed\u13f8\3\2\2\2\u03ef\u13fa")
buf.write("\3\2\2\2\u03f1\u13fc\3\2\2\2\u03f3\u13fe\3\2\2\2\u03f5")
buf.write("\u1400\3\2\2\2\u03f7\u1402\3\2\2\2\u03f9\u1404\3\2\2\2")
buf.write("\u03fb\u1415\3\2\2\2\u03fd\u1417\3\2\2\2\u03ff\u1419\3")
buf.write("\2\2\2\u0401\u141b\3\2\2\2\u0403\u141e\3\2\2\2\u0405\u1420")
buf.write("\3\2\2\2\u0407\u142b\3\2\2\2\u0409\u142d\3\2\2\2\u040b")
buf.write("\u142f\3\2\2\2\u040d\u1431\3\2\2\2\u040f\u1433\3\2\2\2")
buf.write("\u0411\u1435\3\2\2\2\u0413\u1437\3\2\2\2\u0415\u143a\3")
buf.write("\2\2\2\u0417\u143c\3\2\2\2\u0419\u143e\3\2\2\2\u041b\u1440")
buf.write("\3\2\2\2\u041d\u1442\3\2\2\2\u041f\u1445\3\2\2\2\u0421")
buf.write("\u144b\3\2\2\2\u0423\u144e\3\2\2\2\u0425\u1455\3\2\2\2")
buf.write("\u0427\u1460\3\2\2\2\u0429\u146f\3\2\2\2\u042b\u147d\3")
buf.write("\2\2\2\u042d\u1490\3\2\2\2\u042f\u1494\3\2\2\2\u0431\u1496")
buf.write("\3\2\2\2\u0433\u149e\3\2\2\2\u0435\u14a3\3\2\2\2\u0437")
buf.write("\u14a5\3\2\2\2\u0439\u14a7\3\2\2\2\u043b\u14a9\3\2\2\2")
buf.write("\u043d\u14ab\3\2\2\2\u043f\u14ad\3\2\2\2\u0441\u14af\3")
buf.write("\2\2\2\u0443\u14b1\3\2\2\2\u0445\u14b3\3\2\2\2\u0447\u14b5")
buf.write("\3\2\2\2\u0449\u14b7\3\2\2\2\u044b\u14b9\3\2\2\2\u044d")
buf.write("\u14bb\3\2\2\2\u044f\u14bd\3\2\2\2\u0451\u14bf\3\2\2\2")
buf.write("\u0453\u14c1\3\2\2\2\u0455\u14c3\3\2\2\2\u0457\u14c5\3")
buf.write("\2\2\2\u0459\u14c7\3\2\2\2\u045b\u14c9\3\2\2\2\u045d\u14cb")
buf.write("\3\2\2\2\u045f\u14cd\3\2\2\2\u0461\u14cf\3\2\2\2\u0463")
buf.write("\u14d1\3\2\2\2\u0465\u14d3\3\2\2\2\u0467\u14d5\3\2\2\2")
buf.write("\u0469\u046a\7\60\2\2\u046a\u046b\7\60\2\2\u046b\4\3\2")
buf.write("\2\2\u046c\u046d\5\u0435\u021b\2\u046d\6\3\2\2\2\u046e")
buf.write("\u046f\5\u0435\u021b\2\u046f\u0470\5\u043b\u021e\2\u0470")
buf.write("\u0471\5\u043b\u021e\2\u0471\b\3\2\2\2\u0472\u0473\5\u0435")
buf.write("\u021b\2\u0473\u0474\5\u043f\u0220\2\u0474\u0475\5\u045b")
buf.write("\u022e\2\u0475\u0476\5\u043d\u021f\2\u0476\u0477\5\u0457")
buf.write("\u022c\2\u0477\n\3\2\2\2\u0478\u0479\5\u0435\u021b\2\u0479")
buf.write("\u047a\5\u0441\u0221\2\u047a\u047b\5\u043d\u021f\2\u047b")
buf.write("\u047c\5\u044f\u0228\2\u047c\u047d\5\u045b\u022e\2\u047d")
buf.write("\f\3\2\2\2\u047e\u047f\5\u0435\u021b\2\u047f\u0480\5\u0441")
buf.write("\u0221\2\u0480\u0481\5\u0441\u0221\2\u0481\u0482\5\u0457")
buf.write("\u022c\2\u0482\u0483\5\u043d\u021f\2\u0483\u0484\5\u0441")
buf.write("\u0221\2\u0484\u0485\5\u0435\u021b\2\u0485\u0486\5\u045b")
buf.write("\u022e\2\u0486\u0487\5\u043d\u021f\2\u0487\16\3\2\2\2")
buf.write("\u0488\u0489\5\u0435\u021b\2\u0489\u048a\5\u044b\u0226")
buf.write("\2\u048a\u048b\5\u044b\u0226\2\u048b\20\3\2\2\2\u048c")
buf.write("\u048d\5\u0435\u021b\2\u048d\u048e\5\u044b\u0226\2\u048e")
buf.write("\u048f\5\u045b\u022e\2\u048f\u0490\5\u043d\u021f\2\u0490")
buf.write("\u0491\5\u0457\u022c\2\u0491\22\3\2\2\2\u0492\u0493\5")
buf.write("\u0435\u021b\2\u0493\u0494\5\u044f\u0228\2\u0494\u0495")
buf.write("\5\u0435\u021b\2\u0495\u0496\5\u044b\u0226\2\u0496\u0497")
buf.write("\5\u0465\u0233\2\u0497\u0498\5\u0467\u0234\2\u0498\u0499")
buf.write("\5\u043d\u021f\2\u0499\24\3\2\2\2\u049a\u049b\5\u0435")
buf.write("\u021b\2\u049b\u049c\5\u044f\u0228\2\u049c\u049d\5\u043b")
buf.write("\u021e\2\u049d\26\3\2\2\2\u049e\u049f\5\u0435\u021b\2")
buf.write("\u049f\u04a0\5\u044f\u0228\2\u04a0\u04a1\5\u0465\u0233")
buf.write("\2\u04a1\30\3\2\2\2\u04a2\u04a3\5\u0435\u021b\2\u04a3")
buf.write("\u04a4\5\u0457\u022c\2\u04a4\u04a5\5\u0457\u022c\2\u04a5")
buf.write("\u04a6\5\u0435\u021b\2\u04a6\u04a7\5\u0465\u0233\2\u04a7")
buf.write("\32\3\2\2\2\u04a8\u04a9\5\u0435\u021b\2\u04a9\u04aa\5")
buf.write("\u0459\u022d\2\u04aa\34\3\2\2\2\u04ab\u04ac\5\u0435\u021b")
buf.write("\2\u04ac\u04ad\5\u0459\u022d\2\u04ad\u04ae\5\u0459\u022d")
buf.write("\2\u04ae\u04af\5\u045d\u022f\2\u04af\u04b0\5\u044d\u0227")
buf.write("\2\u04b0\u04b1\5\u043d\u021f\2\u04b1\36\3\2\2\2\u04b2")
buf.write("\u04b3\5\u0435\u021b\2\u04b3\u04b4\5\u0459\u022d\2\u04b4")
buf.write("\u04b5\5\u0459\u022d\2\u04b5\u04b6\5\u043d\u021f\2\u04b6")
buf.write("\u04b7\5\u0457\u022c\2\u04b7\u04b8\5\u045b\u022e\2\u04b8")
buf.write(" \3\2\2\2\u04b9\u04ba\5\u0435\u021b\2\u04ba\u04bb\5\u0459")
buf.write("\u022d\2\u04bb\u04bc\5\u0439\u021d\2\u04bc\"\3\2\2\2\u04bd")
buf.write("\u04be\5\u0435\u021b\2\u04be\u04bf\5\u0459\u022d\2\u04bf")
buf.write("\u04c0\5\u0459\u022d\2\u04c0\u04c1\5\u0451\u0229\2\u04c1")
buf.write("\u04c2\5\u0439\u021d\2\u04c2\u04c3\5\u0445\u0223\2\u04c3")
buf.write("\u04c4\5\u0435\u021b\2\u04c4\u04c5\5\u045b\u022e\2\u04c5")
buf.write("\u04c6\5\u043d\u021f\2\u04c6$\3\2\2\2\u04c7\u04c8\5\u0435")
buf.write("\u021b\2\u04c8\u04c9\5\u045b\u022e\2\u04c9&\3\2\2\2\u04ca")
buf.write("\u04cb\5\u0435\u021b\2\u04cb\u04cc\5\u045b\u022e\2\u04cc")
buf.write("\u04cd\5\u045b\u022e\2\u04cd\u04ce\5\u0457\u022c\2\u04ce")
buf.write("\u04cf\5\u0445\u0223\2\u04cf\u04d0\5\u0437\u021c\2\u04d0")
buf.write("\u04d1\5\u045d\u022f\2\u04d1\u04d2\5\u045b\u022e\2\u04d2")
buf.write("\u04d3\5\u043d\u021f\2\u04d3(\3\2\2\2\u04d4\u04d5\5\u0435")
buf.write("\u021b\2\u04d5\u04d6\5\u045d\u022f\2\u04d6\u04d7\5\u043b")
buf.write("\u021e\2\u04d7\u04d8\5\u0445\u0223\2\u04d8\u04d9\5\u045b")
buf.write("\u022e\2\u04d9*\3\2\2\2\u04da\u04db\5\u0435\u021b\2\u04db")
buf.write("\u04dc\5\u045d\u022f\2\u04dc\u04dd\5\u045b\u022e\2\u04dd")
buf.write("\u04de\5\u0443\u0222\2\u04de\u04df\5\u0445\u0223\2\u04df")
buf.write("\u04e0\5\u043b\u021e\2\u04e0,\3\2\2\2\u04e1\u04e2\5\u0435")
buf.write("\u021b\2\u04e2\u04e3\5\u045d\u022f\2\u04e3\u04e4\5\u045b")
buf.write("\u022e\2\u04e4\u04e5\5\u0451\u0229\2\u04e5.\3\2\2\2\u04e6")
buf.write("\u04e7\5\u0435\u021b\2\u04e7\u04e8\5\u045d\u022f\2\u04e8")
buf.write("\u04e9\5\u045b\u022e\2\u04e9\u04ea\5\u0451\u0229\2\u04ea")
buf.write("\u04eb\5\u044d\u0227\2\u04eb\u04ec\5\u0435\u021b\2\u04ec")
buf.write("\u04ed\5\u045b\u022e\2\u04ed\u04ee\5\u0445\u0223\2\u04ee")
buf.write("\u04ef\5\u0439\u021d\2\u04ef\60\3\2\2\2\u04f0\u04f1\5")
buf.write("\u0435\u021b\2\u04f1\u04f2\5\u045d\u022f\2\u04f2\u04f3")
buf.write("\5\u045b\u022e\2\u04f3\u04f4\5\u0451\u0229\2\u04f4\u04f5")
buf.write("\5\u044f\u0228\2\u04f5\u04f6\5\u0451\u0229\2\u04f6\u04f7")
buf.write("\5\u044d\u0227\2\u04f7\u04f8\5\u0451\u0229\2\u04f8\u04f9")
buf.write("\5\u045d\u022f\2\u04f9\u04fa\5\u0459\u022d\2\u04fa\u04fb")
buf.write("\7a\2\2\u04fb\u04fc\5\u045b\u022e\2\u04fc\u04fd\5\u0457")
buf.write("\u022c\2\u04fd\u04fe\5\u0435\u021b\2\u04fe\u04ff\5\u044f")
buf.write("\u0228\2\u04ff\u0500\5\u0459\u022d\2\u0500\u0501\5\u0435")
buf.write("\u021b\2\u0501\u0502\5\u0439\u021d\2\u0502\u0503\5\u045b")
buf.write("\u022e\2\u0503\u0504\5\u0445\u0223\2\u0504\u0505\5\u0451")
buf.write("\u0229\2\u0505\u0506\5\u044f\u0228\2\u0506\62\3\2\2\2")
buf.write("\u0507\u0508\5\u0437\u021c\2\u0508\u0509\5\u0435\u021b")
buf.write("\2\u0509\u050a\5\u045b\u022e\2\u050a\u050b\5\u0439\u021d")
buf.write("\2\u050b\u050c\5\u0443\u0222\2\u050c\64\3\2\2\2\u050d")
buf.write("\u050e\5\u0437\u021c\2\u050e\u050f\5\u043d\u021f\2\u050f")
buf.write("\u0510\5\u043f\u0220\2\u0510\u0511\5\u0451\u0229\2\u0511")
buf.write("\u0512\5\u0457\u022c\2\u0512\u0513\5\u043d\u021f\2\u0513")
buf.write("\66\3\2\2\2\u0514\u0515\5\u0437\u021c\2\u0515\u0516\5")
buf.write("\u043d\u021f\2\u0516\u0517\5\u0441\u0221\2\u0517\u0518")
buf.write("\5\u0445\u0223\2\u0518\u0519\5\u044f\u0228\2\u05198\3")
buf.write("\2\2\2\u051a\u051b\5\u0437\u021c\2\u051b\u051c\5\u043d")
buf.write("\u021f\2\u051c\u051d\5\u045b\u022e\2\u051d\u051e\5\u0461")
buf.write("\u0231\2\u051e\u051f\5\u043d\u021f\2\u051f\u0520\5\u043d")
buf.write("\u021f\2\u0520\u0521\5\u044f\u0228\2\u0521:\3\2\2\2\u0522")
buf.write("\u0523\5\u0437\u021c\2\u0523\u0524\5\u043f\u0220\2\u0524")
buf.write("\u0525\5\u0445\u0223\2\u0525\u0526\5\u044b\u0226\2\u0526")
buf.write("\u0527\5\u043d\u021f\2\u0527<\3\2\2\2\u0528\u0529\5\u0437")
buf.write("\u021c\2\u0529\u052a\5\u0445\u0223\2\u052a\u052b\5\u044f")
buf.write("\u0228\2\u052b\u052c\5\u0435\u021b\2\u052c\u052d\5\u0457")
buf.write("\u022c\2\u052d\u052e\5\u0465\u0233\2\u052e\u052f\7a\2")
buf.write("\2\u052f\u0530\5\u043b\u021e\2\u0530\u0531\5\u0451\u0229")
buf.write("\2\u0531\u0532\5\u045d\u022f\2\u0532\u0533\5\u0437\u021c")
buf.write("\2\u0533\u0534\5\u044b\u0226\2\u0534\u0535\5\u043d\u021f")
buf.write("\2\u0535>\3\2\2\2\u0536\u0537\5\u0437\u021c\2\u0537\u0538")
buf.write("\5\u0445\u0223\2\u0538\u0539\5\u044f\u0228\2\u0539\u053a")
buf.write("\5\u0435\u021b\2\u053a\u053b\5\u0457\u022c\2\u053b\u053c")
buf.write("\5\u0465\u0233\2\u053c\u053d\7a\2\2\u053d\u053e\5\u043f")
buf.write("\u0220\2\u053e\u053f\5\u044b\u0226\2\u053f\u0540\5\u0451")
buf.write("\u0229\2\u0540\u0541\5\u0435\u021b\2\u0541\u0542\5\u045b")
buf.write("\u022e\2\u0542@\3\2\2\2\u0543\u0544\5\u0437\u021c\2\u0544")
buf.write("\u0545\5\u0445\u0223\2\u0545\u0546\5\u044f\u0228\2\u0546")
buf.write("\u0547\5\u0435\u021b\2\u0547\u0548\5\u0457\u022c\2\u0548")
buf.write("\u0549\5\u0465\u0233\2\u0549\u054a\7a\2\2\u054a\u054b")
buf.write("\5\u0445\u0223\2\u054b\u054c\5\u044f\u0228\2\u054c\u054d")
buf.write("\5\u045b\u022e\2\u054d\u054e\5\u043d\u021f\2\u054e\u054f")
buf.write("\5\u0441\u0221\2\u054f\u0550\5\u043d\u021f\2\u0550\u0551")
buf.write("\5\u0457\u022c\2\u0551B\3\2\2\2\u0552\u0553\5\u0437\u021c")
buf.write("\2\u0553\u0554\5\u044b\u0226\2\u0554\u0555\5\u0451\u0229")
buf.write("\2\u0555\u0556\5\u0437\u021c\2\u0556D\3\2\2\2\u0557\u0558")
buf.write("\5\u0437\u021c\2\u0558\u0559\5\u044b\u0226\2\u0559\u055a")
buf.write("\5\u0451\u0229\2\u055a\u055b\5\u0439\u021d\2\u055b\u055c")
buf.write("\5\u0449\u0225\2\u055cF\3\2\2\2\u055d\u055e\5\u0437\u021c")
buf.write("\2\u055e\u055f\5\u0451\u0229\2\u055f\u0560\5\u043b\u021e")
buf.write("\2\u0560\u0561\5\u0465\u0233\2\u0561H\3\2\2\2\u0562\u0563")
buf.write("\5\u0437\u021c\2\u0563\u0564\5\u0451\u0229\2\u0564\u0565")
buf.write("\5\u0451\u0229\2\u0565\u0566\5\u044b\u0226\2\u0566\u0567")
buf.write("\5\u043d\u021f\2\u0567\u0568\5\u0435\u021b\2\u0568\u0569")
buf.write("\5\u044f\u0228\2\u0569J\3\2\2\2\u056a\u056b\5\u0437\u021c")
buf.write("\2\u056b\u056c\5\u0451\u0229\2\u056c\u056d\5\u045b\u022e")
buf.write("\2\u056d\u056e\5\u0443\u0222\2\u056eL\3\2\2\2\u056f\u0570")
buf.write("\5\u0437\u021c\2\u0570\u0571\5\u0457\u022c\2\u0571\u0572")
buf.write("\5\u043d\u021f\2\u0572\u0573\5\u0435\u021b\2\u0573\u0574")
buf.write("\5\u043b\u021e\2\u0574\u0575\5\u045b\u022e\2\u0575\u0576")
buf.write("\5\u0443\u0222\2\u0576N\3\2\2\2\u0577\u0578\5\u0437\u021c")
buf.write("\2\u0578\u0579\5\u045d\u022f\2\u0579\u057a\5\u044b\u0226")
buf.write("\2\u057a\u057b\5\u0449\u0225\2\u057bP\3\2\2\2\u057c\u057d")
buf.write("\5\u0437\u021c\2\u057d\u057e\5\u0465\u0233\2\u057eR\3")
buf.write("\2\2\2\u057f\u0580\5\u0437\u021c\2\u0580\u0581\5\u0465")
buf.write("\u0233\2\u0581\u0582\5\u045b\u022e\2\u0582\u0583\5\u043d")
buf.write("\u021f\2\u0583T\3\2\2\2\u0584\u0585\5\u0439\u021d\2\u0585")
buf.write("V\3\2\2\2\u0586\u0587\5\u0439\u021d\2\u0587\u0588\5\u0435")
buf.write("\u021b\2\u0588\u0589\5\u0439\u021d\2\u0589\u058a\5\u0443")
buf.write("\u0222\2\u058a\u058b\5\u043d\u021f\2\u058bX\3\2\2\2\u058c")
buf.write("\u058d\5\u0439\u021d\2\u058d\u058e\5\u0435\u021b\2\u058e")
buf.write("\u058f\5\u044b\u0226\2\u058f\u0590\5\u044b\u0226\2\u0590")
buf.write("Z\3\2\2\2\u0591\u0592\5\u0439\u021d\2\u0592\u0593\5\u0435")
buf.write("\u021b\2\u0593\u0594\5\u044f\u0228\2\u0594\u0595\5\u0451")
buf.write("\u0229\2\u0595\u0596\5\u044f\u0228\2\u0596\u0597\5\u0445")
buf.write("\u0223\2\u0597\u0598\5\u0439\u021d\2\u0598\u0599\5\u0435")
buf.write("\u021b\2\u0599\u059a\5\u044b\u0226\2\u059a\\\3\2\2\2\u059b")
buf.write("\u059c\5\u0439\u021d\2\u059c\u059d\5\u0435\u021b\2\u059d")
buf.write("\u059e\5\u0459\u022d\2\u059e\u059f\5\u0439\u021d\2\u059f")
buf.write("\u05a0\5\u0435\u021b\2\u05a0\u05a1\5\u043b\u021e\2\u05a1")
buf.write("\u05a2\5\u043d\u021f\2\u05a2^\3\2\2\2\u05a3\u05a4\5\u0439")
buf.write("\u021d\2\u05a4\u05a5\5\u0435\u021b\2\u05a5\u05a6\5\u0459")
buf.write("\u022d\2\u05a6\u05a7\5\u043d\u021f\2\u05a7`\3\2\2\2\u05a8")
buf.write("\u05a9\5\u0439\u021d\2\u05a9\u05aa\5\u0435\u021b\2\u05aa")
buf.write("\u05ab\5\u0459\u022d\2\u05ab\u05ac\5\u045b\u022e\2\u05ac")
buf.write("b\3\2\2\2\u05ad\u05ae\5\u0439\u021d\2\u05ae\u05af\5\u0443")
buf.write("\u0222\2\u05af\u05b0\5\u0435\u021b\2\u05b0\u05b1\5\u0457")
buf.write("\u022c\2\u05b1d\3\2\2\2\u05b2\u05b3\5\u0439\u021d\2\u05b3")
buf.write("\u05b4\5\u0443\u0222\2\u05b4\u05b5\5\u0435\u021b\2\u05b5")
buf.write("\u05b6\5\u0457\u022c\2\u05b6\u05b7\7a\2\2\u05b7\u05b8")
buf.write("\5\u0439\u021d\2\u05b8\u05b9\5\u0459\u022d\2\u05b9f\3")
buf.write("\2\2\2\u05ba\u05bb\5\u0439\u021d\2\u05bb\u05bc\5\u0443")
buf.write("\u0222\2\u05bc\u05bd\5\u0435\u021b\2\u05bd\u05be\5\u0457")
buf.write("\u022c\2\u05be\u05bf\5\u0435\u021b\2\u05bf\u05c0\5\u0439")
buf.write("\u021d\2\u05c0\u05c1\5\u045b\u022e\2\u05c1\u05c2\5\u043d")
buf.write("\u021f\2\u05c2\u05c3\5\u0457\u022c\2\u05c3h\3\2\2\2\u05c4")
buf.write("\u05c5\5\u0439\u021d\2\u05c5\u05c6\5\u0443\u0222\2\u05c6")
buf.write("\u05c7\5\u043d\u021f\2\u05c7\u05c8\5\u0439\u021d\2\u05c8")
buf.write("\u05c9\5\u0449\u0225\2\u05c9j\3\2\2\2\u05ca\u05cb\5\u0439")
buf.write("\u021d\2\u05cb\u05cc\5\u0443\u0222\2\u05cc\u05cd\5\u0457")
buf.write("\u022c\2\u05cdl\3\2\2\2\u05ce\u05cf\5\u0439\u021d\2\u05cf")
buf.write("\u05d0\5\u044b\u0226\2\u05d0\u05d1\5\u0451\u0229\2\u05d1")
buf.write("\u05d2\5\u0437\u021c\2\u05d2n\3\2\2\2\u05d3\u05d4\5\u0439")
buf.write("\u021d\2\u05d4\u05d5\5\u044b\u0226\2\u05d5\u05d6\5\u0451")
buf.write("\u0229\2\u05d6\u05d7\5\u0459\u022d\2\u05d7\u05d8\5\u043d")
buf.write("\u021f\2\u05d8p\3\2\2\2\u05d9\u05da\5\u0439\u021d\2\u05da")
buf.write("\u05db\5\u044b\u0226\2\u05db\u05dc\5\u045d\u022f\2\u05dc")
buf.write("\u05dd\5\u0459\u022d\2\u05dd\u05de\5\u045b\u022e\2\u05de")
buf.write("\u05df\5\u043d\u021f\2\u05df\u05e0\5\u0457\u022c\2\u05e0")
buf.write("r\3\2\2\2\u05e1\u05e2\5\u0439\u021d\2\u05e2\u05e3\5\u0451")
buf.write("\u0229\2\u05e3\u05e4\5\u044b\u0226\2\u05e4\u05e5\5\u044b")
buf.write("\u0226\2\u05e5\u05e6\5\u043d\u021f\2\u05e6\u05e7\5\u0439")
buf.write("\u021d\2\u05e7\u05e8\5\u045b\u022e\2\u05e8t\3\2\2\2\u05e9")
buf.write("\u05ea\5\u0439\u021d\2\u05ea\u05eb\5\u0451\u0229\2\u05eb")
buf.write("\u05ec\5\u044b\u0226\2\u05ec\u05ed\5\u045d\u022f\2\u05ed")
buf.write("\u05ee\5\u044d\u0227\2\u05ee\u05ef\5\u044f\u0228\2\u05ef")
buf.write("\u05f0\5\u0459\u022d\2\u05f0v\3\2\2\2\u05f1\u05f2\5\u0439")
buf.write("\u021d\2\u05f2\u05f3\5\u0451\u0229\2\u05f3\u05f4\5\u044d")
buf.write("\u0227\2\u05f4\u05f5\5\u044d\u0227\2\u05f5\u05f6\5\u043d")
buf.write("\u021f\2\u05f6\u05f7\5\u044f\u0228\2\u05f7\u05f8\5\u045b")
buf.write("\u022e\2\u05f8x\3\2\2\2\u05f9\u05fa\5\u0439\u021d\2\u05fa")
buf.write("\u05fb\5\u0451\u0229\2\u05fb\u05fc\5\u044d\u0227\2\u05fc")
buf.write("\u05fd\5\u044d\u0227\2\u05fd\u05fe\5\u0445\u0223\2\u05fe")
buf.write("\u05ff\5\u045b\u022e\2\u05ffz\3\2\2\2\u0600\u0601\5\u0439")
buf.write("\u021d\2\u0601\u0602\5\u0451\u0229\2\u0602\u0603\5\u044d")
buf.write("\u0227\2\u0603\u0604\5\u044d\u0227\2\u0604\u0605\5\u0445")
buf.write("\u0223\2\u0605\u0606\5\u045b\u022e\2\u0606\u0607\5\u045b")
buf.write("\u022e\2\u0607\u0608\5\u043d\u021f\2\u0608\u0609\5\u043b")
buf.write("\u021e\2\u0609|\3\2\2\2\u060a\u060b\5\u0439\u021d\2\u060b")
buf.write("\u060c\5\u0451\u0229\2\u060c\u060d\5\u044d\u0227\2\u060d")
buf.write("\u060e\5\u0453\u022a\2\u060e\u060f\5\u0435\u021b\2\u060f")
buf.write("\u0610\5\u045b\u022e\2\u0610\u0611\5\u0445\u0223\2\u0611")
buf.write("\u0612\5\u0437\u021c\2\u0612\u0613\5\u0445\u0223\2\u0613")
buf.write("\u0614\5\u044b\u0226\2\u0614\u0615\5\u0445\u0223\2\u0615")
buf.write("\u0616\5\u045b\u022e\2\u0616\u0617\5\u0465\u0233\2\u0617")
buf.write("~\3\2\2\2\u0618\u0619\5\u0439\u021d\2\u0619\u061a\5\u0451")
buf.write("\u0229\2\u061a\u061b\5\u044d\u0227\2\u061b\u061c\5\u0453")
buf.write("\u022a\2\u061c\u061d\5\u0445\u0223\2\u061d\u061e\5\u044b")
buf.write("\u0226\2\u061e\u061f\5\u043d\u021f\2\u061f\u0080\3\2\2")
buf.write("\2\u0620\u0621\5\u0439\u021d\2\u0621\u0622\5\u0451\u0229")
buf.write("\2\u0622\u0623\5\u044d\u0227\2\u0623\u0624\5\u0453\u022a")
buf.write("\2\u0624\u0625\5\u0451\u0229\2\u0625\u0626\5\u045d\u022f")
buf.write("\2\u0626\u0627\5\u044f\u0228\2\u0627\u0628\5\u043b\u021e")
buf.write("\2\u0628\u0082\3\2\2\2\u0629\u062a\5\u0439\u021d\2\u062a")
buf.write("\u062b\5\u0451\u0229\2\u062b\u062c\5\u044f\u0228\2\u062c")
buf.write("\u062d\5\u044f\u0228\2\u062d\u062e\5\u043d\u021f\2\u062e")
buf.write("\u062f\5\u0439\u021d\2\u062f\u0630\5\u045b\u022e\2\u0630")
buf.write("\u0084\3\2\2\2\u0631\u0632\5\u0439\u021d\2\u0632\u0633")
buf.write("\5\u0451\u0229\2\u0633\u0634\5\u044f\u0228\2\u0634\u0635")
buf.write("\5\u044f\u0228\2\u0635\u0636\5\u043d\u021f\2\u0636\u0637")
buf.write("\5\u0439\u021d\2\u0637\u0638\5\u045b\u022e\2\u0638\u0639")
buf.write("\7a\2\2\u0639\u063a\5\u0437\u021c\2\u063a\u063b\5\u0465")
buf.write("\u0233\2\u063b\u063c\7a\2\2\u063c\u063d\5\u0457\u022c")
buf.write("\2\u063d\u063e\5\u0451\u0229\2\u063e\u063f\5\u0451\u0229")
buf.write("\2\u063f\u0640\5\u045b\u022e\2\u0640\u0086\3\2\2\2\u0641")
buf.write("\u0642\5\u0439\u021d\2\u0642\u0643\5\u0451\u0229\2\u0643")
buf.write("\u0644\5\u044f\u0228\2\u0644\u0645\5\u0459\u022d\2\u0645")
buf.write("\u0646\5\u045b\u022e\2\u0646\u0647\5\u0435\u021b\2\u0647")
buf.write("\u0648\5\u044f\u0228\2\u0648\u0649\5\u045b\u022e\2\u0649")
buf.write("\u0088\3\2\2\2\u064a\u064b\5\u0439\u021d\2\u064b\u064c")
buf.write("\5\u0451\u0229\2\u064c\u064d\5\u044f\u0228\2\u064d\u064e")
buf.write("\5\u0459\u022d\2\u064e\u064f\5\u045b\u022e\2\u064f\u0650")
buf.write("\5\u0457\u022c\2\u0650\u0651\5\u0435\u021b\2\u0651\u0652")
buf.write("\5\u0445\u0223\2\u0652\u0653\5\u044f\u0228\2\u0653\u0654")
buf.write("\5\u045b\u022e\2\u0654\u008a\3\2\2\2\u0655\u0656\5\u0439")
buf.write("\u021d\2\u0656\u0657\5\u0451\u0229\2\u0657\u0658\5\u044f")
buf.write("\u0228\2\u0658\u0659\5\u0459\u022d\2\u0659\u065a\5\u045b")
buf.write("\u022e\2\u065a\u065b\5\u0457\u022c\2\u065b\u065c\5\u0435")
buf.write("\u021b\2\u065c\u065d\5\u0445\u0223\2\u065d\u065e\5\u044f")
buf.write("\u0228\2\u065e\u065f\5\u045b\u022e\2\u065f\u0660\5\u0459")
buf.write("\u022d\2\u0660\u008c\3\2\2\2\u0661\u0662\5\u0439\u021d")
buf.write("\2\u0662\u0663\5\u0451\u0229\2\u0663\u0664\5\u044f\u0228")
buf.write("\2\u0664\u0665\5\u0459\u022d\2\u0665\u0666\5\u045b\u022e")
buf.write("\2\u0666\u0667\5\u0457\u022c\2\u0667\u0668\5\u045d\u022f")
buf.write("\2\u0668\u0669\5\u0439\u021d\2\u0669\u066a\5\u045b\u022e")
buf.write("\2\u066a\u066b\5\u0451\u0229\2\u066b\u066c\5\u0457\u022c")
buf.write("\2\u066c\u008e\3\2\2\2\u066d\u066e\5\u0439\u021d\2\u066e")
buf.write("\u066f\5\u0451\u0229\2\u066f\u0670\5\u044f\u0228\2\u0670")
buf.write("\u0671\5\u045b\u022e\2\u0671\u0672\5\u043d\u021f\2\u0672")
buf.write("\u0673\5\u044f\u0228\2\u0673\u0674\5\u045b\u022e\2\u0674")
buf.write("\u0090\3\2\2\2\u0675\u0676\5\u0439\u021d\2\u0676\u0677")
buf.write("\5\u0451\u0229\2\u0677\u0678\5\u044f\u0228\2\u0678\u0679")
buf.write("\5\u045b\u022e\2\u0679\u067a\5\u043d\u021f\2\u067a\u067b")
buf.write("\5\u0463\u0232\2\u067b\u067c\5\u045b\u022e\2\u067c\u0092")
buf.write("\3\2\2\2\u067d\u067e\5\u0439\u021d\2\u067e\u067f\5\u0451")
buf.write("\u0229\2\u067f\u0680\5\u044f\u0228\2\u0680\u0681\5\u045b")
buf.write("\u022e\2\u0681\u0682\5\u0445\u0223\2\u0682\u0683\5\u044f")
buf.write("\u0228\2\u0683\u0684\5\u045d\u022f\2\u0684\u0685\5\u043d")
buf.write("\u021f\2\u0685\u0094\3\2\2\2\u0686\u0687\5\u0439\u021d")
buf.write("\2\u0687\u0688\5\u0451\u0229\2\u0688\u0689\5\u044f\u0228")
buf.write("\2\u0689\u068a\5\u045f\u0230\2\u068a\u068b\5\u043d\u021f")
buf.write("\2\u068b\u068c\5\u0457\u022c\2\u068c\u068d\5\u045b\u022e")
buf.write("\2\u068d\u0096\3\2\2\2\u068e\u068f\5\u0439\u021d\2\u068f")
buf.write("\u0690\5\u0451\u0229\2\u0690\u0691\5\u0457\u022c\2\u0691")
buf.write("\u0692\5\u0457\u022c\2\u0692\u0693\5\u045d\u022f\2\u0693")
buf.write("\u0694\5\u0453\u022a\2\u0694\u0695\5\u045b\u022e\2\u0695")
buf.write("\u0696\7a\2\2\u0696\u0697\5\u0463\u0232\2\u0697\u0698")
buf.write("\5\u0445\u0223\2\u0698\u0699\5\u043b\u021e\2\u0699\u0098")
buf.write("\3\2\2\2\u069a\u069b\5\u0439\u021d\2\u069b\u069c\5\u0451")
buf.write("\u0229\2\u069c\u069d\5\u0457\u022c\2\u069d\u069e\5\u0457")
buf.write("\u022c\2\u069e\u069f\5\u045d\u022f\2\u069f\u06a0\5\u0453")
buf.write("\u022a\2\u06a0\u06a1\5\u045b\u022e\2\u06a1\u06a2\7a\2")
buf.write("\2\u06a2\u06a3\5\u0463\u0232\2\u06a3\u06a4\5\u0445\u0223")
buf.write("\2\u06a4\u06a5\5\u043b\u021e\2\u06a5\u06a6\7a\2\2\u06a6")
buf.write("\u06a7\5\u0435\u021b\2\u06a7\u06a8\5\u044b\u0226\2\u06a8")
buf.write("\u06a9\5\u044b\u0226\2\u06a9\u009a\3\2\2\2\u06aa\u06ab")
buf.write("\5\u0439\u021d\2\u06ab\u06ac\5\u0451\u0229\2\u06ac\u06ad")
buf.write("\5\u0459\u022d\2\u06ad\u06ae\5\u045b\u022e\2\u06ae\u009c")
buf.write("\3\2\2\2\u06af\u06b0\5\u0439\u021d\2\u06b0\u06b1\5\u0451")
buf.write("\u0229\2\u06b1\u06b2\5\u045d\u022f\2\u06b2\u06b3\5\u044f")
buf.write("\u0228\2\u06b3\u06b4\5\u045b\u022e\2\u06b4\u009e\3\2\2")
buf.write("\2\u06b5\u06b6\5\u0439\u021d\2\u06b6\u06b7\5\u0457\u022c")
buf.write("\2\u06b7\u06b8\5\u043d\u021f\2\u06b8\u06b9\5\u0435\u021b")
buf.write("\2\u06b9\u06ba\5\u045b\u022e\2\u06ba\u06bb\5\u043d\u021f")
buf.write("\2\u06bb\u00a0\3\2\2\2\u06bc\u06bd\5\u0439\u021d\2\u06bd")
buf.write("\u06be\5\u0457\u022c\2\u06be\u06bf\5\u0451\u0229\2\u06bf")
buf.write("\u06c0\5\u0459\u022d\2\u06c0\u06c1\5\u0459\u022d\2\u06c1")
buf.write("\u00a2\3\2\2\2\u06c2\u06c3\5\u0439\u021d\2\u06c3\u06c4")
buf.write("\5\u045d\u022f\2\u06c4\u06c5\5\u0437\u021c\2\u06c5\u06c6")
buf.write("\5\u043d\u021f\2\u06c6\u00a4\3\2\2\2\u06c7\u06c8\5\u0439")
buf.write("\u021d\2\u06c8\u06c9\5\u045d\u022f\2\u06c9\u06ca\5\u0457")
buf.write("\u022c\2\u06ca\u06cb\5\u0457\u022c\2\u06cb\u06cc\5\u043d")
buf.write("\u021f\2\u06cc\u06cd\5\u044f\u0228\2\u06cd\u06ce\5\u045b")
buf.write("\u022e\2\u06ce\u00a6\3\2\2\2\u06cf\u06d0\5\u0439\u021d")
buf.write("\2\u06d0\u06d1\5\u045d\u022f\2\u06d1\u06d2\5\u0457\u022c")
buf.write("\2\u06d2\u06d3\5\u0457\u022c\2\u06d3\u06d4\5\u043d\u021f")
buf.write("\2\u06d4\u06d5\5\u044f\u0228\2\u06d5\u06d6\5\u045b\u022e")
buf.write("\2\u06d6\u06d7\7a\2\2\u06d7\u06d8\5\u045d\u022f\2\u06d8")
buf.write("\u06d9\5\u0459\u022d\2\u06d9\u06da\5\u043d\u021f\2\u06da")
buf.write("\u06db\5\u0457\u022c\2\u06db\u00a8\3\2\2\2\u06dc\u06dd")
buf.write("\5\u0439\u021d\2\u06dd\u06de\5\u045d\u022f\2\u06de\u06df")
buf.write("\5\u0457\u022c\2\u06df\u06e0\5\u0459\u022d\2\u06e0\u06e1")
buf.write("\5\u0451\u0229\2\u06e1\u06e2\5\u0457\u022c\2\u06e2\u00aa")
buf.write("\3\2\2\2\u06e3\u06e4\5\u0439\u021d\2\u06e4\u06e5\5\u045d")
buf.write("\u022f\2\u06e5\u06e6\5\u0459\u022d\2\u06e6\u06e7\5\u045b")
buf.write("\u022e\2\u06e7\u06e8\5\u0451\u0229\2\u06e8\u06e9\5\u044d")
buf.write("\u0227\2\u06e9\u06ea\5\u043b\u021e\2\u06ea\u06eb\5\u0435")
buf.write("\u021b\2\u06eb\u06ec\5\u045b\u022e\2\u06ec\u06ed\5\u045d")
buf.write("\u022f\2\u06ed\u06ee\5\u044d\u0227\2\u06ee\u00ac\3\2\2")
buf.write("\2\u06ef\u06f0\5\u0439\u021d\2\u06f0\u06f1\5\u0465\u0233")
buf.write("\2\u06f1\u06f2\5\u0439\u021d\2\u06f2\u06f3\5\u044b\u0226")
buf.write("\2\u06f3\u06f4\5\u043d\u021f\2\u06f4\u00ae\3\2\2\2\u06f5")
buf.write("\u06f6\5\u043b\u021e\2\u06f6\u06f7\5\u0435\u021b\2\u06f7")
buf.write("\u06f8\5\u045b\u022e\2\u06f8\u06f9\5\u0435\u021b\2\u06f9")
buf.write("\u00b0\3\2\2\2\u06fa\u06fb\5\u043b\u021e\2\u06fb\u06fc")
buf.write("\5\u0435\u021b\2\u06fc\u06fd\5\u045b\u022e\2\u06fd\u06fe")
buf.write("\5\u0435\u021b\2\u06fe\u06ff\5\u0437\u021c\2\u06ff\u0700")
buf.write("\5\u0435\u021b\2\u0700\u0701\5\u0459\u022d\2\u0701\u0702")
buf.write("\5\u043d\u021f\2\u0702\u00b2\3\2\2\2\u0703\u0704\5\u043b")
buf.write("\u021e\2\u0704\u0705\5\u0435\u021b\2\u0705\u0706\5\u045b")
buf.write("\u022e\2\u0706\u0707\5\u043d\u021f\2\u0707\u00b4\3\2\2")
buf.write("\2\u0708\u0709\5\u043b\u021e\2\u0709\u070a\5\u0435\u021b")
buf.write("\2\u070a\u070b\5\u0465\u0233\2\u070b\u00b6\3\2\2\2\u070c")
buf.write("\u070d\5\u043b\u021e\2\u070d\u070e\5\u0437\u021c\2\u070e")
buf.write("\u070f\7a\2\2\u070f\u0710\5\u0457\u022c\2\u0710\u0711")
buf.write("\5\u0451\u0229\2\u0711\u0712\5\u044b\u0226\2\u0712\u0713")
buf.write("\5\u043d\u021f\2\u0713\u0714\7a\2\2\u0714\u0715\5\u0439")
buf.write("\u021d\2\u0715\u0716\5\u0443\u0222\2\u0716\u0717\5\u0435")
buf.write("\u021b\2\u0717\u0718\5\u044f\u0228\2\u0718\u0719\5\u0441")
buf.write("\u0221\2\u0719\u071a\5\u043d\u021f\2\u071a\u00b8\3\2\2")
buf.write("\2\u071b\u071c\5\u043b\u021e\2\u071c\u071d\5\u0437\u021c")
buf.write("\2\u071d\u071e\5\u045b\u022e\2\u071e\u071f\5\u0445\u0223")
buf.write("\2\u071f\u0720\5\u044d\u0227\2\u0720\u0721\5\u043d\u021f")
buf.write("\2\u0721\u0722\5\u0467\u0234\2\u0722\u0723\5\u0451\u0229")
buf.write("\2\u0723\u0724\5\u044f\u0228\2\u0724\u0725\5\u043d\u021f")
buf.write("\2\u0725\u00ba\3\2\2\2\u0726\u0727\5\u043b\u021e\2\u0727")
buf.write("\u0728\5\u043b\u021e\2\u0728\u0729\5\u044b\u0226\2\u0729")
buf.write("\u00bc\3\2\2\2\u072a\u072b\5\u043b\u021e\2\u072b\u072c")
buf.write("\5\u043d\u021f\2\u072c\u072d\5\u0437\u021c\2\u072d\u072e")
buf.write("\5\u045d\u022f\2\u072e\u072f\5\u0441\u0221\2\u072f\u00be")
buf.write("\3\2\2\2\u0730\u0731\5\u043b\u021e\2\u0731\u0732\5\u043d")
buf.write("\u021f\2\u0732\u0733\5\u0439\u021d\2\u0733\u00c0\3\2\2")
buf.write("\2\u0734\u0735\5\u043b\u021e\2\u0735\u0736\5\u043d\u021f")
buf.write("\2\u0736\u0737\5\u0439\u021d\2\u0737\u0738\5\u0445\u0223")
buf.write("\2\u0738\u0739\5\u044d\u0227\2\u0739\u073a\5\u0435\u021b")
buf.write("\2\u073a\u073b\5\u044b\u0226\2\u073b\u00c2\3\2\2\2\u073c")
buf.write("\u073d\5\u043b\u021e\2\u073d\u073e\5\u043d\u021f\2\u073e")
buf.write("\u073f\5\u0439\u021d\2\u073f\u0740\5\u044b\u0226\2\u0740")
buf.write("\u0741\5\u0435\u021b\2\u0741\u0742\5\u0457\u022c\2\u0742")
buf.write("\u0743\5\u043d\u021f\2\u0743\u00c4\3\2\2\2\u0744\u0745")
buf.write("\5\u043b\u021e\2\u0745\u0746\5\u043d\u021f\2\u0746\u0747")
buf.write("\5\u0439\u021d\2\u0747\u0748\5\u0451\u0229\2\u0748\u0749")
buf.write("\5\u044d\u0227\2\u0749\u074a\5\u0453\u022a\2\u074a\u074b")
buf.write("\5\u0451\u0229\2\u074b\u074c\5\u0459\u022d\2\u074c\u074d")
buf.write("\5\u043d\u021f\2\u074d\u00c6\3\2\2\2\u074e\u074f\5\u043b")
buf.write("\u021e\2\u074f\u0750\5\u043d\u021f\2\u0750\u0751\5\u0439")
buf.write("\u021d\2\u0751\u0752\5\u0457\u022c\2\u0752\u0753\5\u043d")
buf.write("\u021f\2\u0753\u0754\5\u044d\u0227\2\u0754\u0755\5\u043d")
buf.write("\u021f\2\u0755\u0756\5\u044f\u0228\2\u0756\u0757\5\u045b")
buf.write("\u022e\2\u0757\u00c8\3\2\2\2\u0758\u0759\5\u043b\u021e")
buf.write("\2\u0759\u075a\5\u043d\u021f\2\u075a\u075b\5\u043f\u0220")
buf.write("\2\u075b\u075c\5\u0435\u021b\2\u075c\u075d\5\u045d\u022f")
buf.write("\2\u075d\u075e\5\u044b\u0226\2\u075e\u075f\5\u045b\u022e")
buf.write("\2\u075f\u00ca\3\2\2\2\u0760\u0761\5\u043b\u021e\2\u0761")
buf.write("\u0762\5\u043d\u021f\2\u0762\u0763\5\u043f\u0220\2\u0763")
buf.write("\u0764\5\u0435\u021b\2\u0764\u0765\5\u045d\u022f\2\u0765")
buf.write("\u0766\5\u044b\u0226\2\u0766\u0767\5\u045b\u022e\2\u0767")
buf.write("\u0768\5\u0459\u022d\2\u0768\u00cc\3\2\2\2\u0769\u076a")
buf.write("\5\u043b\u021e\2\u076a\u076b\5\u043d\u021f\2\u076b\u076c")
buf.write("\5\u043f\u0220\2\u076c\u076d\5\u043d\u021f\2\u076d\u076e")
buf.write("\5\u0457\u022c\2\u076e\u076f\5\u0457\u022c\2\u076f\u0770")
buf.write("\5\u043d\u021f\2\u0770\u0771\5\u043b\u021e\2\u0771\u00ce")
buf.write("\3\2\2\2\u0772\u0773\5\u043b\u021e\2\u0773\u0774\5\u043d")
buf.write("\u021f\2\u0774\u0775\5\u043f\u0220\2\u0775\u0776\5\u0445")
buf.write("\u0223\2\u0776\u0777\5\u044f\u0228\2\u0777\u0778\5\u043d")
buf.write("\u021f\2\u0778\u0779\5\u0457\u022c\2\u0779\u00d0\3\2\2")
buf.write("\2\u077a\u077b\5\u043b\u021e\2\u077b\u077c\5\u043d\u021f")
buf.write("\2\u077c\u077d\5\u044b\u0226\2\u077d\u077e\5\u043d\u021f")
buf.write("\2\u077e\u077f\5\u045b\u022e\2\u077f\u0780\5\u043d\u021f")
buf.write("\2\u0780\u00d2\3\2\2\2\u0781\u0782\5\u043b\u021e\2\u0782")
buf.write("\u0783\5\u043d\u021f\2\u0783\u0784\5\u0453\u022a\2\u0784")
buf.write("\u0785\5\u045b\u022e\2\u0785\u0786\5\u0443\u0222\2\u0786")
buf.write("\u00d4\3\2\2\2\u0787\u0788\5\u043b\u021e\2\u0788\u0789")
buf.write("\5\u043d\u021f\2\u0789\u078a\5\u0459\u022d\2\u078a\u078b")
buf.write("\5\u0439\u021d\2\u078b\u00d6\3\2\2\2\u078c\u078d\5\u043b")
buf.write("\u021e\2\u078d\u078e\5\u043d\u021f\2\u078e\u078f\5\u045b")
buf.write("\u022e\2\u078f\u0790\5\u043d\u021f\2\u0790\u0791\5\u0457")
buf.write("\u022c\2\u0791\u0792\5\u044d\u0227\2\u0792\u0793\5\u0445")
buf.write("\u0223\2\u0793\u0794\5\u044f\u0228\2\u0794\u0795\5\u0445")
buf.write("\u0223\2\u0795\u0796\5\u0459\u022d\2\u0796\u0797\5\u045b")
buf.write("\u022e\2\u0797\u0798\5\u0445\u0223\2\u0798\u0799\5\u0439")
buf.write("\u021d\2\u0799\u00d8\3\2\2\2\u079a\u079b\5\u043b\u021e")
buf.write("\2\u079b\u079c\5\u0445\u0223\2\u079c\u079d\5\u044d\u0227")
buf.write("\2\u079d\u079e\5\u043d\u021f\2\u079e\u079f\5\u044f\u0228")
buf.write("\2\u079f\u07a0\5\u0459\u022d\2\u07a0\u07a1\5\u0445\u0223")
buf.write("\2\u07a1\u07a2\5\u0451\u0229\2\u07a2\u07a3\5\u044f\u0228")
buf.write("\2\u07a3\u00da\3\2\2\2\u07a4\u07a5\5\u043b\u021e\2\u07a5")
buf.write("\u07a6\5\u0445\u0223\2\u07a6\u07a7\5\u0459\u022d\2\u07a7")
buf.write("\u07a8\5\u0435\u021b\2\u07a8\u07a9\5\u0437\u021c\2\u07a9")
buf.write("\u07aa\5\u044b\u0226\2\u07aa\u07ab\5\u043d\u021f\2\u07ab")
buf.write("\u00dc\3\2\2\2\u07ac\u07ad\5\u043b\u021e\2\u07ad\u07ae")
buf.write("\5\u0445\u0223\2\u07ae\u07af\5\u0459\u022d\2\u07af\u07b0")
buf.write("\5\u0435\u021b\2\u07b0\u07b1\5\u0459\u022d\2\u07b1\u07b2")
buf.write("\5\u0459\u022d\2\u07b2\u07b3\5\u0451\u0229\2\u07b3\u07b4")
buf.write("\5\u0439\u021d\2\u07b4\u07b5\5\u0445\u0223\2\u07b5\u07b6")
buf.write("\5\u0435\u021b\2\u07b6\u07b7\5\u045b\u022e\2\u07b7\u07b8")
buf.write("\5\u043d\u021f\2\u07b8\u00de\3\2\2\2\u07b9\u07ba\5\u043b")
buf.write("\u021e\2\u07ba\u07bb\5\u0445\u0223\2\u07bb\u07bc\5\u0459")
buf.write("\u022d\2\u07bc\u07bd\5\u045b\u022e\2\u07bd\u07be\5\u0445")
buf.write("\u0223\2\u07be\u07bf\5\u044f\u0228\2\u07bf\u07c0\5\u0439")
buf.write("\u021d\2\u07c0\u07c1\5\u045b\u022e\2\u07c1\u00e0\3\2\2")
buf.write("\2\u07c2\u07c3\5\u043b\u021e\2\u07c3\u07c4\5\u0451\u0229")
buf.write("\2\u07c4\u07c5\5\u0439\u021d\2\u07c5\u07c6\5\u045d\u022f")
buf.write("\2\u07c6\u07c7\5\u044d\u0227\2\u07c7\u07c8\5\u043d\u021f")
buf.write("\2\u07c8\u07c9\5\u044f\u0228\2\u07c9\u07ca\5\u045b\u022e")
buf.write("\2\u07ca\u00e2\3\2\2\2\u07cb\u07cc\5\u043b\u021e\2\u07cc")
buf.write("\u07cd\5\u0451\u0229\2\u07cd\u07ce\5\u045d\u022f\2\u07ce")
buf.write("\u07cf\5\u0437\u021c\2\u07cf\u07d0\5\u044b\u0226\2\u07d0")
buf.write("\u07d1\5\u043d\u021f\2\u07d1\u00e4\3\2\2\2\u07d2\u07d3")
buf.write("\5\u043b\u021e\2\u07d3\u07d4\5\u0457\u022c\2\u07d4\u07d5")
buf.write("\5\u0451\u0229\2\u07d5\u07d6\5\u0453\u022a\2\u07d6\u00e6")
buf.write("\3\2\2\2\u07d7\u07d8\5\u043b\u021e\2\u07d8\u07d9\5\u0459")
buf.write("\u022d\2\u07d9\u07da\5\u0445\u0223\2\u07da\u07db\5\u044f")
buf.write("\u0228\2\u07db\u07dc\5\u045b\u022e\2\u07dc\u07dd\5\u043d")
buf.write("\u021f\2\u07dd\u07de\5\u0457\u022c\2\u07de\u07df\5\u045f")
buf.write("\u0230\2\u07df\u07e0\5\u0435\u021b\2\u07e0\u07e1\5\u044b")
buf.write("\u0226\2\u07e1\u07e2\7a\2\2\u07e2\u07e3\5\u045d\u022f")
buf.write("\2\u07e3\u07e4\5\u044f\u0228\2\u07e4\u07e5\5\u0439\u021d")
buf.write("\2\u07e5\u07e6\5\u0451\u0229\2\u07e6\u07e7\5\u044f\u0228")
buf.write("\2\u07e7\u07e8\5\u0459\u022d\2\u07e8\u07e9\5\u045b\u022e")
buf.write("\2\u07e9\u07ea\5\u0457\u022c\2\u07ea\u07eb\5\u0435\u021b")
buf.write("\2\u07eb\u07ec\5\u0445\u0223\2\u07ec\u07ed\5\u044f\u0228")
buf.write("\2\u07ed\u07ee\5\u043d\u021f\2\u07ee\u07ef\5\u043b\u021e")
buf.write("\2\u07ef\u00e8\3\2\2\2\u07f0\u07f1\5\u043d\u021f\2\u07f1")
buf.write("\u07f2\5\u0435\u021b\2\u07f2\u07f3\5\u0439\u021d\2\u07f3")
buf.write("\u07f4\5\u0443\u0222\2\u07f4\u00ea\3\2\2\2\u07f5\u07f6")
buf.write("\5\u043d\u021f\2\u07f6\u07f7\5\u044b\u0226\2\u07f7\u07f8")
buf.write("\5\u043d\u021f\2\u07f8\u07f9\5\u044d\u0227\2\u07f9\u07fa")
buf.write("\5\u043d\u021f\2\u07fa\u07fb\5\u044f\u0228\2\u07fb\u07fc")
buf.write("\5\u045b\u022e\2\u07fc\u00ec\3\2\2\2\u07fd\u07fe\5\u043d")
buf.write("\u021f\2\u07fe\u07ff\5\u044b\u0226\2\u07ff\u0800\5\u0459")
buf.write("\u022d\2\u0800\u0801\5\u043d\u021f\2\u0801\u00ee\3\2\2")
buf.write("\2\u0802\u0803\5\u043d\u021f\2\u0803\u0804\5\u044b\u0226")
buf.write("\2\u0804\u0805\5\u0459\u022d\2\u0805\u0806\5\u0445\u0223")
buf.write("\2\u0806\u0807\5\u043f\u0220\2\u0807\u00f0\3\2\2\2\u0808")
buf.write("\u0809\5\u043d\u021f\2\u0809\u080a\5\u044d\u0227\2\u080a")
buf.write("\u080b\5\u0453\u022a\2\u080b\u080c\5\u045b\u022e\2\u080c")
buf.write("\u080d\5\u0465\u0233\2\u080d\u00f2\3\2\2\2\u080e\u080f")
buf.write("\5\u043d\u021f\2\u080f\u0810\5\u044f\u0228\2\u0810\u0811")
buf.write("\5\u0435\u021b\2\u0811\u0812\5\u0437\u021c\2\u0812\u0813")
buf.write("\5\u044b\u0226\2\u0813\u0814\5\u043d\u021f\2\u0814\u00f4")
buf.write("\3\2\2\2\u0815\u0816\5\u043d\u021f\2\u0816\u0817\5\u044f")
buf.write("\u0228\2\u0817\u0818\5\u0439\u021d\2\u0818\u0819\5\u0451")
buf.write("\u0229\2\u0819\u081a\5\u043b\u021e\2\u081a\u081b\5\u0445")
buf.write("\u0223\2\u081b\u081c\5\u044f\u0228\2\u081c\u081d\5\u0441")
buf.write("\u0221\2\u081d\u00f6\3\2\2\2\u081e\u081f\5\u043d\u021f")
buf.write("\2\u081f\u0820\5\u044f\u0228\2\u0820\u0821\5\u043b\u021e")
buf.write("\2\u0821\u00f8\3\2\2\2\u0822\u0823\5\u043d\u021f\2\u0823")
buf.write("\u0824\5\u044f\u0228\2\u0824\u0825\5\u045b\u022e\2\u0825")
buf.write("\u0826\5\u0445\u0223\2\u0826\u0827\5\u045b\u022e\2\u0827")
buf.write("\u0828\5\u0465\u0233\2\u0828\u0829\5\u043d\u021f\2\u0829")
buf.write("\u082a\5\u0459\u022d\2\u082a\u082b\5\u0439\u021d\2\u082b")
buf.write("\u082c\5\u0435\u021b\2\u082c\u082d\5\u0453\u022a\2\u082d")
buf.write("\u082e\5\u0445\u0223\2\u082e\u082f\5\u044f\u0228\2\u082f")
buf.write("\u0830\5\u0441\u0221\2\u0830\u00fa\3\2\2\2\u0831\u0832")
buf.write("\5\u043d\u021f\2\u0832\u0833\5\u0457\u022c\2\u0833\u0834")
buf.write("\5\u0457\u022c\2\u0834\u00fc\3\2\2\2\u0835\u0836\5\u043d")
buf.write("\u021f\2\u0836\u0837\5\u0457\u022c\2\u0837\u0838\5\u0457")
buf.write("\u022c\2\u0838\u0839\5\u0451\u0229\2\u0839\u083a\5\u0457")
buf.write("\u022c\2\u083a\u083b\5\u0459\u022d\2\u083b\u00fe\3\2\2")
buf.write("\2\u083c\u083d\5\u043d\u021f\2\u083d\u083e\5\u0459\u022d")
buf.write("\2\u083e\u083f\5\u0439\u021d\2\u083f\u0840\5\u0435\u021b")
buf.write("\2\u0840\u0841\5\u0453\u022a\2\u0841\u0842\5\u043d\u021f")
buf.write("\2\u0842\u0100\3\2\2\2\u0843\u0844\5\u043d\u021f\2\u0844")
buf.write("\u0845\5\u045f\u0230\2\u0845\u0846\5\u0435\u021b\2\u0846")
buf.write("\u0847\5\u044b\u0226\2\u0847\u0848\5\u044f\u0228\2\u0848")
buf.write("\u0849\5\u0435\u021b\2\u0849\u084a\5\u044d\u0227\2\u084a")
buf.write("\u084b\5\u043d\u021f\2\u084b\u0102\3\2\2\2\u084c\u084d")
buf.write("\5\u043d\u021f\2\u084d\u084e\5\u0463\u0232\2\u084e\u084f")
buf.write("\5\u0439\u021d\2\u084f\u0850\5\u043d\u021f\2\u0850\u0851")
buf.write("\5\u0453\u022a\2\u0851\u0852\5\u045b\u022e\2\u0852\u0104")
buf.write("\3\2\2\2\u0853\u0854\5\u043d\u021f\2\u0854\u0855\5\u0463")
buf.write("\u0232\2\u0855\u0856\5\u0439\u021d\2\u0856\u0857\5\u043d")
buf.write("\u021f\2\u0857\u0858\5\u0453\u022a\2\u0858\u0859\5\u045b")
buf.write("\u022e\2\u0859\u085a\5\u0445\u0223\2\u085a\u085b\5\u0451")
buf.write("\u0229\2\u085b\u085c\5\u044f\u0228\2\u085c\u0106\3\2\2")
buf.write("\2\u085d\u085e\5\u043d\u021f\2\u085e\u085f\5\u0463\u0232")
buf.write("\2\u085f\u0860\5\u0439\u021d\2\u0860\u0861\5\u043d\u021f")
buf.write("\2\u0861\u0862\5\u0453\u022a\2\u0862\u0863\5\u045b\u022e")
buf.write("\2\u0863\u0864\5\u0445\u0223\2\u0864\u0865\5\u0451\u0229")
buf.write("\2\u0865\u0866\5\u044f\u0228\2\u0866\u0867\7a\2\2\u0867")
buf.write("\u0868\5\u0445\u0223\2\u0868\u0869\5\u044f\u0228\2\u0869")
buf.write("\u086a\5\u0445\u0223\2\u086a\u086b\5\u045b\u022e\2\u086b")
buf.write("\u0108\3\2\2\2\u086c\u086d\5\u043d\u021f\2\u086d\u086e")
buf.write("\5\u0463\u0232\2\u086e\u086f\5\u0439\u021d\2\u086f\u0870")
buf.write("\5\u043d\u021f\2\u0870\u0871\5\u0453\u022a\2\u0871\u0872")
buf.write("\5\u045b\u022e\2\u0872\u0873\5\u0445\u0223\2\u0873\u0874")
buf.write("\5\u0451\u0229\2\u0874\u0875\5\u044f\u0228\2\u0875\u0876")
buf.write("\5\u0459\u022d\2\u0876\u010a\3\2\2\2\u0877\u0878\5\u043d")
buf.write("\u021f\2\u0878\u0879\5\u0463\u0232\2\u0879\u087a\5\u0439")
buf.write("\u021d\2\u087a\u087b\5\u044b\u0226\2\u087b\u087c\5\u045d")
buf.write("\u022f\2\u087c\u087d\5\u043b\u021e\2\u087d\u087e\5\u043d")
buf.write("\u021f\2\u087e\u010c\3\2\2\2\u087f\u0880\5\u043d\u021f")
buf.write("\2\u0880\u0881\5\u0463\u0232\2\u0881\u0882\5\u0439\u021d")
buf.write("\2\u0882\u0883\5\u044b\u0226\2\u0883\u0884\5\u045d\u022f")
buf.write("\2\u0884\u0885\5\u0459\u022d\2\u0885\u0886\5\u0445\u0223")
buf.write("\2\u0886\u0887\5\u045f\u0230\2\u0887\u0888\5\u043d\u021f")
buf.write("\2\u0888\u010e\3\2\2\2\u0889\u088a\5\u043d\u021f\2\u088a")
buf.write("\u088b\5\u0463\u0232\2\u088b\u088c\5\u043d\u021f\2\u088c")
buf.write("\u088d\5\u0439\u021d\2\u088d\u088e\5\u045d\u022f\2\u088e")
buf.write("\u088f\5\u045b\u022e\2\u088f\u0890\5\u043d\u021f\2\u0890")
buf.write("\u0110\3\2\2\2\u0891\u0892\5\u043d\u021f\2\u0892\u0893")
buf.write("\5\u0463\u0232\2\u0893\u0894\5\u0445\u0223\2\u0894\u0895")
buf.write("\5\u0459\u022d\2\u0895\u0896\5\u045b\u022e\2\u0896\u0897")
buf.write("\5\u0459\u022d\2\u0897\u0112\3\2\2\2\u0898\u0899\5\u043d")
buf.write("\u021f\2\u0899\u089a\5\u0463\u0232\2\u089a\u089b\5\u0445")
buf.write("\u0223\2\u089b\u089c\5\u045b\u022e\2\u089c\u0114\3\2\2")
buf.write("\2\u089d\u089e\5\u043d\u021f\2\u089e\u089f\5\u0463\u0232")
buf.write("\2\u089f\u08a0\5\u0453\u022a\2\u08a0\u08a1\5\u044b\u0226")
buf.write("\2\u08a1\u08a2\5\u0435\u021b\2\u08a2\u08a3\5\u0445\u0223")
buf.write("\2\u08a3\u08a4\5\u044f\u0228\2\u08a4\u0116\3\2\2\2\u08a5")
buf.write("\u08a6\5\u043d\u021f\2\u08a6\u08a7\5\u0463\u0232\2\u08a7")
buf.write("\u08a8\5\u045b\u022e\2\u08a8\u08a9\5\u043d\u021f\2\u08a9")
buf.write("\u08aa\5\u0457\u022c\2\u08aa\u08ab\5\u044f\u0228\2\u08ab")
buf.write("\u08ac\5\u0435\u021b\2\u08ac\u08ad\5\u044b\u0226\2\u08ad")
buf.write("\u0118\3\2\2\2\u08ae\u08af\5\u043d\u021f\2\u08af\u08b0")
buf.write("\5\u0463\u0232\2\u08b0\u08b1\5\u045b\u022e\2\u08b1\u08b2")
buf.write("\5\u0457\u022c\2\u08b2\u08b3\5\u0435\u021b\2\u08b3\u08b4")
buf.write("\5\u0439\u021d\2\u08b4\u08b5\5\u045b\u022e\2\u08b5\u011a")
buf.write("\3\2\2\2\u08b6\u08b7\5\u043f\u0220\2\u08b7\u08b8\5\u0435")
buf.write("\u021b\2\u08b8\u08b9\5\u0445\u0223\2\u08b9\u08ba\5\u044b")
buf.write("\u0226\2\u08ba\u08bb\5\u045d\u022f\2\u08bb\u08bc\5\u0457")
buf.write("\u022c\2\u08bc\u08bd\5\u043d\u021f\2\u08bd\u011c\3\2\2")
buf.write("\2\u08be\u08bf\5\u043f\u0220\2\u08bf\u08c0\5\u0435\u021b")
buf.write("\2\u08c0\u08c1\5\u044b\u0226\2\u08c1\u08c2\5\u0459\u022d")
buf.write("\2\u08c2\u08c3\5\u043d\u021f\2\u08c3\u011e\3\2\2\2\u08c4")
buf.write("\u08c5\5\u043f\u0220\2\u08c5\u08c6\5\u043d\u021f\2\u08c6")
buf.write("\u08c7\5\u045b\u022e\2\u08c7\u08c8\5\u0439\u021d\2\u08c8")
buf.write("\u08c9\5\u0443\u0222\2\u08c9\u0120\3\2\2\2\u08ca\u08cb")
buf.write("\5\u043f\u0220\2\u08cb\u08cc\5\u0445\u0223\2\u08cc\u08cd")
buf.write("\5\u044f\u0228\2\u08cd\u08ce\5\u0435\u021b\2\u08ce\u08cf")
buf.write("\5\u044b\u0226\2\u08cf\u0122\3\2\2\2\u08d0\u08d1\5\u043f")
buf.write("\u0220\2\u08d1\u08d2\5\u0445\u0223\2\u08d2\u08d3\5\u0457")
buf.write("\u022c\2\u08d3\u08d4\5\u0459\u022d\2\u08d4\u08d5\5\u045b")
buf.write("\u022e\2\u08d5\u0124\3\2\2\2\u08d6\u08d7\5\u043f\u0220")
buf.write("\2\u08d7\u08d8\5\u0445\u0223\2\u08d8\u08d9\5\u0457\u022c")
buf.write("\2\u08d9\u08da\5\u0459\u022d\2\u08da\u08db\5\u045b\u022e")
buf.write("\2\u08db\u08dc\7a\2\2\u08dc\u08dd\5\u045f\u0230\2\u08dd")
buf.write("\u08de\5\u0435\u021b\2\u08de\u08df\5\u044b\u0226\2\u08df")
buf.write("\u08e0\5\u045d\u022f\2\u08e0\u08e1\5\u043d\u021f\2\u08e1")
buf.write("\u0126\3\2\2\2\u08e2\u08e3\5\u043f\u0220\2\u08e3\u08e4")
buf.write("\5\u044b\u0226\2\u08e4\u08e5\5\u0451\u0229\2\u08e5\u08e6")
buf.write("\5\u0435\u021b\2\u08e6\u08e7\5\u045b\u022e\2\u08e7\u0128")
buf.write("\3\2\2\2\u08e8\u08e9\5\u043f\u0220\2\u08e9\u08ea\5\u0451")
buf.write("\u0229\2\u08ea\u08eb\5\u044b\u0226\2\u08eb\u08ec\5\u044b")
buf.write("\u0226\2\u08ec\u08ed\5\u0451\u0229\2\u08ed\u08ee\5\u0461")
buf.write("\u0231\2\u08ee\u08ef\5\u0445\u0223\2\u08ef\u08f0\5\u044f")
buf.write("\u0228\2\u08f0\u08f1\5\u0441\u0221\2\u08f1\u012a\3\2\2")
buf.write("\2\u08f2\u08f3\5\u043f\u0220\2\u08f3\u08f4\5\u0451\u0229")
buf.write("\2\u08f4\u08f5\5\u044b\u0226\2\u08f5\u08f6\5\u044b\u0226")
buf.write("\2\u08f6\u08f7\5\u0451\u0229\2\u08f7\u08f8\5\u0461\u0231")
buf.write("\2\u08f8\u08f9\5\u0459\u022d\2\u08f9\u012c\3\2\2\2\u08fa")
buf.write("\u08fb\5\u043f\u0220\2\u08fb\u08fc\5\u0451\u0229\2\u08fc")
buf.write("\u08fd\5\u0457\u022c\2\u08fd\u012e\3\2\2\2\u08fe\u08ff")
buf.write("\5\u043f\u0220\2\u08ff\u0900\5\u0451\u0229\2\u0900\u0901")
buf.write("\5\u0457\u022c\2\u0901\u0902\5\u0435\u021b\2\u0902\u0903")
buf.write("\5\u044b\u0226\2\u0903\u0904\5\u044b\u0226\2\u0904\u0130")
buf.write("\3\2\2\2\u0905\u0906\5\u043f\u0220\2\u0906\u0907\5\u0451")
buf.write("\u0229\2\u0907\u0908\5\u0457\u022c\2\u0908\u0909\5\u0439")
buf.write("\u021d\2\u0909\u090a\5\u043d\u021f\2\u090a\u0132\3\2\2")
buf.write("\2\u090b\u090c\5\u043f\u0220\2\u090c\u090d\5\u0457\u022c")
buf.write("\2\u090d\u090e\5\u0451\u0229\2\u090e\u090f\5\u044d\u0227")
buf.write("\2\u090f\u0134\3\2\2\2\u0910\u0911\5\u043f\u0220\2\u0911")
buf.write("\u0912\5\u045d\u022f\2\u0912\u0913\5\u044b\u0226\2\u0913")
buf.write("\u0914\5\u044b\u0226\2\u0914\u0136\3\2\2\2\u0915\u0916")
buf.write("\5\u043f\u0220\2\u0916\u0917\5\u045d\u022f\2\u0917\u0918")
buf.write("\5\u044f\u0228\2\u0918\u0919\5\u0439\u021d\2\u0919\u091a")
buf.write("\5\u045b\u022e\2\u091a\u091b\5\u0445\u0223\2\u091b\u091c")
buf.write("\5\u0451\u0229\2\u091c\u091d\5\u044f\u0228\2\u091d\u0138")
buf.write("\3\2\2\2\u091e\u091f\5\u0441\u0221\2\u091f\u0920\5\u0451")
buf.write("\u0229\2\u0920\u0921\5\u045b\u022e\2\u0921\u0922\5\u0451")
buf.write("\u0229\2\u0922\u013a\3\2\2\2\u0923\u0924\5\u0441\u0221")
buf.write("\2\u0924\u0925\5\u0457\u022c\2\u0925\u0926\5\u0435\u021b")
buf.write("\2\u0926\u0927\5\u044f\u0228\2\u0927\u0928\5\u045b\u022e")
buf.write("\2\u0928\u013c\3\2\2\2\u0929\u092a\5\u0441\u0221\2\u092a")
buf.write("\u092b\5\u0457\u022c\2\u092b\u092c\5\u0451\u0229\2\u092c")
buf.write("\u092d\5\u045d\u022f\2\u092d\u092e\5\u0453\u022a\2\u092e")
buf.write("\u013e\3\2\2\2\u092f\u0930\5\u0441\u0221\2\u0930\u0931")
buf.write("\5\u0457\u022c\2\u0931\u0932\5\u0451\u0229\2\u0932\u0933")
buf.write("\5\u045d\u022f\2\u0933\u0934\5\u0453\u022a\2\u0934\u0935")
buf.write("\5\u0445\u0223\2\u0935\u0936\5\u044f\u0228\2\u0936\u0937")
buf.write("\5\u0441\u0221\2\u0937\u0140\3\2\2\2\u0938\u0939\5\u0443")
buf.write("\u0222\2\u0939\u093a\5\u0435\u021b\2\u093a\u093b\5\u0459")
buf.write("\u022d\2\u093b\u093c\5\u0443\u0222\2\u093c\u0142\3\2\2")
buf.write("\2\u093d\u093e\5\u0443\u0222\2\u093e\u093f\5\u0435\u021b")
buf.write("\2\u093f\u0940\5\u045f\u0230\2\u0940\u0941\5\u0445\u0223")
buf.write("\2\u0941\u0942\5\u044f\u0228\2\u0942\u0943\5\u0441\u0221")
buf.write("\2\u0943\u0144\3\2\2\2\u0944\u0945\5\u0443\u0222\2\u0945")
buf.write("\u0946\5\u0445\u0223\2\u0946\u0947\5\u043b\u021e\2\u0947")
buf.write("\u0948\5\u043d\u021f\2\u0948\u0146\3\2\2\2\u0949\u094a")
buf.write("\5\u0443\u0222\2\u094a\u094b\5\u0451\u0229\2\u094b\u094c")
buf.write("\5\u045d\u022f\2\u094c\u094d\5\u0457\u022c\2\u094d\u0148")
buf.write("\3\2\2\2\u094e\u094f\5\u0445\u0223\2\u094f\u0950\5\u043f")
buf.write("\u0220\2\u0950\u014a\3\2\2\2\u0951\u0952\5\u0445\u0223")
buf.write("\2\u0952\u0953\5\u0441\u0221\2\u0953\u0954\5\u044f\u0228")
buf.write("\2\u0954\u0955\5\u0451\u0229\2\u0955\u0956\5\u0457\u022c")
buf.write("\2\u0956\u0957\5\u043d\u021f\2\u0957\u014c\3\2\2\2\u0958")
buf.write("\u0959\5\u0445\u0223\2\u0959\u095a\5\u044d\u0227\2\u095a")
buf.write("\u095b\5\u044d\u0227\2\u095b\u095c\5\u043d\u021f\2\u095c")
buf.write("\u095d\5\u043b\u021e\2\u095d\u095e\5\u0445\u0223\2\u095e")
buf.write("\u095f\5\u0435\u021b\2\u095f\u0960\5\u045b\u022e\2\u0960")
buf.write("\u0961\5\u043d\u021f\2\u0961\u014e\3\2\2\2\u0962\u0963")
buf.write("\5\u0445\u0223\2\u0963\u0964\5\u044f\u0228\2\u0964\u0150")
buf.write("\3\2\2\2\u0965\u0966\5\u0445\u0223\2\u0966\u0967\5\u044f")
buf.write("\u0228\2\u0967\u0968\5\u0439\u021d\2\u0968\u0969\5\u044b")
buf.write("\u0226\2\u0969\u096a\5\u045d\u022f\2\u096a\u096b\5\u043b")
buf.write("\u021e\2\u096b\u096c\5\u043d\u021f\2\u096c\u0152\3\2\2")
buf.write("\2\u096d\u096e\5\u0445\u0223\2\u096e\u096f\5\u044f\u0228")
buf.write("\2\u096f\u0970\5\u0439\u021d\2\u0970\u0971\5\u044b\u0226")
buf.write("\2\u0971\u0972\5\u045d\u022f\2\u0972\u0973\5\u043b\u021e")
buf.write("\2\u0973\u0974\5\u0445\u0223\2\u0974\u0975\5\u044f\u0228")
buf.write("\2\u0975\u0976\5\u0441\u0221\2\u0976\u0154\3\2\2\2\u0977")
buf.write("\u0978\5\u0445\u0223\2\u0978\u0979\5\u044f\u0228\2\u0979")
buf.write("\u097a\5\u0439\u021d\2\u097a\u097b\5\u0457\u022c\2\u097b")
buf.write("\u097c\5\u043d\u021f\2\u097c\u097d\5\u044d\u0227\2\u097d")
buf.write("\u097e\5\u043d\u021f\2\u097e\u097f\5\u044f\u0228\2\u097f")
buf.write("\u0980\5\u045b\u022e\2\u0980\u0156\3\2\2\2\u0981\u0982")
buf.write("\5\u0445\u0223\2\u0982\u0983\5\u044f\u0228\2\u0983\u0984")
buf.write("\5\u043b\u021e\2\u0984\u0985\5\u043d\u021f\2\u0985\u0986")
buf.write("\5\u044f\u0228\2\u0986\u0987\5\u045b\u022e\2\u0987\u0158")
buf.write("\3\2\2\2\u0988\u0989\5\u0445\u0223\2\u0989\u098a\5\u044f")
buf.write("\u0228\2\u098a\u098b\5\u043b\u021e\2\u098b\u098c\5\u043d")
buf.write("\u021f\2\u098c\u098d\5\u0463\u0232\2\u098d\u015a\3\2\2")
buf.write("\2\u098e\u098f\5\u0445\u0223\2\u098f\u0990\5\u044f\u0228")
buf.write("\2\u0990\u0991\5\u043b\u021e\2\u0991\u0992\5\u043d\u021f")
buf.write("\2\u0992\u0993\5\u0463\u0232\2\u0993\u0994\5\u043d\u021f")
buf.write("\2\u0994\u0995\5\u043b\u021e\2\u0995\u015c\3\2\2\2\u0996")
buf.write("\u0997\5\u0445\u0223\2\u0997\u0998\5\u044f\u0228\2\u0998")
buf.write("\u0999\5\u043b\u021e\2\u0999\u099a\5\u0445\u0223\2\u099a")
buf.write("\u099b\5\u0439\u021d\2\u099b\u099c\5\u0435\u021b\2\u099c")
buf.write("\u099d\5\u045b\u022e\2\u099d\u099e\5\u0451\u0229\2\u099e")
buf.write("\u099f\5\u0457\u022c\2\u099f\u015e\3\2\2\2\u09a0\u09a1")
buf.write("\5\u0445\u0223\2\u09a1\u09a2\5\u044f\u0228\2\u09a2\u09a3")
buf.write("\5\u043b\u021e\2\u09a3\u09a4\5\u0445\u0223\2\u09a4\u09a5")
buf.write("\5\u0439\u021d\2\u09a5\u09a6\5\u043d\u021f\2\u09a6\u09a7")
buf.write("\5\u0459\u022d\2\u09a7\u0160\3\2\2\2\u09a8\u09a9\5\u0445")
buf.write("\u0223\2\u09a9\u09aa\5\u044f\u0228\2\u09aa\u09ab\5\u043f")
buf.write("\u0220\2\u09ab\u09ac\5\u0445\u0223\2\u09ac\u09ad\5\u044f")
buf.write("\u0228\2\u09ad\u09ae\5\u0445\u0223\2\u09ae\u09af\5\u045b")
buf.write("\u022e\2\u09af\u09b0\5\u043d\u021f\2\u09b0\u0162\3\2\2")
buf.write("\2\u09b1\u09b2\5\u0445\u0223\2\u09b2\u09b3\5\u044f\u0228")
buf.write("\2\u09b3\u09b4\5\u044b\u0226\2\u09b4\u09b5\5\u0445\u0223")
buf.write("\2\u09b5\u09b6\5\u044f\u0228\2\u09b6\u09b7\5\u043d\u021f")
buf.write("\2\u09b7\u0164\3\2\2\2\u09b8\u09b9\5\u0445\u0223\2\u09b9")
buf.write("\u09ba\5\u044f\u0228\2\u09ba\u09bb\5\u044f\u0228\2\u09bb")
buf.write("\u09bc\5\u043d\u021f\2\u09bc\u09bd\5\u0457\u022c\2\u09bd")
buf.write("\u0166\3\2\2\2\u09be\u09bf\5\u0445\u0223\2\u09bf\u09c0")
buf.write("\5\u044f\u0228\2\u09c0\u09c1\5\u0451\u0229\2\u09c1\u09c2")
buf.write("\5\u045d\u022f\2\u09c2\u09c3\5\u045b\u022e\2\u09c3\u0168")
buf.write("\3\2\2\2\u09c4\u09c5\5\u0445\u0223\2\u09c5\u09c6\5\u044f")
buf.write("\u0228\2\u09c6\u09c7\5\u0459\u022d\2\u09c7\u09c8\5\u043d")
buf.write("\u021f\2\u09c8\u09c9\5\u0457\u022c\2\u09c9\u09ca\5\u045b")
buf.write("\u022e\2\u09ca\u016a\3\2\2\2\u09cb\u09cc\5\u0445\u0223")
buf.write("\2\u09cc\u09cd\5\u044f\u0228\2\u09cd\u09ce\5\u0459\u022d")
buf.write("\2\u09ce\u09cf\5\u045b\u022e\2\u09cf\u09d0\5\u0435\u021b")
buf.write("\2\u09d0\u09d1\5\u044f\u0228\2\u09d1\u09d2\5\u045b\u022e")
buf.write("\2\u09d2\u09d3\5\u0445\u0223\2\u09d3\u09d4\5\u0435\u021b")
buf.write("\2\u09d4\u09d5\5\u0437\u021c\2\u09d5\u09d6\5\u044b\u0226")
buf.write("\2\u09d6\u09d7\5\u043d\u021f\2\u09d7\u016c\3\2\2\2\u09d8")
buf.write("\u09d9\5\u0445\u0223\2\u09d9\u09da\5\u044f\u0228\2\u09da")
buf.write("\u09db\5\u0459\u022d\2\u09db\u09dc\5\u045b\u022e\2\u09dc")
buf.write("\u09dd\5\u043d\u021f\2\u09dd\u09de\5\u0435\u021b\2\u09de")
buf.write("\u09df\5\u043b\u021e\2\u09df\u016e\3\2\2\2\u09e0\u09e1")
buf.write("\5\u0445\u0223\2\u09e1\u09e2\5\u044f\u0228\2\u09e2\u09e3")
buf.write("\5\u045b\u022e\2\u09e3\u0170\3\2\2\2\u09e4\u09e5\5\u0445")
buf.write("\u0223\2\u09e5\u09e6\5\u044f\u0228\2\u09e6\u09e7\5\u045b")
buf.write("\u022e\2\u09e7\u09e8\5\u043d\u021f\2\u09e8\u09e9\5\u0441")
buf.write("\u0221\2\u09e9\u09ea\5\u043d\u021f\2\u09ea\u09eb\5\u0457")
buf.write("\u022c\2\u09eb\u0172\3\2\2\2\u09ec\u09ed\5\u0445\u0223")
buf.write("\2\u09ed\u09ee\5\u044f\u0228\2\u09ee\u09ef\5\u045b\u022e")
buf.write("\2\u09ef\u09f0\5\u043d\u021f\2\u09f0\u09f1\5\u0457\u022c")
buf.write("\2\u09f1\u09f2\5\u0459\u022d\2\u09f2\u09f3\5\u043d\u021f")
buf.write("\2\u09f3\u09f4\5\u0439\u021d\2\u09f4\u09f5\5\u045b\u022e")
buf.write("\2\u09f5\u0174\3\2\2\2\u09f6\u09f7\5\u0445\u0223\2\u09f7")
buf.write("\u09f8\5\u044f\u0228\2\u09f8\u09f9\5\u045b\u022e\2\u09f9")
buf.write("\u09fa\5\u043d\u021f\2\u09fa\u09fb\5\u0457\u022c\2\u09fb")
buf.write("\u09fc\5\u045f\u0230\2\u09fc\u09fd\5\u0435\u021b\2\u09fd")
buf.write("\u09fe\5\u044b\u0226\2\u09fe\u0176\3\2\2\2\u09ff\u0a00")
buf.write("\5\u0445\u0223\2\u0a00\u0a01\5\u044f\u0228\2\u0a01\u0a02")
buf.write("\5\u045b\u022e\2\u0a02\u0a03\5\u0451\u0229\2\u0a03\u0178")
buf.write("\3\2\2\2\u0a04\u0a05\5\u0445\u0223\2\u0a05\u0a06\5\u044f")
buf.write("\u0228\2\u0a06\u0a07\5\u045f\u0230\2\u0a07\u0a08\5\u0435")
buf.write("\u021b\2\u0a08\u0a09\5\u044b\u0226\2\u0a09\u0a0a\5\u0445")
buf.write("\u0223\2\u0a0a\u0a0b\5\u043b\u021e\2\u0a0b\u0a0c\5\u0435")
buf.write("\u021b\2\u0a0c\u0a0d\5\u045b\u022e\2\u0a0d\u0a0e\5\u043d")
buf.write("\u021f\2\u0a0e\u017a\3\2\2\2\u0a0f\u0a10\5\u0445\u0223")
buf.write("\2\u0a10\u0a11\5\u0459\u022d\2\u0a11\u017c\3\2\2\2\u0a12")
buf.write("\u0a13\5\u0445\u0223\2\u0a13\u0a14\5\u0459\u022d\2\u0a14")
buf.write("\u0a15\5\u0451\u0229\2\u0a15\u0a16\5\u044b\u0226\2\u0a16")
buf.write("\u0a17\5\u0435\u021b\2\u0a17\u0a18\5\u045b\u022e\2\u0a18")
buf.write("\u0a19\5\u0445\u0223\2\u0a19\u0a1a\5\u0451\u0229\2\u0a1a")
buf.write("\u0a1b\5\u044f\u0228\2\u0a1b\u017e\3\2\2\2\u0a1c\u0a1d")
buf.write("\5\u0445\u0223\2\u0a1d\u0a1e\5\u045b\u022e\2\u0a1e\u0a1f")
buf.write("\5\u043d\u021f\2\u0a1f\u0a20\5\u0457\u022c\2\u0a20\u0a21")
buf.write("\5\u0435\u021b\2\u0a21\u0a22\5\u045b\u022e\2\u0a22\u0a23")
buf.write("\5\u043d\u021f\2\u0a23\u0180\3\2\2\2\u0a24\u0a25\5\u0447")
buf.write("\u0224\2\u0a25\u0a26\5\u0435\u021b\2\u0a26\u0a27\5\u045f")
buf.write("\u0230\2\u0a27\u0a28\5\u0435\u021b\2\u0a28\u0182\3\2\2")
buf.write("\2\u0a29\u0a2a\5\u0447\u0224\2\u0a2a\u0a2b\5\u0451\u0229")
buf.write("\2\u0a2b\u0a2c\5\u0445\u0223\2\u0a2c\u0a2d\5\u044f\u0228")
buf.write("\2\u0a2d\u0184\3\2\2\2\u0a2e\u0a2f\5\u0449\u0225\2\u0a2f")
buf.write("\u0a30\5\u043d\u021f\2\u0a30\u0a31\5\u043d\u021f\2\u0a31")
buf.write("\u0a32\5\u0453\u022a\2\u0a32\u0186\3\2\2\2\u0a33\u0a34")
buf.write("\5\u044b\u0226\2\u0a34\u0a35\5\u0435\u021b\2\u0a35\u0a36")
buf.write("\5\u044f\u0228\2\u0a36\u0a37\5\u0441\u0221\2\u0a37\u0a38")
buf.write("\5\u045d\u022f\2\u0a38\u0a39\5\u0435\u021b\2\u0a39\u0a3a")
buf.write("\5\u0441\u0221\2\u0a3a\u0a3b\5\u043d\u021f\2\u0a3b\u0188")
buf.write("\3\2\2\2\u0a3c\u0a3d\5\u044b\u0226\2\u0a3d\u0a3e\5\u0435")
buf.write("\u021b\2\u0a3e\u0a3f\5\u0459\u022d\2\u0a3f\u0a40\5\u045b")
buf.write("\u022e\2\u0a40\u018a\3\2\2\2\u0a41\u0a42\5\u044b\u0226")
buf.write("\2\u0a42\u0a43\5\u0435\u021b\2\u0a43\u0a44\5\u0459\u022d")
buf.write("\2\u0a44\u0a45\5\u045b\u022e\2\u0a45\u0a46\7a\2\2\u0a46")
buf.write("\u0a47\5\u045f\u0230\2\u0a47\u0a48\5\u0435\u021b\2\u0a48")
buf.write("\u0a49\5\u044b\u0226\2\u0a49\u0a4a\5\u045d\u022f\2\u0a4a")
buf.write("\u0a4b\5\u043d\u021f\2\u0a4b\u018c\3\2\2\2\u0a4c\u0a4d")
buf.write("\5\u044b\u0226\2\u0a4d\u0a4e\5\u043d\u021f\2\u0a4e\u0a4f")
buf.write("\5\u0435\u021b\2\u0a4f\u0a50\5\u043b\u021e\2\u0a50\u0a51")
buf.write("\5\u0445\u0223\2\u0a51\u0a52\5\u044f\u0228\2\u0a52\u0a53")
buf.write("\5\u0441\u0221\2\u0a53\u018e\3\2\2\2\u0a54\u0a55\5\u044b")
buf.write("\u0226\2\u0a55\u0a56\5\u043d\u021f\2\u0a56\u0a57\5\u043f")
buf.write("\u0220\2\u0a57\u0a58\5\u045b\u022e\2\u0a58\u0190\3\2\2")
buf.write("\2\u0a59\u0a5a\5\u044b\u0226\2\u0a5a\u0a5b\5\u043d\u021f")
buf.write("\2\u0a5b\u0a5c\5\u045f\u0230\2\u0a5c\u0a5d\5\u043d\u021f")
buf.write("\2\u0a5d\u0a5e\5\u044b\u0226\2\u0a5e\u0192\3\2\2\2\u0a5f")
buf.write("\u0a60\5\u044b\u0226\2\u0a60\u0a61\5\u0445\u0223\2\u0a61")
buf.write("\u0a62\5\u0437\u021c\2\u0a62\u0a63\5\u0457\u022c\2\u0a63")
buf.write("\u0a64\5\u0435\u021b\2\u0a64\u0a65\5\u0457\u022c\2\u0a65")
buf.write("\u0a66\5\u0465\u0233\2\u0a66\u0194\3\2\2\2\u0a67\u0a68")
buf.write("\5\u044b\u0226\2\u0a68\u0a69\5\u0445\u0223\2\u0a69\u0a6a")
buf.write("\5\u0449\u0225\2\u0a6a\u0a6b\5\u043d\u021f\2\u0a6b\u0196")
buf.write("\3\2\2\2\u0a6c\u0a6d\5\u044b\u0226\2\u0a6d\u0a6e\5\u0445")
buf.write("\u0223\2\u0a6e\u0a6f\5\u0449\u0225\2\u0a6f\u0a70\5\u043d")
buf.write("\u021f\2\u0a70\u0a71\7\64\2\2\u0a71\u0198\3\2\2\2\u0a72")
buf.write("\u0a73\5\u044b\u0226\2\u0a73\u0a74\5\u0445\u0223\2\u0a74")
buf.write("\u0a75\5\u0449\u0225\2\u0a75\u0a76\5\u043d\u021f\2\u0a76")
buf.write("\u0a77\7\66\2\2\u0a77\u019a\3\2\2\2\u0a78\u0a79\5\u044b")
buf.write("\u0226\2\u0a79\u0a7a\5\u0445\u0223\2\u0a7a\u0a7b\5\u0449")
buf.write("\u0225\2\u0a7b\u0a7c\5\u043d\u021f\2\u0a7c\u0a7d\5\u0439")
buf.write("\u021d\2\u0a7d\u019c\3\2\2\2\u0a7e\u0a7f\5\u044b\u0226")
buf.write("\2\u0a7f\u0a80\5\u0445\u0223\2\u0a80\u0a81\5\u044d\u0227")
buf.write("\2\u0a81\u0a82\5\u0445\u0223\2\u0a82\u0a83\5\u045b\u022e")
buf.write("\2\u0a83\u019e\3\2\2\2\u0a84\u0a85\5\u044b\u0226\2\u0a85")
buf.write("\u0a86\5\u0451\u0229\2\u0a86\u0a87\5\u0439\u021d\2\u0a87")
buf.write("\u0a88\5\u0435\u021b\2\u0a88\u0a89\5\u044b\u0226\2\u0a89")
buf.write("\u01a0\3\2\2\2\u0a8a\u0a8b\5\u044b\u0226\2\u0a8b\u0a8c")
buf.write("\5\u0451\u0229\2\u0a8c\u0a8d\5\u0439\u021d\2\u0a8d\u0a8e")
buf.write("\5\u0449\u0225\2\u0a8e\u01a2\3\2\2\2\u0a8f\u0a90\5\u044b")
buf.write("\u0226\2\u0a90\u0a91\5\u0451\u0229\2\u0a91\u0a92\5\u0439")
buf.write("\u021d\2\u0a92\u0a93\5\u0449\u0225\2\u0a93\u0a94\5\u043d")
buf.write("\u021f\2\u0a94\u0a95\5\u043b\u021e\2\u0a95\u01a4\3\2\2")
buf.write("\2\u0a96\u0a97\5\u044b\u0226\2\u0a97\u0a98\5\u0451\u0229")
buf.write("\2\u0a98\u0a99\5\u0441\u0221\2\u0a99\u01a6\3\2\2\2\u0a9a")
buf.write("\u0a9b\5\u044b\u0226\2\u0a9b\u0a9c\5\u0451\u0229\2\u0a9c")
buf.write("\u0a9d\5\u0441\u0221\2\u0a9d\u0a9e\5\u0451\u0229\2\u0a9e")
buf.write("\u0a9f\5\u043f\u0220\2\u0a9f\u0aa0\5\u043f\u0220\2\u0aa0")
buf.write("\u01a8\3\2\2\2\u0aa1\u0aa2\5\u044b\u0226\2\u0aa2\u0aa3")
buf.write("\5\u0451\u0229\2\u0aa3\u0aa4\5\u0441\u0221\2\u0aa4\u0aa5")
buf.write("\5\u0451\u0229\2\u0aa5\u0aa6\5\u044f\u0228\2\u0aa6\u01aa")
buf.write("\3\2\2\2\u0aa7\u0aa8\5\u044b\u0226\2\u0aa8\u0aa9\5\u0451")
buf.write("\u0229\2\u0aa9\u0aaa\5\u044f\u0228\2\u0aaa\u0aab\5\u0441")
buf.write("\u0221\2\u0aab\u01ac\3\2\2\2\u0aac\u0aad\5\u044b\u0226")
buf.write("\2\u0aad\u0aae\5\u0451\u0229\2\u0aae\u0aaf\5\u0451\u0229")
buf.write("\2\u0aaf\u0ab0\5\u0453\u022a\2\u0ab0\u01ae\3\2\2\2\u0ab1")
buf.write("\u0ab2\5\u044d\u0227\2\u0ab2\u0ab3\5\u0435\u021b\2\u0ab3")
buf.write("\u0ab4\5\u0445\u0223\2\u0ab4\u0ab5\5\u044f\u0228\2\u0ab5")
buf.write("\u01b0\3\2\2\2\u0ab6\u0ab7\5\u044d\u0227\2\u0ab7\u0ab8")
buf.write("\5\u0435\u021b\2\u0ab8\u0ab9\5\u0453\u022a\2\u0ab9\u01b2")
buf.write("\3\2\2\2\u0aba\u0abb\5\u044d\u0227\2\u0abb\u0abc\5\u0435")
buf.write("\u021b\2\u0abc\u0abd\5\u045b\u022e\2\u0abd\u0abe\5\u0439")
buf.write("\u021d\2\u0abe\u0abf\5\u0443\u0222\2\u0abf\u0ac0\5\u043d")
buf.write("\u021f\2\u0ac0\u0ac1\5\u043b\u021e\2\u0ac1\u01b4\3\2\2")
buf.write("\2\u0ac2\u0ac3\5\u044d\u0227\2\u0ac3\u0ac4\5\u0435\u021b")
buf.write("\2\u0ac4\u0ac5\5\u0463\u0232\2\u0ac5\u0ac6\5\u045f\u0230")
buf.write("\2\u0ac6\u0ac7\5\u0435\u021b\2\u0ac7\u0ac8\5\u044b\u0226")
buf.write("\2\u0ac8\u0ac9\5\u045d\u022f\2\u0ac9\u0aca\5\u043d\u021f")
buf.write("\2\u0aca\u01b6\3\2\2\2\u0acb\u0acc\5\u044d\u0227\2\u0acc")
buf.write("\u0acd\5\u043d\u021f\2\u0acd\u0ace\5\u0435\u021b\2\u0ace")
buf.write("\u0acf\5\u0459\u022d\2\u0acf\u0ad0\5\u045d\u022f\2\u0ad0")
buf.write("\u0ad1\5\u0457\u022c\2\u0ad1\u0ad2\5\u043d\u021f\2\u0ad2")
buf.write("\u0ad3\5\u0459\u022d\2\u0ad3\u01b8\3\2\2\2\u0ad4\u0ad5")
buf.write("\5\u044d\u0227\2\u0ad5\u0ad6\5\u043d\u021f\2\u0ad6\u0ad7")
buf.write("\5\u044d\u0227\2\u0ad7\u0ad8\5\u0437\u021c\2\u0ad8\u0ad9")
buf.write("\5\u043d\u021f\2\u0ad9\u0ada\5\u0457\u022c\2\u0ada\u01ba")
buf.write("\3\2\2\2\u0adb\u0adc\5\u044d\u0227\2\u0adc\u0add\5\u043d")
buf.write("\u021f\2\u0add\u0ade\5\u0457\u022c\2\u0ade\u0adf\5\u0441")
buf.write("\u0221\2\u0adf\u0ae0\5\u043d\u021f\2\u0ae0\u01bc\3\2\2")
buf.write("\2\u0ae1\u0ae2\5\u044d\u0227\2\u0ae2\u0ae3\5\u0445\u0223")
buf.write("\2\u0ae3\u0ae4\5\u044f\u0228\2\u0ae4\u0ae5\5\u045d\u022f")
buf.write("\2\u0ae5\u0ae6\5\u0459\u022d\2\u0ae6\u01be\3\2\2\2\u0ae7")
buf.write("\u0ae8\5\u044d\u0227\2\u0ae8\u0ae9\5\u0445\u0223\2\u0ae9")
buf.write("\u0aea\5\u044f\u0228\2\u0aea\u0aeb\5\u045d\u022f\2\u0aeb")
buf.write("\u0aec\5\u045b\u022e\2\u0aec\u0aed\5\u043d\u021f\2\u0aed")
buf.write("\u01c0\3\2\2\2\u0aee\u0aef\5\u044d\u0227\2\u0aef\u0af0")
buf.write("\5\u0445\u0223\2\u0af0\u0af1\5\u044f\u0228\2\u0af1\u0af2")
buf.write("\5\u045f\u0230\2\u0af2\u0af3\5\u0435\u021b\2\u0af3\u0af4")
buf.write("\5\u044b\u0226\2\u0af4\u0af5\5\u045d\u022f\2\u0af5\u0af6")
buf.write("\5\u043d\u021f\2\u0af6\u01c2\3\2\2\2\u0af7\u0af8\5\u044d")
buf.write("\u0227\2\u0af8\u0af9\5\u044b\u0226\2\u0af9\u0afa\5\u0459")
buf.write("\u022d\2\u0afa\u0afb\5\u044b\u0226\2\u0afb\u0afc\5\u0435")
buf.write("\u021b\2\u0afc\u0afd\5\u0437\u021c\2\u0afd\u0afe\5\u043d")
buf.write("\u021f\2\u0afe\u0aff\5\u044b\u0226\2\u0aff\u01c4\3\2\2")
buf.write("\2\u0b00\u0b01\5\u044d\u0227\2\u0b01\u0b02\5\u0451\u0229")
buf.write("\2\u0b02\u0b03\5\u043b\u021e\2\u0b03\u0b04\5\u043d\u021f")
buf.write("\2\u0b04\u01c6\3\2\2\2\u0b05\u0b06\5\u044d\u0227\2\u0b06")
buf.write("\u0b07\5\u0451\u0229\2\u0b07\u0b08\5\u043b\u021e\2\u0b08")
buf.write("\u0b09\5\u043d\u021f\2\u0b09\u0b0a\5\u044b\u0226\2\u0b0a")
buf.write("\u01c8\3\2\2\2\u0b0b\u0b0c\5\u044d\u0227\2\u0b0c\u0b0d")
buf.write("\5\u0451\u0229\2\u0b0d\u0b0e\5\u043b\u021e\2\u0b0e\u0b0f")
buf.write("\5\u0445\u0223\2\u0b0f\u0b10\5\u043f\u0220\2\u0b10\u0b11")
buf.write("\5\u0465\u0233\2\u0b11\u01ca\3\2\2\2\u0b12\u0b13\5\u044d")
buf.write("\u0227\2\u0b13\u0b14\5\u0451\u0229\2\u0b14\u0b15\5\u044f")
buf.write("\u0228\2\u0b15\u0b16\5\u045b\u022e\2\u0b16\u0b17\5\u0443")
buf.write("\u0222\2\u0b17\u01cc\3\2\2\2\u0b18\u0b19\5\u044d\u0227")
buf.write("\2\u0b19\u0b1a\5\u045d\u022f\2\u0b1a\u0b1b\5\u044b\u0226")
buf.write("\2\u0b1b\u0b1c\5\u045b\u022e\2\u0b1c\u0b1d\5\u0445\u0223")
buf.write("\2\u0b1d\u0b1e\5\u0459\u022d\2\u0b1e\u0b1f\5\u043d\u021f")
buf.write("\2\u0b1f\u0b20\5\u045b\u022e\2\u0b20\u01ce\3\2\2\2\u0b21")
buf.write("\u0b22\5\u044f\u0228\2\u0b22\u0b23\5\u0435\u021b\2\u0b23")
buf.write("\u0b24\5\u044d\u0227\2\u0b24\u0b25\5\u043d\u021f\2\u0b25")
buf.write("\u01d0\3\2\2\2\u0b26\u0b27\5\u044f\u0228\2\u0b27\u0b28")
buf.write("\5\u0435\u021b\2\u0b28\u0b29\5\u044f\u0228\2\u0b29\u01d2")
buf.write("\3\2\2\2\u0b2a\u0b2b\5\u044f\u0228\2\u0b2b\u0b2c\5\u0435")
buf.write("\u021b\2\u0b2c\u0b2d\5\u045b\u022e\2\u0b2d\u0b2e\5\u045d")
buf.write("\u022f\2\u0b2e\u0b2f\5\u0457\u022c\2\u0b2f\u0b30\5\u0435")
buf.write("\u021b\2\u0b30\u0b31\5\u044b\u0226\2\u0b31\u01d4\3\2\2")
buf.write("\2\u0b32\u0b33\5\u044f\u0228\2\u0b33\u0b34\5\u0435\u021b")
buf.write("\2\u0b34\u0b35\5\u045b\u022e\2\u0b35\u0b36\5\u045d\u022f")
buf.write("\2\u0b36\u0b37\5\u0457\u022c\2\u0b37\u0b38\5\u0435\u021b")
buf.write("\2\u0b38\u0b39\5\u044b\u0226\2\u0b39\u0b3a\5\u044f\u0228")
buf.write("\2\u0b3a\u01d6\3\2\2\2\u0b3b\u0b3c\5\u044f\u0228\2\u0b3c")
buf.write("\u0b3d\5\u0435\u021b\2\u0b3d\u0b3e\5\u045f\u0230\2\u0b3e")
buf.write("\u01d8\3\2\2\2\u0b3f\u0b40\5\u044f\u0228\2\u0b40\u0b41")
buf.write("\5\u0439\u021d\2\u0b41\u0b42\5\u0443\u0222\2\u0b42\u0b43")
buf.write("\5\u0435\u021b\2\u0b43\u0b44\5\u0457\u022c\2\u0b44\u01da")
buf.write("\3\2\2\2\u0b45\u0b46\5\u044f\u0228\2\u0b46\u0b47\5\u0439")
buf.write("\u021d\2\u0b47\u0b48\5\u0443\u0222\2\u0b48\u0b49\5\u0435")
buf.write("\u021b\2\u0b49\u0b4a\5\u0457\u022c\2\u0b4a\u0b4b\7a\2")
buf.write("\2\u0b4b\u0b4c\5\u0439\u021d\2\u0b4c\u0b4d\5\u0459\u022d")
buf.write("\2\u0b4d\u01dc\3\2\2\2\u0b4e\u0b4f\5\u044f\u0228\2\u0b4f")
buf.write("\u0b50\5\u0439\u021d\2\u0b50\u0b51\5\u044b\u0226\2\u0b51")
buf.write("\u0b52\5\u0451\u0229\2\u0b52\u0b53\5\u0437\u021c\2\u0b53")
buf.write("\u01de\3\2\2\2\u0b54\u0b55\5\u044f\u0228\2\u0b55\u0b56")
buf.write("\5\u043d\u021f\2\u0b56\u0b57\5\u0459\u022d\2\u0b57\u0b58")
buf.write("\5\u045b\u022e\2\u0b58\u0b59\5\u043d\u021f\2\u0b59\u0b5a")
buf.write("\5\u043b\u021e\2\u0b5a\u01e0\3\2\2\2\u0b5b\u0b5c\5\u044f")
buf.write("\u0228\2\u0b5c\u0b5d\5\u043d\u021f\2\u0b5d\u0b5e\5\u0461")
buf.write("\u0231\2\u0b5e\u01e2\3\2\2\2\u0b5f\u0b60\5\u044f\u0228")
buf.write("\2\u0b60\u0b61\5\u0451\u0229\2\u0b61\u01e4\3\2\2\2\u0b62")
buf.write("\u0b63\5\u044f\u0228\2\u0b63\u0b64\5\u0451\u0229\2\u0b64")
buf.write("\u0b65\5\u0435\u021b\2\u0b65\u0b66\5\u045d\u022f\2\u0b66")
buf.write("\u0b67\5\u043b\u021e\2\u0b67\u0b68\5\u0445\u0223\2\u0b68")
buf.write("\u0b69\5\u045b\u022e\2\u0b69\u01e6\3\2\2\2\u0b6a\u0b6b")
buf.write("\5\u044f\u0228\2\u0b6b\u0b6c\5\u0451\u0229\2\u0b6c\u0b6d")
buf.write("\5\u0439\u021d\2\u0b6d\u0b6e\5\u0435\u021b\2\u0b6e\u0b6f")
buf.write("\5\u0439\u021d\2\u0b6f\u0b70\5\u0443\u0222\2\u0b70\u0b71")
buf.write("\5\u043d\u021f\2\u0b71\u01e8\3\2\2\2\u0b72\u0b73\5\u044f")
buf.write("\u0228\2\u0b73\u0b74\5\u0451\u0229\2\u0b74\u0b75\5\u0439")
buf.write("\u021d\2\u0b75\u0b76\5\u0451\u0229\2\u0b76\u0b77\5\u0453")
buf.write("\u022a\2\u0b77\u0b78\5\u0465\u0233\2\u0b78\u01ea\3\2\2")
buf.write("\2\u0b79\u0b7a\5\u044f\u0228\2\u0b7a\u0b7b\5\u0451\u0229")
buf.write("\2\u0b7b\u0b7c\5\u0439\u021d\2\u0b7c\u0b7d\5\u0465\u0233")
buf.write("\2\u0b7d\u0b7e\5\u0439\u021d\2\u0b7e\u0b7f\5\u044b\u0226")
buf.write("\2\u0b7f\u0b80\5\u043d\u021f\2\u0b80\u01ec\3\2\2\2\u0b81")
buf.write("\u0b82\5\u044f\u0228\2\u0b82\u0b83\5\u0451\u0229\2\u0b83")
buf.write("\u0b84\5\u043d\u021f\2\u0b84\u0b85\5\u044f\u0228\2\u0b85")
buf.write("\u0b86\5\u045b\u022e\2\u0b86\u0b87\5\u0445\u0223\2\u0b87")
buf.write("\u0b88\5\u045b\u022e\2\u0b88\u0b89\5\u0465\u0233\2\u0b89")
buf.write("\u0b8a\5\u043d\u021f\2\u0b8a\u0b8b\5\u0459\u022d\2\u0b8b")
buf.write("\u0b8c\5\u0439\u021d\2\u0b8c\u0b8d\5\u0435\u021b\2\u0b8d")
buf.write("\u0b8e\5\u0453\u022a\2\u0b8e\u0b8f\5\u0445\u0223\2\u0b8f")
buf.write("\u0b90\5\u044f\u0228\2\u0b90\u0b91\5\u0441\u0221\2\u0b91")
buf.write("\u01ee\3\2\2\2\u0b92\u0b93\5\u044f\u0228\2\u0b93\u0b94")
buf.write("\5\u0451\u0229\2\u0b94\u0b95\5\u044d\u0227\2\u0b95\u0b96")
buf.write("\5\u0435\u021b\2\u0b96\u0b97\5\u0463\u0232\2\u0b97\u0b98")
buf.write("\5\u045f\u0230\2\u0b98\u0b99\5\u0435\u021b\2\u0b99\u0b9a")
buf.write("\5\u044b\u0226\2\u0b9a\u0b9b\5\u045d\u022f\2\u0b9b\u0b9c")
buf.write("\5\u043d\u021f\2\u0b9c\u01f0\3\2\2\2\u0b9d\u0b9e\5\u044f")
buf.write("\u0228\2\u0b9e\u0b9f\5\u0451\u0229\2\u0b9f\u0ba0\5\u044d")
buf.write("\u0227\2\u0ba0\u0ba1\5\u0445\u0223\2\u0ba1\u0ba2\5\u044f")
buf.write("\u0228\2\u0ba2\u0ba3\5\u045f\u0230\2\u0ba3\u0ba4\5\u0435")
buf.write("\u021b\2\u0ba4\u0ba5\5\u044b\u0226\2\u0ba5\u0ba6\5\u045d")
buf.write("\u022f\2\u0ba6\u0ba7\5\u043d\u021f\2\u0ba7\u01f2\3\2\2")
buf.write("\2\u0ba8\u0ba9\5\u044f\u0228\2\u0ba9\u0baa\5\u0451\u0229")
buf.write("\2\u0baa\u0bab\5\u044f\u0228\2\u0bab\u0bac\5\u043d\u021f")
buf.write("\2\u0bac\u01f4\3\2\2\2\u0bad\u0bae\5\u044f\u0228\2\u0bae")
buf.write("\u0baf\5\u0451\u0229\2\u0baf\u0bb0\5\u0451\u0229\2\u0bb0")
buf.write("\u0bb1\5\u0457\u022c\2\u0bb1\u0bb2\5\u043b\u021e\2\u0bb2")
buf.write("\u0bb3\5\u043d\u021f\2\u0bb3\u0bb4\5\u0457\u022c\2\u0bb4")
buf.write("\u01f6\3\2\2\2\u0bb5\u0bb6\5\u044f\u0228\2\u0bb6\u0bb7")
buf.write("\5\u0451\u0229\2\u0bb7\u0bb8\5\u0459\u022d\2\u0bb8\u0bb9")
buf.write("\5\u0439\u021d\2\u0bb9\u0bba\5\u0443\u0222\2\u0bba\u0bbb")
buf.write("\5\u043d\u021f\2\u0bbb\u0bbc\5\u044d\u0227\2\u0bbc\u0bbd")
buf.write("\5\u0435\u021b\2\u0bbd\u0bbe\5\u0439\u021d\2\u0bbe\u0bbf")
buf.write("\5\u0443\u0222\2\u0bbf\u0bc0\5\u043d\u021f\2\u0bc0\u0bc1")
buf.write("\5\u0439\u021d\2\u0bc1\u0bc2\5\u0449\u0225\2\u0bc2\u01f8")
buf.write("\3\2\2\2\u0bc3\u0bc4\5\u044f\u0228\2\u0bc4\u0bc5\5\u0451")
buf.write("\u0229\2\u0bc5\u0bc6\5\u045b\u022e\2\u0bc6\u01fa\3\2\2")
buf.write("\2\u0bc7\u0bc8\5\u044f\u0228\2\u0bc8\u0bc9\5\u0451\u0229")
buf.write("\2\u0bc9\u0bca\5\u0461\u0231\2\u0bca\u0bcb\5\u0435\u021b")
buf.write("\2\u0bcb\u0bcc\5\u0445\u0223\2\u0bcc\u0bcd\5\u045b\u022e")
buf.write("\2\u0bcd\u01fc\3\2\2\2\u0bce\u0bcf\5\u044f\u0228\2\u0bcf")
buf.write("\u0bd0\5\u045d\u022f\2\u0bd0\u0bd1\5\u044b\u0226\2\u0bd1")
buf.write("\u0bd2\5\u044b\u0226\2\u0bd2\u01fe\3\2\2\2\u0bd3\u0bd4")
buf.write("\5\u044f\u0228\2\u0bd4\u0bd5\5\u045d\u022f\2\u0bd5\u0bd6")
buf.write("\5\u044b\u0226\2\u0bd6\u0bd7\5\u044b\u0226\2\u0bd7\u0bd8")
buf.write("\5\u0459\u022d\2\u0bd8\u0200\3\2\2\2\u0bd9\u0bda\5\u044f")
buf.write("\u0228\2\u0bda\u0bdb\5\u045d\u022f\2\u0bdb\u0bdc\5\u044d")
buf.write("\u0227\2\u0bdc\u0bdd\5\u0437\u021c\2\u0bdd\u0bde\5\u043d")
buf.write("\u021f\2\u0bde\u0bdf\5\u0457\u022c\2\u0bdf\u0202\3\2\2")
buf.write("\2\u0be0\u0be1\5\u044f\u0228\2\u0be1\u0be2\5\u045d\u022f")
buf.write("\2\u0be2\u0be3\5\u044d\u0227\2\u0be3\u0be4\5\u043d\u021f")
buf.write("\2\u0be4\u0be5\5\u0457\u022c\2\u0be5\u0be6\5\u0445\u0223")
buf.write("\2\u0be6\u0be7\5\u0439\u021d\2\u0be7\u0204\3\2\2\2\u0be8")
buf.write("\u0be9\5\u044f\u0228\2\u0be9\u0bea\5\u045f\u0230\2\u0bea")
buf.write("\u0beb\5\u0435\u021b\2\u0beb\u0bec\5\u0457\u022c\2\u0bec")
buf.write("\u0bed\5\u0439\u021d\2\u0bed\u0bee\5\u0443\u0222\2\u0bee")
buf.write("\u0bef\5\u0435\u021b\2\u0bef\u0bf0\5\u0457\u022c\2\u0bf0")
buf.write("\u0bf1\7\64\2\2\u0bf1\u0206\3\2\2\2\u0bf2\u0bf3\5\u0451")
buf.write("\u0229\2\u0bf3\u0bf4\5\u0437\u021c\2\u0bf4\u0bf5\5\u0447")
buf.write("\u0224\2\u0bf5\u0bf6\5\u043d\u021f\2\u0bf6\u0bf7\5\u0439")
buf.write("\u021d\2\u0bf7\u0bf8\5\u045b\u022e\2\u0bf8\u0208\3\2\2")
buf.write("\2\u0bf9\u0bfa\5\u0451\u0229\2\u0bfa\u0bfb\5\u043f\u0220")
buf.write("\2\u0bfb\u020a\3\2\2\2\u0bfc\u0bfd\5\u0451\u0229\2\u0bfd")
buf.write("\u0bfe\5\u043f\u0220\2\u0bfe\u0bff\5\u043f\u0220\2\u0bff")
buf.write("\u020c\3\2\2\2\u0c00\u0c01\5\u0451\u0229\2\u0c01\u0c02")
buf.write("\5\u0445\u0223\2\u0c02\u0c03\5\u043b\u021e\2\u0c03\u020e")
buf.write("\3\2\2\2\u0c04\u0c05\5\u0451\u0229\2\u0c05\u0c06\5\u044b")
buf.write("\u0226\2\u0c06\u0c07\5\u043b\u021e\2\u0c07\u0210\3\2\2")
buf.write("\2\u0c08\u0c09\5\u0451\u0229\2\u0c09\u0c0a\5\u044f\u0228")
buf.write("\2\u0c0a\u0212\3\2\2\2\u0c0b\u0c0c\5\u0451\u0229\2\u0c0c")
buf.write("\u0c0d\5\u044f\u0228\2\u0c0d\u0c0e\5\u044b\u0226\2\u0c0e")
buf.write("\u0c0f\5\u0465\u0233\2\u0c0f\u0214\3\2\2\2\u0c10\u0c11")
buf.write("\5\u0451\u0229\2\u0c11\u0c12\5\u0453\u022a\2\u0c12\u0c13")
buf.write("\5\u043d\u021f\2\u0c13\u0c14\5\u044f\u0228\2\u0c14\u0216")
buf.write("\3\2\2\2\u0c15\u0c16\5\u0451\u0229\2\u0c16\u0c17\5\u0453")
buf.write("\u022a\2\u0c17\u0c18\5\u045b\u022e\2\u0c18\u0c19\5\u0445")
buf.write("\u0223\2\u0c19\u0c1a\5\u0451\u0229\2\u0c1a\u0c1b\5\u044f")
buf.write("\u0228\2\u0c1b\u0218\3\2\2\2\u0c1c\u0c1d\5\u0451\u0229")
buf.write("\2\u0c1d\u0c1e\5\u0457\u022c\2\u0c1e\u021a\3\2\2\2\u0c1f")
buf.write("\u0c20\5\u0451\u0229\2\u0c20\u0c21\5\u0457\u022c\2\u0c21")
buf.write("\u0c22\5\u0435\u021b\2\u0c22\u0c23\5\u043b\u021e\2\u0c23")
buf.write("\u0c24\5\u0435\u021b\2\u0c24\u0c25\5\u045b\u022e\2\u0c25")
buf.write("\u0c26\5\u0435\u021b\2\u0c26\u021c\3\2\2\2\u0c27\u0c28")
buf.write("\5\u0451\u0229\2\u0c28\u0c29\5\u0457\u022c\2\u0c29\u0c2a")
buf.write("\5\u043b\u021e\2\u0c2a\u0c2b\5\u043d\u021f\2\u0c2b\u0c2c")
buf.write("\5\u0457\u022c\2\u0c2c\u021e\3\2\2\2\u0c2d\u0c2e\5\u0451")
buf.write("\u0229\2\u0c2e\u0c2f\5\u0457\u022c\2\u0c2f\u0c30\5\u043b")
buf.write("\u021e\2\u0c30\u0c31\5\u0445\u0223\2\u0c31\u0c32\5\u044f")
buf.write("\u0228\2\u0c32\u0c33\5\u0435\u021b\2\u0c33\u0c34\5\u044b")
buf.write("\u0226\2\u0c34\u0c35\5\u0445\u0223\2\u0c35\u0c36\5\u045b")
buf.write("\u022e\2\u0c36\u0c37\5\u0465\u0233\2\u0c37\u0220\3\2\2")
buf.write("\2\u0c38\u0c39\5\u0451\u0229\2\u0c39\u0c3a\5\u0459\u022d")
buf.write("\2\u0c3a\u0c3b\5\u043d\u021f\2\u0c3b\u0c3c\5\u0457\u022c")
buf.write("\2\u0c3c\u0c3d\5\u0457\u022c\2\u0c3d\u0c3e\5\u0451\u0229")
buf.write("\2\u0c3e\u0c3f\5\u0457\u022c\2\u0c3f\u0222\3\2\2\2\u0c40")
buf.write("\u0c41\5\u0451\u0229\2\u0c41\u0c42\5\u045d\u022f\2\u0c42")
buf.write("\u0c43\5\u045b\u022e\2\u0c43\u0224\3\2\2\2\u0c44\u0c45")
buf.write("\5\u0451\u0229\2\u0c45\u0c46\5\u045d\u022f\2\u0c46\u0c47")
buf.write("\5\u045b\u022e\2\u0c47\u0c48\5\u043d\u021f\2\u0c48\u0c49")
buf.write("\5\u0457\u022c\2\u0c49\u0226\3\2\2\2\u0c4a\u0c4b\5\u0451")
buf.write("\u0229\2\u0c4b\u0c4c\5\u045f\u0230\2\u0c4c\u0c4d\5\u043d")
buf.write("\u021f\2\u0c4d\u0c4e\5\u0457\u022c\2\u0c4e\u0228\3\2\2")
buf.write("\2\u0c4f\u0c50\5\u0451\u0229\2\u0c50\u0c51\5\u045f\u0230")
buf.write("\2\u0c51\u0c52\5\u043d\u021f\2\u0c52\u0c53\5\u0457\u022c")
buf.write("\2\u0c53\u0c54\5\u0457\u022c\2\u0c54\u0c55\5\u0445\u0223")
buf.write("\2\u0c55\u0c56\5\u043b\u021e\2\u0c56\u0c57\5\u0445\u0223")
buf.write("\2\u0c57\u0c58\5\u044f\u0228\2\u0c58\u0c59\5\u0441\u0221")
buf.write("\2\u0c59\u022a\3\2\2\2\u0c5a\u0c5b\5\u0453\u022a\2\u0c5b")
buf.write("\u0c5c\5\u0435\u021b\2\u0c5c\u0c5d\5\u0439\u021d\2\u0c5d")
buf.write("\u0c5e\5\u0449\u0225\2\u0c5e\u0c5f\5\u0435\u021b\2\u0c5f")
buf.write("\u0c60\5\u0441\u0221\2\u0c60\u0c61\5\u043d\u021f\2\u0c61")
buf.write("\u022c\3\2\2\2\u0c62\u0c63\5\u0453\u022a\2\u0c63\u0c64")
buf.write("\5\u0435\u021b\2\u0c64\u0c65\5\u0457\u022c\2\u0c65\u0c66")
buf.write("\5\u0435\u021b\2\u0c66\u0c67\5\u044b\u0226\2\u0c67\u0c68")
buf.write("\5\u044b\u0226\2\u0c68\u0c69\5\u043d\u021f\2\u0c69\u0c6a")
buf.write("\5\u044b\u0226\2\u0c6a\u0c6b\7a\2\2\u0c6b\u0c6c\5\u043d")
buf.write("\u021f\2\u0c6c\u0c6d\5\u044f\u0228\2\u0c6d\u0c6e\5\u0435")
buf.write("\u021b\2\u0c6e\u0c6f\5\u0437\u021c\2\u0c6f\u0c70\5\u044b")
buf.write("\u0226\2\u0c70\u0c71\5\u043d\u021f\2\u0c71\u022e\3\2\2")
buf.write("\2\u0c72\u0c73\5\u0453\u022a\2\u0c73\u0c74\5\u0435\u021b")
buf.write("\2\u0c74\u0c75\5\u0457\u022c\2\u0c75\u0c76\5\u0435\u021b")
buf.write("\2\u0c76\u0c77\5\u044d\u0227\2\u0c77\u0c78\5\u043d\u021f")
buf.write("\2\u0c78\u0c79\5\u045b\u022e\2\u0c79\u0c7a\5\u043d\u021f")
buf.write("\2\u0c7a\u0c7b\5\u0457\u022c\2\u0c7b\u0c7c\5\u0459\u022d")
buf.write("\2\u0c7c\u0230\3\2\2\2\u0c7d\u0c7e\5\u0453\u022a\2\u0c7e")
buf.write("\u0c7f\5\u0435\u021b\2\u0c7f\u0c80\5\u0457\u022c\2\u0c80")
buf.write("\u0c81\5\u043d\u021f\2\u0c81\u0c82\5\u044f\u0228\2\u0c82")
buf.write("\u0c83\5\u045b\u022e\2\u0c83\u0232\3\2\2\2\u0c84\u0c85")
buf.write("\5\u0453\u022a\2\u0c85\u0c86\5\u0435\u021b\2\u0c86\u0c87")
buf.write("\5\u0457\u022c\2\u0c87\u0c88\5\u045b\u022e\2\u0c88\u0c89")
buf.write("\5\u0445\u0223\2\u0c89\u0c8a\5\u045b\u022e\2\u0c8a\u0c8b")
buf.write("\5\u0445\u0223\2\u0c8b\u0c8c\5\u0451\u0229\2\u0c8c\u0c8d")
buf.write("\5\u044f\u0228\2\u0c8d\u0234\3\2\2\2\u0c8e\u0c8f\5\u0453")
buf.write("\u022a\2\u0c8f\u0c90\5\u0435\u021b\2\u0c90\u0c91\5\u0459")
buf.write("\u022d\2\u0c91\u0c92\5\u0459\u022d\2\u0c92\u0c93\5\u0445")
buf.write("\u0223\2\u0c93\u0c94\5\u044f\u0228\2\u0c94\u0c95\5\u0441")
buf.write("\u0221\2\u0c95\u0236\3\2\2\2\u0c96\u0c97\5\u0453\u022a")
buf.write("\2\u0c97\u0c98\5\u0435\u021b\2\u0c98\u0c99\5\u045b\u022e")
buf.write("\2\u0c99\u0c9a\5\u0443\u0222\2\u0c9a\u0238\3\2\2\2\u0c9b")
buf.write("\u0c9c\7\'\2\2\u0c9c\u0c9d\5\u0457\u022c\2\u0c9d\u0c9e")
buf.write("\5\u0451\u0229\2\u0c9e\u0c9f\5\u0461\u0231\2\u0c9f\u0ca0")
buf.write("\5\u045b\u022e\2\u0ca0\u0ca1\5\u0465\u0233\2\u0ca1\u0ca2")
buf.write("\5\u0453\u022a\2\u0ca2\u0ca3\5\u043d\u021f\2\u0ca3\u023a")
buf.write("\3\2\2\2\u0ca4\u0ca5\7\'\2\2\u0ca5\u0ca6\5\u045b\u022e")
buf.write("\2\u0ca6\u0ca7\5\u0465\u0233\2\u0ca7\u0ca8\5\u0453\u022a")
buf.write("\2\u0ca8\u0ca9\5\u043d\u021f\2\u0ca9\u023c\3\2\2\2\u0caa")
buf.write("\u0cab\5\u0453\u022a\2\u0cab\u0cac\5\u0445\u0223\2\u0cac")
buf.write("\u0cad\5\u0453\u022a\2\u0cad\u0cae\5\u043d\u021f\2\u0cae")
buf.write("\u0caf\5\u044b\u0226\2\u0caf\u0cb0\5\u0445\u0223\2\u0cb0")
buf.write("\u0cb1\5\u044f\u0228\2\u0cb1\u0cb2\5\u043d\u021f\2\u0cb2")
buf.write("\u0cb3\5\u043b\u021e\2\u0cb3\u023e\3\2\2\2\u0cb4\u0cb5")
buf.write("\5\u0453\u022a\2\u0cb5\u0cb6\5\u0445\u0223\2\u0cb6\u0cb7")
buf.write("\5\u045f\u0230\2\u0cb7\u0cb8\5\u0451\u0229\2\u0cb8\u0cb9")
buf.write("\5\u045b\u022e\2\u0cb9\u0240\3\2\2\2\u0cba\u0cbb\5\u0453")
buf.write("\u022a\2\u0cbb\u0cbc\5\u044b\u0226\2\u0cbc\u0cbd\5\u0435")
buf.write("\u021b\2\u0cbd\u0cbe\5\u044f\u0228\2\u0cbe\u0242\3\2\2")
buf.write("\2\u0cbf\u0cc0\5\u0453\u022a\2\u0cc0\u0cc1\5\u044b\u0226")
buf.write("\2\u0cc1\u0cc2\5\u0459\u022d\2\u0cc2\u0cc3\7a\2\2\u0cc3")
buf.write("\u0cc4\5\u0445\u0223\2\u0cc4\u0cc5\5\u044f\u0228\2\u0cc5")
buf.write("\u0cc6\5\u045b\u022e\2\u0cc6\u0cc7\5\u043d\u021f\2\u0cc7")
buf.write("\u0cc8\5\u0441\u0221\2\u0cc8\u0cc9\5\u043d\u021f\2\u0cc9")
buf.write("\u0cca\5\u0457\u022c\2\u0cca\u0244\3\2\2\2\u0ccb\u0ccc")
buf.write("\5\u0453\u022a\2\u0ccc\u0ccd\5\u0451\u0229\2\u0ccd\u0cce")
buf.write("\5\u0459\u022d\2\u0cce\u0ccf\5\u0445\u0223\2\u0ccf\u0cd0")
buf.write("\5\u045b\u022e\2\u0cd0\u0cd1\5\u0445\u0223\2\u0cd1\u0cd2")
buf.write("\5\u045f\u0230\2\u0cd2\u0cd3\5\u043d\u021f\2\u0cd3\u0246")
buf.write("\3\2\2\2\u0cd4\u0cd5\5\u0453\u022a\2\u0cd5\u0cd6\5\u0451")
buf.write("\u0229\2\u0cd6\u0cd7\5\u0459\u022d\2\u0cd7\u0cd8\5\u0445")
buf.write("\u0223\2\u0cd8\u0cd9\5\u045b\u022e\2\u0cd9\u0cda\5\u0445")
buf.write("\u0223\2\u0cda\u0cdb\5\u045f\u0230\2\u0cdb\u0cdc\5\u043d")
buf.write("\u021f\2\u0cdc\u0cdd\5\u044f\u0228\2\u0cdd\u0248\3\2\2")
buf.write("\2\u0cde\u0cdf\5\u0453\u022a\2\u0cdf\u0ce0\5\u0457\u022c")
buf.write("\2\u0ce0\u0ce1\5\u0435\u021b\2\u0ce1\u0ce2\5\u0441\u0221")
buf.write("\2\u0ce2\u0ce3\5\u044d\u0227\2\u0ce3\u0ce4\5\u0435\u021b")
buf.write("\2\u0ce4\u024a\3\2\2\2\u0ce5\u0ce6\5\u0453\u022a\2\u0ce6")
buf.write("\u0ce7\5\u0457\u022c\2\u0ce7\u0ce8\5\u043d\u021f\2\u0ce8")
buf.write("\u0ce9\5\u0439\u021d\2\u0ce9\u0cea\5\u043d\u021f\2\u0cea")
buf.write("\u0ceb\5\u043b\u021e\2\u0ceb\u0cec\5\u0445\u0223\2\u0cec")
buf.write("\u0ced\5\u044f\u0228\2\u0ced\u0cee\5\u0441\u0221\2\u0cee")
buf.write("\u024c\3\2\2\2\u0cef\u0cf0\5\u0453\u022a\2\u0cf0\u0cf1")
buf.write("\5\u0457\u022c\2\u0cf1\u0cf2\5\u043d\u021f\2\u0cf2\u0cf3")
buf.write("\5\u0439\u021d\2\u0cf3\u0cf4\5\u0445\u0223\2\u0cf4\u0cf5")
buf.write("\5\u0459\u022d\2\u0cf5\u0cf6\5\u0445\u0223\2\u0cf6\u0cf7")
buf.write("\5\u0451\u0229\2\u0cf7\u0cf8\5\u044f\u0228\2\u0cf8\u024e")
buf.write("\3\2\2\2\u0cf9\u0cfa\5\u0453\u022a\2\u0cfa\u0cfb\5\u0457")
buf.write("\u022c\2\u0cfb\u0cfc\5\u043d\u021f\2\u0cfc\u0cfd\5\u0459")
buf.write("\u022d\2\u0cfd\u0cfe\5\u043d\u021f\2\u0cfe\u0cff\5\u044f")
buf.write("\u0228\2\u0cff\u0d00\5\u045b\u022e\2\u0d00\u0250\3\2\2")
buf.write("\2\u0d01\u0d02\5\u0453\u022a\2\u0d02\u0d03\5\u0457\u022c")
buf.write("\2\u0d03\u0d04\5\u0445\u0223\2\u0d04\u0d05\5\u0451\u0229")
buf.write("\2\u0d05\u0d06\5\u0457\u022c\2\u0d06\u0252\3\2\2\2\u0d07")
buf.write("\u0d08\5\u0453\u022a\2\u0d08\u0d09\5\u0457\u022c\2\u0d09")
buf.write("\u0d0a\5\u0451\u0229\2\u0d0a\u0d0b\5\u0439\u021d\2\u0d0b")
buf.write("\u0d0c\5\u043d\u021f\2\u0d0c\u0d0d\5\u043b\u021e\2\u0d0d")
buf.write("\u0d0e\5\u045d\u022f\2\u0d0e\u0d0f\5\u0457\u022c\2\u0d0f")
buf.write("\u0d10\5\u043d\u021f\2\u0d10\u0254\3\2\2\2\u0d11\u0d12")
buf.write("\5\u0457\u022c\2\u0d12\u0d13\5\u0435\u021b\2\u0d13\u0d14")
buf.write("\5\u0445\u0223\2\u0d14\u0d15\5\u0459\u022d\2\u0d15\u0d16")
buf.write("\5\u043d\u021f\2\u0d16\u0256\3\2\2\2\u0d17\u0d18\5\u0457")
buf.write("\u022c\2\u0d18\u0d19\5\u0435\u021b\2\u0d19\u0d1a\5\u044f")
buf.write("\u0228\2\u0d1a\u0d1b\5\u0441\u0221\2\u0d1b\u0d1c\5\u043d")
buf.write("\u021f\2\u0d1c\u0258\3\2\2\2\u0d1d\u0d1e\5\u0457\u022c")
buf.write("\2\u0d1e\u0d1f\5\u0435\u021b\2\u0d1f\u0d20\5\u0461\u0231")
buf.write("\2\u0d20\u025a\3\2\2\2\u0d21\u0d22\5\u0457\u022c\2\u0d22")
buf.write("\u0d23\5\u043d\u021f\2\u0d23\u0d24\5\u0435\u021b\2\u0d24")
buf.write("\u0d25\5\u043b\u021e\2\u0d25\u025c\3\2\2\2\u0d26\u0d27")
buf.write("\5\u0457\u022c\2\u0d27\u0d28\5\u043d\u021f\2\u0d28\u0d29")
buf.write("\5\u0435\u021b\2\u0d29\u0d2a\5\u044b\u0226\2\u0d2a\u025e")
buf.write("\3\2\2\2\u0d2b\u0d2c\5\u0457\u022c\2\u0d2c\u0d2d\5\u043d")
buf.write("\u021f\2\u0d2d\u0d2e\5\u0439\u021d\2\u0d2e\u0d2f\5\u0451")
buf.write("\u0229\2\u0d2f\u0d30\5\u0457\u022c\2\u0d30\u0d31\5\u043b")
buf.write("\u021e\2\u0d31\u0260\3\2\2\2\u0d32\u0d33\5\u0457\u022c")
buf.write("\2\u0d33\u0d34\5\u043d\u021f\2\u0d34\u0d35\5\u043f\u0220")
buf.write("\2\u0d35\u0262\3\2\2\2\u0d36\u0d37\5\u0457\u022c\2\u0d37")
buf.write("\u0d38\5\u043d\u021f\2\u0d38\u0d39\5\u043f\u0220\2\u0d39")
buf.write("\u0d3a\5\u043d\u021f\2\u0d3a\u0d3b\5\u0457\u022c\2\u0d3b")
buf.write("\u0d3c\5\u043d\u021f\2\u0d3c\u0d3d\5\u044f\u0228\2\u0d3d")
buf.write("\u0d3e\5\u0439\u021d\2\u0d3e\u0d3f\5\u043d\u021f\2\u0d3f")
buf.write("\u0264\3\2\2\2\u0d40\u0d41\5\u0457\u022c\2\u0d41\u0d42")
buf.write("\5\u043d\u021f\2\u0d42\u0d43\5\u043f\u0220\2\u0d43\u0d44")
buf.write("\5\u043d\u021f\2\u0d44\u0d45\5\u0457\u022c\2\u0d45\u0d46")
buf.write("\5\u043d\u021f\2\u0d46\u0d47\5\u044f\u0228\2\u0d47\u0d48")
buf.write("\5\u0439\u021d\2\u0d48\u0d49\5\u0445\u0223\2\u0d49\u0d4a")
buf.write("\5\u044f\u0228\2\u0d4a\u0d4b\5\u0441\u0221\2\u0d4b\u0266")
buf.write("\3\2\2\2\u0d4c\u0d4d\5\u0457\u022c\2\u0d4d\u0d4e\5\u043d")
buf.write("\u021f\2\u0d4e\u0d4f\5\u0447\u0224\2\u0d4f\u0d50\5\u043d")
buf.write("\u021f\2\u0d50\u0d51\5\u0439\u021d\2\u0d51\u0d52\5\u045b")
buf.write("\u022e\2\u0d52\u0268\3\2\2\2\u0d53\u0d54\5\u0457\u022c")
buf.write("\2\u0d54\u0d55\5\u043d\u021f\2\u0d55\u0d56\5\u044b\u0226")
buf.write("\2\u0d56\u0d57\5\u0445\u0223\2\u0d57\u0d58\5\u043d\u021f")
buf.write("\2\u0d58\u0d59\5\u0459\u022d\2\u0d59\u0d5a\7a\2\2\u0d5a")
buf.write("\u0d5b\5\u0451\u0229\2\u0d5b\u0d5c\5\u044f\u0228\2\u0d5c")
buf.write("\u026a\3\2\2\2\u0d5d\u0d5e\5\u0457\u022c\2\u0d5e\u0d5f")
buf.write("\5\u043d\u021f\2\u0d5f\u0d60\5\u044f\u0228\2\u0d60\u0d61")
buf.write("\5\u0435\u021b\2\u0d61\u0d62\5\u044d\u0227\2\u0d62\u0d63")
buf.write("\5\u043d\u021f\2\u0d63\u026c\3\2\2\2\u0d64\u0d65\5\u0457")
buf.write("\u022c\2\u0d65\u0d66\5\u043d\u021f\2\u0d66\u0d67\5\u0453")
buf.write("\u022a\2\u0d67\u0d68\5\u044b\u0226\2\u0d68\u0d69\5\u0435")
buf.write("\u021b\2\u0d69\u0d6a\5\u0439\u021d\2\u0d6a\u0d6b\5\u043d")
buf.write("\u021f\2\u0d6b\u026e\3\2\2\2\u0d6c\u0d6d\5\u0457\u022c")
buf.write("\2\u0d6d\u0d6e\5\u043d\u021f\2\u0d6e\u0d6f\5\u0459\u022d")
buf.write("\2\u0d6f\u0d70\5\u0453\u022a\2\u0d70\u0d71\5\u043d\u021f")
buf.write("\2\u0d71\u0d72\5\u0439\u021d\2\u0d72\u0d73\5\u045b\u022e")
buf.write("\2\u0d73\u0270\3\2\2\2\u0d74\u0d75\5\u0457\u022c\2\u0d75")
buf.write("\u0d76\5\u043d\u021f\2\u0d76\u0d77\5\u0459\u022d\2\u0d77")
buf.write("\u0d78\5\u045b\u022e\2\u0d78\u0d79\5\u0457\u022c\2\u0d79")
buf.write("\u0d7a\5\u0445\u0223\2\u0d7a\u0d7b\5\u0439\u021d\2\u0d7b")
buf.write("\u0d7c\5\u045b\u022e\2\u0d7c\u0d7d\7a\2\2\u0d7d\u0d7e")
buf.write("\5\u0457\u022c\2\u0d7e\u0d7f\5\u043d\u021f\2\u0d7f\u0d80")
buf.write("\5\u043f\u0220\2\u0d80\u0d81\5\u043d\u021f\2\u0d81\u0d82")
buf.write("\5\u0457\u022c\2\u0d82\u0d83\5\u043d\u021f\2\u0d83\u0d84")
buf.write("\5\u044f\u0228\2\u0d84\u0d85\5\u0439\u021d\2\u0d85\u0d86")
buf.write("\5\u043d\u021f\2\u0d86\u0d87\5\u0459\u022d\2\u0d87\u0272")
buf.write("\3\2\2\2\u0d88\u0d89\5\u0457\u022c\2\u0d89\u0d8a\5\u043d")
buf.write("\u021f\2\u0d8a\u0d8b\5\u0459\u022d\2\u0d8b\u0d8c\5\u045d")
buf.write("\u022f\2\u0d8c\u0d8d\5\u044b\u0226\2\u0d8d\u0d8e\5\u045b")
buf.write("\u022e\2\u0d8e\u0274\3\2\2\2\u0d8f\u0d90\5\u0457\u022c")
buf.write("\2\u0d90\u0d91\5\u043d\u021f\2\u0d91\u0d92\5\u0459\u022d")
buf.write("\2\u0d92\u0d93\5\u045d\u022f\2\u0d93\u0d94\5\u044b\u0226")
buf.write("\2\u0d94\u0d95\5\u045b\u022e\2\u0d95\u0d96\7a\2\2\u0d96")
buf.write("\u0d97\5\u0439\u021d\2\u0d97\u0d98\5\u0435\u021b\2\u0d98")
buf.write("\u0d99\5\u0439\u021d\2\u0d99\u0d9a\5\u0443\u0222\2\u0d9a")
buf.write("\u0d9b\5\u043d\u021f\2\u0d9b\u0276\3\2\2\2\u0d9c\u0d9d")
buf.write("\5\u0457\u022c\2\u0d9d\u0d9e\5\u043d\u021f\2\u0d9e\u0d9f")
buf.write("\5\u045b\u022e\2\u0d9f\u0da0\5\u045d\u022f\2\u0da0\u0da1")
buf.write("\5\u0457\u022c\2\u0da1\u0da2\5\u044f\u0228\2\u0da2\u0278")
buf.write("\3\2\2\2\u0da3\u0da4\5\u0457\u022c\2\u0da4\u0da5\5\u043d")
buf.write("\u021f\2\u0da5\u0da6\5\u045b\u022e\2\u0da6\u0da7\5\u045d")
buf.write("\u022f\2\u0da7\u0da8\5\u0457\u022c\2\u0da8\u0da9\5\u044f")
buf.write("\u0228\2\u0da9\u0daa\5\u0445\u0223\2\u0daa\u0dab\5\u044f")
buf.write("\u0228\2\u0dab\u0dac\5\u0441\u0221\2\u0dac\u027a\3\2\2")
buf.write("\2\u0dad\u0dae\5\u0457\u022c\2\u0dae\u0daf\5\u043d\u021f")
buf.write("\2\u0daf\u0db0\5\u045d\u022f\2\u0db0\u0db1\5\u0459\u022d")
buf.write("\2\u0db1\u0db2\5\u043d\u021f\2\u0db2\u027c\3\2\2\2\u0db3")
buf.write("\u0db4\5\u0457\u022c\2\u0db4\u0db5\5\u043d\u021f\2\u0db5")
buf.write("\u0db6\5\u045f\u0230\2\u0db6\u0db7\5\u043d\u021f\2\u0db7")
buf.write("\u0db8\5\u0457\u022c\2\u0db8\u0db9\5\u0459\u022d\2\u0db9")
buf.write("\u0dba\5\u043d\u021f\2\u0dba\u027e\3\2\2\2\u0dbb\u0dbc")
buf.write("\5\u0457\u022c\2\u0dbc\u0dbd\5\u043d\u021f\2\u0dbd\u0dbe")
buf.write("\5\u045f\u0230\2\u0dbe\u0dbf\5\u0451\u0229\2\u0dbf\u0dc0")
buf.write("\5\u0449\u0225\2\u0dc0\u0dc1\5\u043d\u021f\2\u0dc1\u0280")
buf.write("\3\2\2\2\u0dc2\u0dc3\5\u0457\u022c\2\u0dc3\u0dc4\5\u0445")
buf.write("\u0223\2\u0dc4\u0dc5\5\u0441\u0221\2\u0dc5\u0dc6\5\u0443")
buf.write("\u0222\2\u0dc6\u0dc7\5\u045b\u022e\2\u0dc7\u0282\3\2\2")
buf.write("\2\u0dc8\u0dc9\5\u0457\u022c\2\u0dc9\u0dca\5\u0451\u0229")
buf.write("\2\u0dca\u0dcb\5\u044b\u0226\2\u0dcb\u0dcc\5\u044b\u0226")
buf.write("\2\u0dcc\u0dcd\5\u0437\u021c\2\u0dcd\u0dce\5\u0435\u021b")
buf.write("\2\u0dce\u0dcf\5\u0439\u021d\2\u0dcf\u0dd0\5\u0449\u0225")
buf.write("\2\u0dd0\u0284\3\2\2\2\u0dd1\u0dd2\5\u0457\u022c\2\u0dd2")
buf.write("\u0dd3\5\u0451\u0229\2\u0dd3\u0dd4\5\u044b\u0226\2\u0dd4")
buf.write("\u0dd5\5\u044b\u0226\2\u0dd5\u0dd6\5\u045d\u022f\2\u0dd6")
buf.write("\u0dd7\5\u0453\u022a\2\u0dd7\u0286\3\2\2\2\u0dd8\u0dd9")
buf.write("\5\u0457\u022c\2\u0dd9\u0dda\5\u0451\u0229\2\u0dda\u0ddb")
buf.write("\5\u0461\u0231\2\u0ddb\u0288\3\2\2\2\u0ddc\u0ddd\5\u0457")
buf.write("\u022c\2\u0ddd\u0dde\5\u0451\u0229\2\u0dde\u0ddf\5\u0461")
buf.write("\u0231\2\u0ddf\u0de0\5\u0445\u0223\2\u0de0\u0de1\5\u043b")
buf.write("\u021e\2\u0de1\u028a\3\2\2\2\u0de2\u0de3\5\u0457\u022c")
buf.write("\2\u0de3\u0de4\5\u0451\u0229\2\u0de4\u0de5\5\u0461\u0231")
buf.write("\2\u0de5\u0de6\5\u0459\u022d\2\u0de6\u028c\3\2\2\2\u0de7")
buf.write("\u0de8\5\u0457\u022c\2\u0de8\u0de9\5\u045d\u022f\2\u0de9")
buf.write("\u0dea\5\u044b\u0226\2\u0dea\u0deb\5\u043d\u021f\2\u0deb")
buf.write("\u0dec\5\u0459\u022d\2\u0dec\u028e\3\2\2\2\u0ded\u0dee")
buf.write("\5\u0459\u022d\2\u0dee\u0def\5\u0435\u021b\2\u0def\u0df0")
buf.write("\5\u044d\u0227\2\u0df0\u0df1\5\u0453\u022a\2\u0df1\u0df2")
buf.write("\5\u044b\u0226\2\u0df2\u0df3\5\u043d\u021f\2\u0df3\u0290")
buf.write("\3\2\2\2\u0df4\u0df5\5\u0459\u022d\2\u0df5\u0df6\5\u0435")
buf.write("\u021b\2\u0df6\u0df7\5\u045f\u0230\2\u0df7\u0df8\5\u043d")
buf.write("\u021f\2\u0df8\u0292\3\2\2\2\u0df9\u0dfa\5\u0459\u022d")
buf.write("\2\u0dfa\u0dfb\5\u0435\u021b\2\u0dfb\u0dfc\5\u045f\u0230")
buf.write("\2\u0dfc\u0dfd\5\u043d\u021f\2\u0dfd\u0dfe\5\u0453\u022a")
buf.write("\2\u0dfe\u0dff\5\u0451\u0229\2\u0dff\u0e00\5\u0445\u0223")
buf.write("\2\u0e00\u0e01\5\u044f\u0228\2\u0e01\u0e02\5\u045b\u022e")
buf.write("\2\u0e02\u0294\3\2\2\2\u0e03\u0e04\5\u0459\u022d\2\u0e04")
buf.write("\u0e05\5\u0439\u021d\2\u0e05\u0e06\5\u0443\u0222\2\u0e06")
buf.write("\u0e07\5\u043d\u021f\2\u0e07\u0e08\5\u044d\u0227\2\u0e08")
buf.write("\u0e09\5\u0435\u021b\2\u0e09\u0296\3\2\2\2\u0e0a\u0e0b")
buf.write("\5\u0459\u022d\2\u0e0b\u0e0c\5\u0439\u021d\2\u0e0c\u0e0d")
buf.write("\5\u0443\u0222\2\u0e0d\u0e0e\5\u043d\u021f\2\u0e0e\u0e0f")
buf.write("\5\u044d\u0227\2\u0e0f\u0e10\5\u0435\u021b\2\u0e10\u0e11")
buf.write("\5\u0439\u021d\2\u0e11\u0e12\5\u0443\u0222\2\u0e12\u0e13")
buf.write("\5\u043d\u021f\2\u0e13\u0e14\5\u0439\u021d\2\u0e14\u0e15")
buf.write("\5\u0449\u0225\2\u0e15\u0298\3\2\2\2\u0e16\u0e17\5\u0459")
buf.write("\u022d\2\u0e17\u0e18\5\u0439\u021d\2\u0e18\u0e19\5\u044f")
buf.write("\u0228\2\u0e19\u029a\3\2\2\2\u0e1a\u0e1b\5\u0459\u022d")
buf.write("\2\u0e1b\u0e1c\5\u043d\u021f\2\u0e1c\u0e1d\5\u0435\u021b")
buf.write("\2\u0e1d\u0e1e\5\u0457\u022c\2\u0e1e\u0e1f\5\u0439\u021d")
buf.write("\2\u0e1f\u0e20\5\u0443\u0222\2\u0e20\u029c\3\2\2\2\u0e21")
buf.write("\u0e22\5\u0459\u022d\2\u0e22\u0e23\5\u043d\u021f\2\u0e23")
buf.write("\u0e24\5\u0439\u021d\2\u0e24\u0e25\5\u0451\u0229\2\u0e25")
buf.write("\u0e26\5\u044f\u0228\2\u0e26\u0e27\5\u043b\u021e\2\u0e27")
buf.write("\u029e\3\2\2\2\u0e28\u0e29\5\u0459\u022d\2\u0e29\u0e2a")
buf.write("\5\u043d\u021f\2\u0e2a\u0e2b\5\u043d\u021f\2\u0e2b\u0e2c")
buf.write("\5\u043b\u021e\2\u0e2c\u02a0\3\2\2\2\u0e2d\u0e2e\5\u0459")
buf.write("\u022d\2\u0e2e\u0e2f\5\u043d\u021f\2\u0e2f\u0e30\5\u0441")
buf.write("\u0221\2\u0e30\u0e31\5\u044d\u0227\2\u0e31\u0e32\5\u043d")
buf.write("\u021f\2\u0e32\u0e33\5\u044f\u0228\2\u0e33\u0e34\5\u045b")
buf.write("\u022e\2\u0e34\u02a2\3\2\2\2\u0e35\u0e36\5\u0459\u022d")
buf.write("\2\u0e36\u0e37\5\u043d\u021f\2\u0e37\u0e38\5\u044b\u0226")
buf.write("\2\u0e38\u0e39\5\u043d\u021f\2\u0e39\u0e3a\5\u0439\u021d")
buf.write("\2\u0e3a\u0e3b\5\u045b\u022e\2\u0e3b\u02a4\3\2\2\2\u0e3c")
buf.write("\u0e3d\5\u0459\u022d\2\u0e3d\u0e3e\5\u043d\u021f\2\u0e3e")
buf.write("\u0e3f\5\u044b\u0226\2\u0e3f\u0e40\5\u043f\u0220\2\u0e40")
buf.write("\u02a6\3\2\2\2\u0e41\u0e42\5\u0459\u022d\2\u0e42\u0e43")
buf.write("\5\u043d\u021f\2\u0e43\u0e44\5\u0455\u022b\2\u0e44\u0e45")
buf.write("\5\u045d\u022f\2\u0e45\u0e46\5\u043d\u021f\2\u0e46\u0e47")
buf.write("\5\u044f\u0228\2\u0e47\u0e48\5\u0439\u021d\2\u0e48\u0e49")
buf.write("\5\u043d\u021f\2\u0e49\u02a8\3\2\2\2\u0e4a\u0e4b\5\u0459")
buf.write("\u022d\2\u0e4b\u0e4c\5\u043d\u021f\2\u0e4c\u0e4d\5\u0455")
buf.write("\u022b\2\u0e4d\u0e4e\5\u045d\u022f\2\u0e4e\u0e4f\5\u043d")
buf.write("\u021f\2\u0e4f\u0e50\5\u044f\u0228\2\u0e50\u0e51\5\u045b")
buf.write("\u022e\2\u0e51\u0e52\5\u0445\u0223\2\u0e52\u0e53\5\u0435")
buf.write("\u021b\2\u0e53\u0e54\5\u044b\u0226\2\u0e54\u02aa\3\2\2")
buf.write("\2\u0e55\u0e56\5\u0459\u022d\2\u0e56\u0e57\5\u043d\u021f")
buf.write("\2\u0e57\u0e58\5\u0457\u022c\2\u0e58\u0e59\5\u0445\u0223")
buf.write("\2\u0e59\u0e5a\5\u0435\u021b\2\u0e5a\u0e5b\5\u044b\u0226")
buf.write("\2\u0e5b\u0e5c\5\u0445\u0223\2\u0e5c\u0e5d\5\u0467\u0234")
buf.write("\2\u0e5d\u0e5e\5\u0435\u021b\2\u0e5e\u0e5f\5\u0437\u021c")
buf.write("\2\u0e5f\u0e60\5\u044b\u0226\2\u0e60\u0e61\5\u043d\u021f")
buf.write("\2\u0e61\u02ac\3\2\2\2\u0e62\u0e63\5\u0459\u022d\2\u0e63")
buf.write("\u0e64\5\u043d\u021f\2\u0e64\u0e65\5\u0457\u022c\2\u0e65")
buf.write("\u0e66\5\u0445\u0223\2\u0e66\u0e67\5\u0435\u021b\2\u0e67")
buf.write("\u0e68\5\u044b\u0226\2\u0e68\u0e69\5\u044b\u0226\2\u0e69")
buf.write("\u0e6a\5\u0465\u0233\2\u0e6a\u0e6b\7a\2\2\u0e6b\u0e6c")
buf.write("\5\u0457\u022c\2\u0e6c\u0e6d\5\u043d\u021f\2\u0e6d\u0e6e")
buf.write("\5\u045d\u022f\2\u0e6e\u0e6f\5\u0459\u022d\2\u0e6f\u0e70")
buf.write("\5\u0435\u021b\2\u0e70\u0e71\5\u0437\u021c\2\u0e71\u0e72")
buf.write("\5\u044b\u0226\2\u0e72\u0e73\5\u043d\u021f\2\u0e73\u02ae")
buf.write("\3\2\2\2\u0e74\u0e75\5\u0459\u022d\2\u0e75\u0e76\5\u043d")
buf.write("\u021f\2\u0e76\u0e77\5\u0457\u022c\2\u0e77\u0e78\5\u045f")
buf.write("\u0230\2\u0e78\u0e79\5\u043d\u021f\2\u0e79\u0e7a\5\u0457")
buf.write("\u022c\2\u0e7a\u0e7b\5\u043d\u021f\2\u0e7b\u0e7c\5\u0457")
buf.write("\u022c\2\u0e7c\u0e7d\5\u0457\u022c\2\u0e7d\u0e7e\5\u0451")
buf.write("\u0229\2\u0e7e\u0e7f\5\u0457\u022c\2\u0e7f\u02b0\3\2\2")
buf.write("\2\u0e80\u0e81\5\u0459\u022d\2\u0e81\u0e82\5\u043d\u021f")
buf.write("\2\u0e82\u0e83\5\u0459\u022d\2\u0e83\u0e84\5\u0459\u022d")
buf.write("\2\u0e84\u0e85\5\u0445\u0223\2\u0e85\u0e86\5\u0451\u0229")
buf.write("\2\u0e86\u0e87\5\u044f\u0228\2\u0e87\u0e88\5\u045b\u022e")
buf.write("\2\u0e88\u0e89\5\u0445\u0223\2\u0e89\u0e8a\5\u044d\u0227")
buf.write("\2\u0e8a\u0e8b\5\u043d\u021f\2\u0e8b\u0e8c\5\u0467\u0234")
buf.write("\2\u0e8c\u0e8d\5\u0451\u0229\2\u0e8d\u0e8e\5\u044f\u0228")
buf.write("\2\u0e8e\u0e8f\5\u043d\u021f\2\u0e8f\u02b2\3\2\2\2\u0e90")
buf.write("\u0e91\5\u0459\u022d\2\u0e91\u0e92\5\u043d\u021f\2\u0e92")
buf.write("\u0e93\5\u045b\u022e\2\u0e93\u02b4\3\2\2\2\u0e94\u0e95")
buf.write("\5\u0459\u022d\2\u0e95\u0e96\5\u043d\u021f\2\u0e96\u0e97")
buf.write("\5\u045b\u022e\2\u0e97\u0e98\5\u0459\u022d\2\u0e98\u02b6")
buf.write("\3\2\2\2\u0e99\u0e9a\5\u0459\u022d\2\u0e9a\u0e9b\5\u043d")
buf.write("\u021f\2\u0e9b\u0e9c\5\u045b\u022e\2\u0e9c\u0e9d\5\u045b")
buf.write("\u022e\2\u0e9d\u0e9e\5\u0445\u0223\2\u0e9e\u0e9f\5\u044f")
buf.write("\u0228\2\u0e9f\u0ea0\5\u0441\u0221\2\u0ea0\u0ea1\5\u0459")
buf.write("\u022d\2\u0ea1\u02b8\3\2\2\2\u0ea2\u0ea3\5\u0459\u022d")
buf.write("\2\u0ea3\u0ea4\5\u0443\u0222\2\u0ea4\u0ea5\5\u0435\u021b")
buf.write("\2\u0ea5\u0ea6\5\u0457\u022c\2\u0ea6\u0ea7\5\u043d\u021f")
buf.write("\2\u0ea7\u02ba\3\2\2\2\u0ea8\u0ea9\5\u0459\u022d\2\u0ea9")
buf.write("\u0eaa\5\u0443\u0222\2\u0eaa\u0eab\5\u0451\u0229\2\u0eab")
buf.write("\u0eac\5\u0461\u0231\2\u0eac\u02bc\3\2\2\2\u0ead\u0eae")
buf.write("\5\u0459\u022d\2\u0eae\u0eaf\5\u0443\u0222\2\u0eaf\u0eb0")
buf.write("\5\u045d\u022f\2\u0eb0\u0eb1\5\u045b\u022e\2\u0eb1\u0eb2")
buf.write("\5\u043b\u021e\2\u0eb2\u0eb3\5\u0451\u0229\2\u0eb3\u0eb4")
buf.write("\5\u0461\u0231\2\u0eb4\u0eb5\5\u044f\u0228\2\u0eb5\u02be")
buf.write("\3\2\2\2\u0eb6\u0eb7\5\u0459\u022d\2\u0eb7\u0eb8\5\u0445")
buf.write("\u0223\2\u0eb8\u0eb9\5\u0437\u021c\2\u0eb9\u0eba\5\u044b")
buf.write("\u0226\2\u0eba\u0ebb\5\u0445\u0223\2\u0ebb\u0ebc\5\u044f")
buf.write("\u0228\2\u0ebc\u0ebd\5\u0441\u0221\2\u0ebd\u0ebe\5\u0459")
buf.write("\u022d\2\u0ebe\u02c0\3\2\2\2\u0ebf\u0ec0\5\u0459\u022d")
buf.write("\2\u0ec0\u0ec1\5\u0445\u0223\2\u0ec1\u0ec2\5\u0441\u0221")
buf.write("\2\u0ec2\u0ec3\5\u044f\u0228\2\u0ec3\u0ec4\5\u045b\u022e")
buf.write("\2\u0ec4\u0ec5\5\u0465\u0233\2\u0ec5\u0ec6\5\u0453\u022a")
buf.write("\2\u0ec6\u0ec7\5\u043d\u021f\2\u0ec7\u02c2\3\2\2\2\u0ec8")
buf.write("\u0ec9\5\u0459\u022d\2\u0ec9\u0eca\5\u0445\u0223\2\u0eca")
buf.write("\u0ecb\5\u044d\u0227\2\u0ecb\u0ecc\5\u0453\u022a\2\u0ecc")
buf.write("\u0ecd\5\u044b\u0226\2\u0ecd\u0ece\5\u043d\u021f\2\u0ece")
buf.write("\u0ecf\7a\2\2\u0ecf\u0ed0\5\u0445\u0223\2\u0ed0\u0ed1")
buf.write("\5\u044f\u0228\2\u0ed1\u0ed2\5\u045b\u022e\2\u0ed2\u0ed3")
buf.write("\5\u043d\u021f\2\u0ed3\u0ed4\5\u0441\u0221\2\u0ed4\u0ed5")
buf.write("\5\u043d\u021f\2\u0ed5\u0ed6\5\u0457\u022c\2\u0ed6\u02c4")
buf.write("\3\2\2\2\u0ed7\u0ed8\5\u0459\u022d\2\u0ed8\u0ed9\5\u0445")
buf.write("\u0223\2\u0ed9\u0eda\5\u044f\u0228\2\u0eda\u0edb\5\u0441")
buf.write("\u0221\2\u0edb\u0edc\5\u044b\u0226\2\u0edc\u0edd\5\u043d")
buf.write("\u021f\2\u0edd\u02c6\3\2\2\2\u0ede\u0edf\5\u0459\u022d")
buf.write("\2\u0edf\u0ee0\5\u0445\u0223\2\u0ee0\u0ee1\5\u0467\u0234")
buf.write("\2\u0ee1\u0ee2\5\u043d\u021f\2\u0ee2\u02c8\3\2\2\2\u0ee3")
buf.write("\u0ee4\5\u0459\u022d\2\u0ee4\u0ee5\5\u0449\u0225\2\u0ee5")
buf.write("\u0ee6\5\u0445\u0223\2\u0ee6\u0ee7\5\u0453\u022a\2\u0ee7")
buf.write("\u02ca\3\2\2\2\u0ee8\u0ee9\5\u0459\u022d\2\u0ee9\u0eea")
buf.write("\5\u044d\u0227\2\u0eea\u0eeb\5\u0435\u021b\2\u0eeb\u0eec")
buf.write("\5\u044b\u0226\2\u0eec\u0eed\5\u044b\u0226\2\u0eed\u0eee")
buf.write("\5\u0445\u0223\2\u0eee\u0eef\5\u044f\u0228\2\u0eef\u0ef0")
buf.write("\5\u045b\u022e\2\u0ef0\u02cc\3\2\2\2\u0ef1\u0ef2\5\u0459")
buf.write("\u022d\2\u0ef2\u0ef3\5\u044f\u0228\2\u0ef3\u0ef4\5\u0435")
buf.write("\u021b\2\u0ef4\u0ef5\5\u0453\u022a\2\u0ef5\u0ef6\5\u0459")
buf.write("\u022d\2\u0ef6\u0ef7\5\u0443\u0222\2\u0ef7\u0ef8\5\u0451")
buf.write("\u0229\2\u0ef8\u0ef9\5\u045b\u022e\2\u0ef9\u02ce\3\2\2")
buf.write("\2\u0efa\u0efb\5\u0459\u022d\2\u0efb\u0efc\5\u0451\u0229")
buf.write("\2\u0efc\u0efd\5\u044d\u0227\2\u0efd\u0efe\5\u043d\u021f")
buf.write("\2\u0efe\u02d0\3\2\2\2\u0eff\u0f00\5\u0459\u022d\2\u0f00")
buf.write("\u0f01\5\u0453\u022a\2\u0f01\u0f02\5\u043d\u021f\2\u0f02")
buf.write("\u0f03\5\u0439\u021d\2\u0f03\u0f04\5\u0445\u0223\2\u0f04")
buf.write("\u0f05\5\u043f\u0220\2\u0f05\u0f06\5\u0445\u0223\2\u0f06")
buf.write("\u0f07\5\u0439\u021d\2\u0f07\u0f08\5\u0435\u021b\2\u0f08")
buf.write("\u0f09\5\u045b\u022e\2\u0f09\u0f0a\5\u0445\u0223\2\u0f0a")
buf.write("\u0f0b\5\u0451\u0229\2\u0f0b\u0f0c\5\u044f\u0228\2\u0f0c")
buf.write("\u02d2\3\2\2\2\u0f0d\u0f0e\5\u0459\u022d\2\u0f0e\u0f0f")
buf.write("\5\u0455\u022b\2\u0f0f\u0f10\5\u044b\u0226\2\u0f10\u0f11")
buf.write("\5\u043b\u021e\2\u0f11\u0f12\5\u0435\u021b\2\u0f12\u0f13")
buf.write("\5\u045b\u022e\2\u0f13\u0f14\5\u0435\u021b\2\u0f14\u02d4")
buf.write("\3\2\2\2\u0f15\u0f16\5\u0459\u022d\2\u0f16\u0f17\5\u0455")
buf.write("\u022b\2\u0f17\u0f18\5\u044b\u0226\2\u0f18\u0f19\5\u043d")
buf.write("\u021f\2\u0f19\u0f1a\5\u0457\u022c\2\u0f1a\u0f1b\5\u0457")
buf.write("\u022c\2\u0f1b\u0f1c\5\u0451\u0229\2\u0f1c\u0f1d\5\u0457")
buf.write("\u022c\2\u0f1d\u02d6\3\2\2\2\u0f1e\u0f1f\5\u0459\u022d")
buf.write("\2\u0f1f\u0f20\5\u045b\u022e\2\u0f20\u0f21\5\u0435\u021b")
buf.write("\2\u0f21\u0f22\5\u044f\u0228\2\u0f22\u0f23\5\u043b\u021e")
buf.write("\2\u0f23\u0f24\5\u0435\u021b\2\u0f24\u0f25\5\u044b\u0226")
buf.write("\2\u0f25\u0f26\5\u0451\u0229\2\u0f26\u0f27\5\u044f\u0228")
buf.write("\2\u0f27\u0f28\5\u043d\u021f\2\u0f28\u02d8\3\2\2\2\u0f29")
buf.write("\u0f2a\5\u0459\u022d\2\u0f2a\u0f2b\5\u045b\u022e\2\u0f2b")
buf.write("\u0f2c\5\u0435\u021b\2\u0f2c\u0f2d\5\u0457\u022c\2\u0f2d")
buf.write("\u0f2e\5\u045b\u022e\2\u0f2e\u02da\3\2\2\2\u0f2f\u0f30")
buf.write("\5\u0459\u022d\2\u0f30\u0f31\5\u045b\u022e\2\u0f31\u0f32")
buf.write("\5\u0435\u021b\2\u0f32\u0f33\5\u0457\u022c\2\u0f33\u0f34")
buf.write("\5\u045b\u022e\2\u0f34\u0f35\5\u045d\u022f\2\u0f35\u0f36")
buf.write("\5\u0453\u022a\2\u0f36\u02dc\3\2\2\2\u0f37\u0f38\5\u0459")
buf.write("\u022d\2\u0f38\u0f39\5\u045b\u022e\2\u0f39\u0f3a\5\u0435")
buf.write("\u021b\2\u0f3a\u0f3b\5\u045b\u022e\2\u0f3b\u0f3c\5\u043d")
buf.write("\u021f\2\u0f3c\u0f3d\5\u044d\u0227\2\u0f3d\u0f3e\5\u043d")
buf.write("\u021f\2\u0f3e\u0f3f\5\u044f\u0228\2\u0f3f\u0f40\5\u045b")
buf.write("\u022e\2\u0f40\u02de\3\2\2\2\u0f41\u0f42\5\u0459\u022d")
buf.write("\2\u0f42\u0f43\5\u045b\u022e\2\u0f43\u0f44\5\u0435\u021b")
buf.write("\2\u0f44\u0f45\5\u045b\u022e\2\u0f45\u0f46\5\u043d\u021f")
buf.write("\2\u0f46\u0f47\5\u044d\u0227\2\u0f47\u0f48\5\u043d\u021f")
buf.write("\2\u0f48\u0f49\5\u044f\u0228\2\u0f49\u0f4a\5\u045b\u022e")
buf.write("\2\u0f4a\u0f4b\7a\2\2\u0f4b\u0f4c\5\u0445\u0223\2\u0f4c")
buf.write("\u0f4d\5\u043b\u021e\2\u0f4d\u02e0\3\2\2\2\u0f4e\u0f4f")
buf.write("\5\u0459\u022d\2\u0f4f\u0f50\5\u045b\u022e\2\u0f50\u0f51")
buf.write("\5\u0435\u021b\2\u0f51\u0f52\5\u045b\u022e\2\u0f52\u0f53")
buf.write("\5\u0445\u0223\2\u0f53\u0f54\5\u0439\u021d\2\u0f54\u02e2")
buf.write("\3\2\2\2\u0f55\u0f56\5\u0459\u022d\2\u0f56\u0f57\5\u045b")
buf.write("\u022e\2\u0f57\u0f58\5\u0435\u021b\2\u0f58\u0f59\5\u045b")
buf.write("\u022e\2\u0f59\u0f5a\5\u0445\u0223\2\u0f5a\u0f5b\5\u0459")
buf.write("\u022d\2\u0f5b\u0f5c\5\u045b\u022e\2\u0f5c\u0f5d\5\u0445")
buf.write("\u0223\2\u0f5d\u0f5e\5\u0439\u021d\2\u0f5e\u0f5f\5\u0459")
buf.write("\u022d\2\u0f5f\u02e4\3\2\2\2\u0f60\u0f61\5\u0459\u022d")
buf.write("\2\u0f61\u0f62\5\u045b\u022e\2\u0f62\u0f63\5\u0457\u022c")
buf.write("\2\u0f63\u0f64\5\u0445\u0223\2\u0f64\u0f65\5\u044f\u0228")
buf.write("\2\u0f65\u0f66\5\u0441\u0221\2\u0f66\u02e6\3\2\2\2\u0f67")
buf.write("\u0f68\5\u0459\u022d\2\u0f68\u0f69\5\u045d\u022f\2\u0f69")
buf.write("\u0f6a\5\u0437\u021c\2\u0f6a\u0f6b\5\u044d\u0227\2\u0f6b")
buf.write("\u0f6c\5\u045d\u022f\2\u0f6c\u0f6d\5\u044b\u0226\2\u0f6d")
buf.write("\u0f6e\5\u045b\u022e\2\u0f6e\u0f6f\5\u0445\u0223\2\u0f6f")
buf.write("\u0f70\5\u0459\u022d\2\u0f70\u0f71\5\u043d\u021f\2\u0f71")
buf.write("\u0f72\5\u045b\u022e\2\u0f72\u02e8\3\2\2\2\u0f73\u0f74")
buf.write("\5\u0459\u022d\2\u0f74\u0f75\5\u045d\u022f\2\u0f75\u0f76")
buf.write("\5\u0437\u021c\2\u0f76\u0f77\5\u0453\u022a\2\u0f77\u0f78")
buf.write("\5\u0435\u021b\2\u0f78\u0f79\5\u0457\u022c\2\u0f79\u0f7a")
buf.write("\5\u045b\u022e\2\u0f7a\u0f7b\5\u0445\u0223\2\u0f7b\u0f7c")
buf.write("\5\u045b\u022e\2\u0f7c\u0f7d\5\u0445\u0223\2\u0f7d\u0f7e")
buf.write("\5\u0451\u0229\2\u0f7e\u0f7f\5\u044f\u0228\2\u0f7f\u02ea")
buf.write("\3\2\2\2\u0f80\u0f81\5\u0459\u022d\2\u0f81\u0f82\5\u045d")
buf.write("\u022f\2\u0f82\u0f83\5\u0437\u021c\2\u0f83\u0f84\5\u0459")
buf.write("\u022d\2\u0f84\u0f85\5\u045b\u022e\2\u0f85\u0f86\5\u0445")
buf.write("\u0223\2\u0f86\u0f87\5\u045b\u022e\2\u0f87\u0f88\5\u045d")
buf.write("\u022f\2\u0f88\u0f89\5\u045b\u022e\2\u0f89\u0f8a\5\u0435")
buf.write("\u021b\2\u0f8a\u0f8b\5\u0437\u021c\2\u0f8b\u0f8c\5\u044b")
buf.write("\u0226\2\u0f8c\u0f8d\5\u043d\u021f\2\u0f8d\u02ec\3\2\2")
buf.write("\2\u0f8e\u0f8f\5\u0459\u022d\2\u0f8f\u0f90\5\u045d\u022f")
buf.write("\2\u0f90\u0f91\5\u0437\u021c\2\u0f91\u0f92\5\u045b\u022e")
buf.write("\2\u0f92\u0f93\5\u0465\u0233\2\u0f93\u0f94\5\u0453\u022a")
buf.write("\2\u0f94\u0f95\5\u043d\u021f\2\u0f95\u02ee\3\2\2\2\u0f96")
buf.write("\u0f97\5\u0459\u022d\2\u0f97\u0f98\5\u045d\u022f\2\u0f98")
buf.write("\u0f99\5\u0439\u021d\2\u0f99\u0f9a\5\u0439\u021d\2\u0f9a")
buf.write("\u0f9b\5\u043d\u021f\2\u0f9b\u0f9c\5\u0459\u022d\2\u0f9c")
buf.write("\u0f9d\5\u0459\u022d\2\u0f9d\u02f0\3\2\2\2\u0f9e\u0f9f")
buf.write("\5\u0459\u022d\2\u0f9f\u0fa0\5\u045d\u022f\2\u0fa0\u0fa1")
buf.write("\5\u0459\u022d\2\u0fa1\u0fa2\5\u0453\u022a\2\u0fa2\u0fa3")
buf.write("\5\u043d\u021f\2\u0fa3\u0fa4\5\u044f\u0228\2\u0fa4\u0fa5")
buf.write("\5\u043b\u021e\2\u0fa5\u02f2\3\2\2\2\u0fa6\u0fa7\5\u045b")
buf.write("\u022e\2\u0fa7\u0fa8\5\u0435\u021b\2\u0fa8\u0fa9\5\u0437")
buf.write("\u021c\2\u0fa9\u0faa\5\u044b\u0226\2\u0faa\u0fab\5\u043d")
buf.write("\u021f\2\u0fab\u02f4\3\2\2\2\u0fac\u0fad\5\u045b\u022e")
buf.write("\2\u0fad\u0fae\5\u0443\u0222\2\u0fae\u0faf\5\u043d\u021f")
buf.write("\2\u0faf\u02f6\3\2\2\2\u0fb0\u0fb1\5\u045b\u022e\2\u0fb1")
buf.write("\u0fb2\5\u0443\u0222\2\u0fb2\u0fb3\5\u043d\u021f\2\u0fb3")
buf.write("\u0fb4\5\u044f\u0228\2\u0fb4\u02f8\3\2\2\2\u0fb5\u0fb6")
buf.write("\5\u045b\u022e\2\u0fb6\u0fb7\5\u0445\u0223\2\u0fb7\u0fb8")
buf.write("\5\u044d\u0227\2\u0fb8\u0fb9\5\u043d\u021f\2\u0fb9\u02fa")
buf.write("\3\2\2\2\u0fba\u0fbb\5\u045b\u022e\2\u0fbb\u0fbc\5\u0445")
buf.write("\u0223\2\u0fbc\u0fbd\5\u044d\u0227\2\u0fbd\u0fbe\5\u043d")
buf.write("\u021f\2\u0fbe\u0fbf\5\u0459\u022d\2\u0fbf\u0fc0\5\u045b")
buf.write("\u022e\2\u0fc0\u0fc1\5\u0435\u021b\2\u0fc1\u0fc2\5\u044d")
buf.write("\u0227\2\u0fc2\u0fc3\5\u0453\u022a\2\u0fc3\u02fc\3\2\2")
buf.write("\2\u0fc4\u0fc5\5\u045b\u022e\2\u0fc5\u0fc6\5\u0445\u0223")
buf.write("\2\u0fc6\u0fc7\5\u044d\u0227\2\u0fc7\u0fc8\5\u043d\u021f")
buf.write("\2\u0fc8\u0fc9\5\u0459\u022d\2\u0fc9\u0fca\5\u045b\u022e")
buf.write("\2\u0fca\u0fcb\5\u0435\u021b\2\u0fcb\u0fcc\5\u044d\u0227")
buf.write("\2\u0fcc\u0fcd\5\u0453\u022a\2\u0fcd\u0fce\7a\2\2\u0fce")
buf.write("\u0fcf\5\u044b\u0226\2\u0fcf\u0fd0\5\u045b\u022e\2\u0fd0")
buf.write("\u0fd1\5\u0467\u0234\2\u0fd1\u0fd2\7a\2\2\u0fd2\u0fd3")
buf.write("\5\u045d\u022f\2\u0fd3\u0fd4\5\u044f\u0228\2\u0fd4\u0fd5")
buf.write("\5\u0439\u021d\2\u0fd5\u0fd6\5\u0451\u0229\2\u0fd6\u0fd7")
buf.write("\5\u044f\u0228\2\u0fd7\u0fd8\5\u0459\u022d\2\u0fd8\u0fd9")
buf.write("\5\u045b\u022e\2\u0fd9\u0fda\5\u0457\u022c\2\u0fda\u0fdb")
buf.write("\5\u0435\u021b\2\u0fdb\u0fdc\5\u0445\u0223\2\u0fdc\u0fdd")
buf.write("\5\u044f\u0228\2\u0fdd\u0fde\5\u043d\u021f\2\u0fde\u0fdf")
buf.write("\5\u043b\u021e\2\u0fdf\u02fe\3\2\2\2\u0fe0\u0fe1\5\u045b")
buf.write("\u022e\2\u0fe1\u0fe2\5\u0445\u0223\2\u0fe2\u0fe3\5\u044d")
buf.write("\u0227\2\u0fe3\u0fe4\5\u043d\u021f\2\u0fe4\u0fe5\5\u0459")
buf.write("\u022d\2\u0fe5\u0fe6\5\u045b\u022e\2\u0fe6\u0fe7\5\u0435")
buf.write("\u021b\2\u0fe7\u0fe8\5\u044d\u0227\2\u0fe8\u0fe9\5\u0453")
buf.write("\u022a\2\u0fe9\u0fea\7a\2\2\u0fea\u0feb\5\u045b\u022e")
buf.write("\2\u0feb\u0fec\5\u0467\u0234\2\u0fec\u0fed\7a\2\2\u0fed")
buf.write("\u0fee\5\u045d\u022f\2\u0fee\u0fef\5\u044f\u0228\2\u0fef")
buf.write("\u0ff0\5\u0439\u021d\2\u0ff0\u0ff1\5\u0451\u0229\2\u0ff1")
buf.write("\u0ff2\5\u044f\u0228\2\u0ff2\u0ff3\5\u0459\u022d\2\u0ff3")
buf.write("\u0ff4\5\u045b\u022e\2\u0ff4\u0ff5\5\u0457\u022c\2\u0ff5")
buf.write("\u0ff6\5\u0435\u021b\2\u0ff6\u0ff7\5\u0445\u0223\2\u0ff7")
buf.write("\u0ff8\5\u044f\u0228\2\u0ff8\u0ff9\5\u043d\u021f\2\u0ff9")
buf.write("\u0ffa\5\u043b\u021e\2\u0ffa\u0300\3\2\2\2\u0ffb\u0ffc")
buf.write("\5\u045b\u022e\2\u0ffc\u0ffd\5\u0445\u0223\2\u0ffd\u0ffe")
buf.write("\5\u044d\u0227\2\u0ffe\u0fff\5\u043d\u021f\2\u0fff\u1000")
buf.write("\5\u0459\u022d\2\u1000\u1001\5\u045b\u022e\2\u1001\u1002")
buf.write("\5\u0435\u021b\2\u1002\u1003\5\u044d\u0227\2\u1003\u1004")
buf.write("\5\u0453\u022a\2\u1004\u1005\7a\2\2\u1005\u1006\5\u045d")
buf.write("\u022f\2\u1006\u1007\5\u044f\u0228\2\u1007\u1008\5\u0439")
buf.write("\u021d\2\u1008\u1009\5\u0451\u0229\2\u1009\u100a\5\u044f")
buf.write("\u0228\2\u100a\u100b\5\u0459\u022d\2\u100b\u100c\5\u045b")
buf.write("\u022e\2\u100c\u100d\5\u0457\u022c\2\u100d\u100e\5\u0435")
buf.write("\u021b\2\u100e\u100f\5\u0445\u0223\2\u100f\u1010\5\u044f")
buf.write("\u0228\2\u1010\u1011\5\u043d\u021f\2\u1011\u1012\5\u043b")
buf.write("\u021e\2\u1012\u0302\3\2\2\2\u1013\u1014\5\u045b\u022e")
buf.write("\2\u1014\u1015\5\u0445\u0223\2\u1015\u1016\5\u044d\u0227")
buf.write("\2\u1016\u1017\5\u043d\u021f\2\u1017\u1018\5\u0467\u0234")
buf.write("\2\u1018\u1019\5\u0451\u0229\2\u1019\u101a\5\u044f\u0228")
buf.write("\2\u101a\u101b\5\u043d\u021f\2\u101b\u101c\7a\2\2\u101c")
buf.write("\u101d\5\u0435\u021b\2\u101d\u101e\5\u0437\u021c\2\u101e")
buf.write("\u101f\5\u0437\u021c\2\u101f\u1020\5\u0457\u022c\2\u1020")
buf.write("\u0304\3\2\2\2\u1021\u1022\5\u045b\u022e\2\u1022\u1023")
buf.write("\5\u0445\u0223\2\u1023\u1024\5\u044d\u0227\2\u1024\u1025")
buf.write("\5\u043d\u021f\2\u1025\u1026\5\u0467\u0234\2\u1026\u1027")
buf.write("\5\u0451\u0229\2\u1027\u1028\5\u044f\u0228\2\u1028\u1029")
buf.write("\5\u043d\u021f\2\u1029\u102a\7a\2\2\u102a\u102b\5\u0443")
buf.write("\u0222\2\u102b\u102c\5\u0451\u0229\2\u102c\u102d\5\u045d")
buf.write("\u022f\2\u102d\u102e\5\u0457\u022c\2\u102e\u0306\3\2\2")
buf.write("\2\u102f\u1030\5\u045b\u022e\2\u1030\u1031\5\u0445\u0223")
buf.write("\2\u1031\u1032\5\u044d\u0227\2\u1032\u1033\5\u043d\u021f")
buf.write("\2\u1033\u1034\5\u0467\u0234\2\u1034\u1035\5\u0451\u0229")
buf.write("\2\u1035\u1036\5\u044f\u0228\2\u1036\u1037\5\u043d\u021f")
buf.write("\2\u1037\u1038\7a\2\2\u1038\u1039\5\u044d\u0227\2\u1039")
buf.write("\u103a\5\u0445\u0223\2\u103a\u103b\5\u044f\u0228\2\u103b")
buf.write("\u103c\5\u045d\u022f\2\u103c\u103d\5\u045b\u022e\2\u103d")
buf.write("\u103e\5\u043d\u021f\2\u103e\u0308\3\2\2\2\u103f\u1040")
buf.write("\5\u045b\u022e\2\u1040\u1041\5\u0445\u0223\2\u1041\u1042")
buf.write("\5\u044d\u0227\2\u1042\u1043\5\u043d\u021f\2\u1043\u1044")
buf.write("\5\u0467\u0234\2\u1044\u1045\5\u0451\u0229\2\u1045\u1046")
buf.write("\5\u044f\u0228\2\u1046\u1047\5\u043d\u021f\2\u1047\u1048")
buf.write("\7a\2\2\u1048\u1049\5\u0457\u022c\2\u1049\u104a\5\u043d")
buf.write("\u021f\2\u104a\u104b\5\u0441\u0221\2\u104b\u104c\5\u0445")
buf.write("\u0223\2\u104c\u104d\5\u0451\u0229\2\u104d\u104e\5\u044f")
buf.write("\u0228\2\u104e\u030a\3\2\2\2\u104f\u1050\5\u045b\u022e")
buf.write("\2\u1050\u1051\5\u0451\u0229\2\u1051\u030c\3\2\2\2\u1052")
buf.write("\u1053\5\u045b\u022e\2\u1053\u1054\5\u0457\u022c\2\u1054")
buf.write("\u1055\5\u0435\u021b\2\u1055\u1056\5\u0445\u0223\2\u1056")
buf.write("\u1057\5\u044b\u0226\2\u1057\u1058\5\u0445\u0223\2\u1058")
buf.write("\u1059\5\u044f\u0228\2\u1059\u105a\5\u0441\u0221\2\u105a")
buf.write("\u030e\3\2\2\2\u105b\u105c\5\u045b\u022e\2\u105c\u105d")
buf.write("\5\u0457\u022c\2\u105d\u105e\5\u0435\u021b\2\u105e\u105f")
buf.write("\5\u044f\u0228\2\u105f\u1060\5\u0459\u022d\2\u1060\u1061")
buf.write("\5\u0435\u021b\2\u1061\u1062\5\u0439\u021d\2\u1062\u1063")
buf.write("\5\u045b\u022e\2\u1063\u1064\5\u0445\u0223\2\u1064\u1065")
buf.write("\5\u0451\u0229\2\u1065\u1066\5\u044f\u0228\2\u1066\u0310")
buf.write("\3\2\2\2\u1067\u1068\5\u045b\u022e\2\u1068\u1069\5\u0457")
buf.write("\u022c\2\u1069\u106a\5\u0435\u021b\2\u106a\u106b\5\u044f")
buf.write("\u0228\2\u106b\u106c\5\u0459\u022d\2\u106c\u106d\5\u044b")
buf.write("\u0226\2\u106d\u106e\5\u0435\u021b\2\u106e\u106f\5\u045b")
buf.write("\u022e\2\u106f\u1070\5\u043d\u021f\2\u1070\u0312\3\2\2")
buf.write("\2\u1071\u1072\5\u045b\u022e\2\u1072\u1073\5\u0457\u022c")
buf.write("\2\u1073\u1074\5\u043d\u021f\2\u1074\u1075\5\u0435\u021b")
buf.write("\2\u1075\u1076\5\u045b\u022e\2\u1076\u0314\3\2\2\2\u1077")
buf.write("\u1078\5\u045b\u022e\2\u1078\u1079\5\u0457\u022c\2\u1079")
buf.write("\u107a\5\u0445\u0223\2\u107a\u107b\5\u0441\u0221\2\u107b")
buf.write("\u107c\5\u0441\u0221\2\u107c\u107d\5\u043d\u021f\2\u107d")
buf.write("\u107e\5\u0457\u022c\2\u107e\u0316\3\2\2\2\u107f\u1080")
buf.write("\5\u045b\u022e\2\u1080\u1081\5\u0457\u022c\2\u1081\u1082")
buf.write("\5\u0445\u0223\2\u1082\u1083\5\u044d\u0227\2\u1083\u0318")
buf.write("\3\2\2\2\u1084\u1085\5\u045b\u022e\2\u1085\u1086\5\u0457")
buf.write("\u022c\2\u1086\u1087\5\u045d\u022f\2\u1087\u1088\5\u043d")
buf.write("\u021f\2\u1088\u031a\3\2\2\2\u1089\u108a\5\u045b\u022e")
buf.write("\2\u108a\u108b\5\u0457\u022c\2\u108b\u108c\5\u045d\u022f")
buf.write("\2\u108c\u108d\5\u044f\u0228\2\u108d\u108e\5\u0439\u021d")
buf.write("\2\u108e\u108f\5\u0435\u021b\2\u108f\u1090\5\u045b\u022e")
buf.write("\2\u1090\u1091\5\u043d\u021f\2\u1091\u031c\3\2\2\2\u1092")
buf.write("\u1093\5\u045b\u022e\2\u1093\u1094\5\u0465\u0233\2\u1094")
buf.write("\u1095\5\u0453\u022a\2\u1095\u1096\5\u043d\u021f\2\u1096")
buf.write("\u031e\3\2\2\2\u1097\u1098\5\u045d\u022f\2\u1098\u1099")
buf.write("\5\u044f\u0228\2\u1099\u109a\5\u0437\u021c\2\u109a\u109b")
buf.write("\5\u0451\u0229\2\u109b\u109c\5\u045d\u022f\2\u109c\u109d")
buf.write("\5\u044f\u0228\2\u109d\u109e\5\u043b\u021e\2\u109e\u109f")
buf.write("\5\u043d\u021f\2\u109f\u10a0\5\u043b\u021e\2\u10a0\u0320")
buf.write("\3\2\2\2\u10a1\u10a2\5\u045d\u022f\2\u10a2\u10a3\5\u044f")
buf.write("\u0228\2\u10a3\u10a4\5\u043b\u021e\2\u10a4\u10a5\5\u043d")
buf.write("\u021f\2\u10a5\u10a6\5\u0457\u022c\2\u10a6\u0322\3\2\2")
buf.write("\2\u10a7\u10a8\5\u045d\u022f\2\u10a8\u10a9\5\u044f\u0228")
buf.write("\2\u10a9\u10aa\5\u0445\u0223\2\u10aa\u10ab\5\u0451\u0229")
buf.write("\2\u10ab\u10ac\5\u044f\u0228\2\u10ac\u0324\3\2\2\2\u10ad")
buf.write("\u10ae\5\u045d\u022f\2\u10ae\u10af\5\u044f\u0228\2\u10af")
buf.write("\u10b0\5\u0445\u0223\2\u10b0\u10b1\5\u0455\u022b\2\u10b1")
buf.write("\u10b2\5\u045d\u022f\2\u10b2\u10b3\5\u043d\u021f\2\u10b3")
buf.write("\u0326\3\2\2\2\u10b4\u10b5\5\u045d\u022f\2\u10b5\u10b6")
buf.write("\5\u044f\u0228\2\u10b6\u10b7\5\u044b\u0226\2\u10b7\u10b8")
buf.write("\5\u0445\u0223\2\u10b8\u10b9\5\u044d\u0227\2\u10b9\u10ba")
buf.write("\5\u0445\u0223\2\u10ba\u10bb\5\u045b\u022e\2\u10bb\u10bc")
buf.write("\5\u043d\u021f\2\u10bc\u10bd\5\u043b\u021e\2\u10bd\u0328")
buf.write("\3\2\2\2\u10be\u10bf\5\u045d\u022f\2\u10bf\u10c0\5\u044f")
buf.write("\u0228\2\u10c0\u10c1\5\u0453\u022a\2\u10c1\u10c2\5\u0445")
buf.write("\u0223\2\u10c2\u10c3\5\u045f\u0230\2\u10c3\u10c4\5\u0451")
buf.write("\u0229\2\u10c4\u10c5\5\u045b\u022e\2\u10c5\u032a\3\2\2")
buf.write("\2\u10c6\u10c7\5\u045d\u022f\2\u10c7\u10c8\5\u044f\u0228")
buf.write("\2\u10c8\u10c9\5\u045b\u022e\2\u10c9\u10ca\5\u0445\u0223")
buf.write("\2\u10ca\u10cb\5\u044b\u0226\2\u10cb\u032c\3\2\2\2\u10cc")
buf.write("\u10cd\5\u045d\u022f\2\u10cd\u10ce\5\u0453\u022a\2\u10ce")
buf.write("\u10cf\5\u043b\u021e\2\u10cf\u10d0\5\u0435\u021b\2\u10d0")
buf.write("\u10d1\5\u045b\u022e\2\u10d1\u10d2\5\u043d\u021f\2\u10d2")
buf.write("\u032e\3\2\2\2\u10d3\u10d4\5\u045d\u022f\2\u10d4\u10d5")
buf.write("\5\u0453\u022a\2\u10d5\u10d6\5\u043b\u021e\2\u10d6\u10d7")
buf.write("\5\u0435\u021b\2\u10d7\u10d8\5\u045b\u022e\2\u10d8\u10d9")
buf.write("\5\u043d\u021f\2\u10d9\u10da\5\u043b\u021e\2\u10da\u0330")
buf.write("\3\2\2\2\u10db\u10dc\5\u045d\u022f\2\u10dc\u10dd\5\u0453")
buf.write("\u022a\2\u10dd\u10de\5\u0459\u022d\2\u10de\u10df\5\u043d")
buf.write("\u021f\2\u10df\u10e0\5\u0457\u022c\2\u10e0\u10e1\5\u045b")
buf.write("\u022e\2\u10e1\u0332\3\2\2\2\u10e2\u10e3\5\u045d\u022f")
buf.write("\2\u10e3\u10e4\5\u0457\u022c\2\u10e4\u10e5\5\u0451\u0229")
buf.write("\2\u10e5\u10e6\5\u0461\u0231\2\u10e6\u10e7\5\u0445\u0223")
buf.write("\2\u10e7\u10e8\5\u043b\u021e\2\u10e8\u0334\3\2\2\2\u10e9")
buf.write("\u10ea\5\u045d\u022f\2\u10ea\u10eb\5\u0459\u022d\2\u10eb")
buf.write("\u10ec\5\u043d\u021f\2\u10ec\u0336\3\2\2\2\u10ed\u10ee")
buf.write("\5\u045d\u022f\2\u10ee\u10ef\5\u0459\u022d\2\u10ef\u10f0")
buf.write("\5\u0445\u0223\2\u10f0\u10f1\5\u044f\u0228\2\u10f1\u10f2")
buf.write("\5\u0441\u0221\2\u10f2\u0338\3\2\2\2\u10f3\u10f4\5\u045f")
buf.write("\u0230\2\u10f4\u10f5\5\u0435\u021b\2\u10f5\u10f6\5\u044b")
buf.write("\u0226\2\u10f6\u10f7\5\u0445\u0223\2\u10f7\u10f8\5\u043b")
buf.write("\u021e\2\u10f8\u10f9\5\u0435\u021b\2\u10f9\u10fa\5\u045b")
buf.write("\u022e\2\u10fa\u10fb\5\u043d\u021f\2\u10fb\u033a\3\2\2")
buf.write("\2\u10fc\u10fd\5\u045f\u0230\2\u10fd\u10fe\5\u0435\u021b")
buf.write("\2\u10fe\u10ff\5\u044b\u0226\2\u10ff\u1100\5\u045d\u022f")
buf.write("\2\u1100\u1101\5\u043d\u021f\2\u1101\u033c\3\2\2\2\u1102")
buf.write("\u1103\5\u045f\u0230\2\u1103\u1104\5\u0435\u021b\2\u1104")
buf.write("\u1105\5\u044b\u0226\2\u1105\u1106\5\u045d\u022f\2\u1106")
buf.write("\u1107\5\u043d\u021f\2\u1107\u1108\5\u0459\u022d\2\u1108")
buf.write("\u033e\3\2\2\2\u1109\u110a\5\u045f\u0230\2\u110a\u110b")
buf.write("\5\u0435\u021b\2\u110b\u110c\5\u0457\u022c\2\u110c\u110d")
buf.write("\5\u0439\u021d\2\u110d\u110e\5\u0443\u0222\2\u110e\u110f")
buf.write("\5\u0435\u021b\2\u110f\u1110\5\u0457\u022c\2\u1110\u0340")
buf.write("\3\2\2\2\u1111\u1112\5\u045f\u0230\2\u1112\u1113\5\u0435")
buf.write("\u021b\2\u1113\u1114\5\u0457\u022c\2\u1114\u1115\5\u0439")
buf.write("\u021d\2\u1115\u1116\5\u0443\u0222\2\u1116\u1117\5\u0435")
buf.write("\u021b\2\u1117\u1118\5\u0457\u022c\2\u1118\u1119\7\64")
buf.write("\2\2\u1119\u0342\3\2\2\2\u111a\u111b\5\u045f\u0230\2\u111b")
buf.write("\u111c\5\u0435\u021b\2\u111c\u111d\5\u0457\u022c\2\u111d")
buf.write("\u111e\5\u0445\u0223\2\u111e\u111f\5\u0435\u021b\2\u111f")
buf.write("\u1120\5\u0437\u021c\2\u1120\u1121\5\u044b\u0226\2\u1121")
buf.write("\u1122\5\u043d\u021f\2\u1122\u0344\3\2\2\2\u1123\u1124")
buf.write("\5\u045f\u0230\2\u1124\u1125\5\u0435\u021b\2\u1125\u1126")
buf.write("\5\u0457\u022c\2\u1126\u1127\5\u0457\u022c\2\u1127\u1128")
buf.write("\5\u0435\u021b\2\u1128\u1129\5\u0465\u0233\2\u1129\u0346")
buf.write("\3\2\2\2\u112a\u112b\5\u045f\u0230\2\u112b\u112c\5\u0435")
buf.write("\u021b\2\u112c\u112d\5\u0457\u022c\2\u112d\u112e\5\u0465")
buf.write("\u0233\2\u112e\u112f\5\u0445\u0223\2\u112f\u1130\5\u044f")
buf.write("\u0228\2\u1130\u1131\5\u0441\u0221\2\u1131\u0348\3\2\2")
buf.write("\2\u1132\u1133\5\u045f\u0230\2\u1133\u1134\5\u043d\u021f")
buf.write("\2\u1134\u1135\5\u0457\u022c\2\u1135\u1136\5\u0459\u022d")
buf.write("\2\u1136\u1137\5\u0445\u0223\2\u1137\u1138\5\u0451\u0229")
buf.write("\2\u1138\u1139\5\u044f\u0228\2\u1139\u034a\3\2\2\2\u113a")
buf.write("\u113b\5\u045f\u0230\2\u113b\u113c\5\u043d\u021f\2\u113c")
buf.write("\u113d\5\u0457\u022c\2\u113d\u113e\5\u0459\u022d\2\u113e")
buf.write("\u113f\5\u0445\u0223\2\u113f\u1140\5\u0451\u0229\2\u1140")
buf.write("\u1141\5\u044f\u0228\2\u1141\u1142\5\u0459\u022d\2\u1142")
buf.write("\u034c\3\2\2\2\u1143\u1144\5\u0461\u0231\2\u1144\u1145")
buf.write("\5\u0435\u021b\2\u1145\u1146\5\u0445\u0223\2\u1146\u1147")
buf.write("\5\u045b\u022e\2\u1147\u034e\3\2\2\2\u1148\u1149\5\u0461")
buf.write("\u0231\2\u1149\u114a\5\u0435\u021b\2\u114a\u114b\5\u0457")
buf.write("\u022c\2\u114b\u114c\5\u044f\u0228\2\u114c\u114d\5\u0445")
buf.write("\u0223\2\u114d\u114e\5\u044f\u0228\2\u114e\u114f\5\u0441")
buf.write("\u0221\2\u114f\u0350\3\2\2\2\u1150\u1151\5\u0461\u0231")
buf.write("\2\u1151\u1152\5\u043d\u021f\2\u1152\u1153\5\u044b\u0226")
buf.write("\2\u1153\u1154\5\u044b\u0226\2\u1154\u1155\5\u043f\u0220")
buf.write("\2\u1155\u1156\5\u0451\u0229\2\u1156\u1157\5\u0457\u022c")
buf.write("\2\u1157\u1158\5\u044d\u0227\2\u1158\u1159\5\u043d\u021f")
buf.write("\2\u1159\u115a\5\u043b\u021e\2\u115a\u0352\3\2\2\2\u115b")
buf.write("\u115c\5\u0461\u0231\2\u115c\u115d\5\u0443\u0222\2\u115d")
buf.write("\u115e\5\u043d\u021f\2\u115e\u115f\5\u044f\u0228\2\u115f")
buf.write("\u0354\3\2\2\2\u1160\u1161\5\u0461\u0231\2\u1161\u1162")
buf.write("\5\u0443\u0222\2\u1162\u1163\5\u043d\u021f\2\u1163\u1164")
buf.write("\5\u044f\u0228\2\u1164\u1165\5\u043d\u021f\2\u1165\u1166")
buf.write("\5\u045f\u0230\2\u1166\u1167\5\u043d\u021f\2\u1167\u1168")
buf.write("\5\u0457\u022c\2\u1168\u0356\3\2\2\2\u1169\u116a\5\u0461")
buf.write("\u0231\2\u116a\u116b\5\u0443\u0222\2\u116b\u116c\5\u043d")
buf.write("\u021f\2\u116c\u116d\5\u0457\u022c\2\u116d\u116e\5\u043d")
buf.write("\u021f\2\u116e\u0358\3\2\2\2\u116f\u1170\5\u0461\u0231")
buf.write("\2\u1170\u1171\5\u0443\u0222\2\u1171\u1172\5\u0445\u0223")
buf.write("\2\u1172\u1173\5\u044b\u0226\2\u1173\u1174\5\u043d\u021f")
buf.write("\2\u1174\u035a\3\2\2\2\u1175\u1176\5\u0461\u0231\2\u1176")
buf.write("\u1177\5\u0445\u0223\2\u1177\u1178\5\u045b\u022e\2\u1178")
buf.write("\u1179\5\u0443\u0222\2\u1179\u035c\3\2\2\2\u117a\u117b")
buf.write("\5\u0461\u0231\2\u117b\u117c\5\u0445\u0223\2\u117c\u117d")
buf.write("\5\u045b\u022e\2\u117d\u117e\5\u0443\u0222\2\u117e\u117f")
buf.write("\5\u0445\u0223\2\u117f\u1180\5\u044f\u0228\2\u1180\u035e")
buf.write("\3\2\2\2\u1181\u1182\5\u0461\u0231\2\u1182\u1183\5\u0451")
buf.write("\u0229\2\u1183\u1184\5\u0457\u022c\2\u1184\u1185\5\u0449")
buf.write("\u0225\2\u1185\u0360\3\2\2\2\u1186\u1187\5\u0461\u0231")
buf.write("\2\u1187\u1188\5\u0457\u022c\2\u1188\u1189\5\u0445\u0223")
buf.write("\2\u1189\u118a\5\u045b\u022e\2\u118a\u118b\5\u043d\u021f")
buf.write("\2\u118b\u0362\3\2\2\2\u118c\u118d\5\u0463\u0232\2\u118d")
buf.write("\u118e\5\u044d\u0227\2\u118e\u118f\5\u044b\u0226\2\u118f")
buf.write("\u0364\3\2\2\2\u1190\u1191\5\u0463\u0232\2\u1191\u1192")
buf.write("\5\u044d\u0227\2\u1192\u1193\5\u044b\u0226\2\u1193\u1194")
buf.write("\5\u0435\u021b\2\u1194\u1195\5\u0441\u0221\2\u1195\u1196")
buf.write("\5\u0441\u0221\2\u1196\u0366\3\2\2\2\u1197\u1198\5\u0463")
buf.write("\u0232\2\u1198\u1199\5\u044d\u0227\2\u1199\u119a\5\u044b")
buf.write("\u0226\2\u119a\u119b\5\u0435\u021b\2\u119b\u119c\5\u045b")
buf.write("\u022e\2\u119c\u119d\5\u045b\u022e\2\u119d\u119e\5\u0457")
buf.write("\u022c\2\u119e\u119f\5\u0445\u0223\2\u119f\u11a0\5\u0437")
buf.write("\u021c\2\u11a0\u11a1\5\u045d\u022f\2\u11a1\u11a2\5\u045b")
buf.write("\u022e\2\u11a2\u11a3\5\u043d\u021f\2\u11a3\u11a4\5\u0459")
buf.write("\u022d\2\u11a4\u0368\3\2\2\2\u11a5\u11a6\5\u0463\u0232")
buf.write("\2\u11a6\u11a7\5\u044d\u0227\2\u11a7\u11a8\5\u044b\u0226")
buf.write("\2\u11a8\u11a9\5\u0439\u021d\2\u11a9\u11aa\5\u0435\u021b")
buf.write("\2\u11aa\u11ab\5\u0459\u022d\2\u11ab\u11ac\5\u045b\u022e")
buf.write("\2\u11ac\u036a\3\2\2\2\u11ad\u11ae\5\u0463\u0232\2\u11ae")
buf.write("\u11af\5\u044d\u0227\2\u11af\u11b0\5\u044b\u0226\2\u11b0")
buf.write("\u11b1\5\u0439\u021d\2\u11b1\u11b2\5\u0451\u0229\2\u11b2")
buf.write("\u11b3\5\u044b\u0226\2\u11b3\u11b4\5\u0435\u021b\2\u11b4")
buf.write("\u11b5\5\u045b\u022e\2\u11b5\u11b6\5\u045b\u022e\2\u11b6")
buf.write("\u11b7\5\u045f\u0230\2\u11b7\u11b8\5\u0435\u021b\2\u11b8")
buf.write("\u11b9\5\u044b\u0226\2\u11b9\u036c\3\2\2\2\u11ba\u11bb")
buf.write("\5\u0463\u0232\2\u11bb\u11bc\5\u044d\u0227\2\u11bc\u11bd")
buf.write("\5\u044b\u0226\2\u11bd\u11be\5\u043d\u021f\2\u11be\u11bf")
buf.write("\5\u044b\u0226\2\u11bf\u11c0\5\u043d\u021f\2\u11c0\u11c1")
buf.write("\5\u044d\u0227\2\u11c1\u11c2\5\u043d\u021f\2\u11c2\u11c3")
buf.write("\5\u044f\u0228\2\u11c3\u11c4\5\u045b\u022e\2\u11c4\u036e")
buf.write("\3\2\2\2\u11c5\u11c6\5\u0463\u0232\2\u11c6\u11c7\5\u044d")
buf.write("\u0227\2\u11c7\u11c8\5\u044b\u0226\2\u11c8\u11c9\5\u043d")
buf.write("\u021f\2\u11c9\u11ca\5\u0463\u0232\2\u11ca\u11cb\5\u0445")
buf.write("\u0223\2\u11cb\u11cc\5\u0459\u022d\2\u11cc\u11cd\5\u045b")
buf.write("\u022e\2\u11cd\u11ce\5\u0459\u022d\2\u11ce\u0370\3\2\2")
buf.write("\2\u11cf\u11d0\5\u0463\u0232\2\u11d0\u11d1\5\u044d\u0227")
buf.write("\2\u11d1\u11d2\5\u044b\u0226\2\u11d2\u11d3\5\u043f\u0220")
buf.write("\2\u11d3\u11d4\5\u0451\u0229\2\u11d4\u11d5\5\u0457\u022c")
buf.write("\2\u11d5\u11d6\5\u043d\u021f\2\u11d6\u11d7\5\u0459\u022d")
buf.write("\2\u11d7\u11d8\5\u045b\u022e\2\u11d8\u0372\3\2\2\2\u11d9")
buf.write("\u11da\5\u0463\u0232\2\u11da\u11db\5\u044d\u0227\2\u11db")
buf.write("\u11dc\5\u044b\u0226\2\u11dc\u11dd\5\u044f\u0228\2\u11dd")
buf.write("\u11de\5\u0435\u021b\2\u11de\u11df\5\u044d\u0227\2\u11df")
buf.write("\u11e0\5\u043d\u021f\2\u11e0\u11e1\5\u0459\u022d\2\u11e1")
buf.write("\u11e2\5\u0453\u022a\2\u11e2\u11e3\5\u0435\u021b\2\u11e3")
buf.write("\u11e4\5\u0439\u021d\2\u11e4\u11e5\5\u043d\u021f\2\u11e5")
buf.write("\u11e6\5\u0459\u022d\2\u11e6\u0374\3\2\2\2\u11e7\u11e8")
buf.write("\5\u0463\u0232\2\u11e8\u11e9\5\u044d\u0227\2\u11e9\u11ea")
buf.write("\5\u044b\u0226\2\u11ea\u11eb\5\u0453\u022a\2\u11eb\u11ec")
buf.write("\5\u0435\u021b\2\u11ec\u11ed\5\u0457\u022c\2\u11ed\u11ee")
buf.write("\5\u0459\u022d\2\u11ee\u11ef\5\u043d\u021f\2\u11ef\u0376")
buf.write("\3\2\2\2\u11f0\u11f1\5\u0463\u0232\2\u11f1\u11f2\5\u044d")
buf.write("\u0227\2\u11f2\u11f3\5\u044b\u0226\2\u11f3\u11f4\5\u0453")
buf.write("\u022a\2\u11f4\u11f5\5\u0445\u0223\2\u11f5\u0378\3\2\2")
buf.write("\2\u11f6\u11f7\5\u0463\u0232\2\u11f7\u11f8\5\u044d\u0227")
buf.write("\2\u11f8\u11f9\5\u044b\u0226\2\u11f9\u11fa\5\u0455\u022b")
buf.write("\2\u11fa\u11fb\5\u045d\u022f\2\u11fb\u11fc\5\u043d\u021f")
buf.write("\2\u11fc\u11fd\5\u0457\u022c\2\u11fd\u11fe\5\u0465\u0233")
buf.write("\2\u11fe\u037a\3\2\2\2\u11ff\u1200\5\u0463\u0232\2\u1200")
buf.write("\u1201\5\u044d\u0227\2\u1201\u1202\5\u044b\u0226\2\u1202")
buf.write("\u1203\5\u0457\u022c\2\u1203\u1204\5\u0451\u0229\2\u1204")
buf.write("\u1205\5\u0451\u0229\2\u1205\u1206\5\u045b\u022e\2\u1206")
buf.write("\u037c\3\2\2\2\u1207\u1208\5\u0463\u0232\2\u1208\u1209")
buf.write("\5\u044d\u0227\2\u1209\u120a\5\u044b\u0226\2\u120a\u120b")
buf.write("\5\u0459\u022d\2\u120b\u120c\5\u043d\u021f\2\u120c\u120d")
buf.write("\5\u0457\u022c\2\u120d\u120e\5\u0445\u0223\2\u120e\u120f")
buf.write("\5\u0435\u021b\2\u120f\u1210\5\u044b\u0226\2\u1210\u1211")
buf.write("\5\u0445\u0223\2\u1211\u1212\5\u0467\u0234\2\u1212\u1213")
buf.write("\5\u043d\u021f\2\u1213\u037e\3\2\2\2\u1214\u1215\5\u0463")
buf.write("\u0232\2\u1215\u1216\5\u044d\u0227\2\u1216\u1217\5\u044b")
buf.write("\u0226\2\u1217\u1218\5\u045b\u022e\2\u1218\u1219\5\u0435")
buf.write("\u021b\2\u1219\u121a\5\u0437\u021c\2\u121a\u121b\5\u044b")
buf.write("\u0226\2\u121b\u121c\5\u043d\u021f\2\u121c\u0380\3\2\2")
buf.write("\2\u121d\u121e\5\u0465\u0233\2\u121e\u121f\5\u043d\u021f")
buf.write("\2\u121f\u1220\5\u0435\u021b\2\u1220\u1221\5\u0457\u022c")
buf.write("\2\u1221\u0382\3\2\2\2\u1222\u1223\5\u0465\u0233\2\u1223")
buf.write("\u1224\5\u043d\u021f\2\u1224\u1225\5\u0459\u022d\2\u1225")
buf.write("\u0384\3\2\2\2\u1226\u1227\5\u0465\u0233\2\u1227\u1228")
buf.write("\5\u044d\u0227\2\u1228\u1229\5\u0445\u0223\2\u1229\u122a")
buf.write("\5\u044f\u0228\2\u122a\u122b\5\u045b\u022e\2\u122b\u122c")
buf.write("\5\u043d\u021f\2\u122c\u122d\5\u0457\u022c\2\u122d\u122e")
buf.write("\5\u045f\u0230\2\u122e\u122f\5\u0435\u021b\2\u122f\u1230")
buf.write("\5\u044b\u0226\2\u1230\u1231\7a\2\2\u1231\u1232\5\u045d")
buf.write("\u022f\2\u1232\u1233\5\u044f\u0228\2\u1233\u1234\5\u0439")
buf.write("\u021d\2\u1234\u1235\5\u0451\u0229\2\u1235\u1236\5\u044f")
buf.write("\u0228\2\u1236\u1237\5\u0459\u022d\2\u1237\u1238\5\u045b")
buf.write("\u022e\2\u1238\u1239\5\u0457\u022c\2\u1239\u123a\5\u0435")
buf.write("\u021b\2\u123a\u123b\5\u0445\u0223\2\u123b\u123c\5\u044f")
buf.write("\u0228\2\u123c\u123d\5\u043d\u021f\2\u123d\u123e\5\u043b")
buf.write("\u021e\2\u123e\u0386\3\2\2\2\u123f\u1240\5\u0467\u0234")
buf.write("\2\u1240\u1241\5\u0451\u0229\2\u1241\u1242\5\u044f\u0228")
buf.write("\2\u1242\u1243\5\u043d\u021f\2\u1243\u0388\3\2\2\2\u1244")
buf.write("\u1245\5\u0453\u022a\2\u1245\u1246\5\u0457\u022c\2\u1246")
buf.write("\u1247\5\u043d\u021f\2\u1247\u1248\5\u043b\u021e\2\u1248")
buf.write("\u1249\5\u0445\u0223\2\u1249\u124a\5\u0439\u021d\2\u124a")
buf.write("\u124b\5\u045b\u022e\2\u124b\u124c\5\u0445\u0223\2\u124c")
buf.write("\u124d\5\u0451\u0229\2\u124d\u124e\5\u044f\u0228\2\u124e")
buf.write("\u038a\3\2\2\2\u124f\u1250\5\u0453\u022a\2\u1250\u1251")
buf.write("\5\u0457\u022c\2\u1251\u1252\5\u043d\u021f\2\u1252\u1253")
buf.write("\5\u043b\u021e\2\u1253\u1254\5\u0445\u0223\2\u1254\u1255")
buf.write("\5\u0439\u021d\2\u1255\u1256\5\u045b\u022e\2\u1256\u1257")
buf.write("\5\u0445\u0223\2\u1257\u1258\5\u0451\u0229\2\u1258\u1259")
buf.write("\5\u044f\u0228\2\u1259\u125a\7a\2\2\u125a\u125b\5\u0437")
buf.write("\u021c\2\u125b\u125c\5\u0451\u0229\2\u125c\u125d\5\u045d")
buf.write("\u022f\2\u125d\u125e\5\u044f\u0228\2\u125e\u125f\5\u043b")
buf.write("\u021e\2\u125f\u1260\5\u0459\u022d\2\u1260\u038c\3\2\2")
buf.write("\2\u1261\u1262\5\u0453\u022a\2\u1262\u1263\5\u0457\u022c")
buf.write("\2\u1263\u1264\5\u043d\u021f\2\u1264\u1265\5\u043b\u021e")
buf.write("\2\u1265\u1266\5\u0445\u0223\2\u1266\u1267\5\u0439\u021d")
buf.write("\2\u1267\u1268\5\u045b\u022e\2\u1268\u1269\5\u0445\u0223")
buf.write("\2\u1269\u126a\5\u0451\u0229\2\u126a\u126b\5\u044f\u0228")
buf.write("\2\u126b\u126c\7a\2\2\u126c\u126d\5\u0439\u021d\2\u126d")
buf.write("\u126e\5\u0451\u0229\2\u126e\u126f\5\u0459\u022d\2\u126f")
buf.write("\u1270\5\u045b\u022e\2\u1270\u038e\3\2\2\2\u1271\u1272")
buf.write("\5\u0453\u022a\2\u1272\u1273\5\u0457\u022c\2\u1273\u1274")
buf.write("\5\u043d\u021f\2\u1274\u1275\5\u043b\u021e\2\u1275\u1276")
buf.write("\5\u0445\u0223\2\u1276\u1277\5\u0439\u021d\2\u1277\u1278")
buf.write("\5\u045b\u022e\2\u1278\u1279\5\u0445\u0223\2\u1279\u127a")
buf.write("\5\u0451\u0229\2\u127a\u127b\5\u044f\u0228\2\u127b\u127c")
buf.write("\7a\2\2\u127c\u127d\5\u043b\u021e\2\u127d\u127e\5\u043d")
buf.write("\u021f\2\u127e\u127f\5\u045b\u022e\2\u127f\u1280\5\u0435")
buf.write("\u021b\2\u1280\u1281\5\u0445\u0223\2\u1281\u1282\5\u044b")
buf.write("\u0226\2\u1282\u1283\5\u0459\u022d\2\u1283\u0390\3\2\2")
buf.write("\2\u1284\u1285\5\u0453\u022a\2\u1285\u1286\5\u0457\u022c")
buf.write("\2\u1286\u1287\5\u043d\u021f\2\u1287\u1288\5\u043b\u021e")
buf.write("\2\u1288\u1289\5\u0445\u0223\2\u1289\u128a\5\u0439\u021d")
buf.write("\2\u128a\u128b\5\u045b\u022e\2\u128b\u128c\5\u0445\u0223")
buf.write("\2\u128c\u128d\5\u0451\u0229\2\u128d\u128e\5\u044f\u0228")
buf.write("\2\u128e\u128f\7a\2\2\u128f\u1290\5\u0453\u022a\2\u1290")
buf.write("\u1291\5\u0457\u022c\2\u1291\u1292\5\u0451\u0229\2\u1292")
buf.write("\u1293\5\u0437\u021c\2\u1293\u1294\5\u0435\u021b\2\u1294")
buf.write("\u1295\5\u0437\u021c\2\u1295\u1296\5\u0445\u0223\2\u1296")
buf.write("\u1297\5\u044b\u0226\2\u1297\u1298\5\u0445\u0223\2\u1298")
buf.write("\u1299\5\u045b\u022e\2\u1299\u129a\5\u0465\u0233\2\u129a")
buf.write("\u0392\3\2\2\2\u129b\u129c\5\u0453\u022a\2\u129c\u129d")
buf.write("\5\u0457\u022c\2\u129d\u129e\5\u043d\u021f\2\u129e\u129f")
buf.write("\5\u043b\u021e\2\u129f\u12a0\5\u0445\u0223\2\u12a0\u12a1")
buf.write("\5\u0439\u021d\2\u12a1\u12a2\5\u045b\u022e\2\u12a2\u12a3")
buf.write("\5\u0445\u0223\2\u12a3\u12a4\5\u0451\u0229\2\u12a4\u12a5")
buf.write("\5\u044f\u0228\2\u12a5\u12a6\7a\2\2\u12a6\u12a7\5\u0459")
buf.write("\u022d\2\u12a7\u12a8\5\u043d\u021f\2\u12a8\u12a9\5\u045b")
buf.write("\u022e\2\u12a9\u0394\3\2\2\2\u12aa\u12ab\5\u0439\u021d")
buf.write("\2\u12ab\u12ac\5\u045d\u022f\2\u12ac\u12ad\5\u044d\u0227")
buf.write("\2\u12ad\u12ae\5\u043d\u021f\2\u12ae\u12af\7a\2\2\u12af")
buf.write("\u12b0\5\u043b\u021e\2\u12b0\u12b1\5\u0445\u0223\2\u12b1")
buf.write("\u12b2\5\u0459\u022d\2\u12b2\u12b3\5\u045b\u022e\2\u12b3")
buf.write("\u0396\3\2\2\2\u12b4\u12b5\5\u043b\u021e\2\u12b5\u12b6")
buf.write("\5\u043d\u021f\2\u12b6\u12b7\5\u044f\u0228\2\u12b7\u12b8")
buf.write("\5\u0459\u022d\2\u12b8\u12b9\5\u043d\u021f\2\u12b9\u12ba")
buf.write("\7a\2\2\u12ba\u12bb\5\u0457\u022c\2\u12bb\u12bc\5\u0435")
buf.write("\u021b\2\u12bc\u12bd\5\u044f\u0228\2\u12bd\u12be\5\u0449")
buf.write("\u0225\2\u12be\u0398\3\2\2\2\u12bf\u12c0\5\u044b\u0226")
buf.write("\2\u12c0\u12c1\5\u0445\u0223\2\u12c1\u12c2\5\u0459\u022d")
buf.write("\2\u12c2\u12c3\5\u045b\u022e\2\u12c3\u12c4\5\u0435\u021b")
buf.write("\2\u12c4\u12c5\5\u0441\u0221\2\u12c5\u12c6\5\u0441\u0221")
buf.write("\2\u12c6\u039a\3\2\2\2\u12c7\u12c8\5\u0453\u022a\2\u12c8")
buf.write("\u12c9\5\u043d\u021f\2\u12c9\u12ca\5\u0457\u022c\2\u12ca")
buf.write("\u12cb\5\u0439\u021d\2\u12cb\u12cc\5\u043d\u021f\2\u12cc")
buf.write("\u12cd\5\u044f\u0228\2\u12cd\u12ce\5\u045b\u022e\2\u12ce")
buf.write("\u12cf\7a\2\2\u12cf\u12d0\5\u0457\u022c\2\u12d0\u12d1")
buf.write("\5\u0435\u021b\2\u12d1\u12d2\5\u044f\u0228\2\u12d2\u12d3")
buf.write("\5\u0449\u0225\2\u12d3\u039c\3\2\2\2\u12d4\u12d5\5\u0453")
buf.write("\u022a\2\u12d5\u12d6\5\u043d\u021f\2\u12d6\u12d7\5\u0457")
buf.write("\u022c\2\u12d7\u12d8\5\u0439\u021d\2\u12d8\u12d9\5\u043d")
buf.write("\u021f\2\u12d9\u12da\5\u044f\u0228\2\u12da\u12db\5\u045b")
buf.write("\u022e\2\u12db\u12dc\5\u0445\u0223\2\u12dc\u12dd\5\u044b")
buf.write("\u0226\2\u12dd\u12de\5\u043d\u021f\2\u12de\u12df\7a\2")
buf.write("\2\u12df\u12e0\5\u0439\u021d\2\u12e0\u12e1\5\u0451\u0229")
buf.write("\2\u12e1\u12e2\5\u044f\u0228\2\u12e2\u12e3\5\u045b\u022e")
buf.write("\2\u12e3\u039e\3\2\2\2\u12e4\u12e5\5\u0453\u022a\2\u12e5")
buf.write("\u12e6\5\u043d\u021f\2\u12e6\u12e7\5\u0457\u022c\2\u12e7")
buf.write("\u12e8\5\u0439\u021d\2\u12e8\u12e9\5\u043d\u021f\2\u12e9")
buf.write("\u12ea\5\u044f\u0228\2\u12ea\u12eb\5\u045b\u022e\2\u12eb")
buf.write("\u12ec\5\u0445\u0223\2\u12ec\u12ed\5\u044b\u0226\2\u12ed")
buf.write("\u12ee\5\u043d\u021f\2\u12ee\u12ef\7a\2\2\u12ef\u12f0")
buf.write("\5\u043b\u021e\2\u12f0\u12f1\5\u0445\u0223\2\u12f1\u12f2")
buf.write("\5\u0459\u022d\2\u12f2\u12f3\5\u0439\u021d\2\u12f3\u03a0")
buf.write("\3\2\2\2\u12f4\u12f5\5\u0457\u022c\2\u12f5\u12f6\5\u0435")
buf.write("\u021b\2\u12f6\u12f7\5\u044f\u0228\2\u12f7\u12f8\5\u0449")
buf.write("\u0225\2\u12f8\u03a2\3\2\2\2\u12f9\u12fa\5\u0435\u021b")
buf.write("\2\u12fa\u12fb\5\u045f\u0230\2\u12fb\u12fc\5\u0441\u0221")
buf.write("\2\u12fc\u03a4\3\2\2\2\u12fd\u12fe\5\u0439\u021d\2\u12fe")
buf.write("\u12ff\5\u0451\u0229\2\u12ff\u1300\5\u0457\u022c\2\u1300")
buf.write("\u1301\5\u0457\u022c\2\u1301\u03a6\3\2\2\2\u1302\u1303")
buf.write("\5\u044b\u0226\2\u1303\u1304\5\u0435\u021b\2\u1304\u1305")
buf.write("\5\u0441\u0221\2\u1305\u03a8\3\2\2\2\u1306\u1307\5\u044b")
buf.write("\u0226\2\u1307\u1308\5\u043d\u021f\2\u1308\u1309\5\u0435")
buf.write("\u021b\2\u1309\u130a\5\u043b\u021e\2\u130a\u03aa\3\2\2")
buf.write("\2\u130b\u130c\5\u044d\u0227\2\u130c\u130d\5\u0435\u021b")
buf.write("\2\u130d\u130e\5\u0463\u0232\2\u130e\u03ac\3\2\2\2\u130f")
buf.write("\u1310\5\u044d\u0227\2\u1310\u1311\5\u043d\u021f\2\u1311")
buf.write("\u1312\5\u043b\u021e\2\u1312\u1313\5\u0445\u0223\2\u1313")
buf.write("\u1314\5\u0435\u021b\2\u1314\u1315\5\u044f\u0228\2\u1315")
buf.write("\u03ae\3\2\2\2\u1316\u1317\5\u044d\u0227\2\u1317\u1318")
buf.write("\5\u0445\u0223\2\u1318\u1319\5\u044f\u0228\2\u1319\u03b0")
buf.write("\3\2\2\2\u131a\u131b\5\u044f\u0228\2\u131b\u131c\5\u045b")
buf.write("\u022e\2\u131c\u131d\5\u0445\u0223\2\u131d\u131e\5\u044b")
buf.write("\u0226\2\u131e\u131f\5\u043d\u021f\2\u131f\u03b2\3\2\2")
buf.write("\2\u1320\u1321\5\u0457\u022c\2\u1321\u1322\5\u0435\u021b")
buf.write("\2\u1322\u1323\5\u045b\u022e\2\u1323\u1324\5\u0445\u0223")
buf.write("\2\u1324\u1325\5\u0451\u0229\2\u1325\u1326\7a\2\2\u1326")
buf.write("\u1327\5\u045b\u022e\2\u1327\u1328\5\u0451\u0229\2\u1328")
buf.write("\u1329\7a\2\2\u1329\u132a\5\u0457\u022c\2\u132a\u132b")
buf.write("\5\u043d\u021f\2\u132b\u132c\5\u0453\u022a\2\u132c\u132d")
buf.write("\5\u0451\u0229\2\u132d\u132e\5\u0457\u022c\2\u132e\u132f")
buf.write("\5\u045b\u022e\2\u132f\u03b4\3\2\2\2\u1330\u1331\5\u0457")
buf.write("\u022c\2\u1331\u1332\5\u0451\u0229\2\u1332\u1333\5\u0461")
buf.write("\u0231\2\u1333\u1334\7a\2\2\u1334\u1335\5\u044f\u0228")
buf.write("\2\u1335\u1336\5\u045d\u022f\2\u1336\u1337\5\u044d\u0227")
buf.write("\2\u1337\u1338\5\u0437\u021c\2\u1338\u1339\5\u043d\u021f")
buf.write("\2\u1339\u133a\5\u0457\u022c\2\u133a\u03b6\3\2\2\2\u133b")
buf.write("\u133c\5\u0459\u022d\2\u133c\u133d\5\u045d\u022f\2\u133d")
buf.write("\u133e\5\u044d\u0227\2\u133e\u03b8\3\2\2\2\u133f\u1340")
buf.write("\5\u045f\u0230\2\u1340\u1341\5\u0435\u021b\2\u1341\u1342")
buf.write("\5\u0457\u022c\2\u1342\u1343\5\u0445\u0223\2\u1343\u1344")
buf.write("\5\u0435\u021b\2\u1344\u1345\5\u044f\u0228\2\u1345\u1346")
buf.write("\5\u0439\u021d\2\u1346\u1347\5\u043d\u021f\2\u1347\u03ba")
buf.write("\3\2\2\2\u1348\u1349\5\u0457\u022c\2\u1349\u134a\5\u043d")
buf.write("\u021f\2\u134a\u134b\5\u0441\u0221\2\u134b\u134c\5\u0457")
buf.write("\u022c\2\u134c\u134d\7a\2\2\u134d\u03bc\3\2\2\2\u134e")
buf.write("\u134f\5\u0459\u022d\2\u134f\u1350\5\u045b\u022e\2\u1350")
buf.write("\u1351\5\u043b\u021e\2\u1351\u1352\5\u043b\u021e\2\u1352")
buf.write("\u1353\5\u043d\u021f\2\u1353\u1354\5\u045f\u0230\2\u1354")
buf.write("\u03be\3\2\2\2\u1355\u1356\5\u045f\u0230\2\u1356\u1357")
buf.write("\5\u0435\u021b\2\u1357\u1358\5\u0457\u022c\2\u1358\u1359")
buf.write("\7a\2\2\u1359\u03c0\3\2\2\2\u135a\u135b\5\u0439\u021d")
buf.write("\2\u135b\u135c\5\u0451\u0229\2\u135c\u135d\5\u045f\u0230")
buf.write("\2\u135d\u135e\5\u0435\u021b\2\u135e\u135f\5\u0457\u022c")
buf.write("\2\u135f\u1360\7a\2\2\u1360\u03c2\3\2\2\2\u1361\u1362")
buf.write("\5\u044f\u0228\2\u1362\u1369\7)\2\2\u1363\u1368\n\2\2")
buf.write("\2\u1364\u1365\7)\2\2\u1365\u1368\7)\2\2\u1366\u1368\5")
buf.write("\u042d\u0217\2\u1367\u1363\3\2\2\2\u1367\u1364\3\2\2\2")
buf.write("\u1367\u1366\3\2\2\2\u1368\u136b\3\2\2\2\u1369\u1367\3")
buf.write("\2\2\2\u1369\u136a\3\2\2\2\u136a\u136c\3\2\2\2\u136b\u1369")
buf.write("\3\2\2\2\u136c\u136d\7)\2\2\u136d\u03c4\3\2\2\2\u136e")
buf.write("\u1377\5\u0437\u021c\2\u136f\u1373\7)\2\2\u1370\u1372")
buf.write("\4\62\63\2\u1371\u1370\3\2\2\2\u1372\u1375\3\2\2\2\u1373")
buf.write("\u1371\3\2\2\2\u1373\u1374\3\2\2\2\u1374\u1376\3\2\2\2")
buf.write("\u1375\u1373\3\2\2\2\u1376\u1378\7)\2\2\u1377\u136f\3")
buf.write("\2\2\2\u1378\u1379\3\2\2\2\u1379\u1377\3\2\2\2\u1379\u137a")
buf.write("\3\2\2\2\u137a\u03c6\3\2\2\2\u137b\u1384\5\u0463\u0232")
buf.write("\2\u137c\u1380\7)\2\2\u137d\u137f\t\3\2\2\u137e\u137d")
buf.write("\3\2\2\2\u137f\u1382\3\2\2\2\u1380\u137e\3\2\2\2\u1380")
buf.write("\u1381\3\2\2\2\u1381\u1383\3\2\2\2\u1382\u1380\3\2\2\2")
buf.write("\u1383\u1385\7)\2\2\u1384\u137c\3\2\2\2\u1385\u1386\3")
buf.write("\2\2\2\u1386\u1384\3\2\2\2\u1386\u1387\3\2\2\2\u1387\u03c8")
buf.write("\3\2\2\2\u1388\u1389\7\60\2\2\u1389\u138a\7\60\2\2\u138a")
buf.write("\u03ca\3\2\2\2\u138b\u138c\7\60\2\2\u138c\u03cc\3\2\2")
buf.write("\2\u138d\u138e\5\u0423\u0212\2\u138e\u03ce\3\2\2\2\u138f")
buf.write("\u1398\5\u0425\u0213\2\u1390\u1392\t\4\2\2\u1391\u1393")
buf.write("\t\5\2\2\u1392\u1391\3\2\2\2\u1392\u1393\3\2\2\2\u1393")
buf.write("\u1396\3\2\2\2\u1394\u1397\5\u0425\u0213\2\u1395\u1397")
buf.write("\5\u0423\u0212\2\u1396\u1394\3\2\2\2\u1396\u1395\3\2\2")
buf.write("\2\u1397\u1399\3\2\2\2\u1398\u1390\3\2\2\2\u1398\u1399")
buf.write("\3\2\2\2\u1399\u139c\3\2\2\2\u139a\u139d\5\u043b\u021e")
buf.write("\2\u139b\u139d\5\u043f\u0220\2\u139c\u139a\3\2\2\2\u139c")
buf.write("\u139b\3\2\2\2\u139c\u139d\3\2\2\2\u139d\u03d0\3\2\2\2")
buf.write("\u139e\u13a5\7)\2\2\u139f\u13a4\n\2\2\2\u13a0\u13a1\7")
buf.write(")\2\2\u13a1\u13a4\7)\2\2\u13a2\u13a4\5\u042d\u0217\2\u13a3")
buf.write("\u139f\3\2\2\2\u13a3\u13a0\3\2\2\2\u13a3\u13a2\3\2\2\2")
buf.write("\u13a4\u13a7\3\2\2\2\u13a5\u13a3\3\2\2\2\u13a5\u13a6\3")
buf.write("\2\2\2\u13a6\u13a8\3\2\2\2\u13a7\u13a5\3\2\2\2\u13a8\u13a9")
buf.write("\7)\2\2\u13a9\u03d2\3\2\2\2\u13aa\u13af\5\u0455\u022b")
buf.write("\2\u13ab\u13b0\5\u03d7\u01ec\2\u13ac\u13b0\5\u03d9\u01ed")
buf.write("\2\u13ad\u13b0\5\u03db\u01ee\2\u13ae\u13b0\5\u03dd\u01ef")
buf.write("\2\u13af\u13ab\3\2\2\2\u13af\u13ac\3\2\2\2\u13af\u13ad")
buf.write("\3\2\2\2\u13af\u13ae\3\2\2\2\u13b0\u13b1\3\2\2\2\u13b1")
buf.write("\u13b2\b\u01ea\2\2\u13b2\u03d4\3\2\2\2\u13b3\u13b4\7)")
buf.write("\2\2\u13b4\u03d6\3\2\2\2\u13b5\u13b6\5\u03d5\u01eb\2\u13b6")
buf.write("\u13ba\7>\2\2\u13b7\u13b9\13\2\2\2\u13b8\u13b7\3\2\2\2")
buf.write("\u13b9\u13bc\3\2\2\2\u13ba\u13bb\3\2\2\2\u13ba\u13b8\3")
buf.write("\2\2\2\u13bb\u13bd\3\2\2\2\u13bc\u13ba\3\2\2\2\u13bd\u13be")
buf.write("\7@\2\2\u13be\u13bf\5\u03d5\u01eb\2\u13bf\u03d8\3\2\2")
buf.write("\2\u13c0\u13c1\5\u03d5\u01eb\2\u13c1\u13c5\7}\2\2\u13c2")
buf.write("\u13c4\13\2\2\2\u13c3\u13c2\3\2\2\2\u13c4\u13c7\3\2\2")
buf.write("\2\u13c5\u13c6\3\2\2\2\u13c5\u13c3\3\2\2\2\u13c6\u13c8")
buf.write("\3\2\2\2\u13c7\u13c5\3\2\2\2\u13c8\u13c9\7\177\2\2\u13c9")
buf.write("\u13ca\5\u03d5\u01eb\2\u13ca\u03da\3\2\2\2\u13cb\u13cc")
buf.write("\5\u03d5\u01eb\2\u13cc\u13d0\7]\2\2\u13cd\u13cf\13\2\2")
buf.write("\2\u13ce\u13cd\3\2\2\2\u13cf\u13d2\3\2\2\2\u13d0\u13d1")
buf.write("\3\2\2\2\u13d0\u13ce\3\2\2\2\u13d1\u13d3\3\2\2\2\u13d2")
buf.write("\u13d0\3\2\2\2\u13d3\u13d4\7_\2\2\u13d4\u13d5\5\u03d5")
buf.write("\u01eb\2\u13d5\u03dc\3\2\2\2\u13d6\u13d7\5\u03d5\u01eb")
buf.write("\2\u13d7\u13db\7*\2\2\u13d8\u13da\13\2\2\2\u13d9\u13d8")
buf.write("\3\2\2\2\u13da\u13dd\3\2\2\2\u13db\u13dc\3\2\2\2\u13db")
buf.write("\u13d9\3\2\2\2\u13dc\u13de\3\2\2\2\u13dd\u13db\3\2\2\2")
buf.write("\u13de\u13df\7+\2\2\u13df\u13e0\5\u03d5\u01eb\2\u13e0")
buf.write("\u03de\3\2\2\2\u13e1\u13e2\n\6\2\2\u13e2\u03e0\3\2\2\2")
buf.write("\u13e3\u13e7\7$\2\2\u13e4\u13e8\n\7\2\2\u13e5\u13e6\7")
buf.write("$\2\2\u13e6\u13e8\7$\2\2\u13e7\u13e4\3\2\2\2\u13e7\u13e5")
buf.write("\3\2\2\2\u13e8\u13e9\3\2\2\2\u13e9\u13e7\3\2\2\2\u13e9")
buf.write("\u13ea\3\2\2\2\u13ea\u13eb\3\2\2\2\u13eb\u13ec\7$\2\2")
buf.write("\u13ec\u03e2\3\2\2\2\u13ed\u13ee\7\'\2\2\u13ee\u03e4\3")
buf.write("\2\2\2\u13ef\u13f0\7(\2\2\u13f0\u03e6\3\2\2\2\u13f1\u13f2")
buf.write("\7*\2\2\u13f2\u03e8\3\2\2\2\u13f3\u13f4\7+\2\2\u13f4\u03ea")
buf.write("\3\2\2\2\u13f5\u13f6\7,\2\2\u13f6\u13f7\7,\2\2\u13f7\u03ec")
buf.write("\3\2\2\2\u13f8\u13f9\7,\2\2\u13f9\u03ee\3\2\2\2\u13fa")
buf.write("\u13fb\7-\2\2\u13fb\u03f0\3\2\2\2\u13fc\u13fd\7/\2\2\u13fd")
buf.write("\u03f2\3\2\2\2\u13fe\u13ff\7.\2\2\u13ff\u03f4\3\2\2\2")
buf.write("\u1400\u1401\7\61\2\2\u1401\u03f6\3\2\2\2\u1402\u1403")
buf.write("\7B\2\2\u1403\u03f8\3\2\2\2\u1404\u1405\7<\2\2\u1405\u1406")
buf.write("\7?\2\2\u1406\u03fa\3\2\2\2\u1407\u1408\7<\2\2\u1408\u140d")
buf.write("\5\u0421\u0211\2\u1409\u140c\5\u0421\u0211\2\u140a\u140c")
buf.write("\t\b\2\2\u140b\u1409\3\2\2\2\u140b\u140a\3\2\2\2\u140c")
buf.write("\u140f\3\2\2\2\u140d\u140b\3\2\2\2\u140d\u140e\3\2\2\2")
buf.write("\u140e\u1416\3\2\2\2\u140f\u140d\3\2\2\2\u1410\u1411\7")
buf.write("<\2\2\u1411\u1416\5\u03e1\u01f1\2\u1412\u1413\7<\2\2\u1413")
buf.write("\u1416\5\u03cd\u01e7\2\u1414\u1416\5\u0411\u0209\2\u1415")
buf.write("\u1407\3\2\2\2\u1415\u1410\3\2\2\2\u1415\u1412\3\2\2\2")
buf.write("\u1415\u1414\3\2\2\2\u1416\u03fc\3\2\2\2\u1417\u1418\7")
buf.write("<\2\2\u1418\u03fe\3\2\2\2\u1419\u141a\7=\2\2\u141a\u0400")
buf.write("\3\2\2\2\u141b\u141c\7>\2\2\u141c\u141d\7?\2\2\u141d\u0402")
buf.write("\3\2\2\2\u141e\u141f\7>\2\2\u141f\u0404\3\2\2\2\u1420")
buf.write("\u1421\7@\2\2\u1421\u1422\7?\2\2\u1422\u0406\3\2\2\2\u1423")
buf.write("\u1424\7#\2\2\u1424\u142c\7?\2\2\u1425\u1426\7>\2\2\u1426")
buf.write("\u142c\7@\2\2\u1427\u1428\7`\2\2\u1428\u142c\7?\2\2\u1429")
buf.write("\u142a\7\u0080\2\2\u142a\u142c\7?\2\2\u142b\u1423\3\2")
buf.write("\2\2\u142b\u1425\3\2\2\2\u142b\u1427\3\2\2\2\u142b\u1429")
buf.write("\3\2\2\2\u142c\u0408\3\2\2\2\u142d\u142e\7`\2\2\u142e")
buf.write("\u040a\3\2\2\2\u142f\u1430\7\u0080\2\2\u1430\u040c\3\2")
buf.write("\2\2\u1431\u1432\7#\2\2\u1432\u040e\3\2\2\2\u1433\u1434")
buf.write("\7@\2\2\u1434\u0410\3\2\2\2\u1435\u1436\7A\2\2\u1436\u0412")
buf.write("\3\2\2\2\u1437\u1438\7~\2\2\u1438\u1439\7~\2\2\u1439\u0414")
buf.write("\3\2\2\2\u143a\u143b\7~\2\2\u143b\u0416\3\2\2\2\u143c")
buf.write("\u143d\7?\2\2\u143d\u0418\3\2\2\2\u143e\u143f\7]\2\2\u143f")
buf.write("\u041a\3\2\2\2\u1440\u1441\7_\2\2\u1441\u041c\3\2\2\2")
buf.write("\u1442\u1443\7a\2\2\u1443\u041e\3\2\2\2\u1444\u1446\t")
buf.write("\t\2\2\u1445\u1444\3\2\2\2\u1446\u1447\3\2\2\2\u1447\u1445")
buf.write("\3\2\2\2\u1447\u1448\3\2\2\2\u1448\u1449\3\2\2\2\u1449")
buf.write("\u144a\b\u0210\3\2\u144a\u0420\3\2\2\2\u144b\u144c\t\n")
buf.write("\2\2\u144c\u0422\3\2\2\2\u144d\u144f\4\62;\2\u144e\u144d")
buf.write("\3\2\2\2\u144f\u1450\3\2\2\2\u1450\u144e\3\2\2\2\u1450")
buf.write("\u1451\3\2\2\2\u1451\u0424\3\2\2\2\u1452\u1454\5\u03cd")
buf.write("\u01e7\2\u1453\u1452\3\2\2\2\u1454\u1457\3\2\2\2\u1455")
buf.write("\u1453\3\2\2\2\u1455\u1456\3\2\2\2\u1456\u1459\3\2\2\2")
buf.write("\u1457\u1455\3\2\2\2\u1458\u145a\7\60\2\2\u1459\u1458")
buf.write("\3\2\2\2\u1459\u145a\3\2\2\2\u145a\u145c\3\2\2\2\u145b")
buf.write("\u145d\5\u03cd\u01e7\2\u145c\u145b\3\2\2\2\u145d\u145e")
buf.write("\3\2\2\2\u145e\u145c\3\2\2\2\u145e\u145f\3\2\2\2\u145f")
buf.write("\u0426\3\2\2\2\u1460\u1461\7/\2\2\u1461\u1462\7/\2\2\u1462")
buf.write("\u1466\3\2\2\2\u1463\u1465\n\13\2\2\u1464\u1463\3\2\2")
buf.write("\2\u1465\u1468\3\2\2\2\u1466\u1464\3\2\2\2\u1466\u1467")
buf.write("\3\2\2\2\u1467\u146b\3\2\2\2\u1468\u1466\3\2\2\2\u1469")
buf.write("\u146c\5\u042d\u0217\2\u146a\u146c\7\2\2\3\u146b\u1469")
buf.write("\3\2\2\2\u146b\u146a\3\2\2\2\u146c\u146d\3\2\2\2\u146d")
buf.write("\u146e\b\u0214\4\2\u146e\u0428\3\2\2\2\u146f\u1470\7\61")
buf.write("\2\2\u1470\u1471\7,\2\2\u1471\u1475\3\2\2\2\u1472\u1474")
buf.write("\13\2\2\2\u1473\u1472\3\2\2\2\u1474\u1477\3\2\2\2\u1475")
buf.write("\u1476\3\2\2\2\u1475\u1473\3\2\2\2\u1476\u1478\3\2\2\2")
buf.write("\u1477\u1475\3\2\2\2\u1478\u1479\7,\2\2\u1479\u147a\7")
buf.write("\61\2\2\u147a\u147b\3\2\2\2\u147b\u147c\b\u0215\4\2\u147c")
buf.write("\u042a\3\2\2\2\u147d\u147e\7r\2\2\u147e\u147f\7t\2\2\u147f")
buf.write("\u1480\7q\2\2\u1480\u1481\7o\2\2\u1481\u1482\7r\2\2\u1482")
buf.write("\u1483\7v\2\2\u1483\u1484\3\2\2\2\u1484\u1488\5\u042f")
buf.write("\u0218\2\u1485\u1487\n\13\2\2\u1486\u1485\3\2\2\2\u1487")
buf.write("\u148a\3\2\2\2\u1488\u1486\3\2\2\2\u1488\u1489\3\2\2\2")
buf.write("\u1489\u148d\3\2\2\2\u148a\u1488\3\2\2\2\u148b\u148e\5")
buf.write("\u042d\u0217\2\u148c\u148e\7\2\2\3\u148d\u148b\3\2\2\2")
buf.write("\u148d\u148c\3\2\2\2\u148e\u042c\3\2\2\2\u148f\u1491\7")
buf.write("\17\2\2\u1490\u148f\3\2\2\2\u1490\u1491\3\2\2\2\u1491")
buf.write("\u1492\3\2\2\2\u1492\u1493\7\f\2\2\u1493\u042e\3\2\2\2")
buf.write("\u1494\u1495\t\f\2\2\u1495\u0430\3\2\2\2\u1496\u149b\5")
buf.write("\u0421\u0211\2\u1497\u149a\5\u0421\u0211\2\u1498\u149a")
buf.write("\t\r\2\2\u1499\u1497\3\2\2\2\u1499\u1498\3\2\2\2\u149a")
buf.write("\u149d\3\2\2\2\u149b\u1499\3\2\2\2\u149b\u149c\3\2\2\2")
buf.write("\u149c\u0432\3\2\2\2\u149d\u149b\3\2\2\2\u149e\u149f\7")
buf.write("B\2\2\u149f\u14a0\7#\2\2\u14a0\u14a1\3\2\2\2\u14a1\u14a2")
buf.write("\b\u021a\4\2\u14a2\u0434\3\2\2\2\u14a3\u14a4\t\16\2\2")
buf.write("\u14a4\u0436\3\2\2\2\u14a5\u14a6\t\17\2\2\u14a6\u0438")
buf.write("\3\2\2\2\u14a7\u14a8\t\20\2\2\u14a8\u043a\3\2\2\2\u14a9")
buf.write("\u14aa\t\21\2\2\u14aa\u043c\3\2\2\2\u14ab\u14ac\t\4\2")
buf.write("\2\u14ac\u043e\3\2\2\2\u14ad\u14ae\t\22\2\2\u14ae\u0440")
buf.write("\3\2\2\2\u14af\u14b0\t\23\2\2\u14b0\u0442\3\2\2\2\u14b1")
buf.write("\u14b2\t\24\2\2\u14b2\u0444\3\2\2\2\u14b3\u14b4\t\25\2")
buf.write("\2\u14b4\u0446\3\2\2\2\u14b5\u14b6\t\26\2\2\u14b6\u0448")
buf.write("\3\2\2\2\u14b7\u14b8\t\27\2\2\u14b8\u044a\3\2\2\2\u14b9")
buf.write("\u14ba\t\30\2\2\u14ba\u044c\3\2\2\2\u14bb\u14bc\t\31\2")
buf.write("\2\u14bc\u044e\3\2\2\2\u14bd\u14be\t\32\2\2\u14be\u0450")
buf.write("\3\2\2\2\u14bf\u14c0\t\33\2\2\u14c0\u0452\3\2\2\2\u14c1")
buf.write("\u14c2\t\34\2\2\u14c2\u0454\3\2\2\2\u14c3\u14c4\t\35\2")
buf.write("\2\u14c4\u0456\3\2\2\2\u14c5\u14c6\t\36\2\2\u14c6\u0458")
buf.write("\3\2\2\2\u14c7\u14c8\t\37\2\2\u14c8\u045a\3\2\2\2\u14c9")
buf.write("\u14ca\t \2\2\u14ca\u045c\3\2\2\2\u14cb\u14cc\t!\2\2\u14cc")
buf.write("\u045e\3\2\2\2\u14cd\u14ce\t\"\2\2\u14ce\u0460\3\2\2\2")
buf.write("\u14cf\u14d0\t#\2\2\u14d0\u0462\3\2\2\2\u14d1\u14d2\t")
buf.write("$\2\2\u14d2\u0464\3\2\2\2\u14d3\u14d4\t%\2\2\u14d4\u0466")
buf.write("\3\2\2\2\u14d5\u14d6\t&\2\2\u14d6\u0468\3\2\2\2\'\2\u1367")
buf.write("\u1369\u1373\u1379\u1380\u1386\u1392\u1396\u1398\u139c")
buf.write("\u13a3\u13a5\u13af\u13ba\u13c5\u13d0\u13db\u13e7\u13e9")
buf.write("\u140b\u140d\u1415\u142b\u1447\u1450\u1455\u1459\u145e")
buf.write("\u1466\u146b\u1475\u1488\u148d\u1490\u1499\u149b\5\t\u01ea")
buf.write("\2\b\2\2\2\3\2")
return buf.getvalue()
class PlSqlLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
A_LETTER = 2
ADD = 3
AFTER = 4
AGENT = 5
AGGREGATE = 6
ALL = 7
ALTER = 8
ANALYZE = 9
AND = 10
ANY = 11
ARRAY = 12
AS = 13
ASSUME = 14
ASSERT = 15
ASC = 16
ASSOCIATE = 17
AT = 18
ATTRIBUTE = 19
AUDIT = 20
AUTHID = 21
AUTO = 22
AUTOMATIC = 23
AUTONOMOUS_TRANSACTION = 24
BATCH = 25
BEFORE = 26
BEGIN = 27
BETWEEN = 28
BFILE = 29
BINARY_DOUBLE = 30
BINARY_FLOAT = 31
BINARY_INTEGER = 32
BLOB = 33
BLOCK = 34
BODY = 35
BOOLEAN = 36
BOTH = 37
BREADTH = 38
BULK = 39
BY = 40
BYTE = 41
C_LETTER = 42
CACHE = 43
CALL = 44
CANONICAL = 45
CASCADE = 46
CASE = 47
CAST = 48
CHAR = 49
CHAR_CS = 50
CHARACTER = 51
CHECK = 52
CHR = 53
CLOB = 54
CLOSE = 55
CLUSTER = 56
COLLECT = 57
COLUMNS = 58
COMMENT = 59
COMMIT = 60
COMMITTED = 61
COMPATIBILITY = 62
COMPILE = 63
COMPOUND = 64
CONNECT = 65
CONNECT_BY_ROOT = 66
CONSTANT = 67
CONSTRAINT = 68
CONSTRAINTS = 69
CONSTRUCTOR = 70
CONTENT = 71
CONTEXT = 72
CONTINUE = 73
CONVERT = 74
CORRUPT_XID = 75
CORRUPT_XID_ALL = 76
COST = 77
COUNT = 78
CREATE = 79
CROSS = 80
CUBE = 81
CURRENT = 82
CURRENT_USER = 83
CURSOR = 84
CUSTOMDATUM = 85
CYCLE = 86
DATA = 87
DATABASE = 88
DATE = 89
DAY = 90
DB_ROLE_CHANGE = 91
DBTIMEZONE = 92
DDL = 93
DEBUG = 94
DEC = 95
DECIMAL = 96
DECLARE = 97
DECOMPOSE = 98
DECREMENT = 99
DEFAULT = 100
DEFAULTS = 101
DEFERRED = 102
DEFINER = 103
DELETE = 104
DEPTH = 105
DESC = 106
DETERMINISTIC = 107
DIMENSION = 108
DISABLE = 109
DISASSOCIATE = 110
DISTINCT = 111
DOCUMENT = 112
DOUBLE = 113
DROP = 114
DSINTERVAL_UNCONSTRAINED = 115
EACH = 116
ELEMENT = 117
ELSE = 118
ELSIF = 119
EMPTY = 120
ENABLE = 121
ENCODING = 122
END = 123
ENTITYESCAPING = 124
ERR = 125
ERRORS = 126
ESCAPE = 127
EVALNAME = 128
EXCEPT = 129
EXCEPTION = 130
EXCEPTION_INIT = 131
EXCEPTIONS = 132
EXCLUDE = 133
EXCLUSIVE = 134
EXECUTE = 135
EXISTS = 136
EXIT = 137
EXPLAIN = 138
EXTERNAL = 139
EXTRACT = 140
FAILURE = 141
FALSE = 142
FETCH = 143
FINAL = 144
FIRST = 145
FIRST_VALUE = 146
FLOAT = 147
FOLLOWING = 148
FOLLOWS = 149
FOR = 150
FORALL = 151
FORCE = 152
FROM = 153
FULL = 154
FUNCTION = 155
GOTO = 156
GRANT = 157
GROUP = 158
GROUPING = 159
HASH = 160
HAVING = 161
HIDE = 162
HOUR = 163
IF = 164
IGNORE = 165
IMMEDIATE = 166
IN = 167
INCLUDE = 168
INCLUDING = 169
INCREMENT = 170
INDENT = 171
INDEX = 172
INDEXED = 173
INDICATOR = 174
INDICES = 175
INFINITE = 176
INLINE = 177
INNER = 178
INOUT = 179
INSERT = 180
INSTANTIABLE = 181
INSTEAD = 182
INT = 183
INTEGER = 184
INTERSECT = 185
INTERVAL = 186
INTO = 187
INVALIDATE = 188
IS = 189
ISOLATION = 190
ITERATE = 191
JAVA = 192
JOIN = 193
KEEP = 194
LANGUAGE = 195
LAST = 196
LAST_VALUE = 197
LEADING = 198
LEFT = 199
LEVEL = 200
LIBRARY = 201
LIKE = 202
LIKE2 = 203
LIKE4 = 204
LIKEC = 205
LIMIT = 206
LOCAL = 207
LOCK = 208
LOCKED = 209
LOG = 210
LOGOFF = 211
LOGON = 212
LONG = 213
LOOP = 214
MAIN = 215
MAP = 216
MATCHED = 217
MAXVALUE = 218
MEASURES = 219
MEMBER = 220
MERGE = 221
MINUS = 222
MINUTE = 223
MINVALUE = 224
MLSLABEL = 225
MODE = 226
MODEL = 227
MODIFY = 228
MONTH = 229
MULTISET = 230
NAME = 231
NAN = 232
NATURAL = 233
NATURALN = 234
NAV = 235
NCHAR = 236
NCHAR_CS = 237
NCLOB = 238
NESTED = 239
NEW = 240
NO = 241
NOAUDIT = 242
NOCACHE = 243
NOCOPY = 244
NOCYCLE = 245
NOENTITYESCAPING = 246
NOMAXVALUE = 247
NOMINVALUE = 248
NONE = 249
NOORDER = 250
NOSCHEMACHECK = 251
NOT = 252
NOWAIT = 253
NULL = 254
NULLS = 255
NUMBER = 256
NUMERIC = 257
NVARCHAR2 = 258
OBJECT = 259
OF = 260
OFF = 261
OID = 262
OLD = 263
ON = 264
ONLY = 265
OPEN = 266
OPTION = 267
OR = 268
ORADATA = 269
ORDER = 270
ORDINALITY = 271
OSERROR = 272
OUT = 273
OUTER = 274
OVER = 275
OVERRIDING = 276
PACKAGE = 277
PARALLEL_ENABLE = 278
PARAMETERS = 279
PARENT = 280
PARTITION = 281
PASSING = 282
PATH = 283
PERCENT_ROWTYPE = 284
PERCENT_TYPE = 285
PIPELINED = 286
PIVOT = 287
PLAN = 288
PLS_INTEGER = 289
POSITIVE = 290
POSITIVEN = 291
PRAGMA = 292
PRECEDING = 293
PRECISION = 294
PRESENT = 295
PRIOR = 296
PROCEDURE = 297
RAISE = 298
RANGE = 299
RAW = 300
READ = 301
REAL = 302
RECORD = 303
REF = 304
REFERENCE = 305
REFERENCING = 306
REJECT = 307
RELIES_ON = 308
RENAME = 309
REPLACE = 310
RESPECT = 311
RESTRICT_REFERENCES = 312
RESULT = 313
RESULT_CACHE = 314
RETURN = 315
RETURNING = 316
REUSE = 317
REVERSE = 318
REVOKE = 319
RIGHT = 320
ROLLBACK = 321
ROLLUP = 322
ROW = 323
ROWID = 324
ROWS = 325
RULES = 326
SAMPLE = 327
SAVE = 328
SAVEPOINT = 329
SCHEMA = 330
SCHEMACHECK = 331
SCN = 332
SEARCH = 333
SECOND = 334
SEED = 335
SEGMENT = 336
SELECT = 337
SELF = 338
SEQUENCE = 339
SEQUENTIAL = 340
SERIALIZABLE = 341
SERIALLY_REUSABLE = 342
SERVERERROR = 343
SESSIONTIMEZONE = 344
SET = 345
SETS = 346
SETTINGS = 347
SHARE = 348
SHOW = 349
SHUTDOWN = 350
SIBLINGS = 351
SIGNTYPE = 352
SIMPLE_INTEGER = 353
SINGLE = 354
SIZE = 355
SKIP_ = 356
SMALLINT = 357
SNAPSHOT = 358
SOME = 359
SPECIFICATION = 360
SQLDATA = 361
SQLERROR = 362
STANDALONE = 363
START = 364
STARTUP = 365
STATEMENT = 366
STATEMENT_ID = 367
STATIC = 368
STATISTICS = 369
STRING = 370
SUBMULTISET = 371
SUBPARTITION = 372
SUBSTITUTABLE = 373
SUBTYPE = 374
SUCCESS = 375
SUSPEND = 376
TABLE = 377
THE = 378
THEN = 379
TIME = 380
TIMESTAMP = 381
TIMESTAMP_LTZ_UNCONSTRAINED = 382
TIMESTAMP_TZ_UNCONSTRAINED = 383
TIMESTAMP_UNCONSTRAINED = 384
TIMEZONE_ABBR = 385
TIMEZONE_HOUR = 386
TIMEZONE_MINUTE = 387
TIMEZONE_REGION = 388
TO = 389
TRAILING = 390
TRANSACTION = 391
TRANSLATE = 392
TREAT = 393
TRIGGER = 394
TRIM = 395
TRUE = 396
TRUNCATE = 397
TYPE = 398
UNBOUNDED = 399
UNDER = 400
UNION = 401
UNIQUE = 402
UNLIMITED = 403
UNPIVOT = 404
UNTIL = 405
UPDATE = 406
UPDATED = 407
UPSERT = 408
UROWID = 409
USE = 410
USING = 411
VALIDATE = 412
VALUE = 413
VALUES = 414
VARCHAR = 415
VARCHAR2 = 416
VARIABLE = 417
VARRAY = 418
VARYING = 419
VERSION = 420
VERSIONS = 421
WAIT = 422
WARNING = 423
WELLFORMED = 424
WHEN = 425
WHENEVER = 426
WHERE = 427
WHILE = 428
WITH = 429
WITHIN = 430
WORK = 431
WRITE = 432
XML = 433
XMLAGG = 434
XMLATTRIBUTES = 435
XMLCAST = 436
XMLCOLATTVAL = 437
XMLELEMENT = 438
XMLEXISTS = 439
XMLFOREST = 440
XMLNAMESPACES = 441
XMLPARSE = 442
XMLPI = 443
XMLQUERY = 444
XMLROOT = 445
XMLSERIALIZE = 446
XMLTABLE = 447
YEAR = 448
YES = 449
YMINTERVAL_UNCONSTRAINED = 450
ZONE = 451
PREDICTION = 452
PREDICTION_BOUNDS = 453
PREDICTION_COST = 454
PREDICTION_DETAILS = 455
PREDICTION_PROBABILITY = 456
PREDICTION_SET = 457
CUME_DIST = 458
DENSE_RANK = 459
LISTAGG = 460
PERCENT_RANK = 461
PERCENTILE_CONT = 462
PERCENTILE_DISC = 463
RANK = 464
AVG = 465
CORR = 466
LAG = 467
LEAD = 468
MAX = 469
MEDIAN = 470
MIN = 471
NTILE = 472
RATIO_TO_REPORT = 473
ROW_NUMBER = 474
SUM = 475
VARIANCE = 476
REGR_ = 477
STDDEV = 478
VAR_ = 479
COVAR_ = 480
NATIONAL_CHAR_STRING_LIT = 481
BIT_STRING_LIT = 482
HEX_STRING_LIT = 483
DOUBLE_PERIOD = 484
PERIOD = 485
UNSIGNED_INTEGER = 486
APPROXIMATE_NUM_LIT = 487
CHAR_STRING = 488
DELIMITED_ID = 489
PERCENT = 490
AMPERSAND = 491
LEFT_PAREN = 492
RIGHT_PAREN = 493
DOUBLE_ASTERISK = 494
ASTERISK = 495
PLUS_SIGN = 496
MINUS_SIGN = 497
COMMA = 498
SOLIDUS = 499
AT_SIGN = 500
ASSIGN_OP = 501
BINDVAR = 502
COLON = 503
SEMICOLON = 504
LESS_THAN_OR_EQUALS_OP = 505
LESS_THAN_OP = 506
GREATER_THAN_OR_EQUALS_OP = 507
NOT_EQUAL_OP = 508
CARRET_OPERATOR_PART = 509
TILDE_OPERATOR_PART = 510
EXCLAMATION_OPERATOR_PART = 511
GREATER_THAN_OP = 512
CONCATENATION_OP = 513
VERTICAL_BAR = 514
EQUALS_OP = 515
LEFT_BRACKET = 516
RIGHT_BRACKET = 517
INTRODUCER = 518
SPACES = 519
SINGLE_LINE_COMMENT = 520
MULTI_LINE_COMMENT = 521
PROMPT = 522
REGULAR_ID = 523
ZV = 524
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'..'", "'.'", "'%'", "'&'", "'('", "')'", "'**'", "'*'", "'+'",
"'-'", "','", "'/'", "'@'", "':='", "':'", "';'", "'<='", "'<'",
"'>='", "'^'", "'~'", "'!'", "'>'", "'||'", "'|'", "'='", "'['",
"']'", "'_'", "'@!'" ]
symbolicNames = [ "<INVALID>",
"A_LETTER", "ADD", "AFTER", "AGENT", "AGGREGATE", "ALL", "ALTER",
"ANALYZE", "AND", "ANY", "ARRAY", "AS", "ASSUME", "ASSERT",
"ASC", "ASSOCIATE", "AT", "ATTRIBUTE", "AUDIT", "AUTHID", "AUTO",
"AUTOMATIC", "AUTONOMOUS_TRANSACTION", "BATCH", "BEFORE", "BEGIN",
"BETWEEN", "BFILE", "BINARY_DOUBLE", "BINARY_FLOAT", "BINARY_INTEGER",
"BLOB", "BLOCK", "BODY", "BOOLEAN", "BOTH", "BREADTH", "BULK",
"BY", "BYTE", "C_LETTER", "CACHE", "CALL", "CANONICAL", "CASCADE",
"CASE", "CAST", "CHAR", "CHAR_CS", "CHARACTER", "CHECK", "CHR",
"CLOB", "CLOSE", "CLUSTER", "COLLECT", "COLUMNS", "COMMENT",
"COMMIT", "COMMITTED", "COMPATIBILITY", "COMPILE", "COMPOUND",
"CONNECT", "CONNECT_BY_ROOT", "CONSTANT", "CONSTRAINT", "CONSTRAINTS",
"CONSTRUCTOR", "CONTENT", "CONTEXT", "CONTINUE", "CONVERT",
"CORRUPT_XID", "CORRUPT_XID_ALL", "COST", "COUNT", "CREATE",
"CROSS", "CUBE", "CURRENT", "CURRENT_USER", "CURSOR", "CUSTOMDATUM",
"CYCLE", "DATA", "DATABASE", "DATE", "DAY", "DB_ROLE_CHANGE",
"DBTIMEZONE", "DDL", "DEBUG", "DEC", "DECIMAL", "DECLARE", "DECOMPOSE",
"DECREMENT", "DEFAULT", "DEFAULTS", "DEFERRED", "DEFINER", "DELETE",
"DEPTH", "DESC", "DETERMINISTIC", "DIMENSION", "DISABLE", "DISASSOCIATE",
"DISTINCT", "DOCUMENT", "DOUBLE", "DROP", "DSINTERVAL_UNCONSTRAINED",
"EACH", "ELEMENT", "ELSE", "ELSIF", "EMPTY", "ENABLE", "ENCODING",
"END", "ENTITYESCAPING", "ERR", "ERRORS", "ESCAPE", "EVALNAME",
"EXCEPT", "EXCEPTION", "EXCEPTION_INIT", "EXCEPTIONS", "EXCLUDE",
"EXCLUSIVE", "EXECUTE", "EXISTS", "EXIT", "EXPLAIN", "EXTERNAL",
"EXTRACT", "FAILURE", "FALSE", "FETCH", "FINAL", "FIRST", "FIRST_VALUE",
"FLOAT", "FOLLOWING", "FOLLOWS", "FOR", "FORALL", "FORCE", "FROM",
"FULL", "FUNCTION", "GOTO", "GRANT", "GROUP", "GROUPING", "HASH",
"HAVING", "HIDE", "HOUR", "IF", "IGNORE", "IMMEDIATE", "IN",
"INCLUDE", "INCLUDING", "INCREMENT", "INDENT", "INDEX", "INDEXED",
"INDICATOR", "INDICES", "INFINITE", "INLINE", "INNER", "INOUT",
"INSERT", "INSTANTIABLE", "INSTEAD", "INT", "INTEGER", "INTERSECT",
"INTERVAL", "INTO", "INVALIDATE", "IS", "ISOLATION", "ITERATE",
"JAVA", "JOIN", "KEEP", "LANGUAGE", "LAST", "LAST_VALUE", "LEADING",
"LEFT", "LEVEL", "LIBRARY", "LIKE", "LIKE2", "LIKE4", "LIKEC",
"LIMIT", "LOCAL", "LOCK", "LOCKED", "LOG", "LOGOFF", "LOGON",
"LONG", "LOOP", "MAIN", "MAP", "MATCHED", "MAXVALUE", "MEASURES",
"MEMBER", "MERGE", "MINUS", "MINUTE", "MINVALUE", "MLSLABEL",
"MODE", "MODEL", "MODIFY", "MONTH", "MULTISET", "NAME", "NAN",
"NATURAL", "NATURALN", "NAV", "NCHAR", "NCHAR_CS", "NCLOB",
"NESTED", "NEW", "NO", "NOAUDIT", "NOCACHE", "NOCOPY", "NOCYCLE",
"NOENTITYESCAPING", "NOMAXVALUE", "NOMINVALUE", "NONE", "NOORDER",
"NOSCHEMACHECK", "NOT", "NOWAIT", "NULL", "NULLS", "NUMBER",
"NUMERIC", "NVARCHAR2", "OBJECT", "OF", "OFF", "OID", "OLD",
"ON", "ONLY", "OPEN", "OPTION", "OR", "ORADATA", "ORDER", "ORDINALITY",
"OSERROR", "OUT", "OUTER", "OVER", "OVERRIDING", "PACKAGE",
"PARALLEL_ENABLE", "PARAMETERS", "PARENT", "PARTITION", "PASSING",
"PATH", "PERCENT_ROWTYPE", "PERCENT_TYPE", "PIPELINED", "PIVOT",
"PLAN", "PLS_INTEGER", "POSITIVE", "POSITIVEN", "PRAGMA", "PRECEDING",
"PRECISION", "PRESENT", "PRIOR", "PROCEDURE", "RAISE", "RANGE",
"RAW", "READ", "REAL", "RECORD", "REF", "REFERENCE", "REFERENCING",
"REJECT", "RELIES_ON", "RENAME", "REPLACE", "RESPECT", "RESTRICT_REFERENCES",
"RESULT", "RESULT_CACHE", "RETURN", "RETURNING", "REUSE", "REVERSE",
"REVOKE", "RIGHT", "ROLLBACK", "ROLLUP", "ROW", "ROWID", "ROWS",
"RULES", "SAMPLE", "SAVE", "SAVEPOINT", "SCHEMA", "SCHEMACHECK",
"SCN", "SEARCH", "SECOND", "SEED", "SEGMENT", "SELECT", "SELF",
"SEQUENCE", "SEQUENTIAL", "SERIALIZABLE", "SERIALLY_REUSABLE",
"SERVERERROR", "SESSIONTIMEZONE", "SET", "SETS", "SETTINGS",
"SHARE", "SHOW", "SHUTDOWN", "SIBLINGS", "SIGNTYPE", "SIMPLE_INTEGER",
"SINGLE", "SIZE", "SKIP_", "SMALLINT", "SNAPSHOT", "SOME", "SPECIFICATION",
"SQLDATA", "SQLERROR", "STANDALONE", "START", "STARTUP", "STATEMENT",
"STATEMENT_ID", "STATIC", "STATISTICS", "STRING", "SUBMULTISET",
"SUBPARTITION", "SUBSTITUTABLE", "SUBTYPE", "SUCCESS", "SUSPEND",
"TABLE", "THE", "THEN", "TIME", "TIMESTAMP", "TIMESTAMP_LTZ_UNCONSTRAINED",
"TIMESTAMP_TZ_UNCONSTRAINED", "TIMESTAMP_UNCONSTRAINED", "TIMEZONE_ABBR",
"TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TIMEZONE_REGION", "TO",
"TRAILING", "TRANSACTION", "TRANSLATE", "TREAT", "TRIGGER",
"TRIM", "TRUE", "TRUNCATE", "TYPE", "UNBOUNDED", "UNDER", "UNION",
"UNIQUE", "UNLIMITED", "UNPIVOT", "UNTIL", "UPDATE", "UPDATED",
"UPSERT", "UROWID", "USE", "USING", "VALIDATE", "VALUE", "VALUES",
"VARCHAR", "VARCHAR2", "VARIABLE", "VARRAY", "VARYING", "VERSION",
"VERSIONS", "WAIT", "WARNING", "WELLFORMED", "WHEN", "WHENEVER",
"WHERE", "WHILE", "WITH", "WITHIN", "WORK", "WRITE", "XML",
"XMLAGG", "XMLATTRIBUTES", "XMLCAST", "XMLCOLATTVAL", "XMLELEMENT",
"XMLEXISTS", "XMLFOREST", "XMLNAMESPACES", "XMLPARSE", "XMLPI",
"XMLQUERY", "XMLROOT", "XMLSERIALIZE", "XMLTABLE", "YEAR", "YES",
"YMINTERVAL_UNCONSTRAINED", "ZONE", "PREDICTION", "PREDICTION_BOUNDS",
"PREDICTION_COST", "PREDICTION_DETAILS", "PREDICTION_PROBABILITY",
"PREDICTION_SET", "CUME_DIST", "DENSE_RANK", "LISTAGG", "PERCENT_RANK",
"PERCENTILE_CONT", "PERCENTILE_DISC", "RANK", "AVG", "CORR",
"LAG", "LEAD", "MAX", "MEDIAN", "MIN", "NTILE", "RATIO_TO_REPORT",
"ROW_NUMBER", "SUM", "VARIANCE", "REGR_", "STDDEV", "VAR_",
"COVAR_", "NATIONAL_CHAR_STRING_LIT", "BIT_STRING_LIT", "HEX_STRING_LIT",
"DOUBLE_PERIOD", "PERIOD", "UNSIGNED_INTEGER", "APPROXIMATE_NUM_LIT",
"CHAR_STRING", "DELIMITED_ID", "PERCENT", "AMPERSAND", "LEFT_PAREN",
"RIGHT_PAREN", "DOUBLE_ASTERISK", "ASTERISK", "PLUS_SIGN", "MINUS_SIGN",
"COMMA", "SOLIDUS", "AT_SIGN", "ASSIGN_OP", "BINDVAR", "COLON",
"SEMICOLON", "LESS_THAN_OR_EQUALS_OP", "LESS_THAN_OP", "GREATER_THAN_OR_EQUALS_OP",
"NOT_EQUAL_OP", "CARRET_OPERATOR_PART", "TILDE_OPERATOR_PART",
"EXCLAMATION_OPERATOR_PART", "GREATER_THAN_OP", "CONCATENATION_OP",
"VERTICAL_BAR", "EQUALS_OP", "LEFT_BRACKET", "RIGHT_BRACKET",
"INTRODUCER", "SPACES", "SINGLE_LINE_COMMENT", "MULTI_LINE_COMMENT",
"PROMPT", "REGULAR_ID", "ZV" ]
ruleNames = [ "T__0", "A_LETTER", "ADD", "AFTER", "AGENT", "AGGREGATE",
"ALL", "ALTER", "ANALYZE", "AND", "ANY", "ARRAY", "AS",
"ASSUME", "ASSERT", "ASC", "ASSOCIATE", "AT", "ATTRIBUTE",
"AUDIT", "AUTHID", "AUTO", "AUTOMATIC", "AUTONOMOUS_TRANSACTION",
"BATCH", "BEFORE", "BEGIN", "BETWEEN", "BFILE", "BINARY_DOUBLE",
"BINARY_FLOAT", "BINARY_INTEGER", "BLOB", "BLOCK", "BODY",
"BOOLEAN", "BOTH", "BREADTH", "BULK", "BY", "BYTE", "C_LETTER",
"CACHE", "CALL", "CANONICAL", "CASCADE", "CASE", "CAST",
"CHAR", "CHAR_CS", "CHARACTER", "CHECK", "CHR", "CLOB",
"CLOSE", "CLUSTER", "COLLECT", "COLUMNS", "COMMENT", "COMMIT",
"COMMITTED", "COMPATIBILITY", "COMPILE", "COMPOUND", "CONNECT",
"CONNECT_BY_ROOT", "CONSTANT", "CONSTRAINT", "CONSTRAINTS",
"CONSTRUCTOR", "CONTENT", "CONTEXT", "CONTINUE", "CONVERT",
"CORRUPT_XID", "CORRUPT_XID_ALL", "COST", "COUNT", "CREATE",
"CROSS", "CUBE", "CURRENT", "CURRENT_USER", "CURSOR",
"CUSTOMDATUM", "CYCLE", "DATA", "DATABASE", "DATE", "DAY",
"DB_ROLE_CHANGE", "DBTIMEZONE", "DDL", "DEBUG", "DEC",
"DECIMAL", "DECLARE", "DECOMPOSE", "DECREMENT", "DEFAULT",
"DEFAULTS", "DEFERRED", "DEFINER", "DELETE", "DEPTH",
"DESC", "DETERMINISTIC", "DIMENSION", "DISABLE", "DISASSOCIATE",
"DISTINCT", "DOCUMENT", "DOUBLE", "DROP", "DSINTERVAL_UNCONSTRAINED",
"EACH", "ELEMENT", "ELSE", "ELSIF", "EMPTY", "ENABLE",
"ENCODING", "END", "ENTITYESCAPING", "ERR", "ERRORS",
"ESCAPE", "EVALNAME", "EXCEPT", "EXCEPTION", "EXCEPTION_INIT",
"EXCEPTIONS", "EXCLUDE", "EXCLUSIVE", "EXECUTE", "EXISTS",
"EXIT", "EXPLAIN", "EXTERNAL", "EXTRACT", "FAILURE", "FALSE",
"FETCH", "FINAL", "FIRST", "FIRST_VALUE", "FLOAT", "FOLLOWING",
"FOLLOWS", "FOR", "FORALL", "FORCE", "FROM", "FULL", "FUNCTION",
"GOTO", "GRANT", "GROUP", "GROUPING", "HASH", "HAVING",
"HIDE", "HOUR", "IF", "IGNORE", "IMMEDIATE", "IN", "INCLUDE",
"INCLUDING", "INCREMENT", "INDENT", "INDEX", "INDEXED",
"INDICATOR", "INDICES", "INFINITE", "INLINE", "INNER",
"INOUT", "INSERT", "INSTANTIABLE", "INSTEAD", "INT", "INTEGER",
"INTERSECT", "INTERVAL", "INTO", "INVALIDATE", "IS", "ISOLATION",
"ITERATE", "JAVA", "JOIN", "KEEP", "LANGUAGE", "LAST",
"LAST_VALUE", "LEADING", "LEFT", "LEVEL", "LIBRARY", "LIKE",
"LIKE2", "LIKE4", "LIKEC", "LIMIT", "LOCAL", "LOCK", "LOCKED",
"LOG", "LOGOFF", "LOGON", "LONG", "LOOP", "MAIN", "MAP",
"MATCHED", "MAXVALUE", "MEASURES", "MEMBER", "MERGE",
"MINUS", "MINUTE", "MINVALUE", "MLSLABEL", "MODE", "MODEL",
"MODIFY", "MONTH", "MULTISET", "NAME", "NAN", "NATURAL",
"NATURALN", "NAV", "NCHAR", "NCHAR_CS", "NCLOB", "NESTED",
"NEW", "NO", "NOAUDIT", "NOCACHE", "NOCOPY", "NOCYCLE",
"NOENTITYESCAPING", "NOMAXVALUE", "NOMINVALUE", "NONE",
"NOORDER", "NOSCHEMACHECK", "NOT", "NOWAIT", "NULL", "NULLS",
"NUMBER", "NUMERIC", "NVARCHAR2", "OBJECT", "OF", "OFF",
"OID", "OLD", "ON", "ONLY", "OPEN", "OPTION", "OR", "ORADATA",
"ORDER", "ORDINALITY", "OSERROR", "OUT", "OUTER", "OVER",
"OVERRIDING", "PACKAGE", "PARALLEL_ENABLE", "PARAMETERS",
"PARENT", "PARTITION", "PASSING", "PATH", "PERCENT_ROWTYPE",
"PERCENT_TYPE", "PIPELINED", "PIVOT", "PLAN", "PLS_INTEGER",
"POSITIVE", "POSITIVEN", "PRAGMA", "PRECEDING", "PRECISION",
"PRESENT", "PRIOR", "PROCEDURE", "RAISE", "RANGE", "RAW",
"READ", "REAL", "RECORD", "REF", "REFERENCE", "REFERENCING",
"REJECT", "RELIES_ON", "RENAME", "REPLACE", "RESPECT",
"RESTRICT_REFERENCES", "RESULT", "RESULT_CACHE", "RETURN",
"RETURNING", "REUSE", "REVERSE", "REVOKE", "RIGHT", "ROLLBACK",
"ROLLUP", "ROW", "ROWID", "ROWS", "RULES", "SAMPLE", "SAVE",
"SAVEPOINT", "SCHEMA", "SCHEMACHECK", "SCN", "SEARCH",
"SECOND", "SEED", "SEGMENT", "SELECT", "SELF", "SEQUENCE",
"SEQUENTIAL", "SERIALIZABLE", "SERIALLY_REUSABLE", "SERVERERROR",
"SESSIONTIMEZONE", "SET", "SETS", "SETTINGS", "SHARE",
"SHOW", "SHUTDOWN", "SIBLINGS", "SIGNTYPE", "SIMPLE_INTEGER",
"SINGLE", "SIZE", "SKIP_", "SMALLINT", "SNAPSHOT", "SOME",
"SPECIFICATION", "SQLDATA", "SQLERROR", "STANDALONE",
"START", "STARTUP", "STATEMENT", "STATEMENT_ID", "STATIC",
"STATISTICS", "STRING", "SUBMULTISET", "SUBPARTITION",
"SUBSTITUTABLE", "SUBTYPE", "SUCCESS", "SUSPEND", "TABLE",
"THE", "THEN", "TIME", "TIMESTAMP", "TIMESTAMP_LTZ_UNCONSTRAINED",
"TIMESTAMP_TZ_UNCONSTRAINED", "TIMESTAMP_UNCONSTRAINED",
"TIMEZONE_ABBR", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TIMEZONE_REGION",
"TO", "TRAILING", "TRANSACTION", "TRANSLATE", "TREAT",
"TRIGGER", "TRIM", "TRUE", "TRUNCATE", "TYPE", "UNBOUNDED",
"UNDER", "UNION", "UNIQUE", "UNLIMITED", "UNPIVOT", "UNTIL",
"UPDATE", "UPDATED", "UPSERT", "UROWID", "USE", "USING",
"VALIDATE", "VALUE", "VALUES", "VARCHAR", "VARCHAR2",
"VARIABLE", "VARRAY", "VARYING", "VERSION", "VERSIONS",
"WAIT", "WARNING", "WELLFORMED", "WHEN", "WHENEVER", "WHERE",
"WHILE", "WITH", "WITHIN", "WORK", "WRITE", "XML", "XMLAGG",
"XMLATTRIBUTES", "XMLCAST", "XMLCOLATTVAL", "XMLELEMENT",
"XMLEXISTS", "XMLFOREST", "XMLNAMESPACES", "XMLPARSE",
"XMLPI", "XMLQUERY", "XMLROOT", "XMLSERIALIZE", "XMLTABLE",
"YEAR", "YES", "YMINTERVAL_UNCONSTRAINED", "ZONE", "PREDICTION",
"PREDICTION_BOUNDS", "PREDICTION_COST", "PREDICTION_DETAILS",
"PREDICTION_PROBABILITY", "PREDICTION_SET", "CUME_DIST",
"DENSE_RANK", "LISTAGG", "PERCENT_RANK", "PERCENTILE_CONT",
"PERCENTILE_DISC", "RANK", "AVG", "CORR", "LAG", "LEAD",
"MAX", "MEDIAN", "MIN", "NTILE", "RATIO_TO_REPORT", "ROW_NUMBER",
"SUM", "VARIANCE", "REGR_", "STDDEV", "VAR_", "COVAR_",
"NATIONAL_CHAR_STRING_LIT", "BIT_STRING_LIT", "HEX_STRING_LIT",
"DOUBLE_PERIOD", "PERIOD", "UNSIGNED_INTEGER", "APPROXIMATE_NUM_LIT",
"CHAR_STRING", "CHAR_STRING_PERL", "QUOTE", "QS_ANGLE",
"QS_BRACE", "QS_BRACK", "QS_PAREN", "QS_OTHER_CH", "DELIMITED_ID",
"PERCENT", "AMPERSAND", "LEFT_PAREN", "RIGHT_PAREN", "DOUBLE_ASTERISK",
"ASTERISK", "PLUS_SIGN", "MINUS_SIGN", "COMMA", "SOLIDUS",
"AT_SIGN", "ASSIGN_OP", "BINDVAR", "COLON", "SEMICOLON",
"LESS_THAN_OR_EQUALS_OP", "LESS_THAN_OP", "GREATER_THAN_OR_EQUALS_OP",
"NOT_EQUAL_OP", "CARRET_OPERATOR_PART", "TILDE_OPERATOR_PART",
"EXCLAMATION_OPERATOR_PART", "GREATER_THAN_OP", "QUESTION_MARK",
"CONCATENATION_OP", "VERTICAL_BAR", "EQUALS_OP", "LEFT_BRACKET",
"RIGHT_BRACKET", "INTRODUCER", "SPACES", "SIMPLE_LETTER",
"UNSIGNED_INTEGER_FRAGMENT", "FLOAT_FRAGMENT", "SINGLE_LINE_COMMENT",
"MULTI_LINE_COMMENT", "PROMPT", "NEWLINE", "SPACE", "REGULAR_ID",
"ZV", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
"K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U",
"V", "W", "X", "Y", "Z" ]
grammarFileName = "PlSql.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
|
flexible
|
{
"blob_id": "b6dbed95b321ac93c712c4735d601a00650b8dc4",
"index": 1552,
"step-1": "<mask token>\n\n\nclass PlSqlLexer(Lexer):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PlSqlLexer(Lexer):\n atn = ATNDeserializer().deserialize(serializedATN())\n decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]\n T__0 = 1\n A_LETTER = 2\n ADD = 3\n AFTER = 4\n AGENT = 5\n AGGREGATE = 6\n ALL = 7\n ALTER = 8\n ANALYZE = 9\n AND = 10\n ANY = 11\n ARRAY = 12\n AS = 13\n ASSUME = 14\n ASSERT = 15\n ASC = 16\n ASSOCIATE = 17\n AT = 18\n ATTRIBUTE = 19\n AUDIT = 20\n AUTHID = 21\n AUTO = 22\n AUTOMATIC = 23\n AUTONOMOUS_TRANSACTION = 24\n BATCH = 25\n BEFORE = 26\n BEGIN = 27\n BETWEEN = 28\n BFILE = 29\n BINARY_DOUBLE = 30\n BINARY_FLOAT = 31\n BINARY_INTEGER = 32\n BLOB = 33\n BLOCK = 34\n BODY = 35\n BOOLEAN = 36\n BOTH = 37\n BREADTH = 38\n BULK = 39\n BY = 40\n BYTE = 41\n C_LETTER = 42\n CACHE = 43\n CALL = 44\n CANONICAL = 45\n CASCADE = 46\n CASE = 47\n CAST = 48\n CHAR = 49\n CHAR_CS = 50\n CHARACTER = 51\n CHECK = 52\n CHR = 53\n CLOB = 54\n CLOSE = 55\n CLUSTER = 56\n COLLECT = 57\n COLUMNS = 58\n COMMENT = 59\n COMMIT = 60\n COMMITTED = 61\n COMPATIBILITY = 62\n COMPILE = 63\n COMPOUND = 64\n CONNECT = 65\n CONNECT_BY_ROOT = 66\n CONSTANT = 67\n CONSTRAINT = 68\n CONSTRAINTS = 69\n CONSTRUCTOR = 70\n CONTENT = 71\n CONTEXT = 72\n CONTINUE = 73\n CONVERT = 74\n CORRUPT_XID = 75\n CORRUPT_XID_ALL = 76\n COST = 77\n COUNT = 78\n CREATE = 79\n CROSS = 80\n CUBE = 81\n CURRENT = 82\n CURRENT_USER = 83\n CURSOR = 84\n CUSTOMDATUM = 85\n CYCLE = 86\n DATA = 87\n DATABASE = 88\n DATE = 89\n DAY = 90\n DB_ROLE_CHANGE = 91\n DBTIMEZONE = 92\n DDL = 93\n DEBUG = 94\n DEC = 95\n DECIMAL = 96\n DECLARE = 97\n DECOMPOSE = 98\n DECREMENT = 99\n DEFAULT = 100\n DEFAULTS = 101\n DEFERRED = 102\n DEFINER = 103\n DELETE = 104\n DEPTH = 105\n DESC = 106\n DETERMINISTIC = 107\n DIMENSION = 108\n DISABLE = 109\n DISASSOCIATE = 110\n DISTINCT = 111\n DOCUMENT = 112\n DOUBLE = 113\n DROP = 114\n DSINTERVAL_UNCONSTRAINED = 115\n EACH = 116\n ELEMENT = 117\n ELSE = 118\n ELSIF = 119\n EMPTY = 120\n ENABLE = 121\n ENCODING = 122\n END = 123\n ENTITYESCAPING = 124\n ERR = 125\n ERRORS = 126\n ESCAPE = 127\n EVALNAME = 128\n EXCEPT = 129\n EXCEPTION = 130\n EXCEPTION_INIT = 131\n EXCEPTIONS = 132\n EXCLUDE = 133\n EXCLUSIVE = 134\n EXECUTE = 135\n EXISTS = 136\n EXIT = 137\n EXPLAIN = 138\n EXTERNAL = 139\n EXTRACT = 140\n FAILURE = 141\n FALSE = 142\n FETCH = 143\n FINAL = 144\n FIRST = 145\n FIRST_VALUE = 146\n FLOAT = 147\n FOLLOWING = 148\n FOLLOWS = 149\n FOR = 150\n FORALL = 151\n FORCE = 152\n FROM = 153\n FULL = 154\n FUNCTION = 155\n GOTO = 156\n GRANT = 157\n GROUP = 158\n GROUPING = 159\n HASH = 160\n HAVING = 161\n HIDE = 162\n HOUR = 163\n IF = 164\n IGNORE = 165\n IMMEDIATE = 166\n IN = 167\n INCLUDE = 168\n INCLUDING = 169\n INCREMENT = 170\n INDENT = 171\n INDEX = 172\n INDEXED = 173\n INDICATOR = 174\n INDICES = 175\n INFINITE = 176\n INLINE = 177\n INNER = 178\n INOUT = 179\n INSERT = 180\n INSTANTIABLE = 181\n INSTEAD = 182\n INT = 183\n INTEGER = 184\n INTERSECT = 185\n INTERVAL = 186\n INTO = 187\n INVALIDATE = 188\n IS = 189\n ISOLATION = 190\n ITERATE = 191\n JAVA = 192\n JOIN = 193\n KEEP = 194\n LANGUAGE = 195\n LAST = 196\n LAST_VALUE = 197\n LEADING = 198\n LEFT = 199\n LEVEL = 200\n LIBRARY = 201\n LIKE = 202\n LIKE2 = 203\n LIKE4 = 204\n LIKEC = 205\n LIMIT = 206\n LOCAL = 207\n LOCK = 208\n LOCKED = 209\n LOG = 210\n LOGOFF = 211\n LOGON = 212\n LONG = 213\n LOOP = 214\n MAIN = 215\n MAP = 216\n MATCHED = 217\n MAXVALUE = 218\n MEASURES = 219\n MEMBER = 220\n MERGE = 221\n MINUS = 222\n MINUTE = 223\n MINVALUE = 224\n MLSLABEL = 225\n MODE = 226\n MODEL = 227\n MODIFY = 228\n MONTH = 229\n MULTISET = 230\n NAME = 231\n NAN = 232\n NATURAL = 233\n NATURALN = 234\n NAV = 235\n NCHAR = 236\n NCHAR_CS = 237\n NCLOB = 238\n NESTED = 239\n NEW = 240\n NO = 241\n NOAUDIT = 242\n NOCACHE = 243\n NOCOPY = 244\n NOCYCLE = 245\n NOENTITYESCAPING = 246\n NOMAXVALUE = 247\n NOMINVALUE = 248\n NONE = 249\n NOORDER = 250\n NOSCHEMACHECK = 251\n NOT = 252\n NOWAIT = 253\n NULL = 254\n NULLS = 255\n NUMBER = 256\n NUMERIC = 257\n NVARCHAR2 = 258\n OBJECT = 259\n OF = 260\n OFF = 261\n OID = 262\n OLD = 263\n ON = 264\n ONLY = 265\n OPEN = 266\n OPTION = 267\n OR = 268\n ORADATA = 269\n ORDER = 270\n ORDINALITY = 271\n OSERROR = 272\n OUT = 273\n OUTER = 274\n OVER = 275\n OVERRIDING = 276\n PACKAGE = 277\n PARALLEL_ENABLE = 278\n PARAMETERS = 279\n PARENT = 280\n PARTITION = 281\n PASSING = 282\n PATH = 283\n PERCENT_ROWTYPE = 284\n PERCENT_TYPE = 285\n PIPELINED = 286\n PIVOT = 287\n PLAN = 288\n PLS_INTEGER = 289\n POSITIVE = 290\n POSITIVEN = 291\n PRAGMA = 292\n PRECEDING = 293\n PRECISION = 294\n PRESENT = 295\n PRIOR = 296\n PROCEDURE = 297\n RAISE = 298\n RANGE = 299\n RAW = 300\n READ = 301\n REAL = 302\n RECORD = 303\n REF = 304\n REFERENCE = 305\n REFERENCING = 306\n REJECT = 307\n RELIES_ON = 308\n RENAME = 309\n REPLACE = 310\n RESPECT = 311\n RESTRICT_REFERENCES = 312\n RESULT = 313\n RESULT_CACHE = 314\n RETURN = 315\n RETURNING = 316\n REUSE = 317\n REVERSE = 318\n REVOKE = 319\n RIGHT = 320\n ROLLBACK = 321\n ROLLUP = 322\n ROW = 323\n ROWID = 324\n ROWS = 325\n RULES = 326\n SAMPLE = 327\n SAVE = 328\n SAVEPOINT = 329\n SCHEMA = 330\n SCHEMACHECK = 331\n SCN = 332\n SEARCH = 333\n SECOND = 334\n SEED = 335\n SEGMENT = 336\n SELECT = 337\n SELF = 338\n SEQUENCE = 339\n SEQUENTIAL = 340\n SERIALIZABLE = 341\n SERIALLY_REUSABLE = 342\n SERVERERROR = 343\n SESSIONTIMEZONE = 344\n SET = 345\n SETS = 346\n SETTINGS = 347\n SHARE = 348\n SHOW = 349\n SHUTDOWN = 350\n SIBLINGS = 351\n SIGNTYPE = 352\n SIMPLE_INTEGER = 353\n SINGLE = 354\n SIZE = 355\n SKIP_ = 356\n SMALLINT = 357\n SNAPSHOT = 358\n SOME = 359\n SPECIFICATION = 360\n SQLDATA = 361\n SQLERROR = 362\n STANDALONE = 363\n START = 364\n STARTUP = 365\n STATEMENT = 366\n STATEMENT_ID = 367\n STATIC = 368\n STATISTICS = 369\n STRING = 370\n SUBMULTISET = 371\n SUBPARTITION = 372\n SUBSTITUTABLE = 373\n SUBTYPE = 374\n SUCCESS = 375\n SUSPEND = 376\n TABLE = 377\n THE = 378\n THEN = 379\n TIME = 380\n TIMESTAMP = 381\n TIMESTAMP_LTZ_UNCONSTRAINED = 382\n TIMESTAMP_TZ_UNCONSTRAINED = 383\n TIMESTAMP_UNCONSTRAINED = 384\n TIMEZONE_ABBR = 385\n TIMEZONE_HOUR = 386\n TIMEZONE_MINUTE = 387\n TIMEZONE_REGION = 388\n TO = 389\n TRAILING = 390\n TRANSACTION = 391\n TRANSLATE = 392\n TREAT = 393\n TRIGGER = 394\n TRIM = 395\n TRUE = 396\n TRUNCATE = 397\n TYPE = 398\n UNBOUNDED = 399\n UNDER = 400\n UNION = 401\n UNIQUE = 402\n UNLIMITED = 403\n UNPIVOT = 404\n UNTIL = 405\n UPDATE = 406\n UPDATED = 407\n UPSERT = 408\n UROWID = 409\n USE = 410\n USING = 411\n VALIDATE = 412\n VALUE = 413\n VALUES = 414\n VARCHAR = 415\n VARCHAR2 = 416\n VARIABLE = 417\n VARRAY = 418\n VARYING = 419\n VERSION = 420\n VERSIONS = 421\n WAIT = 422\n WARNING = 423\n WELLFORMED = 424\n WHEN = 425\n WHENEVER = 426\n WHERE = 427\n WHILE = 428\n WITH = 429\n WITHIN = 430\n WORK = 431\n WRITE = 432\n XML = 433\n XMLAGG = 434\n XMLATTRIBUTES = 435\n XMLCAST = 436\n XMLCOLATTVAL = 437\n XMLELEMENT = 438\n XMLEXISTS = 439\n XMLFOREST = 440\n XMLNAMESPACES = 441\n XMLPARSE = 442\n XMLPI = 443\n XMLQUERY = 444\n XMLROOT = 445\n XMLSERIALIZE = 446\n XMLTABLE = 447\n YEAR = 448\n YES = 449\n YMINTERVAL_UNCONSTRAINED = 450\n ZONE = 451\n PREDICTION = 452\n PREDICTION_BOUNDS = 453\n PREDICTION_COST = 454\n PREDICTION_DETAILS = 455\n PREDICTION_PROBABILITY = 456\n PREDICTION_SET = 457\n CUME_DIST = 458\n DENSE_RANK = 459\n LISTAGG = 460\n PERCENT_RANK = 461\n PERCENTILE_CONT = 462\n PERCENTILE_DISC = 463\n RANK = 464\n AVG = 465\n CORR = 466\n LAG = 467\n LEAD = 468\n MAX = 469\n MEDIAN = 470\n MIN = 471\n NTILE = 472\n RATIO_TO_REPORT = 473\n ROW_NUMBER = 474\n SUM = 475\n VARIANCE = 476\n REGR_ = 477\n STDDEV = 478\n VAR_ = 479\n COVAR_ = 480\n NATIONAL_CHAR_STRING_LIT = 481\n BIT_STRING_LIT = 482\n HEX_STRING_LIT = 483\n DOUBLE_PERIOD = 484\n PERIOD = 485\n UNSIGNED_INTEGER = 486\n APPROXIMATE_NUM_LIT = 487\n CHAR_STRING = 488\n DELIMITED_ID = 489\n PERCENT = 490\n AMPERSAND = 491\n LEFT_PAREN = 492\n RIGHT_PAREN = 493\n DOUBLE_ASTERISK = 494\n ASTERISK = 495\n PLUS_SIGN = 496\n MINUS_SIGN = 497\n COMMA = 498\n SOLIDUS = 499\n AT_SIGN = 500\n ASSIGN_OP = 501\n BINDVAR = 502\n COLON = 503\n SEMICOLON = 504\n LESS_THAN_OR_EQUALS_OP = 505\n LESS_THAN_OP = 506\n GREATER_THAN_OR_EQUALS_OP = 507\n NOT_EQUAL_OP = 508\n CARRET_OPERATOR_PART = 509\n TILDE_OPERATOR_PART = 510\n EXCLAMATION_OPERATOR_PART = 511\n GREATER_THAN_OP = 512\n CONCATENATION_OP = 513\n VERTICAL_BAR = 514\n EQUALS_OP = 515\n LEFT_BRACKET = 516\n RIGHT_BRACKET = 517\n INTRODUCER = 518\n SPACES = 519\n SINGLE_LINE_COMMENT = 520\n MULTI_LINE_COMMENT = 521\n PROMPT = 522\n REGULAR_ID = 523\n ZV = 524\n channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']\n modeNames = ['DEFAULT_MODE']\n literalNames = ['<INVALID>', \"'..'\", \"'.'\", \"'%'\", \"'&'\", \"'('\", \"')'\",\n \"'**'\", \"'*'\", \"'+'\", \"'-'\", \"','\", \"'/'\", \"'@'\", \"':='\", \"':'\",\n \"';'\", \"'<='\", \"'<'\", \"'>='\", \"'^'\", \"'~'\", \"'!'\", \"'>'\", \"'||'\",\n \"'|'\", \"'='\", \"'['\", \"']'\", \"'_'\", \"'@!'\"]\n symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',\n 'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',\n 'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',\n 'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',\n 'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',\n 'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',\n 'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',\n 'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',\n 'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',\n 'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',\n 'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',\n 'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',\n 'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',\n 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',\n 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',\n 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',\n 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',\n 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',\n 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',\n 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',\n 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',\n 'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',\n 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',\n 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',\n 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',\n 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',\n 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',\n 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',\n 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',\n 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',\n 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',\n 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',\n 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',\n 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',\n 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',\n 'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',\n 'ZV']\n ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',\n 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',\n 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',\n 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',\n 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',\n 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',\n 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',\n 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',\n 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',\n 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',\n 'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',\n 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',\n 'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',\n 'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',\n 'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',\n 'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',\n 'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',\n 'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',\n 'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',\n 'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',\n 'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',\n 'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',\n 'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',\n 'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',\n 'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',\n 'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',\n 'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',\n 'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',\n 'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',\n 'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',\n 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',\n 'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',\n 'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',\n 'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',\n 'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',\n 'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',\n 'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',\n 'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',\n 'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',\n 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',\n 'V', 'W', 'X', 'Y', 'Z']\n grammarFileName = 'PlSql.g4'\n\n def __init__(self, input=None, output: TextIO=sys.stdout):\n super().__init__(input, output)\n self.checkVersion('4.7.2')\n self._interp = LexerATNSimulator(self, self.atn, self.\n decisionsToDFA, PredictionContextCache())\n self._actions = None\n self._predicates = None\n",
"step-3": "<mask token>\n\n\ndef serializedATN():\n with StringIO() as buf:\n buf.write('\\x03悋Ꜫ脳맭䅼㯧瞆奤\\x02Ȏ')\n buf.write(\n 'ᓗ\\x08\\x01\\x04\\x02\\t\\x02\\x04\\x03\\t\\x03\\x04\\x04\\t\\x04\\x04\\x05\\t\\x05\\x04\\x06\\t\\x06\\x04\\x07'\n )\n buf.write(\n '\\t\\x07\\x04\\x08\\t\\x08\\x04\\t\\t\\t\\x04\\n\\t\\n\\x04\\x0b\\t\\x0b\\x04\\x0c\\t\\x0c\\x04\\r\\t\\r'\n )\n buf.write(\n '\\x04\\x0e\\t\\x0e\\x04\\x0f\\t\\x0f\\x04\\x10\\t\\x10\\x04\\x11\\t\\x11\\x04\\x12\\t\\x12\\x04\\x13'\n )\n buf.write(\n '\\t\\x13\\x04\\x14\\t\\x14\\x04\\x15\\t\\x15\\x04\\x16\\t\\x16\\x04\\x17\\t\\x17\\x04\\x18\\t\\x18'\n )\n buf.write(\n '\\x04\\x19\\t\\x19\\x04\\x1a\\t\\x1a\\x04\\x1b\\t\\x1b\\x04\\x1c\\t\\x1c\\x04\\x1d\\t\\x1d\\x04\\x1e'\n )\n buf.write(\n '\\t\\x1e\\x04\\x1f\\t\\x1f\\x04 \\t \\x04!\\t!\\x04\"\\t\"\\x04#\\t#\\x04$\\t$\\x04%\\t%'\n )\n buf.write(\n \"\\x04&\\t&\\x04'\\t'\\x04(\\t(\\x04)\\t)\\x04*\\t*\\x04+\\t+\\x04,\\t,\\x04-\\t-\\x04.\"\n )\n buf.write('\\t.\\x04/\\t/\\x040\\t0\\x041\\t1\\x042\\t2\\x043\\t3\\x044')\n buf.write('\\t4\\x045\\t5\\x046\\t6\\x047\\t7\\x048\\t8\\x049\\t9\\x04:\\t:')\n buf.write(\n '\\x04;\\t;\\x04<\\t<\\x04=\\t=\\x04>\\t>\\x04?\\t?\\x04@\\t@\\x04A\\tA\\x04B\\tB\\x04C\\t'\n )\n buf.write(\n 'C\\x04D\\tD\\x04E\\tE\\x04F\\tF\\x04G\\tG\\x04H\\tH\\x04I\\tI\\x04J\\tJ\\x04K\\tK\\x04L\\t'\n )\n buf.write(\n 'L\\x04M\\tM\\x04N\\tN\\x04O\\tO\\x04P\\tP\\x04Q\\tQ\\x04R\\tR\\x04S\\tS\\x04T\\tT\\x04U\\t'\n )\n buf.write(\n 'U\\x04V\\tV\\x04W\\tW\\x04X\\tX\\x04Y\\tY\\x04Z\\tZ\\x04[\\t[\\x04\\\\\\t\\\\\\x04]\\t]\\x04'\n )\n buf.write(\n '^\\t^\\x04_\\t_\\x04`\\t`\\x04a\\ta\\x04b\\tb\\x04c\\tc\\x04d\\td\\x04e\\te\\x04f\\tf\\x04'\n )\n buf.write(\n 'g\\tg\\x04h\\th\\x04i\\ti\\x04j\\tj\\x04k\\tk\\x04l\\tl\\x04m\\tm\\x04n\\tn\\x04o\\to\\x04'\n )\n buf.write(\n 'p\\tp\\x04q\\tq\\x04r\\tr\\x04s\\ts\\x04t\\tt\\x04u\\tu\\x04v\\tv\\x04w\\tw\\x04x\\tx\\x04'\n )\n buf.write(\n 'y\\ty\\x04z\\tz\\x04{\\t{\\x04|\\t|\\x04}\\t}\\x04~\\t~\\x04\\x7f\\t\\x7f\\x04\\x80'\n )\n buf.write('\\t\\x80\\x04\\x81\\t\\x81\\x04\\x82\\t\\x82\\x04\\x83\\t\\x83')\n buf.write('\\x04\\x84\\t\\x84\\x04\\x85\\t\\x85\\x04\\x86\\t\\x86\\x04\\x87')\n buf.write('\\t\\x87\\x04\\x88\\t\\x88\\x04\\x89\\t\\x89\\x04\\x8a\\t\\x8a')\n buf.write('\\x04\\x8b\\t\\x8b\\x04\\x8c\\t\\x8c\\x04\\x8d\\t\\x8d\\x04\\x8e')\n buf.write('\\t\\x8e\\x04\\x8f\\t\\x8f\\x04\\x90\\t\\x90\\x04\\x91\\t\\x91')\n buf.write('\\x04\\x92\\t\\x92\\x04\\x93\\t\\x93\\x04\\x94\\t\\x94\\x04\\x95')\n buf.write('\\t\\x95\\x04\\x96\\t\\x96\\x04\\x97\\t\\x97\\x04\\x98\\t\\x98')\n buf.write('\\x04\\x99\\t\\x99\\x04\\x9a\\t\\x9a\\x04\\x9b\\t\\x9b\\x04\\x9c')\n buf.write('\\t\\x9c\\x04\\x9d\\t\\x9d\\x04\\x9e\\t\\x9e\\x04\\x9f\\t\\x9f')\n buf.write('\\x04\\xa0\\t\\xa0\\x04¡\\t¡\\x04¢\\t¢\\x04£')\n buf.write('\\t£\\x04¤\\t¤\\x04¥\\t¥\\x04¦\\t¦')\n buf.write('\\x04§\\t§\\x04¨\\t¨\\x04©\\t©\\x04ª')\n buf.write('\\tª\\x04«\\t«\\x04¬\\t¬\\x04\\xad\\t\\xad')\n buf.write('\\x04®\\t®\\x04¯\\t¯\\x04°\\t°\\x04±')\n buf.write('\\t±\\x04²\\t²\\x04³\\t³\\x04´\\t´')\n buf.write('\\x04µ\\tµ\\x04¶\\t¶\\x04·\\t·\\x04¸')\n buf.write('\\t¸\\x04¹\\t¹\\x04º\\tº\\x04»\\t»')\n buf.write('\\x04¼\\t¼\\x04½\\t½\\x04¾\\t¾\\x04¿')\n buf.write('\\t¿\\x04À\\tÀ\\x04Á\\tÁ\\x04Â\\tÂ')\n buf.write('\\x04Ã\\tÃ\\x04Ä\\tÄ\\x04Å\\tÅ\\x04Æ')\n buf.write('\\tÆ\\x04Ç\\tÇ\\x04È\\tÈ\\x04É\\tÉ')\n buf.write('\\x04Ê\\tÊ\\x04Ë\\tË\\x04Ì\\tÌ\\x04Í')\n buf.write('\\tÍ\\x04Î\\tÎ\\x04Ï\\tÏ\\x04Ð\\tÐ')\n buf.write('\\x04Ñ\\tÑ\\x04Ò\\tÒ\\x04Ó\\tÓ\\x04Ô')\n buf.write('\\tÔ\\x04Õ\\tÕ\\x04Ö\\tÖ\\x04×\\t×')\n buf.write('\\x04Ø\\tØ\\x04Ù\\tÙ\\x04Ú\\tÚ\\x04Û')\n buf.write('\\tÛ\\x04Ü\\tÜ\\x04Ý\\tÝ\\x04Þ\\tÞ')\n buf.write('\\x04ß\\tß\\x04à\\tà\\x04á\\tá\\x04â')\n buf.write('\\tâ\\x04ã\\tã\\x04ä\\tä\\x04å\\tå')\n buf.write('\\x04æ\\tæ\\x04ç\\tç\\x04è\\tè\\x04é')\n buf.write('\\té\\x04ê\\tê\\x04ë\\të\\x04ì\\tì')\n buf.write('\\x04í\\tí\\x04î\\tî\\x04ï\\tï\\x04ð')\n buf.write('\\tð\\x04ñ\\tñ\\x04ò\\tò\\x04ó\\tó')\n buf.write('\\x04ô\\tô\\x04õ\\tõ\\x04ö\\tö\\x04÷')\n buf.write('\\t÷\\x04ø\\tø\\x04ù\\tù\\x04ú\\tú')\n buf.write('\\x04û\\tû\\x04ü\\tü\\x04ý\\tý\\x04þ')\n buf.write('\\tþ\\x04ÿ\\tÿ\\x04Ā\\tĀ\\x04ā\\tā')\n buf.write('\\x04Ă\\tĂ\\x04ă\\tă\\x04Ą\\tĄ\\x04ą')\n buf.write('\\tą\\x04Ć\\tĆ\\x04ć\\tć\\x04Ĉ\\tĈ')\n buf.write('\\x04ĉ\\tĉ\\x04Ċ\\tĊ\\x04ċ\\tċ\\x04Č')\n buf.write('\\tČ\\x04č\\tč\\x04Ď\\tĎ\\x04ď\\tď')\n buf.write('\\x04Đ\\tĐ\\x04đ\\tđ\\x04Ē\\tĒ\\x04ē')\n buf.write('\\tē\\x04Ĕ\\tĔ\\x04ĕ\\tĕ\\x04Ė\\tĖ')\n buf.write('\\x04ė\\tė\\x04Ę\\tĘ\\x04ę\\tę\\x04Ě')\n buf.write('\\tĚ\\x04ě\\tě\\x04Ĝ\\tĜ\\x04ĝ\\tĝ')\n buf.write('\\x04Ğ\\tĞ\\x04ğ\\tğ\\x04Ġ\\tĠ\\x04ġ')\n buf.write('\\tġ\\x04Ģ\\tĢ\\x04ģ\\tģ\\x04Ĥ\\tĤ')\n buf.write('\\x04ĥ\\tĥ\\x04Ħ\\tĦ\\x04ħ\\tħ\\x04Ĩ')\n buf.write('\\tĨ\\x04ĩ\\tĩ\\x04Ī\\tĪ\\x04ī\\tī')\n buf.write('\\x04Ĭ\\tĬ\\x04ĭ\\tĭ\\x04Į\\tĮ\\x04į')\n buf.write('\\tį\\x04İ\\tİ\\x04ı\\tı\\x04IJ\\tIJ')\n buf.write('\\x04ij\\tij\\x04Ĵ\\tĴ\\x04ĵ\\tĵ\\x04Ķ')\n buf.write('\\tĶ\\x04ķ\\tķ\\x04ĸ\\tĸ\\x04Ĺ\\tĹ')\n buf.write('\\x04ĺ\\tĺ\\x04Ļ\\tĻ\\x04ļ\\tļ\\x04Ľ')\n buf.write('\\tĽ\\x04ľ\\tľ\\x04Ŀ\\tĿ\\x04ŀ\\tŀ')\n buf.write('\\x04Ł\\tŁ\\x04ł\\tł\\x04Ń\\tŃ\\x04ń')\n buf.write('\\tń\\x04Ņ\\tŅ\\x04ņ\\tņ\\x04Ň\\tŇ')\n buf.write('\\x04ň\\tň\\x04ʼn\\tʼn\\x04Ŋ\\tŊ\\x04ŋ')\n buf.write('\\tŋ\\x04Ō\\tŌ\\x04ō\\tō\\x04Ŏ\\tŎ')\n buf.write('\\x04ŏ\\tŏ\\x04Ő\\tŐ\\x04ő\\tő\\x04Œ')\n buf.write('\\tŒ\\x04œ\\tœ\\x04Ŕ\\tŔ\\x04ŕ\\tŕ')\n buf.write('\\x04Ŗ\\tŖ\\x04ŗ\\tŗ\\x04Ř\\tŘ\\x04ř')\n buf.write('\\tř\\x04Ś\\tŚ\\x04ś\\tś\\x04Ŝ\\tŜ')\n buf.write('\\x04ŝ\\tŝ\\x04Ş\\tŞ\\x04ş\\tş\\x04Š')\n buf.write('\\tŠ\\x04š\\tš\\x04Ţ\\tŢ\\x04ţ\\tţ')\n buf.write('\\x04Ť\\tŤ\\x04ť\\tť\\x04Ŧ\\tŦ\\x04ŧ')\n buf.write('\\tŧ\\x04Ũ\\tŨ\\x04ũ\\tũ\\x04Ū\\tŪ')\n buf.write('\\x04ū\\tū\\x04Ŭ\\tŬ\\x04ŭ\\tŭ\\x04Ů')\n buf.write('\\tŮ\\x04ů\\tů\\x04Ű\\tŰ\\x04ű\\tű')\n buf.write('\\x04Ų\\tŲ\\x04ų\\tų\\x04Ŵ\\tŴ\\x04ŵ')\n buf.write('\\tŵ\\x04Ŷ\\tŶ\\x04ŷ\\tŷ\\x04Ÿ\\tŸ')\n buf.write('\\x04Ź\\tŹ\\x04ź\\tź\\x04Ż\\tŻ\\x04ż')\n buf.write('\\tż\\x04Ž\\tŽ\\x04ž\\tž\\x04ſ\\tſ')\n buf.write('\\x04ƀ\\tƀ\\x04Ɓ\\tƁ\\x04Ƃ\\tƂ\\x04ƃ')\n buf.write('\\tƃ\\x04Ƅ\\tƄ\\x04ƅ\\tƅ\\x04Ɔ\\tƆ')\n buf.write('\\x04Ƈ\\tƇ\\x04ƈ\\tƈ\\x04Ɖ\\tƉ\\x04Ɗ')\n buf.write('\\tƊ\\x04Ƌ\\tƋ\\x04ƌ\\tƌ\\x04ƍ\\tƍ')\n buf.write('\\x04Ǝ\\tƎ\\x04Ə\\tƏ\\x04Ɛ\\tƐ\\x04Ƒ')\n buf.write('\\tƑ\\x04ƒ\\tƒ\\x04Ɠ\\tƓ\\x04Ɣ\\tƔ')\n buf.write('\\x04ƕ\\tƕ\\x04Ɩ\\tƖ\\x04Ɨ\\tƗ\\x04Ƙ')\n buf.write('\\tƘ\\x04ƙ\\tƙ\\x04ƚ\\tƚ\\x04ƛ\\tƛ')\n buf.write('\\x04Ɯ\\tƜ\\x04Ɲ\\tƝ\\x04ƞ\\tƞ\\x04Ɵ')\n buf.write('\\tƟ\\x04Ơ\\tƠ\\x04ơ\\tơ\\x04Ƣ\\tƢ')\n buf.write('\\x04ƣ\\tƣ\\x04Ƥ\\tƤ\\x04ƥ\\tƥ\\x04Ʀ')\n buf.write('\\tƦ\\x04Ƨ\\tƧ\\x04ƨ\\tƨ\\x04Ʃ\\tƩ')\n buf.write('\\x04ƪ\\tƪ\\x04ƫ\\tƫ\\x04Ƭ\\tƬ\\x04ƭ')\n buf.write('\\tƭ\\x04Ʈ\\tƮ\\x04Ư\\tƯ\\x04ư\\tư')\n buf.write('\\x04Ʊ\\tƱ\\x04Ʋ\\tƲ\\x04Ƴ\\tƳ\\x04ƴ')\n buf.write('\\tƴ\\x04Ƶ\\tƵ\\x04ƶ\\tƶ\\x04Ʒ\\tƷ')\n buf.write('\\x04Ƹ\\tƸ\\x04ƹ\\tƹ\\x04ƺ\\tƺ\\x04ƻ')\n buf.write('\\tƻ\\x04Ƽ\\tƼ\\x04ƽ\\tƽ\\x04ƾ\\tƾ')\n buf.write('\\x04ƿ\\tƿ\\x04ǀ\\tǀ\\x04ǁ\\tǁ\\x04ǂ')\n buf.write('\\tǂ\\x04ǃ\\tǃ\\x04DŽ\\tDŽ\\x04Dž\\tDž')\n buf.write('\\x04dž\\tdž\\x04LJ\\tLJ\\x04Lj\\tLj\\x04lj')\n buf.write('\\tlj\\x04NJ\\tNJ\\x04Nj\\tNj\\x04nj\\tnj')\n buf.write('\\x04Ǎ\\tǍ\\x04ǎ\\tǎ\\x04Ǐ\\tǏ\\x04ǐ')\n buf.write('\\tǐ\\x04Ǒ\\tǑ\\x04ǒ\\tǒ\\x04Ǔ\\tǓ')\n buf.write('\\x04ǔ\\tǔ\\x04Ǖ\\tǕ\\x04ǖ\\tǖ\\x04Ǘ')\n buf.write('\\tǗ\\x04ǘ\\tǘ\\x04Ǚ\\tǙ\\x04ǚ\\tǚ')\n buf.write('\\x04Ǜ\\tǛ\\x04ǜ\\tǜ\\x04ǝ\\tǝ\\x04Ǟ')\n buf.write('\\tǞ\\x04ǟ\\tǟ\\x04Ǡ\\tǠ\\x04ǡ\\tǡ')\n buf.write('\\x04Ǣ\\tǢ\\x04ǣ\\tǣ\\x04Ǥ\\tǤ\\x04ǥ')\n buf.write('\\tǥ\\x04Ǧ\\tǦ\\x04ǧ\\tǧ\\x04Ǩ\\tǨ')\n buf.write('\\x04ǩ\\tǩ\\x04Ǫ\\tǪ\\x04ǫ\\tǫ\\x04Ǭ')\n buf.write('\\tǬ\\x04ǭ\\tǭ\\x04Ǯ\\tǮ\\x04ǯ\\tǯ')\n buf.write('\\x04ǰ\\tǰ\\x04DZ\\tDZ\\x04Dz\\tDz\\x04dz')\n buf.write('\\tdz\\x04Ǵ\\tǴ\\x04ǵ\\tǵ\\x04Ƕ\\tǶ')\n buf.write('\\x04Ƿ\\tǷ\\x04Ǹ\\tǸ\\x04ǹ\\tǹ\\x04Ǻ')\n buf.write('\\tǺ\\x04ǻ\\tǻ\\x04Ǽ\\tǼ\\x04ǽ\\tǽ')\n buf.write('\\x04Ǿ\\tǾ\\x04ǿ\\tǿ\\x04Ȁ\\tȀ\\x04ȁ')\n buf.write('\\tȁ\\x04Ȃ\\tȂ\\x04ȃ\\tȃ\\x04Ȅ\\tȄ')\n buf.write('\\x04ȅ\\tȅ\\x04Ȇ\\tȆ\\x04ȇ\\tȇ\\x04Ȉ')\n buf.write('\\tȈ\\x04ȉ\\tȉ\\x04Ȋ\\tȊ\\x04ȋ\\tȋ')\n buf.write('\\x04Ȍ\\tȌ\\x04ȍ\\tȍ\\x04Ȏ\\tȎ\\x04ȏ')\n buf.write('\\tȏ\\x04Ȑ\\tȐ\\x04ȑ\\tȑ\\x04Ȓ\\tȒ')\n buf.write('\\x04ȓ\\tȓ\\x04Ȕ\\tȔ\\x04ȕ\\tȕ\\x04Ȗ')\n buf.write('\\tȖ\\x04ȗ\\tȗ\\x04Ș\\tȘ\\x04ș\\tș')\n buf.write('\\x04Ț\\tȚ\\x04ț\\tț\\x04Ȝ\\tȜ\\x04ȝ')\n buf.write('\\tȝ\\x04Ȟ\\tȞ\\x04ȟ\\tȟ\\x04Ƞ\\tȠ')\n buf.write('\\x04ȡ\\tȡ\\x04Ȣ\\tȢ\\x04ȣ\\tȣ\\x04Ȥ')\n buf.write('\\tȤ\\x04ȥ\\tȥ\\x04Ȧ\\tȦ\\x04ȧ\\tȧ')\n buf.write('\\x04Ȩ\\tȨ\\x04ȩ\\tȩ\\x04Ȫ\\tȪ\\x04ȫ')\n buf.write('\\tȫ\\x04Ȭ\\tȬ\\x04ȭ\\tȭ\\x04Ȯ\\tȮ')\n buf.write('\\x04ȯ\\tȯ\\x04Ȱ\\tȰ\\x04ȱ\\tȱ\\x04Ȳ')\n buf.write('\\tȲ\\x04ȳ\\tȳ\\x04ȴ\\tȴ\\x03\\x02\\x03\\x02\\x03\\x02\\x03')\n buf.write(\n '\\x03\\x03\\x03\\x03\\x04\\x03\\x04\\x03\\x04\\x03\\x04\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x06\\x03\\x06'\n )\n buf.write(\n '\\x03\\x06\\x03\\x06\\x03\\x06\\x03\\x06\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03'\n )\n buf.write(\"\"\"\u0007\u0003\b\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\t\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\"\"\")\n buf.write(\"\"\"\u0003\n\u0003\n\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\f\u0003\"\"\")\n buf.write(\n '\\x0c\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\x0e\\x03\\x0e\\x03\\x0e\\x03\\x0f\\x03\\x0f\\x03'\n )\n buf.write(\n '\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10'\n )\n buf.write(\n '\\x03\\x10\\x03\\x11\\x03\\x11\\x03\\x11\\x03\\x11\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12'\n )\n buf.write(\n '\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x13\\x03\\x13\\x03\\x13\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14'\n )\n buf.write(\n '\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x15\\x03\\x15\\x03\\x15\\x03\\x15\\x03\\x15'\n )\n buf.write(\n '\\x03\\x15\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x17\\x03\\x17\\x03\\x17'\n )\n buf.write(\n '\\x03\\x17\\x03\\x17\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18'\n )\n buf.write(\n '\\x03\\x18\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19'\n )\n buf.write(\n '\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19'\n )\n buf.write(\n '\\x03\\x19\\x03\\x19\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1b\\x03\\x1b\\x03\\x1b'\n )\n buf.write(\n '\\x03\\x1b\\x03\\x1b\\x03\\x1b\\x03\\x1b\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1d'\n )\n buf.write(\n '\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1e\\x03\\x1e\\x03\\x1e\\x03\\x1e'\n )\n buf.write(\n '\\x03\\x1e\\x03\\x1e\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f'\n )\n buf.write(\n '\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03'\n )\n buf.write(\n ' \\x03 \\x03 \\x03 \\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03'\n )\n buf.write(\n '!\\x03\"\\x03\"\\x03\"\\x03\"\\x03\"\\x03#\\x03#\\x03#\\x03#\\x03#\\x03#\\x03$\\x03$\\x03$\\x03$\\x03'\n )\n buf.write(\n \"$\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03&\\x03&\\x03&\\x03&\\x03&\\x03'\\x03'\\x03'\\x03\"\n )\n buf.write(\n \"'\\x03'\\x03'\\x03'\\x03'\\x03(\\x03(\\x03(\\x03(\\x03(\\x03)\\x03)\\x03)\\x03*\\x03*\\x03*\\x03\"\n )\n buf.write(\n '*\\x03*\\x03+\\x03+\\x03,\\x03,\\x03,\\x03,\\x03,\\x03,\\x03-\\x03-\\x03-\\x03-\\x03-\\x03.\\x03.\\x03.\\x03'\n )\n buf.write(\n '.\\x03.\\x03.\\x03.\\x03.\\x03.\\x03.\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x030\\x030'\n )\n buf.write('\\x030\\x030\\x030\\x031\\x031\\x031\\x031\\x031\\x032\\x032\\x032')\n buf.write('\\x032\\x032\\x033\\x033\\x033\\x033\\x033\\x033\\x033\\x033\\x034')\n buf.write('\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x035\\x035')\n buf.write('\\x035\\x035\\x035\\x035\\x036\\x036\\x036\\x036\\x037\\x037\\x037')\n buf.write(\n '\\x037\\x037\\x038\\x038\\x038\\x038\\x038\\x038\\x039\\x039\\x039\\x039\\x039\\x039\\x039\\x039\\x03'\n )\n buf.write(\n ':\\x03:\\x03:\\x03:\\x03:\\x03:\\x03:\\x03:\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03<\\x03<\\x03'\n )\n buf.write(\n '<\\x03<\\x03<\\x03<\\x03<\\x03<\\x03=\\x03=\\x03=\\x03=\\x03=\\x03=\\x03=\\x03>\\x03>\\x03>\\x03>\\x03>\\x03'\n )\n buf.write(\n '>\\x03>\\x03>\\x03>\\x03>\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03'\n )\n buf.write(\n '?\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03'\n )\n buf.write(\n 'B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03'\n )\n buf.write(\n 'C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03E\\x03E\\x03E\\x03'\n )\n buf.write(\n 'E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03'\n )\n buf.write(\n 'F\\x03F\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03H\\x03H\\x03H\\x03H\\x03'\n )\n buf.write(\n 'H\\x03H\\x03H\\x03H\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03J\\x03J\\x03J\\x03J\\x03J\\x03J\\x03'\n )\n buf.write(\n 'J\\x03J\\x03J\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03L\\x03L\\x03L\\x03L\\x03L\\x03L\\x03L\\x03'\n )\n buf.write(\n 'L\\x03L\\x03L\\x03L\\x03L\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03'\n )\n buf.write(\n 'M\\x03M\\x03M\\x03N\\x03N\\x03N\\x03N\\x03N\\x03O\\x03O\\x03O\\x03O\\x03O\\x03O\\x03P\\x03P\\x03P\\x03P\\x03'\n )\n buf.write(\n 'P\\x03P\\x03P\\x03Q\\x03Q\\x03Q\\x03Q\\x03Q\\x03Q\\x03R\\x03R\\x03R\\x03R\\x03R\\x03S\\x03S\\x03S\\x03S\\x03'\n )\n buf.write(\n 'S\\x03S\\x03S\\x03S\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03U\\x03'\n )\n buf.write(\n 'U\\x03U\\x03U\\x03U\\x03U\\x03U\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03'\n )\n buf.write(\n 'W\\x03W\\x03W\\x03W\\x03W\\x03W\\x03X\\x03X\\x03X\\x03X\\x03X\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03'\n )\n buf.write(\n 'Y\\x03Y\\x03Z\\x03Z\\x03Z\\x03Z\\x03Z\\x03[\\x03[\\x03[\\x03[\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03'\n )\n buf.write(\n '\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03]\\x03]\\x03]\\x03]\\x03]'\n )\n buf.write(\n '\\x03]\\x03]\\x03]\\x03]\\x03]\\x03]\\x03^\\x03^\\x03^\\x03^\\x03_\\x03_\\x03_\\x03_\\x03_\\x03_\\x03`\\x03'\n )\n buf.write(\n '`\\x03`\\x03`\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03b\\x03b\\x03b\\x03b\\x03b\\x03b\\x03b\\x03'\n )\n buf.write(\n 'b\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03d\\x03d\\x03d\\x03d\\x03d\\x03d\\x03d\\x03'\n )\n buf.write(\n 'd\\x03d\\x03d\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03f\\x03f\\x03f\\x03f\\x03f\\x03f\\x03f\\x03'\n )\n buf.write(\n 'f\\x03f\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03h\\x03h\\x03h\\x03h\\x03h\\x03h\\x03h\\x03'\n )\n buf.write(\n 'h\\x03i\\x03i\\x03i\\x03i\\x03i\\x03i\\x03i\\x03j\\x03j\\x03j\\x03j\\x03j\\x03j\\x03k\\x03k\\x03k\\x03k\\x03'\n )\n buf.write(\n 'k\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03m\\x03m\\x03m\\x03'\n )\n buf.write(\n 'm\\x03m\\x03m\\x03m\\x03m\\x03m\\x03m\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03o\\x03o\\x03o\\x03'\n )\n buf.write(\n 'o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03'\n )\n buf.write(\n 'p\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03r\\x03r\\x03r\\x03r\\x03r\\x03r\\x03r\\x03s\\x03'\n )\n buf.write(\n 's\\x03s\\x03s\\x03s\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03'\n )\n buf.write(\n 't\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03u\\x03u\\x03u\\x03u\\x03u\\x03v\\x03v\\x03'\n )\n buf.write(\n 'v\\x03v\\x03v\\x03v\\x03v\\x03v\\x03w\\x03w\\x03w\\x03w\\x03w\\x03x\\x03x\\x03x\\x03x\\x03x\\x03x\\x03y\\x03'\n )\n buf.write(\n 'y\\x03y\\x03y\\x03y\\x03y\\x03z\\x03z\\x03z\\x03z\\x03z\\x03z\\x03z\\x03{\\x03{\\x03{\\x03{\\x03{\\x03{\\x03'\n )\n buf.write(\n '{\\x03{\\x03{\\x03|\\x03|\\x03|\\x03|\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03'\n )\n buf.write(\n '}\\x03}\\x03}\\x03}\\x03~\\x03~\\x03~\\x03~\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03'\n )\n buf.write(\n '\\x7f\\x03\\x7f\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80')\n buf.write('\\x03\\x80\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81')\n buf.write('\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x82\\x03\\x82\\x03\\x82\\x03\\x82')\n buf.write('\\x03\\x82\\x03\\x82\\x03\\x82\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83')\n buf.write('\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x84')\n buf.write('\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84')\n buf.write('\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84')\n buf.write('\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85')\n buf.write('\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x86\\x03\\x86\\x03\\x86')\n buf.write('\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x87\\x03\\x87')\n buf.write('\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87')\n buf.write('\\x03\\x87\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88')\n buf.write('\\x03\\x88\\x03\\x88\\x03\\x89\\x03\\x89\\x03\\x89\\x03\\x89\\x03\\x89')\n buf.write('\\x03\\x89\\x03\\x89\\x03\\x8a\\x03\\x8a\\x03\\x8a\\x03\\x8a\\x03\\x8a')\n buf.write('\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b')\n buf.write('\\x03\\x8b\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c')\n buf.write('\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8d')\n buf.write('\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8e\\x03\\x8e\\x03\\x8e')\n buf.write('\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8f\\x03\\x8f')\n buf.write('\\x03\\x8f\\x03\\x8f\\x03\\x8f\\x03\\x8f\\x03\\x90\\x03\\x90\\x03\\x90')\n buf.write('\\x03\\x90\\x03\\x90\\x03\\x90\\x03\\x91\\x03\\x91\\x03\\x91\\x03\\x91')\n buf.write('\\x03\\x91\\x03\\x91\\x03\\x92\\x03\\x92\\x03\\x92\\x03\\x92\\x03\\x92')\n buf.write('\\x03\\x92\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93')\n buf.write('\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x94')\n buf.write('\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x95\\x03\\x95')\n buf.write('\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95')\n buf.write('\\x03\\x95\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96')\n buf.write('\\x03\\x96\\x03\\x96\\x03\\x97\\x03\\x97\\x03\\x97\\x03\\x97\\x03\\x98')\n buf.write('\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x99')\n buf.write('\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x9a\\x03\\x9a')\n buf.write('\\x03\\x9a\\x03\\x9a\\x03\\x9a\\x03\\x9b\\x03\\x9b\\x03\\x9b\\x03\\x9b')\n buf.write('\\x03\\x9b\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c')\n buf.write('\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9d\\x03\\x9d\\x03\\x9d\\x03\\x9d')\n buf.write('\\x03\\x9d\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e')\n buf.write('\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\xa0')\n buf.write('\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0')\n buf.write('\\x03\\xa0\\x03¡\\x03¡\\x03¡\\x03¡\\x03¡\\x03¢')\n buf.write('\\x03¢\\x03¢\\x03¢\\x03¢\\x03¢\\x03¢\\x03£')\n buf.write('\\x03£\\x03£\\x03£\\x03£\\x03¤\\x03¤\\x03¤')\n buf.write('\\x03¤\\x03¤\\x03¥\\x03¥\\x03¥\\x03¦\\x03¦')\n buf.write('\\x03¦\\x03¦\\x03¦\\x03¦\\x03¦\\x03§\\x03§')\n buf.write('\\x03§\\x03§\\x03§\\x03§\\x03§\\x03§\\x03§')\n buf.write('\\x03§\\x03¨\\x03¨\\x03¨\\x03©\\x03©\\x03©')\n buf.write('\\x03©\\x03©\\x03©\\x03©\\x03©\\x03ª\\x03ª')\n buf.write('\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª')\n buf.write('\\x03ª\\x03«\\x03«\\x03«\\x03«\\x03«\\x03«')\n buf.write('\\x03«\\x03«\\x03«\\x03«\\x03¬\\x03¬\\x03¬')\n buf.write('\\x03¬\\x03¬\\x03¬\\x03¬\\x03\\xad\\x03\\xad\\x03\\xad')\n buf.write('\\x03\\xad\\x03\\xad\\x03\\xad\\x03®\\x03®\\x03®\\x03®')\n buf.write('\\x03®\\x03®\\x03®\\x03®\\x03¯\\x03¯\\x03¯')\n buf.write('\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯')\n buf.write('\\x03°\\x03°\\x03°\\x03°\\x03°\\x03°\\x03°')\n buf.write('\\x03°\\x03±\\x03±\\x03±\\x03±\\x03±\\x03±')\n buf.write('\\x03±\\x03±\\x03±\\x03²\\x03²\\x03²\\x03²')\n buf.write('\\x03²\\x03²\\x03²\\x03³\\x03³\\x03³\\x03³')\n buf.write('\\x03³\\x03³\\x03´\\x03´\\x03´\\x03´\\x03´')\n buf.write('\\x03´\\x03µ\\x03µ\\x03µ\\x03µ\\x03µ\\x03µ')\n buf.write('\\x03µ\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶')\n buf.write('\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶')\n buf.write('\\x03·\\x03·\\x03·\\x03·\\x03·\\x03·\\x03·')\n buf.write('\\x03·\\x03¸\\x03¸\\x03¸\\x03¸\\x03¹\\x03¹')\n buf.write('\\x03¹\\x03¹\\x03¹\\x03¹\\x03¹\\x03¹\\x03º')\n buf.write('\\x03º\\x03º\\x03º\\x03º\\x03º\\x03º\\x03º')\n buf.write('\\x03º\\x03º\\x03»\\x03»\\x03»\\x03»\\x03»')\n buf.write('\\x03»\\x03»\\x03»\\x03»\\x03¼\\x03¼\\x03¼')\n buf.write('\\x03¼\\x03¼\\x03½\\x03½\\x03½\\x03½\\x03½')\n buf.write('\\x03½\\x03½\\x03½\\x03½\\x03½\\x03½\\x03¾')\n buf.write('\\x03¾\\x03¾\\x03¿\\x03¿\\x03¿\\x03¿\\x03¿')\n buf.write('\\x03¿\\x03¿\\x03¿\\x03¿\\x03¿\\x03À\\x03À')\n buf.write('\\x03À\\x03À\\x03À\\x03À\\x03À\\x03À\\x03Á')\n buf.write('\\x03Á\\x03Á\\x03Á\\x03Á\\x03Â\\x03Â\\x03Â')\n buf.write('\\x03Â\\x03Â\\x03Ã\\x03Ã\\x03Ã\\x03Ã\\x03Ã')\n buf.write('\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä')\n buf.write('\\x03Ä\\x03Ä\\x03Å\\x03Å\\x03Å\\x03Å\\x03Å')\n buf.write('\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ')\n buf.write('\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Ç\\x03Ç\\x03Ç')\n buf.write('\\x03Ç\\x03Ç\\x03Ç\\x03Ç\\x03Ç\\x03È\\x03È')\n buf.write('\\x03È\\x03È\\x03È\\x03É\\x03É\\x03É\\x03É')\n buf.write('\\x03É\\x03É\\x03Ê\\x03Ê\\x03Ê\\x03Ê\\x03Ê')\n buf.write('\\x03Ê\\x03Ê\\x03Ê\\x03Ë\\x03Ë\\x03Ë\\x03Ë')\n buf.write('\\x03Ë\\x03Ì\\x03Ì\\x03Ì\\x03Ì\\x03Ì\\x03Ì')\n buf.write('\\x03Í\\x03Í\\x03Í\\x03Í\\x03Í\\x03Í\\x03Î')\n buf.write('\\x03Î\\x03Î\\x03Î\\x03Î\\x03Î\\x03Ï\\x03Ï')\n buf.write('\\x03Ï\\x03Ï\\x03Ï\\x03Ï\\x03Ð\\x03Ð\\x03Ð')\n buf.write('\\x03Ð\\x03Ð\\x03Ð\\x03Ñ\\x03Ñ\\x03Ñ\\x03Ñ')\n buf.write('\\x03Ñ\\x03Ò\\x03Ò\\x03Ò\\x03Ò\\x03Ò\\x03Ò')\n buf.write('\\x03Ò\\x03Ó\\x03Ó\\x03Ó\\x03Ó\\x03Ô\\x03Ô')\n buf.write('\\x03Ô\\x03Ô\\x03Ô\\x03Ô\\x03Ô\\x03Õ\\x03Õ')\n buf.write('\\x03Õ\\x03Õ\\x03Õ\\x03Õ\\x03Ö\\x03Ö\\x03Ö')\n buf.write('\\x03Ö\\x03Ö\\x03×\\x03×\\x03×\\x03×\\x03×')\n buf.write('\\x03Ø\\x03Ø\\x03Ø\\x03Ø\\x03Ø\\x03Ù\\x03Ù')\n buf.write('\\x03Ù\\x03Ù\\x03Ú\\x03Ú\\x03Ú\\x03Ú\\x03Ú')\n buf.write('\\x03Ú\\x03Ú\\x03Ú\\x03Û\\x03Û\\x03Û\\x03Û')\n buf.write('\\x03Û\\x03Û\\x03Û\\x03Û\\x03Û\\x03Ü\\x03Ü')\n buf.write('\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü')\n buf.write('\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý')\n buf.write('\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03ß')\n buf.write('\\x03ß\\x03ß\\x03ß\\x03ß\\x03ß\\x03à\\x03à')\n buf.write('\\x03à\\x03à\\x03à\\x03à\\x03à\\x03á\\x03á')\n buf.write('\\x03á\\x03á\\x03á\\x03á\\x03á\\x03á\\x03á')\n buf.write('\\x03â\\x03â\\x03â\\x03â\\x03â\\x03â\\x03â')\n buf.write('\\x03â\\x03â\\x03ã\\x03ã\\x03ã\\x03ã\\x03ã')\n buf.write('\\x03ä\\x03ä\\x03ä\\x03ä\\x03ä\\x03ä\\x03å')\n buf.write('\\x03å\\x03å\\x03å\\x03å\\x03å\\x03å\\x03æ')\n buf.write('\\x03æ\\x03æ\\x03æ\\x03æ\\x03æ\\x03ç\\x03ç')\n buf.write('\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç')\n buf.write('\\x03è\\x03è\\x03è\\x03è\\x03è\\x03é\\x03é')\n buf.write('\\x03é\\x03é\\x03ê\\x03ê\\x03ê\\x03ê\\x03ê')\n buf.write('\\x03ê\\x03ê\\x03ê\\x03ë\\x03ë\\x03ë\\x03ë')\n buf.write('\\x03ë\\x03ë\\x03ë\\x03ë\\x03ë\\x03ì\\x03ì')\n buf.write('\\x03ì\\x03ì\\x03í\\x03í\\x03í\\x03í\\x03í')\n buf.write('\\x03í\\x03î\\x03î\\x03î\\x03î\\x03î\\x03î')\n buf.write('\\x03î\\x03î\\x03î\\x03ï\\x03ï\\x03ï\\x03ï')\n buf.write('\\x03ï\\x03ï\\x03ð\\x03ð\\x03ð\\x03ð\\x03ð')\n buf.write('\\x03ð\\x03ð\\x03ñ\\x03ñ\\x03ñ\\x03ñ\\x03ò')\n buf.write('\\x03ò\\x03ò\\x03ó\\x03ó\\x03ó\\x03ó\\x03ó')\n buf.write('\\x03ó\\x03ó\\x03ó\\x03ô\\x03ô\\x03ô\\x03ô')\n buf.write('\\x03ô\\x03ô\\x03ô\\x03ô\\x03õ\\x03õ\\x03õ')\n buf.write('\\x03õ\\x03õ\\x03õ\\x03õ\\x03ö\\x03ö\\x03ö')\n buf.write('\\x03ö\\x03ö\\x03ö\\x03ö\\x03ö\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø')\n buf.write('\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø\\x03ù\\x03ù')\n buf.write('\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù')\n buf.write('\\x03ù\\x03ù\\x03ú\\x03ú\\x03ú\\x03ú\\x03ú')\n buf.write('\\x03û\\x03û\\x03û\\x03û\\x03û\\x03û\\x03û')\n buf.write('\\x03û\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü')\n buf.write('\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü')\n buf.write('\\x03ü\\x03ý\\x03ý\\x03ý\\x03ý\\x03þ\\x03þ')\n buf.write('\\x03þ\\x03þ\\x03þ\\x03þ\\x03þ\\x03ÿ\\x03ÿ')\n buf.write('\\x03ÿ\\x03ÿ\\x03ÿ\\x03Ā\\x03Ā\\x03Ā\\x03Ā')\n buf.write('\\x03Ā\\x03Ā\\x03ā\\x03ā\\x03ā\\x03ā\\x03ā')\n buf.write('\\x03ā\\x03ā\\x03Ă\\x03Ă\\x03Ă\\x03Ă\\x03Ă')\n buf.write('\\x03Ă\\x03Ă\\x03Ă\\x03ă\\x03ă\\x03ă\\x03ă')\n buf.write('\\x03ă\\x03ă\\x03ă\\x03ă\\x03ă\\x03ă\\x03Ą')\n buf.write('\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03ą')\n buf.write('\\x03ą\\x03ą\\x03Ć\\x03Ć\\x03Ć\\x03Ć\\x03ć')\n buf.write('\\x03ć\\x03ć\\x03ć\\x03Ĉ\\x03Ĉ\\x03Ĉ\\x03Ĉ')\n buf.write('\\x03ĉ\\x03ĉ\\x03ĉ\\x03Ċ\\x03Ċ\\x03Ċ\\x03Ċ')\n buf.write('\\x03Ċ\\x03ċ\\x03ċ\\x03ċ\\x03ċ\\x03ċ\\x03Č')\n buf.write('\\x03Č\\x03Č\\x03Č\\x03Č\\x03Č\\x03Č\\x03č')\n buf.write('\\x03č\\x03č\\x03Ď\\x03Ď\\x03Ď\\x03Ď\\x03Ď')\n buf.write('\\x03Ď\\x03Ď\\x03Ď\\x03ď\\x03ď\\x03ď\\x03ď')\n buf.write('\\x03ď\\x03ď\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ')\n buf.write('\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03đ')\n buf.write('\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ')\n buf.write('\\x03Ē\\x03Ē\\x03Ē\\x03Ē\\x03ē\\x03ē\\x03ē')\n buf.write('\\x03ē\\x03ē\\x03ē\\x03Ĕ\\x03Ĕ\\x03Ĕ\\x03Ĕ')\n buf.write('\\x03Ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ')\n buf.write('\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03Ė\\x03Ė')\n buf.write('\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03ė')\n buf.write('\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė')\n buf.write('\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė')\n buf.write('\\x03ė\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę')\n buf.write('\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03ę\\x03ę')\n buf.write('\\x03ę\\x03ę\\x03ę\\x03ę\\x03ę\\x03Ě\\x03Ě')\n buf.write('\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě')\n buf.write('\\x03Ě\\x03ě\\x03ě\\x03ě\\x03ě\\x03ě\\x03ě')\n buf.write('\\x03ě\\x03ě\\x03Ĝ\\x03Ĝ\\x03Ĝ\\x03Ĝ\\x03Ĝ')\n buf.write('\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ')\n buf.write('\\x03ĝ\\x03ĝ\\x03Ğ\\x03Ğ\\x03Ğ\\x03Ğ\\x03Ğ')\n buf.write('\\x03Ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ')\n buf.write('\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03Ġ\\x03Ġ\\x03Ġ')\n buf.write('\\x03Ġ\\x03Ġ\\x03Ġ\\x03ġ\\x03ġ\\x03ġ\\x03ġ')\n buf.write('\\x03ġ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ')\n buf.write('\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03ģ')\n buf.write('\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ')\n buf.write('\\x03ģ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ')\n buf.write('\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03ĥ\\x03ĥ\\x03ĥ')\n buf.write('\\x03ĥ\\x03ĥ\\x03ĥ\\x03ĥ\\x03Ħ\\x03Ħ\\x03Ħ')\n buf.write('\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ')\n buf.write('\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ')\n buf.write('\\x03ħ\\x03ħ\\x03ħ\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03Ĩ')\n buf.write('\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03ĩ\\x03ĩ\\x03ĩ')\n buf.write('\\x03ĩ\\x03ĩ\\x03ĩ\\x03Ī\\x03Ī\\x03Ī\\x03Ī')\n buf.write('\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03ī')\n buf.write('\\x03ī\\x03ī\\x03ī\\x03ī\\x03ī\\x03Ĭ\\x03Ĭ')\n buf.write('\\x03Ĭ\\x03Ĭ\\x03Ĭ\\x03Ĭ\\x03ĭ\\x03ĭ\\x03ĭ')\n buf.write('\\x03ĭ\\x03Į\\x03Į\\x03Į\\x03Į\\x03Į\\x03į')\n buf.write('\\x03į\\x03į\\x03į\\x03į\\x03İ\\x03İ\\x03İ')\n buf.write('\\x03İ\\x03İ\\x03İ\\x03İ\\x03ı\\x03ı\\x03ı')\n buf.write('\\x03ı\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03IJ')\n buf.write('\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03ij\\x03ij\\x03ij')\n buf.write('\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij')\n buf.write('\\x03ij\\x03ij\\x03Ĵ\\x03Ĵ\\x03Ĵ\\x03Ĵ\\x03Ĵ')\n buf.write('\\x03Ĵ\\x03Ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ')\n buf.write('\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03Ķ\\x03Ķ')\n buf.write('\\x03Ķ\\x03Ķ\\x03Ķ\\x03Ķ\\x03Ķ\\x03ķ\\x03ķ')\n buf.write('\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ĸ')\n buf.write('\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03ĺ')\n buf.write('\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03Ļ')\n buf.write('\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ')\n buf.write('\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03ļ\\x03ļ')\n buf.write('\\x03ļ\\x03ļ\\x03ļ\\x03ļ\\x03ļ\\x03Ľ\\x03Ľ')\n buf.write('\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ')\n buf.write('\\x03Ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ')\n buf.write('\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ')\n buf.write('\\x03Ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ')\n buf.write('\\x03ŀ\\x03Ł\\x03Ł\\x03Ł\\x03Ł\\x03Ł\\x03Ł')\n buf.write('\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł')\n buf.write('\\x03ł\\x03ł\\x03Ń\\x03Ń\\x03Ń\\x03Ń\\x03Ń')\n buf.write('\\x03Ń\\x03Ń\\x03ń\\x03ń\\x03ń\\x03ń\\x03Ņ')\n buf.write('\\x03Ņ\\x03Ņ\\x03Ņ\\x03Ņ\\x03Ņ\\x03ņ\\x03ņ')\n buf.write('\\x03ņ\\x03ņ\\x03ņ\\x03Ň\\x03Ň\\x03Ň\\x03Ň')\n buf.write('\\x03Ň\\x03Ň\\x03ň\\x03ň\\x03ň\\x03ň\\x03ň')\n buf.write('\\x03ň\\x03ň\\x03ʼn\\x03ʼn\\x03ʼn\\x03ʼn\\x03ʼn')\n buf.write('\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ')\n buf.write('\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03ŋ\\x03ŋ\\x03ŋ\\x03ŋ')\n buf.write('\\x03ŋ\\x03ŋ\\x03ŋ\\x03Ō\\x03Ō\\x03Ō\\x03Ō')\n buf.write('\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō')\n buf.write('\\x03Ō\\x03ō\\x03ō\\x03ō\\x03ō\\x03Ŏ\\x03Ŏ')\n buf.write('\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03ŏ\\x03ŏ')\n buf.write('\\x03ŏ\\x03ŏ\\x03ŏ\\x03ŏ\\x03ŏ\\x03Ő\\x03Ő')\n buf.write('\\x03Ő\\x03Ő\\x03Ő\\x03ő\\x03ő\\x03ő\\x03ő')\n buf.write('\\x03ő\\x03ő\\x03ő\\x03ő\\x03Œ\\x03Œ\\x03Œ')\n buf.write('\\x03Œ\\x03Œ\\x03Œ\\x03Œ\\x03œ\\x03œ\\x03œ')\n buf.write('\\x03œ\\x03œ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ')\n buf.write('\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03ŕ\\x03ŕ\\x03ŕ')\n buf.write('\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ')\n buf.write('\\x03ŕ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ')\n buf.write('\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03Ř\\x03Ř\\x03Ř')\n buf.write('\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř')\n buf.write('\\x03Ř\\x03Ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř')\n buf.write('\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř')\n buf.write('\\x03ř\\x03ř\\x03ř\\x03ř\\x03Ś\\x03Ś\\x03Ś')\n buf.write('\\x03Ś\\x03ś\\x03ś\\x03ś\\x03ś\\x03ś\\x03Ŝ')\n buf.write('\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ')\n buf.write('\\x03Ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ')\n buf.write('\\x03Ş\\x03Ş\\x03Ş\\x03Ş\\x03Ş\\x03ş\\x03ş')\n buf.write('\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş')\n buf.write('\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š')\n buf.write('\\x03Š\\x03Š\\x03š\\x03š\\x03š\\x03š\\x03š')\n buf.write('\\x03š\\x03š\\x03š\\x03š\\x03Ţ\\x03Ţ\\x03Ţ')\n buf.write('\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ')\n buf.write('\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03ţ\\x03ţ')\n buf.write('\\x03ţ\\x03ţ\\x03ţ\\x03ţ\\x03ţ\\x03Ť\\x03Ť')\n buf.write('\\x03Ť\\x03Ť\\x03Ť\\x03ť\\x03ť\\x03ť\\x03ť')\n buf.write('\\x03ť\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ')\n buf.write('\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ')\n buf.write('\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03Ũ\\x03Ũ')\n buf.write('\\x03Ũ\\x03Ũ\\x03Ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ')\n buf.write('\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ')\n buf.write('\\x03ũ\\x03ũ\\x03ũ\\x03Ū\\x03Ū\\x03Ū\\x03Ū')\n buf.write('\\x03Ū\\x03Ū\\x03Ū\\x03Ū\\x03ū\\x03ū\\x03ū')\n buf.write('\\x03ū\\x03ū\\x03ū\\x03ū\\x03ū\\x03ū\\x03Ŭ')\n buf.write('\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ')\n buf.write('\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03ŭ\\x03ŭ\\x03ŭ\\x03ŭ')\n buf.write('\\x03ŭ\\x03ŭ\\x03Ů\\x03Ů\\x03Ů\\x03Ů\\x03Ů')\n buf.write('\\x03Ů\\x03Ů\\x03Ů\\x03ů\\x03ů\\x03ů\\x03ů')\n buf.write('\\x03ů\\x03ů\\x03ů\\x03ů\\x03ů\\x03ů\\x03Ű')\n buf.write('\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű')\n buf.write('\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03ű\\x03ű')\n buf.write('\\x03ű\\x03ű\\x03ű\\x03ű\\x03ű\\x03Ų\\x03Ų')\n buf.write('\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų')\n buf.write('\\x03Ų\\x03Ų\\x03ų\\x03ų\\x03ų\\x03ų\\x03ų')\n buf.write('\\x03ų\\x03ų\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ')\n buf.write('\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ')\n buf.write('\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ')\n buf.write('\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03Ŷ')\n buf.write('\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ')\n buf.write('\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03ŷ')\n buf.write('\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ')\n buf.write('\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ')\n buf.write('\\x03Ÿ\\x03Ź\\x03Ź\\x03Ź\\x03Ź\\x03Ź\\x03Ź')\n buf.write('\\x03Ź\\x03Ź\\x03ź\\x03ź\\x03ź\\x03ź\\x03ź')\n buf.write('\\x03ź\\x03Ż\\x03Ż\\x03Ż\\x03Ż\\x03ż\\x03ż')\n buf.write('\\x03ż\\x03ż\\x03ż\\x03Ž\\x03Ž\\x03Ž\\x03Ž')\n buf.write('\\x03Ž\\x03ž\\x03ž\\x03ž\\x03ž\\x03ž\\x03ž')\n buf.write('\\x03ž\\x03ž\\x03ž\\x03ž\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ƃ')\n buf.write('\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ')\n buf.write('\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03ƃ')\n buf.write('\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ')\n buf.write('\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ')\n buf.write('\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ')\n buf.write('\\x03ƅ\\x03ƅ\\x03ƅ\\x03Ɔ\\x03Ɔ\\x03Ɔ\\x03Ƈ')\n buf.write('\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ')\n buf.write('\\x03Ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ')\n buf.write('\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03Ɖ')\n buf.write('\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ')\n buf.write('\\x03Ɖ\\x03Ɖ\\x03Ɗ\\x03Ɗ\\x03Ɗ\\x03Ɗ\\x03Ɗ')\n buf.write('\\x03Ɗ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ')\n buf.write('\\x03Ƌ\\x03Ƌ\\x03ƌ\\x03ƌ\\x03ƌ\\x03ƌ\\x03ƌ')\n buf.write('\\x03ƍ\\x03ƍ\\x03ƍ\\x03ƍ\\x03ƍ\\x03Ǝ\\x03Ǝ')\n buf.write('\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ')\n buf.write('\\x03Ə\\x03Ə\\x03Ə\\x03Ə\\x03Ə\\x03Ɛ\\x03Ɛ')\n buf.write('\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ')\n buf.write('\\x03Ɛ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ')\n buf.write('\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03Ɠ')\n buf.write('\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɣ')\n buf.write('\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ')\n buf.write('\\x03Ɣ\\x03Ɣ\\x03ƕ\\x03ƕ\\x03ƕ\\x03ƕ\\x03ƕ')\n buf.write('\\x03ƕ\\x03ƕ\\x03ƕ\\x03Ɩ\\x03Ɩ\\x03Ɩ\\x03Ɩ')\n buf.write('\\x03Ɩ\\x03Ɩ\\x03Ɨ\\x03Ɨ\\x03Ɨ\\x03Ɨ\\x03Ɨ')\n buf.write('\\x03Ɨ\\x03Ɨ\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03Ƙ')\n buf.write('\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03ƙ\\x03ƙ\\x03ƙ\\x03ƙ')\n buf.write('\\x03ƙ\\x03ƙ\\x03ƙ\\x03ƚ\\x03ƚ\\x03ƚ\\x03ƚ')\n buf.write('\\x03ƚ\\x03ƚ\\x03ƚ\\x03ƛ\\x03ƛ\\x03ƛ\\x03ƛ')\n buf.write('\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɲ')\n buf.write('\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ')\n buf.write('\\x03Ɲ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ')\n buf.write('\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ')\n buf.write('\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ')\n buf.write('\\x03Ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ')\n buf.write('\\x03ơ\\x03ơ\\x03ơ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ')\n buf.write('\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03ƣ\\x03ƣ')\n buf.write('\\x03ƣ\\x03ƣ\\x03ƣ\\x03ƣ\\x03ƣ\\x03Ƥ\\x03Ƥ')\n buf.write('\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03ƥ')\n buf.write('\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ')\n buf.write('\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ')\n buf.write('\\x03Ʀ\\x03Ʀ\\x03Ƨ\\x03Ƨ\\x03Ƨ\\x03Ƨ\\x03Ƨ')\n buf.write('\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ')\n buf.write('\\x03ƨ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ')\n buf.write('\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03ƪ\\x03ƪ')\n buf.write('\\x03ƪ\\x03ƪ\\x03ƪ\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ')\n buf.write('\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ\\x03Ƭ\\x03Ƭ')\n buf.write('\\x03Ƭ\\x03Ƭ\\x03Ƭ\\x03Ƭ\\x03ƭ\\x03ƭ\\x03ƭ')\n buf.write('\\x03ƭ\\x03ƭ\\x03ƭ\\x03Ʈ\\x03Ʈ\\x03Ʈ\\x03Ʈ')\n buf.write('\\x03Ʈ\\x03Ư\\x03Ư\\x03Ư\\x03Ư\\x03Ư\\x03Ư')\n buf.write('\\x03Ư\\x03ư\\x03ư\\x03ư\\x03ư\\x03ư\\x03Ʊ')\n buf.write('\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʋ\\x03Ʋ')\n buf.write('\\x03Ʋ\\x03Ʋ\\x03Ƴ\\x03Ƴ\\x03Ƴ\\x03Ƴ\\x03Ƴ')\n buf.write('\\x03Ƴ\\x03Ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ')\n buf.write('\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ')\n buf.write('\\x03ƴ\\x03ƴ\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03Ƶ')\n buf.write('\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ')\n buf.write('\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ')\n buf.write('\\x03ƶ\\x03ƶ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ')\n buf.write('\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ƹ')\n buf.write('\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ')\n buf.write('\\x03Ƹ\\x03Ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ')\n buf.write('\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƺ\\x03ƺ')\n buf.write('\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ')\n buf.write('\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƻ\\x03ƻ')\n buf.write('\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ')\n buf.write('\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03ƽ')\n buf.write('\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ')\n buf.write('\\x03ƽ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ')\n buf.write('\\x03ƾ\\x03ƾ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ')\n buf.write('\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ')\n buf.write('\\x03ƿ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ')\n buf.write('\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǁ\\x03ǁ\\x03ǁ\\x03ǁ')\n buf.write('\\x03ǁ\\x03ǂ\\x03ǂ\\x03ǂ\\x03ǂ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03DŽ\\x03DŽ\\x03DŽ\\x03DŽ\\x03DŽ')\n buf.write('\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž')\n buf.write('\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ')\n buf.write('\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ')\n buf.write('\\x03LJ\\x03LJ\\x03LJ\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03NJ\\x03NJ\\x03NJ\\x03NJ')\n buf.write('\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ')\n buf.write('\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03Nj\\x03Nj\\x03Nj')\n buf.write('\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj')\n buf.write('\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj')\n buf.write('\\x03nj\\x03nj\\x03nj\\x03nj\\x03Ǎ\\x03Ǎ\\x03Ǎ')\n buf.write('\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03ǎ\\x03ǎ')\n buf.write('\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ')\n buf.write('\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03Ǐ\\x03Ǐ\\x03Ǐ')\n buf.write('\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ')\n buf.write('\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03ǒ')\n buf.write('\\x03ǒ\\x03ǒ\\x03ǒ\\x03Ǔ\\x03Ǔ\\x03Ǔ\\x03Ǔ')\n buf.write('\\x03Ǔ\\x03ǔ\\x03ǔ\\x03ǔ\\x03ǔ\\x03Ǖ\\x03Ǖ')\n buf.write('\\x03Ǖ\\x03Ǖ\\x03Ǖ\\x03ǖ\\x03ǖ\\x03ǖ\\x03ǖ')\n buf.write('\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ')\n buf.write('\\x03ǘ\\x03ǘ\\x03ǘ\\x03ǘ\\x03Ǚ\\x03Ǚ\\x03Ǚ')\n buf.write('\\x03Ǚ\\x03Ǚ\\x03Ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ')\n buf.write('\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ')\n buf.write('\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03Ǜ\\x03Ǜ')\n buf.write('\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ')\n buf.write('\\x03Ǜ\\x03Ǜ\\x03ǜ\\x03ǜ\\x03ǜ\\x03ǜ\\x03ǝ')\n buf.write('\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ')\n buf.write('\\x03ǝ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ')\n buf.write('\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ')\n buf.write('\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03ǡ\\x03ǡ')\n buf.write('\\x03ǡ\\x03ǡ\\x03ǡ\\x03ǡ\\x03ǡ\\x03Ǣ\\x03Ǣ')\n buf.write('\\x03Ǣ\\x03Ǣ\\x03Ǣ\\x03Ǣ\\x07Ǣ፨\\nǢ')\n buf.write('\\x0cǢ\\x0eǢ፫\\x0bǢ\\x03Ǣ\\x03Ǣ\\x03ǣ')\n buf.write('\\x03ǣ\\x03ǣ\\x07ǣ፲\\nǣ\\x0cǣ\\x0eǣ')\n buf.write('፵\\x0bǣ\\x03ǣ\\x06ǣ፸\\nǣ\\rǣ')\n buf.write('\\x0eǣ፹\\x03Ǥ\\x03Ǥ\\x03Ǥ\\x07Ǥ\\u137f')\n buf.write('\\nǤ\\x0cǤ\\x0eǤᎂ\\x0bǤ\\x03Ǥ\\x06Ǥ')\n buf.write('ᎅ\\nǤ\\rǤ\\x0eǤᎆ\\x03ǥ\\x03ǥ')\n buf.write('\\x03ǥ\\x03Ǧ\\x03Ǧ\\x03ǧ\\x03ǧ\\x03Ǩ\\x03Ǩ')\n buf.write('\\x03Ǩ\\x05Ǩ᎓\\nǨ\\x03Ǩ\\x03Ǩ\\x05Ǩ')\n buf.write('᎗\\nǨ\\x05Ǩ᎙\\nǨ\\x03Ǩ\\x03Ǩ\\x05')\n buf.write('Ǩ\\u139d\\nǨ\\x03ǩ\\x03ǩ\\x03ǩ\\x03ǩ\\x03')\n buf.write('ǩ\\x07ǩᎤ\\nǩ\\x0cǩ\\x0eǩᎧ\\x0b')\n buf.write('ǩ\\x03ǩ\\x03ǩ\\x03Ǫ\\x03Ǫ\\x03Ǫ\\x03Ǫ')\n buf.write('\\x03Ǫ\\x05ǪᎰ\\nǪ\\x03Ǫ\\x03Ǫ\\x03ǫ')\n buf.write('\\x03ǫ\\x03Ǭ\\x03Ǭ\\x03Ǭ\\x07ǬᎹ\\nǬ')\n buf.write('\\x0cǬ\\x0eǬᎼ\\x0bǬ\\x03Ǭ\\x03Ǭ\\x03Ǭ')\n buf.write('\\x03ǭ\\x03ǭ\\x03ǭ\\x07ǭᏄ\\nǭ\\x0cǭ')\n buf.write('\\x0eǭᏇ\\x0bǭ\\x03ǭ\\x03ǭ\\x03ǭ\\x03Ǯ')\n buf.write('\\x03Ǯ\\x03Ǯ\\x07ǮᏏ\\nǮ\\x0cǮ\\x0eǮ')\n buf.write('Ꮢ\\x0bǮ\\x03Ǯ\\x03Ǯ\\x03Ǯ\\x03ǯ\\x03ǯ')\n buf.write('\\x03ǯ\\x07ǯᏚ\\nǯ\\x0cǯ\\x0eǯᏝ')\n buf.write('\\x0bǯ\\x03ǯ\\x03ǯ\\x03ǯ\\x03ǰ\\x03ǰ\\x03DZ')\n buf.write('\\x03DZ\\x03DZ\\x03DZ\\x06DZᏨ\\nDZ\\rDZ')\n buf.write('\\x0eDZᏩ\\x03DZ\\x03DZ\\x03Dz\\x03Dz\\x03dz')\n buf.write('\\x03dz\\x03Ǵ\\x03Ǵ\\x03ǵ\\x03ǵ\\x03Ƕ\\x03Ƕ')\n buf.write('\\x03Ƕ\\x03Ƿ\\x03Ƿ\\x03Ǹ\\x03Ǹ\\x03ǹ\\x03ǹ')\n buf.write('\\x03Ǻ\\x03Ǻ\\x03ǻ\\x03ǻ\\x03Ǽ\\x03Ǽ\\x03ǽ')\n buf.write('\\x03ǽ\\x03ǽ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x07Ǿ')\n buf.write('ᐌ\\nǾ\\x0cǾ\\x0eǾᐏ\\x0bǾ\\x03Ǿ')\n buf.write('\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x05Ǿᐖ\\nǾ')\n buf.write('\\x03ǿ\\x03ǿ\\x03Ȁ\\x03Ȁ\\x03ȁ\\x03ȁ\\x03ȁ')\n buf.write('\\x03Ȃ\\x03Ȃ\\x03ȃ\\x03ȃ\\x03ȃ\\x03Ȅ\\x03Ȅ')\n buf.write('\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x05Ȅ')\n buf.write('ᐬ\\nȄ\\x03ȅ\\x03ȅ\\x03Ȇ\\x03Ȇ\\x03ȇ')\n buf.write('\\x03ȇ\\x03Ȉ\\x03Ȉ\\x03ȉ\\x03ȉ\\x03Ȋ\\x03Ȋ')\n buf.write('\\x03Ȋ\\x03ȋ\\x03ȋ\\x03Ȍ\\x03Ȍ\\x03ȍ\\x03ȍ')\n buf.write('\\x03Ȏ\\x03Ȏ\\x03ȏ\\x03ȏ\\x03Ȑ\\x06Ȑᑆ')\n buf.write('\\nȐ\\rȐ\\x0eȐᑇ\\x03Ȑ\\x03Ȑ\\x03ȑ')\n buf.write('\\x03ȑ\\x03Ȓ\\x06Ȓᑏ\\nȒ\\rȒ\\x0eȒ')\n buf.write('ᑐ\\x03ȓ\\x07ȓᑔ\\nȓ\\x0cȓ\\x0eȓ')\n buf.write('ᑗ\\x0bȓ\\x03ȓ\\x05ȓᑚ\\nȓ\\x03ȓ')\n buf.write('\\x06ȓᑝ\\nȓ\\rȓ\\x0eȓᑞ\\x03Ȕ')\n buf.write('\\x03Ȕ\\x03Ȕ\\x03Ȕ\\x07Ȕᑥ\\nȔ\\x0cȔ')\n buf.write('\\x0eȔᑨ\\x0bȔ\\x03Ȕ\\x03Ȕ\\x05Ȕᑬ')\n buf.write('\\nȔ\\x03Ȕ\\x03Ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ')\n buf.write('\\x07ȕᑴ\\nȕ\\x0cȕ\\x0eȕᑷ\\x0bȕ')\n buf.write('\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03Ȗ\\x03Ȗ')\n buf.write('\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ')\n buf.write('\\x07Ȗᒇ\\nȖ\\x0cȖ\\x0eȖᒊ\\x0bȖ')\n buf.write('\\x03Ȗ\\x03Ȗ\\x05Ȗᒎ\\nȖ\\x03ȗ\\x05ȗ')\n buf.write('ᒑ\\nȗ\\x03ȗ\\x03ȗ\\x03Ș\\x03Ș\\x03ș')\n buf.write('\\x03ș\\x03ș\\x07șᒚ\\nș\\x0cș\\x0eș')\n buf.write('ᒝ\\x0bș\\x03Ț\\x03Ț\\x03Ț\\x03Ț\\x03Ț')\n buf.write('\\x03ț\\x03ț\\x03Ȝ\\x03Ȝ\\x03ȝ\\x03ȝ\\x03Ȟ')\n buf.write('\\x03Ȟ\\x03ȟ\\x03ȟ\\x03Ƞ\\x03Ƞ\\x03ȡ\\x03ȡ')\n buf.write('\\x03Ȣ\\x03Ȣ\\x03ȣ\\x03ȣ\\x03Ȥ\\x03Ȥ\\x03ȥ')\n buf.write('\\x03ȥ\\x03Ȧ\\x03Ȧ\\x03ȧ\\x03ȧ\\x03Ȩ\\x03Ȩ')\n buf.write('\\x03ȩ\\x03ȩ\\x03Ȫ\\x03Ȫ\\x03ȫ\\x03ȫ\\x03Ȭ')\n buf.write('\\x03Ȭ\\x03ȭ\\x03ȭ\\x03Ȯ\\x03Ȯ\\x03ȯ\\x03ȯ')\n buf.write('\\x03Ȱ\\x03Ȱ\\x03ȱ\\x03ȱ\\x03Ȳ\\x03Ȳ\\x03ȳ')\n buf.write('\\x03ȳ\\x03ȴ\\x03ȴ\\x07ᎺᏅᏐᏛᑵ')\n buf.write(\n '\\x02ȵ\\x03\\x03\\x05\\x04\\x07\\x05\\t\\x06\\x0b\\x07\\r\\x08\\x0f\\t\\x11\\n\\x13\\x0b\\x15\\x0c'\n )\n buf.write(\n \"\\x17\\r\\x19\\x0e\\x1b\\x0f\\x1d\\x10\\x1f\\x11!\\x12#\\x13%\\x14'\\x15)\\x16+\\x17\"\n )\n buf.write('-\\x18/\\x191\\x1a3\\x1b5\\x1c7\\x1d9\\x1e;\\x1f= ?!A\"C#E$G%')\n buf.write(\"I&K'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7\")\n buf.write('m8o9q:s;u<w=y>{?}@\\x7fA\\x81B\\x83C\\x85D\\x87E\\x89')\n buf.write('F\\x8bG\\x8dH\\x8fI\\x91J\\x93K\\x95L\\x97M\\x99')\n buf.write('N\\x9bO\\x9dP\\x9fQ¡R£S¥T§U©')\n buf.write('V«W\\xadX¯Y±Z³[µ\\\\·]¹')\n buf.write('^»_½`¿aÁbÃcÅdÇeÉ')\n buf.write('fËgÍhÏiÑjÓkÕl×mÙ')\n buf.write('nÛoÝpßqárãsåtçué')\n buf.write('vëwíxïyñzó{õ|÷}ù')\n buf.write('~û\\x7fý\\x80ÿ\\x81ā\\x82ă')\n buf.write('\\x83ą\\x84ć\\x85ĉ\\x86ċ\\x87')\n buf.write('č\\x88ď\\x89đ\\x8aē\\x8bĕ')\n buf.write('\\x8cė\\x8dę\\x8eě\\x8fĝ\\x90')\n buf.write('ğ\\x91ġ\\x92ģ\\x93ĥ\\x94ħ')\n buf.write('\\x95ĩ\\x96ī\\x97ĭ\\x98į\\x99')\n buf.write('ı\\x9aij\\x9bĵ\\x9cķ\\x9dĹ')\n buf.write('\\x9eĻ\\x9fĽ\\xa0Ŀ¡Ł¢')\n buf.write('Ń£Ņ¤Ň¥ʼn¦ŋ')\n buf.write('§ō¨ŏ©őªœ«')\n buf.write('ŕ¬ŗ\\xadř®ś¯ŝ')\n buf.write('°ş±š²ţ³ť´')\n buf.write('ŧµũ¶ū·ŭ¸ů')\n buf.write('¹űºų»ŵ¼ŷ½')\n buf.write('Ź¾Ż¿ŽÀſÁƁ')\n buf.write('ÂƃÃƅÄƇÅƉÆ')\n buf.write('ƋÇƍÈƏÉƑÊƓ')\n buf.write('ËƕÌƗÍƙÎƛÏ')\n buf.write('ƝÐƟÑơÒƣÓƥ')\n buf.write('ÔƧÕƩÖƫ×ƭØ')\n buf.write('ƯÙƱÚƳÛƵÜƷ')\n buf.write('ÝƹÞƻßƽàƿá')\n buf.write('ǁâǃãDžäLJålj')\n buf.write('æNjçǍèǏéǑê')\n buf.write('ǓëǕìǗíǙîǛ')\n buf.write('ïǝðǟñǡòǣó')\n buf.write('ǥôǧõǩöǫ÷ǭ')\n buf.write('øǯùDZúdzûǵü')\n buf.write('ǷýǹþǻÿǽĀǿ')\n buf.write('āȁĂȃăȅĄȇą')\n buf.write('ȉĆȋćȍĈȏĉȑ')\n buf.write('ĊȓċȕČȗčșĎ')\n buf.write('țďȝĐȟđȡĒȣ')\n buf.write('ēȥĔȧĕȩĖȫė')\n buf.write('ȭĘȯęȱĚȳěȵ')\n buf.write('ĜȷĝȹĞȻğȽĠ')\n buf.write('ȿġɁĢɃģɅĤɇ')\n buf.write('ĥɉĦɋħɍĨɏĩ')\n buf.write('ɑĪɓīɕĬɗĭə')\n buf.write('ĮɛįɝİɟıɡIJ')\n buf.write('ɣijɥĴɧĵɩĶɫ')\n buf.write('ķɭĸɯĹɱĺɳĻ')\n buf.write('ɵļɷĽɹľɻĿɽ')\n buf.write('ŀɿŁʁłʃŃʅń')\n buf.write('ʇŅʉņʋŇʍňʏ')\n buf.write('ʼnʑŊʓŋʕŌʗō')\n buf.write('ʙŎʛŏʝŐʟőʡ')\n buf.write('ŒʣœʥŔʧŕʩŖ')\n buf.write('ʫŗʭŘʯřʱŚʳ')\n buf.write('śʵŜʷŝʹŞʻş')\n buf.write('ʽŠʿšˁŢ˃ţ˅')\n buf.write('ŤˇťˉŦˋŧˍŨ')\n buf.write('ˏũˑŪ˓ū˕Ŭ˗')\n buf.write('ŭ˙ٲů˝Ű˟ű')\n buf.write('ˡŲˣų˥Ŵ˧ŵ˩')\n buf.write('Ŷ˫ŷ˭Ÿ˯Ź˱ź')\n buf.write('˳Ż˵ż˷Ž˹ž˻')\n buf.write('ſ˽ƀ˿Ɓ́Ƃ̃ƃ')\n buf.write('̅Ƅ̇ƅ̉Ɔ̋Ƈ̍')\n buf.write('ƈ̏Ɖ̑Ɗ̓Ƌ̕ƌ')\n buf.write('̗ƍ̙Ǝ̛Ə̝Ɛ̟')\n buf.write('Ƒ̡ƒ̣Ɠ̥Ɣ̧ƕ')\n buf.write('̩Ɩ̫Ɨ̭Ƙ̯ƙ̱')\n buf.write('ƚ̳ƛ̵Ɯ̷Ɲ̹ƞ')\n buf.write('̻Ɵ̽Ơ̿ớƢ̓')\n buf.write('ƣͅƤ͇ƥ͉Ʀ͋Ƨ')\n buf.write('͍ƨ͏Ʃ͑ƪ͓ƫ͕')\n buf.write('Ƭ͗ƭ͙Ʈ͛Ư͝ư')\n buf.write('͟Ʊ͡ƲͣƳͥƴͧ')\n buf.write('ƵͩƶͫƷͭƸͯƹ')\n buf.write('ͱƺͳƻ͵Ƽͷƽ\\u0379')\n buf.write('ƾͻƿͽǀͿǁ\\u0381ǂ')\n buf.write('\\u0383ǃ΅DŽ·DžΉdž\\u038b')\n buf.write('LJ\\u038dLjΏljΑNJΓNj')\n buf.write('ΕnjΗǍΙǎΛǏΝ')\n buf.write('ǐΟǑΡǒΣǓΥǔ')\n buf.write('ΧǕΩǖΫǗέǘί')\n buf.write('ǙαǚγǛεǜηǝ')\n buf.write('ιǞλǟνǠοǡρ')\n buf.write('ǢσǣυǤχǥωǦ')\n buf.write('ϋǧύǨϏǩϑǪϓ')\n buf.write('\\x02ϕ\\x02ϗ\\x02ϙ\\x02ϛ\\x02ϝ\\x02ϟ\\x02ϡ')\n buf.write('ǫϣǬϥǭϧǮϩǯ')\n buf.write('ϫǰϭDZϯDzϱdzϳ')\n buf.write('ǴϵǵϷǶϹǷϻǸ')\n buf.write('ϽǹϿǺЁǻЃǼЅ')\n buf.write('ǽЇǾЉǿЋȀЍȁ')\n buf.write('ЏȂБ\\x02ГȃЕȄЗȅ')\n buf.write('ЙȆЛȇНȈПȉС')\n buf.write('\\x02У\\x02Х\\x02ЧȊЩȋЫȌ')\n buf.write('Э\\x02Я\\x02бȍгȎе\\x02з')\n buf.write('\\x02й\\x02л\\x02н\\x02п\\x02с\\x02у\\x02х')\n buf.write('\\x02ч\\x02щ\\x02ы\\x02э\\x02я\\x02ё\\x02ѓ')\n buf.write('\\x02ѕ\\x02ї\\x02љ\\x02ћ\\x02ѝ\\x02џ\\x02ѡ')\n buf.write(\n \"\\x02ѣ\\x02ѥ\\x02ѧ\\x02\\x03\\x02'\\x05\\x02\\x0c\\x0c\\x0f\\x0f))\\x05\\x022\")\n buf.write(\n ';CHch\\x04\\x02GGgg\\x04\\x02--//\\t\\x02\\x0b\\x0c\\x0f\\x0f\"\"**>>]]}}\\x05\\x02\\x0c'\n )\n buf.write(\n '\\x0c\\x0f\\x0f$$\\x04\\x022;aa\\x05\\x02\\x0b\\x0c\\x0f\\x0f\"\"\\x04\\x02C\\\\c|\\x04\\x02\\x0c'\n )\n buf.write(\n '\\x0c\\x0f\\x0f\\x04\\x02\\x0b\\x0b\"\"\\x05\\x02%&2;aa\\x04\\x02CCcc\\x04\\x02DDdd\\x04\\x02'\n )\n buf.write(\n 'EEee\\x04\\x02FFff\\x04\\x02HHhh\\x04\\x02IIii\\x04\\x02JJjj\\x04\\x02KKkk\\x04\\x02LLll\\x04'\n )\n buf.write(\n '\\x02MMmm\\x04\\x02NNnn\\x04\\x02OOoo\\x04\\x02PPpp\\x04\\x02QQqq\\x04\\x02RRrr\\x04\\x02SSs'\n )\n buf.write(\n 's\\x04\\x02TTtt\\x04\\x02UUuu\\x04\\x02VVvv\\x04\\x02WWww\\x04\\x02XXxx\\x04\\x02YYyy\\x04\\x02'\n )\n buf.write(\n 'ZZzz\\x04\\x02[[{{\\x04\\x02\\\\\\\\||\\x02ᓝ\\x02\\x03\\x03\\x02\\x02\\x02\\x02\\x05\\x03\\x02\\x02\\x02'\n )\n buf.write(\n '\\x02\\x07\\x03\\x02\\x02\\x02\\x02\\t\\x03\\x02\\x02\\x02\\x02\\x0b\\x03\\x02\\x02\\x02\\x02\\r\\x03\\x02\\x02\\x02\\x02\\x0f'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x11\\x03\\x02\\x02\\x02\\x02\\x13\\x03\\x02\\x02\\x02\\x02\\x15\\x03\\x02\\x02\\x02\\x02\\x17\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02\\x19\\x03\\x02\\x02\\x02\\x02\\x1b\\x03\\x02\\x02\\x02\\x02\\x1d\\x03\\x02\\x02\\x02\\x02\\x1f\\x03\\x02'\n )\n buf.write(\n \"\\x02\\x02\\x02!\\x03\\x02\\x02\\x02\\x02#\\x03\\x02\\x02\\x02\\x02%\\x03\\x02\\x02\\x02\\x02'\\x03\\x02\\x02\\x02\\x02)\\x03\"\n )\n buf.write(\n '\\x02\\x02\\x02\\x02+\\x03\\x02\\x02\\x02\\x02-\\x03\\x02\\x02\\x02\\x02/\\x03\\x02\\x02\\x02\\x021\\x03\\x02\\x02\\x02\\x02'\n )\n buf.write(\n '3\\x03\\x02\\x02\\x02\\x025\\x03\\x02\\x02\\x02\\x027\\x03\\x02\\x02\\x02\\x029\\x03\\x02\\x02\\x02\\x02;\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02=\\x03\\x02\\x02\\x02\\x02?\\x03\\x02\\x02\\x02\\x02A\\x03\\x02\\x02\\x02\\x02C\\x03\\x02\\x02\\x02\\x02E'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02G\\x03\\x02\\x02\\x02\\x02I\\x03\\x02\\x02\\x02\\x02K\\x03\\x02\\x02\\x02\\x02M\\x03\\x02\\x02\\x02\\x02'\n )\n buf.write(\n 'O\\x03\\x02\\x02\\x02\\x02Q\\x03\\x02\\x02\\x02\\x02S\\x03\\x02\\x02\\x02\\x02U\\x03\\x02\\x02\\x02\\x02W\\x03\\x02\\x02\\x02'\n )\n buf.write(\n '\\x02Y\\x03\\x02\\x02\\x02\\x02[\\x03\\x02\\x02\\x02\\x02]\\x03\\x02\\x02\\x02\\x02_\\x03\\x02\\x02\\x02\\x02a\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02c\\x03\\x02\\x02\\x02\\x02e\\x03\\x02\\x02\\x02\\x02g\\x03\\x02\\x02\\x02\\x02i\\x03\\x02\\x02\\x02\\x02k\\x03\\x02'\n )\n buf.write(\n '\\x02\\x02\\x02m\\x03\\x02\\x02\\x02\\x02o\\x03\\x02\\x02\\x02\\x02q\\x03\\x02\\x02\\x02\\x02s\\x03\\x02\\x02\\x02\\x02u\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02w\\x03\\x02\\x02\\x02\\x02y\\x03\\x02\\x02\\x02\\x02{\\x03\\x02\\x02\\x02\\x02}\\x03\\x02\\x02\\x02\\x02\\x7f'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x81\\x03\\x02\\x02\\x02\\x02\\x83\\x03\\x02\\x02\\x02\\x02\\x85\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02\\x87\\x03\\x02\\x02\\x02\\x02\\x89\\x03\\x02\\x02\\x02\\x02\\x8b\\x03\\x02\\x02\\x02\\x02\\x8d'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x8f\\x03\\x02\\x02\\x02\\x02\\x91\\x03\\x02\\x02\\x02\\x02\\x93\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02\\x95\\x03\\x02\\x02\\x02\\x02\\x97\\x03\\x02\\x02\\x02\\x02\\x99\\x03\\x02\\x02\\x02\\x02\\x9b'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x9d\\x03\\x02\\x02\\x02\\x02\\x9f\\x03\\x02\\x02\\x02\\x02¡\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02£\\x03\\x02\\x02\\x02\\x02¥\\x03\\x02\\x02\\x02\\x02§\\x03\\x02\\x02\\x02\\x02©'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02«\\x03\\x02\\x02\\x02\\x02\\xad\\x03\\x02\\x02\\x02\\x02¯\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02±\\x03\\x02\\x02\\x02\\x02³\\x03\\x02\\x02\\x02\\x02µ\\x03\\x02\\x02\\x02\\x02·'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02¹\\x03\\x02\\x02\\x02\\x02»\\x03\\x02\\x02\\x02\\x02½\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02¿\\x03\\x02\\x02\\x02\\x02Á\\x03\\x02\\x02\\x02\\x02Ã\\x03\\x02\\x02\\x02\\x02Å'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ç\\x03\\x02\\x02\\x02\\x02É\\x03\\x02\\x02\\x02\\x02Ë\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Í\\x03\\x02\\x02\\x02\\x02Ï\\x03\\x02\\x02\\x02\\x02Ñ\\x03\\x02\\x02\\x02\\x02Ó'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Õ\\x03\\x02\\x02\\x02\\x02×\\x03\\x02\\x02\\x02\\x02Ù\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Û\\x03\\x02\\x02\\x02\\x02Ý\\x03\\x02\\x02\\x02\\x02ß\\x03\\x02\\x02\\x02\\x02á'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ã\\x03\\x02\\x02\\x02\\x02å\\x03\\x02\\x02\\x02\\x02ç\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02é\\x03\\x02\\x02\\x02\\x02ë\\x03\\x02\\x02\\x02\\x02í\\x03\\x02\\x02\\x02\\x02ï'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ñ\\x03\\x02\\x02\\x02\\x02ó\\x03\\x02\\x02\\x02\\x02õ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02÷\\x03\\x02\\x02\\x02\\x02ù\\x03\\x02\\x02\\x02\\x02û\\x03\\x02\\x02\\x02\\x02ý'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ÿ\\x03\\x02\\x02\\x02\\x02ā\\x03\\x02\\x02\\x02\\x02ă\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ą\\x03\\x02\\x02\\x02\\x02ć\\x03\\x02\\x02\\x02\\x02ĉ\\x03\\x02\\x02\\x02\\x02ċ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02č\\x03\\x02\\x02\\x02\\x02ď\\x03\\x02\\x02\\x02\\x02đ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ē\\x03\\x02\\x02\\x02\\x02ĕ\\x03\\x02\\x02\\x02\\x02ė\\x03\\x02\\x02\\x02\\x02ę'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ě\\x03\\x02\\x02\\x02\\x02ĝ\\x03\\x02\\x02\\x02\\x02ğ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ġ\\x03\\x02\\x02\\x02\\x02ģ\\x03\\x02\\x02\\x02\\x02ĥ\\x03\\x02\\x02\\x02\\x02ħ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ĩ\\x03\\x02\\x02\\x02\\x02ī\\x03\\x02\\x02\\x02\\x02ĭ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02į\\x03\\x02\\x02\\x02\\x02ı\\x03\\x02\\x02\\x02\\x02ij\\x03\\x02\\x02\\x02\\x02ĵ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ķ\\x03\\x02\\x02\\x02\\x02Ĺ\\x03\\x02\\x02\\x02\\x02Ļ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ľ\\x03\\x02\\x02\\x02\\x02Ŀ\\x03\\x02\\x02\\x02\\x02Ł\\x03\\x02\\x02\\x02\\x02Ń'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ņ\\x03\\x02\\x02\\x02\\x02Ň\\x03\\x02\\x02\\x02\\x02ʼn\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŋ\\x03\\x02\\x02\\x02\\x02ō\\x03\\x02\\x02\\x02\\x02ŏ\\x03\\x02\\x02\\x02\\x02ő'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02œ\\x03\\x02\\x02\\x02\\x02ŕ\\x03\\x02\\x02\\x02\\x02ŗ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ř\\x03\\x02\\x02\\x02\\x02ś\\x03\\x02\\x02\\x02\\x02ŝ\\x03\\x02\\x02\\x02\\x02ş'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02š\\x03\\x02\\x02\\x02\\x02ţ\\x03\\x02\\x02\\x02\\x02ť\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŧ\\x03\\x02\\x02\\x02\\x02ũ\\x03\\x02\\x02\\x02\\x02ū\\x03\\x02\\x02\\x02\\x02ŭ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ů\\x03\\x02\\x02\\x02\\x02ű\\x03\\x02\\x02\\x02\\x02ų\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŵ\\x03\\x02\\x02\\x02\\x02ŷ\\x03\\x02\\x02\\x02\\x02Ź\\x03\\x02\\x02\\x02\\x02Ż'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ž\\x03\\x02\\x02\\x02\\x02ſ\\x03\\x02\\x02\\x02\\x02Ɓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƃ\\x03\\x02\\x02\\x02\\x02ƅ\\x03\\x02\\x02\\x02\\x02Ƈ\\x03\\x02\\x02\\x02\\x02Ɖ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƌ\\x03\\x02\\x02\\x02\\x02ƍ\\x03\\x02\\x02\\x02\\x02Ə\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ƒ\\x03\\x02\\x02\\x02\\x02Ɠ\\x03\\x02\\x02\\x02\\x02ƕ\\x03\\x02\\x02\\x02\\x02Ɨ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ƙ\\x03\\x02\\x02\\x02\\x02ƛ\\x03\\x02\\x02\\x02\\x02Ɲ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ɵ\\x03\\x02\\x02\\x02\\x02ơ\\x03\\x02\\x02\\x02\\x02ƣ\\x03\\x02\\x02\\x02\\x02ƥ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƨ\\x03\\x02\\x02\\x02\\x02Ʃ\\x03\\x02\\x02\\x02\\x02ƫ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƭ\\x03\\x02\\x02\\x02\\x02Ư\\x03\\x02\\x02\\x02\\x02Ʊ\\x03\\x02\\x02\\x02\\x02Ƴ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƶ\\x03\\x02\\x02\\x02\\x02Ʒ\\x03\\x02\\x02\\x02\\x02ƹ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƻ\\x03\\x02\\x02\\x02\\x02ƽ\\x03\\x02\\x02\\x02\\x02ƿ\\x03\\x02\\x02\\x02\\x02ǁ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǃ\\x03\\x02\\x02\\x02\\x02Dž\\x03\\x02\\x02\\x02\\x02LJ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02lj\\x03\\x02\\x02\\x02\\x02Nj\\x03\\x02\\x02\\x02\\x02Ǎ\\x03\\x02\\x02\\x02\\x02Ǐ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ǒ\\x03\\x02\\x02\\x02\\x02Ǔ\\x03\\x02\\x02\\x02\\x02Ǖ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ǘ\\x03\\x02\\x02\\x02\\x02Ǚ\\x03\\x02\\x02\\x02\\x02Ǜ\\x03\\x02\\x02\\x02\\x02ǝ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǟ\\x03\\x02\\x02\\x02\\x02ǡ\\x03\\x02\\x02\\x02\\x02ǣ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ǥ\\x03\\x02\\x02\\x02\\x02ǧ\\x03\\x02\\x02\\x02\\x02ǩ\\x03\\x02\\x02\\x02\\x02ǫ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǭ\\x03\\x02\\x02\\x02\\x02ǯ\\x03\\x02\\x02\\x02\\x02DZ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02dz\\x03\\x02\\x02\\x02\\x02ǵ\\x03\\x02\\x02\\x02\\x02Ƿ\\x03\\x02\\x02\\x02\\x02ǹ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǻ\\x03\\x02\\x02\\x02\\x02ǽ\\x03\\x02\\x02\\x02\\x02ǿ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȁ\\x03\\x02\\x02\\x02\\x02ȃ\\x03\\x02\\x02\\x02\\x02ȅ\\x03\\x02\\x02\\x02\\x02ȇ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȉ\\x03\\x02\\x02\\x02\\x02ȋ\\x03\\x02\\x02\\x02\\x02ȍ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȏ\\x03\\x02\\x02\\x02\\x02ȑ\\x03\\x02\\x02\\x02\\x02ȓ\\x03\\x02\\x02\\x02\\x02ȕ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȗ\\x03\\x02\\x02\\x02\\x02ș\\x03\\x02\\x02\\x02\\x02ț\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȝ\\x03\\x02\\x02\\x02\\x02ȟ\\x03\\x02\\x02\\x02\\x02ȡ\\x03\\x02\\x02\\x02\\x02ȣ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȥ\\x03\\x02\\x02\\x02\\x02ȧ\\x03\\x02\\x02\\x02\\x02ȩ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȫ\\x03\\x02\\x02\\x02\\x02ȭ\\x03\\x02\\x02\\x02\\x02ȯ\\x03\\x02\\x02\\x02\\x02ȱ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȳ\\x03\\x02\\x02\\x02\\x02ȵ\\x03\\x02\\x02\\x02\\x02ȷ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȹ\\x03\\x02\\x02\\x02\\x02Ȼ\\x03\\x02\\x02\\x02\\x02Ƚ\\x03\\x02\\x02\\x02\\x02ȿ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ɂ\\x03\\x02\\x02\\x02\\x02Ƀ\\x03\\x02\\x02\\x02\\x02Ʌ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɇ\\x03\\x02\\x02\\x02\\x02ɉ\\x03\\x02\\x02\\x02\\x02ɋ\\x03\\x02\\x02\\x02\\x02ɍ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɏ\\x03\\x02\\x02\\x02\\x02ɑ\\x03\\x02\\x02\\x02\\x02ɓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɕ\\x03\\x02\\x02\\x02\\x02ɗ\\x03\\x02\\x02\\x02\\x02ə\\x03\\x02\\x02\\x02\\x02ɛ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɝ\\x03\\x02\\x02\\x02\\x02ɟ\\x03\\x02\\x02\\x02\\x02ɡ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɣ\\x03\\x02\\x02\\x02\\x02ɥ\\x03\\x02\\x02\\x02\\x02ɧ\\x03\\x02\\x02\\x02\\x02ɩ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɫ\\x03\\x02\\x02\\x02\\x02ɭ\\x03\\x02\\x02\\x02\\x02ɯ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɱ\\x03\\x02\\x02\\x02\\x02ɳ\\x03\\x02\\x02\\x02\\x02ɵ\\x03\\x02\\x02\\x02\\x02ɷ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɹ\\x03\\x02\\x02\\x02\\x02ɻ\\x03\\x02\\x02\\x02\\x02ɽ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɿ\\x03\\x02\\x02\\x02\\x02ʁ\\x03\\x02\\x02\\x02\\x02ʃ\\x03\\x02\\x02\\x02\\x02ʅ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʇ\\x03\\x02\\x02\\x02\\x02ʉ\\x03\\x02\\x02\\x02\\x02ʋ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʍ\\x03\\x02\\x02\\x02\\x02ʏ\\x03\\x02\\x02\\x02\\x02ʑ\\x03\\x02\\x02\\x02\\x02ʓ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʕ\\x03\\x02\\x02\\x02\\x02ʗ\\x03\\x02\\x02\\x02\\x02ʙ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʛ\\x03\\x02\\x02\\x02\\x02ʝ\\x03\\x02\\x02\\x02\\x02ʟ\\x03\\x02\\x02\\x02\\x02ʡ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʣ\\x03\\x02\\x02\\x02\\x02ʥ\\x03\\x02\\x02\\x02\\x02ʧ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʩ\\x03\\x02\\x02\\x02\\x02ʫ\\x03\\x02\\x02\\x02\\x02ʭ\\x03\\x02\\x02\\x02\\x02ʯ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʱ\\x03\\x02\\x02\\x02\\x02ʳ\\x03\\x02\\x02\\x02\\x02ʵ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʷ\\x03\\x02\\x02\\x02\\x02ʹ\\x03\\x02\\x02\\x02\\x02ʻ\\x03\\x02\\x02\\x02\\x02ʽ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʿ\\x03\\x02\\x02\\x02\\x02ˁ\\x03\\x02\\x02\\x02\\x02˃\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˅\\x03\\x02\\x02\\x02\\x02ˇ\\x03\\x02\\x02\\x02\\x02ˉ\\x03\\x02\\x02\\x02\\x02ˋ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ˍ\\x03\\x02\\x02\\x02\\x02ˏ\\x03\\x02\\x02\\x02\\x02ˑ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˓\\x03\\x02\\x02\\x02\\x02˕\\x03\\x02\\x02\\x02\\x02˗\\x03\\x02\\x02\\x02\\x02˙'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˛\\x03\\x02\\x02\\x02\\x02˝\\x03\\x02\\x02\\x02\\x02˟\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ˡ\\x03\\x02\\x02\\x02\\x02ˣ\\x03\\x02\\x02\\x02\\x02˥\\x03\\x02\\x02\\x02\\x02˧'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˩\\x03\\x02\\x02\\x02\\x02˫\\x03\\x02\\x02\\x02\\x02˭\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˯\\x03\\x02\\x02\\x02\\x02˱\\x03\\x02\\x02\\x02\\x02˳\\x03\\x02\\x02\\x02\\x02˵'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˷\\x03\\x02\\x02\\x02\\x02˹\\x03\\x02\\x02\\x02\\x02˻\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˽\\x03\\x02\\x02\\x02\\x02˿\\x03\\x02\\x02\\x02\\x02́\\x03\\x02\\x02\\x02\\x02̃'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̅\\x03\\x02\\x02\\x02\\x02̇\\x03\\x02\\x02\\x02\\x02̉\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̋\\x03\\x02\\x02\\x02\\x02̍\\x03\\x02\\x02\\x02\\x02̏\\x03\\x02\\x02\\x02\\x02̑'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̓\\x03\\x02\\x02\\x02\\x02̕\\x03\\x02\\x02\\x02\\x02̗\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̙\\x03\\x02\\x02\\x02\\x02̛\\x03\\x02\\x02\\x02\\x02̝\\x03\\x02\\x02\\x02\\x02̟'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̡\\x03\\x02\\x02\\x02\\x02̣\\x03\\x02\\x02\\x02\\x02̥\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̧\\x03\\x02\\x02\\x02\\x02̩\\x03\\x02\\x02\\x02\\x02̫\\x03\\x02\\x02\\x02\\x02̭'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̯\\x03\\x02\\x02\\x02\\x02̱\\x03\\x02\\x02\\x02\\x02̳\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̵\\x03\\x02\\x02\\x02\\x02̷\\x03\\x02\\x02\\x02\\x02̹\\x03\\x02\\x02\\x02\\x02̻'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̽\\x03\\x02\\x02\\x02\\x02̿\\x03\\x02\\x02\\x02\\x02́\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̓\\x03\\x02\\x02\\x02\\x02ͅ\\x03\\x02\\x02\\x02\\x02͇\\x03\\x02\\x02\\x02\\x02͉'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͋\\x03\\x02\\x02\\x02\\x02͍\\x03\\x02\\x02\\x02\\x02͏\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02͑\\x03\\x02\\x02\\x02\\x02͓\\x03\\x02\\x02\\x02\\x02͕\\x03\\x02\\x02\\x02\\x02͗'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͙\\x03\\x02\\x02\\x02\\x02͛\\x03\\x02\\x02\\x02\\x02͝\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02͟\\x03\\x02\\x02\\x02\\x02͡\\x03\\x02\\x02\\x02\\x02ͣ\\x03\\x02\\x02\\x02\\x02ͥ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ͧ\\x03\\x02\\x02\\x02\\x02ͩ\\x03\\x02\\x02\\x02\\x02ͫ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ͭ\\x03\\x02\\x02\\x02\\x02ͯ\\x03\\x02\\x02\\x02\\x02ͱ\\x03\\x02\\x02\\x02\\x02ͳ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͵\\x03\\x02\\x02\\x02\\x02ͷ\\x03\\x02\\x02\\x02\\x02\\u0379\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ͻ\\x03\\x02\\x02\\x02\\x02ͽ\\x03\\x02\\x02\\x02\\x02Ϳ\\x03\\x02\\x02\\x02\\x02\\u0381'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\u0383\\x03\\x02\\x02\\x02\\x02΅\\x03\\x02\\x02\\x02\\x02·\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ή\\x03\\x02\\x02\\x02\\x02\\u038b\\x03\\x02\\x02\\x02\\x02\\u038d\\x03\\x02\\x02\\x02\\x02Ώ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Α\\x03\\x02\\x02\\x02\\x02Γ\\x03\\x02\\x02\\x02\\x02Ε\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Η\\x03\\x02\\x02\\x02\\x02Ι\\x03\\x02\\x02\\x02\\x02Λ\\x03\\x02\\x02\\x02\\x02Ν'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ο\\x03\\x02\\x02\\x02\\x02Ρ\\x03\\x02\\x02\\x02\\x02Σ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Υ\\x03\\x02\\x02\\x02\\x02Χ\\x03\\x02\\x02\\x02\\x02Ω\\x03\\x02\\x02\\x02\\x02Ϋ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02έ\\x03\\x02\\x02\\x02\\x02ί\\x03\\x02\\x02\\x02\\x02α\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02γ\\x03\\x02\\x02\\x02\\x02ε\\x03\\x02\\x02\\x02\\x02η\\x03\\x02\\x02\\x02\\x02ι'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02λ\\x03\\x02\\x02\\x02\\x02ν\\x03\\x02\\x02\\x02\\x02ο\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ρ\\x03\\x02\\x02\\x02\\x02σ\\x03\\x02\\x02\\x02\\x02υ\\x03\\x02\\x02\\x02\\x02χ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ω\\x03\\x02\\x02\\x02\\x02ϋ\\x03\\x02\\x02\\x02\\x02ύ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ϗ\\x03\\x02\\x02\\x02\\x02ϑ\\x03\\x02\\x02\\x02\\x02ϓ\\x03\\x02\\x02\\x02\\x02ϡ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ϣ\\x03\\x02\\x02\\x02\\x02ϥ\\x03\\x02\\x02\\x02\\x02ϧ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ϩ\\x03\\x02\\x02\\x02\\x02ϫ\\x03\\x02\\x02\\x02\\x02ϭ\\x03\\x02\\x02\\x02\\x02ϯ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ϱ\\x03\\x02\\x02\\x02\\x02ϳ\\x03\\x02\\x02\\x02\\x02ϵ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ϸ\\x03\\x02\\x02\\x02\\x02Ϲ\\x03\\x02\\x02\\x02\\x02ϻ\\x03\\x02\\x02\\x02\\x02Ͻ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ͽ\\x03\\x02\\x02\\x02\\x02Ё\\x03\\x02\\x02\\x02\\x02Ѓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ѕ\\x03\\x02\\x02\\x02\\x02Ї\\x03\\x02\\x02\\x02\\x02Љ\\x03\\x02\\x02\\x02\\x02Ћ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ѝ\\x03\\x02\\x02\\x02\\x02Џ\\x03\\x02\\x02\\x02\\x02Г\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Е\\x03\\x02\\x02\\x02\\x02З\\x03\\x02\\x02\\x02\\x02Й\\x03\\x02\\x02\\x02\\x02Л'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Н\\x03\\x02\\x02\\x02\\x02П\\x03\\x02\\x02\\x02\\x02Ч\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Щ\\x03\\x02\\x02\\x02\\x02Ы\\x03\\x02\\x02\\x02\\x02б\\x03\\x02\\x02\\x02\\x02г'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x03ѩ\\x03\\x02\\x02\\x02\\x05Ѭ\\x03\\x02\\x02\\x02\\x07Ѯ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\tѲ\\x03\\x02\\x02\\x02\\x0bѸ\\x03\\x02\\x02\\x02\\rѾ\\x03\\x02\\x02\\x02\\x0f'\n )\n buf.write(\n '҈\\x03\\x02\\x02\\x02\\x11Ҍ\\x03\\x02\\x02\\x02\\x13Ғ\\x03\\x02\\x02\\x02\\x15Қ')\n buf.write(\n '\\x03\\x02\\x02\\x02\\x17Ҟ\\x03\\x02\\x02\\x02\\x19Ң\\x03\\x02\\x02\\x02\\x1bҨ\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x1dҫ\\x03\\x02\\x02\\x02\\x1fҲ\\x03\\x02\\x02\\x02!ҹ\\x03\\x02\\x02'\n )\n buf.write(\n \"\\x02#ҽ\\x03\\x02\\x02\\x02%Ӈ\\x03\\x02\\x02\\x02'ӊ\\x03\\x02\\x02\\x02)Ӕ\")\n buf.write(\n '\\x03\\x02\\x02\\x02+Ӛ\\x03\\x02\\x02\\x02-ӡ\\x03\\x02\\x02\\x02/Ӧ\\x03\\x02\\x02\\x02'\n )\n buf.write('1Ӱ\\x03\\x02\\x02\\x023ԇ\\x03\\x02\\x02\\x025ԍ\\x03\\x02\\x02\\x027')\n buf.write('Ԕ\\x03\\x02\\x02\\x029Ԛ\\x03\\x02\\x02\\x02;Ԣ\\x03\\x02\\x02\\x02=Ԩ\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02?Զ\\x03\\x02\\x02\\x02AՃ\\x03\\x02\\x02\\x02CՒ\\x03\\x02\\x02\\x02E\\u0557'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02G՝\\x03\\x02\\x02\\x02Iբ\\x03\\x02\\x02\\x02Kժ\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'Mկ\\x03\\x02\\x02\\x02Oշ\\x03\\x02\\x02\\x02Qռ\\x03\\x02\\x02\\x02Sտ\\x03')\n buf.write(\n '\\x02\\x02\\x02Uք\\x03\\x02\\x02\\x02Wֆ\\x03\\x02\\x02\\x02Y\\u058c\\x03\\x02\\x02\\x02[֑'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02]֛\\x03\\x02\\x02\\x02_֣\\x03\\x02\\x02\\x02a֨\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'c֭\\x03\\x02\\x02\\x02eֲ\\x03\\x02\\x02\\x02gֺ\\x03\\x02\\x02\\x02iׄ\\x03')\n buf.write(\n '\\x02\\x02\\x02k\\u05ca\\x03\\x02\\x02\\x02m\\u05ce\\x03\\x02\\x02\\x02oד\\x03\\x02\\x02\\x02qי'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02sס\\x03\\x02\\x02\\x02uש\\x03\\x02\\x02\\x02wױ\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'y\\u05f9\\x03\\x02\\x02\\x02{\\u0600\\x03\\x02\\x02\\x02}؊\\x03\\x02\\x02\\x02\\x7fؘ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x81ؠ\\x03\\x02\\x02\\x02\\x83ة\\x03\\x02\\x02\\x02\\x85')\n buf.write('ر\\x03\\x02\\x02\\x02\\x87ف\\x03\\x02\\x02\\x02\\x89ي\\x03\\x02\\x02\\x02'\n )\n buf.write('\\x8bٕ\\x03\\x02\\x02\\x02\\x8d١\\x03\\x02\\x02\\x02\\x8f٭\\x03')\n buf.write('\\x02\\x02\\x02\\x91ٵ\\x03\\x02\\x02\\x02\\x93ٽ\\x03\\x02\\x02\\x02\\x95چ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x97ڎ\\x03\\x02\\x02\\x02\\x99ښ\\x03\\x02\\x02\\x02\\x9b')\n buf.write('ڪ\\x03\\x02\\x02\\x02\\x9dگ\\x03\\x02\\x02\\x02\\x9fڵ\\x03\\x02\\x02\\x02'\n )\n buf.write('¡ڼ\\x03\\x02\\x02\\x02£ۂ\\x03\\x02\\x02\\x02¥ۇ\\x03')\n buf.write('\\x02\\x02\\x02§ۏ\\x03\\x02\\x02\\x02©ۜ\\x03\\x02\\x02\\x02«ۣ')\n buf.write('\\x03\\x02\\x02\\x02\\xadۯ\\x03\\x02\\x02\\x02¯۵\\x03\\x02\\x02\\x02±')\n buf.write('ۺ\\x03\\x02\\x02\\x02³܃\\x03\\x02\\x02\\x02µ܈\\x03\\x02\\x02\\x02')\n buf.write('·܌\\x03\\x02\\x02\\x02¹ܛ\\x03\\x02\\x02\\x02»ܦ\\x03')\n buf.write('\\x02\\x02\\x02½ܪ\\x03\\x02\\x02\\x02¿ܰ\\x03\\x02\\x02\\x02Áܴ')\n buf.write('\\x03\\x02\\x02\\x02Ãܼ\\x03\\x02\\x02\\x02Å݄\\x03\\x02\\x02\\x02Ç')\n buf.write('ݎ\\x03\\x02\\x02\\x02Éݘ\\x03\\x02\\x02\\x02Ëݠ\\x03\\x02\\x02\\x02')\n buf.write('Íݩ\\x03\\x02\\x02\\x02Ïݲ\\x03\\x02\\x02\\x02Ñݺ\\x03')\n buf.write('\\x02\\x02\\x02Óށ\\x03\\x02\\x02\\x02Õއ\\x03\\x02\\x02\\x02×ތ')\n buf.write('\\x03\\x02\\x02\\x02Ùޚ\\x03\\x02\\x02\\x02Ûޤ\\x03\\x02\\x02\\x02Ý')\n buf.write('ެ\\x03\\x02\\x02\\x02ß\\u07b9\\x03\\x02\\x02\\x02á߂\\x03\\x02\\x02\\x02')\n buf.write('ãߋ\\x03\\x02\\x02\\x02åߒ\\x03\\x02\\x02\\x02çߗ\\x03')\n buf.write('\\x02\\x02\\x02é߰\\x03\\x02\\x02\\x02ëߵ\\x03\\x02\\x02\\x02í߽')\n buf.write('\\x03\\x02\\x02\\x02ïࠂ\\x03\\x02\\x02\\x02ñࠈ\\x03\\x02\\x02\\x02ó')\n buf.write('ࠎ\\x03\\x02\\x02\\x02õࠕ\\x03\\x02\\x02\\x02÷ࠞ\\x03\\x02\\x02\\x02')\n buf.write('ùࠢ\\x03\\x02\\x02\\x02û࠱\\x03\\x02\\x02\\x02ý࠵\\x03')\n buf.write('\\x02\\x02\\x02ÿ࠼\\x03\\x02\\x02\\x02āࡃ\\x03\\x02\\x02\\x02ăࡌ')\n buf.write('\\x03\\x02\\x02\\x02ąࡓ\\x03\\x02\\x02\\x02ć\\u085d\\x03\\x02\\x02\\x02ĉ')\n buf.write('\\u086c\\x03\\x02\\x02\\x02ċࡷ\\x03\\x02\\x02\\x02čࡿ\\x03\\x02\\x02\\x02')\n buf.write('ďࢉ\\x03\\x02\\x02\\x02đ\\u0891\\x03\\x02\\x02\\x02ē࢘\\x03')\n buf.write('\\x02\\x02\\x02ĕ࢝\\x03\\x02\\x02\\x02ėࢥ\\x03\\x02\\x02\\x02ęࢮ')\n buf.write('\\x03\\x02\\x02\\x02ěࢶ\\x03\\x02\\x02\\x02ĝࢾ\\x03\\x02\\x02\\x02ğ')\n buf.write('ࣄ\\x03\\x02\\x02\\x02ġ࣊\\x03\\x02\\x02\\x02ģ࣐\\x03\\x02\\x02\\x02')\n buf.write('ĥࣖ\\x03\\x02\\x02\\x02ħ\\u08e2\\x03\\x02\\x02\\x02ĩࣨ\\x03')\n buf.write('\\x02\\x02\\x02īࣲ\\x03\\x02\\x02\\x02ĭࣺ\\x03\\x02\\x02\\x02įࣾ')\n buf.write('\\x03\\x02\\x02\\x02ıअ\\x03\\x02\\x02\\x02ijऋ\\x03\\x02\\x02\\x02ĵ')\n buf.write('ऐ\\x03\\x02\\x02\\x02ķक\\x03\\x02\\x02\\x02Ĺञ\\x03\\x02\\x02\\x02')\n buf.write('Ļण\\x03\\x02\\x02\\x02Ľऩ\\x03\\x02\\x02\\x02Ŀय\\x03')\n buf.write('\\x02\\x02\\x02Łस\\x03\\x02\\x02\\x02Ńऽ\\x03\\x02\\x02\\x02Ņॄ')\n buf.write('\\x03\\x02\\x02\\x02Ňॉ\\x03\\x02\\x02\\x02ʼnॎ\\x03\\x02\\x02\\x02ŋ')\n buf.write('॑\\x03\\x02\\x02\\x02ōक़\\x03\\x02\\x02\\x02ŏॢ\\x03\\x02\\x02\\x02')\n buf.write('ő॥\\x03\\x02\\x02\\x02œ७\\x03\\x02\\x02\\x02ŕॷ\\x03')\n buf.write('\\x02\\x02\\x02ŗঁ\\x03\\x02\\x02\\x02řঈ\\x03\\x02\\x02\\x02ś\\u098e')\n buf.write('\\x03\\x02\\x02\\x02ŝখ\\x03\\x02\\x02\\x02şঠ\\x03\\x02\\x02\\x02š')\n buf.write('ন\\x03\\x02\\x02\\x02ţ\\u09b1\\x03\\x02\\x02\\x02ťস\\x03\\x02\\x02\\x02')\n buf.write('ŧা\\x03\\x02\\x02\\x02ũৄ\\x03\\x02\\x02\\x02ūো\\x03')\n buf.write(\n '\\x02\\x02\\x02ŭ\\u09d8\\x03\\x02\\x02\\x02ůৠ\\x03\\x02\\x02\\x02ű\\u09e4')\n buf.write('\\x03\\x02\\x02\\x02ų৬\\x03\\x02\\x02\\x02ŵ৶\\x03\\x02\\x02\\x02ŷ')\n buf.write(\n '\\u09ff\\x03\\x02\\x02\\x02Ź\\u0a04\\x03\\x02\\x02\\x02Żਏ\\x03\\x02\\x02\\x02')\n buf.write('Ž\\u0a12\\x03\\x02\\x02\\x02ſਜ\\x03\\x02\\x02\\x02Ɓਤ\\x03')\n buf.write('\\x02\\x02\\x02ƃ\\u0a29\\x03\\x02\\x02\\x02ƅਮ\\x03\\x02\\x02\\x02Ƈਲ਼')\n buf.write('\\x03\\x02\\x02\\x02Ɖ਼\\x03\\x02\\x02\\x02Ƌੁ\\x03\\x02\\x02\\x02ƍ')\n buf.write('ੌ\\x03\\x02\\x02\\x02Ə\\u0a54\\x03\\x02\\x02\\x02Ƒਖ਼\\x03\\x02\\x02\\x02')\n buf.write('Ɠ\\u0a5f\\x03\\x02\\x02\\x02ƕ੧\\x03\\x02\\x02\\x02Ɨ੬\\x03')\n buf.write(\n '\\x02\\x02\\x02ƙੲ\\x03\\x02\\x02\\x02ƛ\\u0a78\\x03\\x02\\x02\\x02Ɲ\\u0a7e')\n buf.write('\\x03\\x02\\x02\\x02Ɵ\\u0a84\\x03\\x02\\x02\\x02ơઊ\\x03\\x02\\x02\\x02ƣ')\n buf.write('એ\\x03\\x02\\x02\\x02ƥખ\\x03\\x02\\x02\\x02Ƨચ\\x03\\x02\\x02\\x02')\n buf.write('Ʃડ\\x03\\x02\\x02\\x02ƫધ\\x03\\x02\\x02\\x02ƭબ\\x03')\n buf.write(\n '\\x02\\x02\\x02Ư\\u0ab1\\x03\\x02\\x02\\x02Ʊશ\\x03\\x02\\x02\\x02Ƴ\\u0aba')\n buf.write('\\x03\\x02\\x02\\x02Ƶૂ\\x03\\x02\\x02\\x02Ʒો\\x03\\x02\\x02\\x02ƹ')\n buf.write(\n '\\u0ad4\\x03\\x02\\x02\\x02ƻ\\u0adb\\x03\\x02\\x02\\x02ƽૡ\\x03\\x02\\x02\\x02')\n buf.write('ƿ૧\\x03\\x02\\x02\\x02ǁ૮\\x03\\x02\\x02\\x02ǃ\\u0af7\\x03')\n buf.write('\\x02\\x02\\x02Dž\\u0b00\\x03\\x02\\x02\\x02LJଅ\\x03\\x02\\x02\\x02ljଋ')\n buf.write('\\x03\\x02\\x02\\x02Nj\\u0b12\\x03\\x02\\x02\\x02Ǎଘ\\x03\\x02\\x02\\x02Ǐ')\n buf.write('ଡ\\x03\\x02\\x02\\x02Ǒଦ\\x03\\x02\\x02\\x02Ǔପ\\x03\\x02\\x02\\x02')\n buf.write('Ǖଲ\\x03\\x02\\x02\\x02Ǘ\\u0b3b\\x03\\x02\\x02\\x02Ǚି\\x03')\n buf.write(\n '\\x02\\x02\\x02Ǜ\\u0b45\\x03\\x02\\x02\\x02ǝ\\u0b4e\\x03\\x02\\x02\\x02ǟ\\u0b54'\n )\n buf.write('\\x03\\x02\\x02\\x02ǡ\\u0b5b\\x03\\x02\\x02\\x02ǣୟ\\x03\\x02\\x02\\x02ǥ')\n buf.write('ୢ\\x03\\x02\\x02\\x02ǧ୪\\x03\\x02\\x02\\x02ǩ୲\\x03\\x02\\x02\\x02')\n buf.write('ǫ\\u0b79\\x03\\x02\\x02\\x02ǭ\\u0b81\\x03\\x02\\x02\\x02ǯஒ\\x03')\n buf.write(\n '\\x02\\x02\\x02DZ\\u0b9d\\x03\\x02\\x02\\x02dzந\\x03\\x02\\x02\\x02ǵ\\u0bad')\n buf.write('\\x03\\x02\\x02\\x02Ƿவ\\x03\\x02\\x02\\x02ǹ\\u0bc3\\x03\\x02\\x02\\x02ǻ')\n buf.write(\n 'ே\\x03\\x02\\x02\\x02ǽ\\u0bce\\x03\\x02\\x02\\x02ǿ\\u0bd3\\x03\\x02\\x02\\x02')\n buf.write('ȁ\\u0bd9\\x03\\x02\\x02\\x02ȃ\\u0be0\\x03\\x02\\x02\\x02ȅ௨\\x03')\n buf.write('\\x02\\x02\\x02ȇ௲\\x03\\x02\\x02\\x02ȉ௹\\x03\\x02\\x02\\x02ȋ\\u0bfc')\n buf.write('\\x03\\x02\\x02\\x02ȍఀ\\x03\\x02\\x02\\x02ȏఄ\\x03\\x02\\x02\\x02ȑ')\n buf.write('ఈ\\x03\\x02\\x02\\x02ȓఋ\\x03\\x02\\x02\\x02ȕఐ\\x03\\x02\\x02\\x02')\n buf.write('ȗక\\x03\\x02\\x02\\x02șజ\\x03\\x02\\x02\\x02țట\\x03')\n buf.write('\\x02\\x02\\x02ȝధ\\x03\\x02\\x02\\x02ȟభ\\x03\\x02\\x02\\x02ȡస')\n buf.write('\\x03\\x02\\x02\\x02ȣీ\\x03\\x02\\x02\\x02ȥౄ\\x03\\x02\\x02\\x02ȧ')\n buf.write('ొ\\x03\\x02\\x02\\x02ȩ\\u0c4f\\x03\\x02\\x02\\x02ȫౚ\\x03\\x02\\x02\\x02')\n buf.write('ȭౢ\\x03\\x02\\x02\\x02ȯ\\u0c72\\x03\\x02\\x02\\x02ȱ౽\\x03')\n buf.write('\\x02\\x02\\x02ȳ಄\\x03\\x02\\x02\\x02ȵಎ\\x03\\x02\\x02\\x02ȷಖ')\n buf.write('\\x03\\x02\\x02\\x02ȹಛ\\x03\\x02\\x02\\x02Ȼತ\\x03\\x02\\x02\\x02Ƚ')\n buf.write(\n 'ಪ\\x03\\x02\\x02\\x02ȿ\\u0cb4\\x03\\x02\\x02\\x02Ɂ\\u0cba\\x03\\x02\\x02\\x02')\n buf.write('Ƀಿ\\x03\\x02\\x02\\x02Ʌೋ\\x03\\x02\\x02\\x02ɇ\\u0cd4\\x03')\n buf.write('\\x02\\x02\\x02ɉೞ\\x03\\x02\\x02\\x02ɋ\\u0ce5\\x03\\x02\\x02\\x02ɍ೯')\n buf.write('\\x03\\x02\\x02\\x02ɏ\\u0cf9\\x03\\x02\\x02\\x02ɑഁ\\x03\\x02\\x02\\x02ɓ')\n buf.write('ഇ\\x03\\x02\\x02\\x02ɕ\\u0d11\\x03\\x02\\x02\\x02ɗഗ\\x03\\x02\\x02\\x02')\n buf.write('əഝ\\x03\\x02\\x02\\x02ɛഡ\\x03\\x02\\x02\\x02ɝദ\\x03')\n buf.write('\\x02\\x02\\x02ɟഫ\\x03\\x02\\x02\\x02ɡല\\x03\\x02\\x02\\x02ɣശ')\n buf.write('\\x03\\x02\\x02\\x02ɥീ\\x03\\x02\\x02\\x02ɧൌ\\x03\\x02\\x02\\x02ɩ')\n buf.write(\n '\\u0d53\\x03\\x02\\x02\\x02ɫ൝\\x03\\x02\\x02\\x02ɭ\\u0d64\\x03\\x02\\x02\\x02')\n buf.write('ɯ൬\\x03\\x02\\x02\\x02ɱ൴\\x03\\x02\\x02\\x02ɳඈ\\x03')\n buf.write('\\x02\\x02\\x02ɵඏ\\x03\\x02\\x02\\x02ɷග\\x03\\x02\\x02\\x02ɹඣ')\n buf.write('\\x03\\x02\\x02\\x02ɻත\\x03\\x02\\x02\\x02ɽඳ\\x03\\x02\\x02\\x02ɿ')\n buf.write('ර\\x03\\x02\\x02\\x02ʁෂ\\x03\\x02\\x02\\x02ʃ\\u0dc8\\x03\\x02\\x02\\x02')\n buf.write('ʅෑ\\x03\\x02\\x02\\x02ʇෘ\\x03\\x02\\x02\\x02ʉො\\x03')\n buf.write('\\x02\\x02\\x02ʋ\\u0de2\\x03\\x02\\x02\\x02ʍ෧\\x03\\x02\\x02\\x02ʏ෭')\n buf.write('\\x03\\x02\\x02\\x02ʑ෴\\x03\\x02\\x02\\x02ʓ\\u0df9\\x03\\x02\\x02\\x02ʕ')\n buf.write('ฃ\\x03\\x02\\x02\\x02ʗช\\x03\\x02\\x02\\x02ʙถ\\x03\\x02\\x02\\x02')\n buf.write('ʛบ\\x03\\x02\\x02\\x02ʝม\\x03\\x02\\x02\\x02ʟศ\\x03')\n buf.write('\\x02\\x02\\x02ʡอ\\x03\\x02\\x02\\x02ʣี\\x03\\x02\\x02\\x02ʥ\\u0e3c')\n buf.write('\\x03\\x02\\x02\\x02ʧแ\\x03\\x02\\x02\\x02ʩ๊\\x03\\x02\\x02\\x02ʫ')\n buf.write(\n '๕\\x03\\x02\\x02\\x02ʭ\\u0e62\\x03\\x02\\x02\\x02ʯ\\u0e74\\x03\\x02\\x02\\x02')\n buf.write('ʱ\\u0e80\\x03\\x02\\x02\\x02ʳຐ\\x03\\x02\\x02\\x02ʵດ\\x03')\n buf.write('\\x02\\x02\\x02ʷນ\\x03\\x02\\x02\\x02ʹຢ\\x03\\x02\\x02\\x02ʻຨ')\n buf.write('\\x03\\x02\\x02\\x02ʽອ\\x03\\x02\\x02\\x02ʿຶ\\x03\\x02\\x02\\x02ˁ')\n buf.write('\\u0ebf\\x03\\x02\\x02\\x02˃່\\x03\\x02\\x02\\x02˅໗\\x03\\x02\\x02\\x02')\n buf.write('ˇໞ\\x03\\x02\\x02\\x02ˉ\\u0ee3\\x03\\x02\\x02\\x02ˋ\\u0ee8\\x03')\n buf.write(\n '\\x02\\x02\\x02ˍ\\u0ef1\\x03\\x02\\x02\\x02ˏ\\u0efa\\x03\\x02\\x02\\x02ˑ\\u0eff'\n )\n buf.write('\\x03\\x02\\x02\\x02˓།\\x03\\x02\\x02\\x02˕༕\\x03\\x02\\x02\\x02˗')\n buf.write('༞\\x03\\x02\\x02\\x02˙༩\\x03\\x02\\x02\\x02˛༯\\x03\\x02\\x02\\x02')\n buf.write('˝༷\\x03\\x02\\x02\\x02˟ཁ\\x03\\x02\\x02\\x02ˡཎ\\x03')\n buf.write('\\x02\\x02\\x02ˣཕ\\x03\\x02\\x02\\x02˥འ\\x03\\x02\\x02\\x02˧ཧ')\n buf.write('\\x03\\x02\\x02\\x02˩ཱི\\x03\\x02\\x02\\x02˫ྀ\\x03\\x02\\x02\\x02˭')\n buf.write('ྎ\\x03\\x02\\x02\\x02˯ྖ\\x03\\x02\\x02\\x02˱ྞ\\x03\\x02\\x02\\x02')\n buf.write('˳ྦ\\x03\\x02\\x02\\x02˵ྫྷ\\x03\\x02\\x02\\x02˷ྰ\\x03')\n buf.write('\\x02\\x02\\x02˹ྵ\\x03\\x02\\x02\\x02˻ྺ\\x03\\x02\\x02\\x02˽࿄')\n buf.write(\n '\\x03\\x02\\x02\\x02˿\\u0fe0\\x03\\x02\\x02\\x02́\\u0ffb\\x03\\x02\\x02\\x02̃')\n buf.write('ဓ\\x03\\x02\\x02\\x02̅အ\\x03\\x02\\x02\\x02̇ု\\x03\\x02\\x02\\x02')\n buf.write('̉ဿ\\x03\\x02\\x02\\x02̋၏\\x03\\x02\\x02\\x02̍ၒ\\x03')\n buf.write('\\x02\\x02\\x02̏ၛ\\x03\\x02\\x02\\x02̑ၧ\\x03\\x02\\x02\\x02̓ၱ')\n buf.write('\\x03\\x02\\x02\\x02̕ၷ\\x03\\x02\\x02\\x02̗ၿ\\x03\\x02\\x02\\x02̙')\n buf.write('ႄ\\x03\\x02\\x02\\x02̛ႉ\\x03\\x02\\x02\\x02̝႒\\x03\\x02\\x02\\x02')\n buf.write('̟႗\\x03\\x02\\x02\\x02̡Ⴁ\\x03\\x02\\x02\\x02̣Ⴇ\\x03')\n buf.write('\\x02\\x02\\x02̥Ⴍ\\x03\\x02\\x02\\x02̧Ⴔ\\x03\\x02\\x02\\x02̩Ⴞ')\n buf.write(\n '\\x03\\x02\\x02\\x02̫\\u10c6\\x03\\x02\\x02\\x02̭\\u10cc\\x03\\x02\\x02\\x02̯')\n buf.write('დ\\x03\\x02\\x02\\x02̱მ\\x03\\x02\\x02\\x02̳ტ\\x03\\x02\\x02\\x02')\n buf.write('̵ჩ\\x03\\x02\\x02\\x02̷ჭ\\x03\\x02\\x02\\x02̹ჳ\\x03')\n buf.write('\\x02\\x02\\x02̻ჼ\\x03\\x02\\x02\\x02̽ᄂ\\x03\\x02\\x02\\x02̿ᄉ')\n buf.write('\\x03\\x02\\x02\\x02́ᄑ\\x03\\x02\\x02\\x02̓ᄚ\\x03\\x02\\x02\\x02ͅ')\n buf.write('ᄣ\\x03\\x02\\x02\\x02͇ᄪ\\x03\\x02\\x02\\x02͉ᄲ\\x03\\x02\\x02\\x02')\n buf.write('͋ᄺ\\x03\\x02\\x02\\x02͍ᅃ\\x03\\x02\\x02\\x02͏ᅈ\\x03')\n buf.write('\\x02\\x02\\x02͑ᅐ\\x03\\x02\\x02\\x02͓ᅛ\\x03\\x02\\x02\\x02͕ᅠ')\n buf.write('\\x03\\x02\\x02\\x02͗ᅩ\\x03\\x02\\x02\\x02͙ᅯ\\x03\\x02\\x02\\x02͛')\n buf.write('ᅵ\\x03\\x02\\x02\\x02͝ᅺ\\x03\\x02\\x02\\x02͟ᆁ\\x03\\x02\\x02\\x02')\n buf.write('͡ᆆ\\x03\\x02\\x02\\x02ͣᆌ\\x03\\x02\\x02\\x02ͥᆐ\\x03')\n buf.write('\\x02\\x02\\x02ͧᆗ\\x03\\x02\\x02\\x02ͩᆥ\\x03\\x02\\x02\\x02ͫᆭ')\n buf.write('\\x03\\x02\\x02\\x02ͭᆺ\\x03\\x02\\x02\\x02ͯᇅ\\x03\\x02\\x02\\x02ͱ')\n buf.write('ᇏ\\x03\\x02\\x02\\x02ͳᇙ\\x03\\x02\\x02\\x02͵ᇧ\\x03\\x02\\x02\\x02')\n buf.write('ͷᇰ\\x03\\x02\\x02\\x02\\u0379ᇶ\\x03\\x02\\x02\\x02ͻᇿ\\x03')\n buf.write('\\x02\\x02\\x02ͽሇ\\x03\\x02\\x02\\x02Ϳሔ\\x03\\x02\\x02\\x02\\u0381ም')\n buf.write('\\x03\\x02\\x02\\x02\\u0383ሢ\\x03\\x02\\x02\\x02΅ሦ\\x03\\x02\\x02\\x02·')\n buf.write(\n 'ሿ\\x03\\x02\\x02\\x02Ήቄ\\x03\\x02\\x02\\x02\\u038b\\u124f\\x03\\x02\\x02\\x02')\n buf.write('\\u038dቡ\\x03\\x02\\x02\\x02Ώቱ\\x03\\x02\\x02\\x02Αኄ\\x03')\n buf.write('\\x02\\x02\\x02Γኛ\\x03\\x02\\x02\\x02Εኪ\\x03\\x02\\x02\\x02Ηኴ')\n buf.write(\n '\\x03\\x02\\x02\\x02Ι\\u12bf\\x03\\x02\\x02\\x02Λ\\u12c7\\x03\\x02\\x02\\x02Ν')\n buf.write('ዔ\\x03\\x02\\x02\\x02Οዤ\\x03\\x02\\x02\\x02Ρዴ\\x03\\x02\\x02\\x02')\n buf.write('Σዹ\\x03\\x02\\x02\\x02Υዽ\\x03\\x02\\x02\\x02Χጂ\\x03')\n buf.write('\\x02\\x02\\x02Ωጆ\\x03\\x02\\x02\\x02Ϋጋ\\x03\\x02\\x02\\x02έጏ')\n buf.write('\\x03\\x02\\x02\\x02ί\\u1316\\x03\\x02\\x02\\x02αጚ\\x03\\x02\\x02\\x02γ')\n buf.write('ጠ\\x03\\x02\\x02\\x02εጰ\\x03\\x02\\x02\\x02ηጻ\\x03\\x02\\x02\\x02')\n buf.write('ιጿ\\x03\\x02\\x02\\x02λፈ\\x03\\x02\\x02\\x02νፎ\\x03')\n buf.write('\\x02\\x02\\x02οፕ\\x03\\x02\\x02\\x02ρፚ\\x03\\x02\\x02\\x02σ፡')\n buf.write('\\x03\\x02\\x02\\x02υ፮\\x03\\x02\\x02\\x02χ፻\\x03\\x02\\x02\\x02ω')\n buf.write('ᎈ\\x03\\x02\\x02\\x02ϋᎋ\\x03\\x02\\x02\\x02ύᎍ\\x03\\x02\\x02\\x02')\n buf.write('Ϗᎏ\\x03\\x02\\x02\\x02ϑ\\u139e\\x03\\x02\\x02\\x02ϓᎪ\\x03')\n buf.write('\\x02\\x02\\x02ϕᎳ\\x03\\x02\\x02\\x02ϗᎵ\\x03\\x02\\x02\\x02ϙᏀ')\n buf.write('\\x03\\x02\\x02\\x02ϛᏋ\\x03\\x02\\x02\\x02ϝᏖ\\x03\\x02\\x02\\x02ϟ')\n buf.write('Ꮱ\\x03\\x02\\x02\\x02ϡᏣ\\x03\\x02\\x02\\x02ϣᏭ\\x03\\x02\\x02\\x02')\n buf.write('ϥᏯ\\x03\\x02\\x02\\x02ϧᏱ\\x03\\x02\\x02\\x02ϩᏳ\\x03')\n buf.write('\\x02\\x02\\x02ϫᏵ\\x03\\x02\\x02\\x02ϭᏸ\\x03\\x02\\x02\\x02ϯᏺ')\n buf.write('\\x03\\x02\\x02\\x02ϱᏼ\\x03\\x02\\x02\\x02ϳ\\u13fe\\x03\\x02\\x02\\x02ϵ')\n buf.write('᐀\\x03\\x02\\x02\\x02Ϸᐂ\\x03\\x02\\x02\\x02Ϲᐄ\\x03\\x02\\x02\\x02')\n buf.write('ϻᐕ\\x03\\x02\\x02\\x02Ͻᐗ\\x03\\x02\\x02\\x02Ͽᐙ\\x03')\n buf.write('\\x02\\x02\\x02Ёᐛ\\x03\\x02\\x02\\x02Ѓᐞ\\x03\\x02\\x02\\x02Ѕᐠ')\n buf.write('\\x03\\x02\\x02\\x02Їᐫ\\x03\\x02\\x02\\x02Љᐭ\\x03\\x02\\x02\\x02Ћ')\n buf.write('ᐯ\\x03\\x02\\x02\\x02Ѝᐱ\\x03\\x02\\x02\\x02Џᐳ\\x03\\x02\\x02\\x02')\n buf.write('Бᐵ\\x03\\x02\\x02\\x02Гᐷ\\x03\\x02\\x02\\x02Еᐺ\\x03')\n buf.write('\\x02\\x02\\x02Зᐼ\\x03\\x02\\x02\\x02Йᐾ\\x03\\x02\\x02\\x02Лᑀ')\n buf.write('\\x03\\x02\\x02\\x02Нᑂ\\x03\\x02\\x02\\x02Пᑅ\\x03\\x02\\x02\\x02С')\n buf.write('ᑋ\\x03\\x02\\x02\\x02Уᑎ\\x03\\x02\\x02\\x02Хᑕ\\x03\\x02\\x02\\x02')\n buf.write('Чᑠ\\x03\\x02\\x02\\x02Щᑯ\\x03\\x02\\x02\\x02Ыᑽ\\x03')\n buf.write('\\x02\\x02\\x02Эᒐ\\x03\\x02\\x02\\x02Яᒔ\\x03\\x02\\x02\\x02бᒖ')\n buf.write('\\x03\\x02\\x02\\x02гᒞ\\x03\\x02\\x02\\x02еᒣ\\x03\\x02\\x02\\x02з')\n buf.write('ᒥ\\x03\\x02\\x02\\x02йᒧ\\x03\\x02\\x02\\x02лᒩ\\x03\\x02\\x02\\x02')\n buf.write('нᒫ\\x03\\x02\\x02\\x02пᒭ\\x03\\x02\\x02\\x02сᒯ\\x03')\n buf.write('\\x02\\x02\\x02уᒱ\\x03\\x02\\x02\\x02хᒳ\\x03\\x02\\x02\\x02чᒵ')\n buf.write('\\x03\\x02\\x02\\x02щᒷ\\x03\\x02\\x02\\x02ыᒹ\\x03\\x02\\x02\\x02э')\n buf.write('ᒻ\\x03\\x02\\x02\\x02яᒽ\\x03\\x02\\x02\\x02ёᒿ\\x03\\x02\\x02\\x02')\n buf.write('ѓᓁ\\x03\\x02\\x02\\x02ѕᓃ\\x03\\x02\\x02\\x02їᓅ\\x03')\n buf.write('\\x02\\x02\\x02љᓇ\\x03\\x02\\x02\\x02ћᓉ\\x03\\x02\\x02\\x02ѝᓋ')\n buf.write('\\x03\\x02\\x02\\x02џᓍ\\x03\\x02\\x02\\x02ѡᓏ\\x03\\x02\\x02\\x02ѣ')\n buf.write('ᓑ\\x03\\x02\\x02\\x02ѥᓓ\\x03\\x02\\x02\\x02ѧᓕ\\x03\\x02\\x02\\x02')\n buf.write('ѩѪ\\x070\\x02\\x02Ѫѫ\\x070\\x02\\x02ѫ\\x04\\x03\\x02')\n buf.write('\\x02\\x02Ѭѭ\\x05еț\\x02ѭ\\x06\\x03\\x02\\x02\\x02Ѯ')\n buf.write('ѯ\\x05еț\\x02ѯѰ\\x05лȞ\\x02Ѱ')\n buf.write('ѱ\\x05лȞ\\x02ѱ\\x08\\x03\\x02\\x02\\x02Ѳѳ\\x05е')\n buf.write('ț\\x02ѳѴ\\x05пȠ\\x02Ѵѵ\\x05ћ')\n buf.write('Ȯ\\x02ѵѶ\\x05нȟ\\x02Ѷѷ\\x05ї')\n buf.write('Ȭ\\x02ѷ\\n\\x03\\x02\\x02\\x02Ѹѹ\\x05еț\\x02ѹ')\n buf.write('Ѻ\\x05сȡ\\x02Ѻѻ\\x05нȟ\\x02ѻ')\n buf.write('Ѽ\\x05яȨ\\x02Ѽѽ\\x05ћȮ\\x02ѽ')\n buf.write('\\x0c\\x03\\x02\\x02\\x02Ѿѿ\\x05еț\\x02ѿҀ\\x05с')\n buf.write('ȡ\\x02Ҁҁ\\x05сȡ\\x02ҁ҂\\x05ї')\n buf.write('Ȭ\\x02҂҃\\x05нȟ\\x02҃҄\\x05с')\n buf.write('ȡ\\x02҄҅\\x05еț\\x02҅҆\\x05ћ')\n buf.write('Ȯ\\x02҆҇\\x05нȟ\\x02҇\\x0e\\x03\\x02\\x02\\x02')\n buf.write('҈҉\\x05еț\\x02҉Ҋ\\x05ыȦ')\n buf.write('\\x02Ҋҋ\\x05ыȦ\\x02ҋ\\x10\\x03\\x02\\x02\\x02Ҍ')\n buf.write('ҍ\\x05еț\\x02ҍҎ\\x05ыȦ\\x02Ҏ')\n buf.write('ҏ\\x05ћȮ\\x02ҏҐ\\x05нȟ\\x02Ґ')\n buf.write('ґ\\x05їȬ\\x02ґ\\x12\\x03\\x02\\x02\\x02Ғғ\\x05')\n buf.write('еț\\x02ғҔ\\x05яȨ\\x02Ҕҕ')\n buf.write('\\x05еț\\x02ҕҖ\\x05ыȦ\\x02Җҗ')\n buf.write('\\x05ѥȳ\\x02җҘ\\x05ѧȴ\\x02Ҙҙ')\n buf.write('\\x05нȟ\\x02ҙ\\x14\\x03\\x02\\x02\\x02Ққ\\x05е')\n buf.write('ț\\x02қҜ\\x05яȨ\\x02Ҝҝ\\x05л')\n buf.write('Ȟ\\x02ҝ\\x16\\x03\\x02\\x02\\x02Ҟҟ\\x05еț\\x02')\n buf.write('ҟҠ\\x05яȨ\\x02Ҡҡ\\x05ѥȳ')\n buf.write('\\x02ҡ\\x18\\x03\\x02\\x02\\x02Ңң\\x05еț\\x02ң')\n buf.write('Ҥ\\x05їȬ\\x02Ҥҥ\\x05їȬ\\x02ҥ')\n buf.write('Ҧ\\x05еț\\x02Ҧҧ\\x05ѥȳ\\x02ҧ')\n buf.write('\\x1a\\x03\\x02\\x02\\x02Ҩҩ\\x05еț\\x02ҩҪ\\x05')\n buf.write('љȭ\\x02Ҫ\\x1c\\x03\\x02\\x02\\x02ҫҬ\\x05еț')\n buf.write('\\x02Ҭҭ\\x05љȭ\\x02ҭҮ\\x05љȭ')\n buf.write('\\x02Үү\\x05ѝȯ\\x02үҰ\\x05эȧ')\n buf.write('\\x02Ұұ\\x05нȟ\\x02ұ\\x1e\\x03\\x02\\x02\\x02Ҳ')\n buf.write('ҳ\\x05еț\\x02ҳҴ\\x05љȭ\\x02Ҵ')\n buf.write('ҵ\\x05љȭ\\x02ҵҶ\\x05нȟ\\x02Ҷ')\n buf.write('ҷ\\x05їȬ\\x02ҷҸ\\x05ћȮ\\x02Ҹ')\n buf.write(' \\x03\\x02\\x02\\x02ҹҺ\\x05еț\\x02Һһ\\x05љ')\n buf.write('ȭ\\x02һҼ\\x05йȝ\\x02Ҽ\"\\x03\\x02\\x02\\x02ҽ')\n buf.write('Ҿ\\x05еț\\x02Ҿҿ\\x05љȭ\\x02ҿ')\n buf.write('Ӏ\\x05љȭ\\x02ӀӁ\\x05ёȩ\\x02Ӂ')\n buf.write('ӂ\\x05йȝ\\x02ӂӃ\\x05хȣ\\x02Ӄ')\n buf.write('ӄ\\x05еț\\x02ӄӅ\\x05ћȮ\\x02Ӆ')\n buf.write('ӆ\\x05нȟ\\x02ӆ$\\x03\\x02\\x02\\x02Ӈӈ\\x05е')\n buf.write('ț\\x02ӈӉ\\x05ћȮ\\x02Ӊ&\\x03\\x02\\x02\\x02ӊ')\n buf.write('Ӌ\\x05еț\\x02Ӌӌ\\x05ћȮ\\x02ӌ')\n buf.write('Ӎ\\x05ћȮ\\x02Ӎӎ\\x05їȬ\\x02ӎ')\n buf.write('ӏ\\x05хȣ\\x02ӏӐ\\x05зȜ\\x02Ӑ')\n buf.write('ӑ\\x05ѝȯ\\x02ӑӒ\\x05ћȮ\\x02Ӓ')\n buf.write('ӓ\\x05нȟ\\x02ӓ(\\x03\\x02\\x02\\x02Ӕӕ\\x05е')\n buf.write('ț\\x02ӕӖ\\x05ѝȯ\\x02Ӗӗ\\x05л')\n buf.write('Ȟ\\x02ӗӘ\\x05хȣ\\x02Әә\\x05ћ')\n buf.write('Ȯ\\x02ә*\\x03\\x02\\x02\\x02Ӛӛ\\x05еț\\x02ӛ')\n buf.write('Ӝ\\x05ѝȯ\\x02Ӝӝ\\x05ћȮ\\x02ӝ')\n buf.write('Ӟ\\x05уȢ\\x02Ӟӟ\\x05хȣ\\x02ӟ')\n buf.write('Ӡ\\x05лȞ\\x02Ӡ,\\x03\\x02\\x02\\x02ӡӢ\\x05е')\n buf.write('ț\\x02Ӣӣ\\x05ѝȯ\\x02ӣӤ\\x05ћ')\n buf.write('Ȯ\\x02Ӥӥ\\x05ёȩ\\x02ӥ.\\x03\\x02\\x02\\x02Ӧ')\n buf.write('ӧ\\x05еț\\x02ӧӨ\\x05ѝȯ\\x02Ө')\n buf.write('ө\\x05ћȮ\\x02өӪ\\x05ёȩ\\x02Ӫ')\n buf.write('ӫ\\x05эȧ\\x02ӫӬ\\x05еț\\x02Ӭ')\n buf.write('ӭ\\x05ћȮ\\x02ӭӮ\\x05хȣ\\x02Ӯ')\n buf.write('ӯ\\x05йȝ\\x02ӯ0\\x03\\x02\\x02\\x02Ӱӱ\\x05')\n buf.write('еț\\x02ӱӲ\\x05ѝȯ\\x02Ӳӳ')\n buf.write('\\x05ћȮ\\x02ӳӴ\\x05ёȩ\\x02Ӵӵ')\n buf.write('\\x05яȨ\\x02ӵӶ\\x05ёȩ\\x02Ӷӷ')\n buf.write('\\x05эȧ\\x02ӷӸ\\x05ёȩ\\x02Ӹӹ')\n buf.write('\\x05ѝȯ\\x02ӹӺ\\x05љȭ\\x02Ӻӻ')\n buf.write('\\x07a\\x02\\x02ӻӼ\\x05ћȮ\\x02Ӽӽ\\x05ї')\n buf.write('Ȭ\\x02ӽӾ\\x05еț\\x02Ӿӿ\\x05я')\n buf.write('Ȩ\\x02ӿԀ\\x05љȭ\\x02Ԁԁ\\x05е')\n buf.write('ț\\x02ԁԂ\\x05йȝ\\x02Ԃԃ\\x05ћ')\n buf.write('Ȯ\\x02ԃԄ\\x05хȣ\\x02Ԅԅ\\x05ё')\n buf.write('ȩ\\x02ԅԆ\\x05яȨ\\x02Ԇ2\\x03\\x02\\x02\\x02')\n buf.write('ԇԈ\\x05зȜ\\x02Ԉԉ\\x05еț')\n buf.write('\\x02ԉԊ\\x05ћȮ\\x02Ԋԋ\\x05йȝ')\n buf.write('\\x02ԋԌ\\x05уȢ\\x02Ԍ4\\x03\\x02\\x02\\x02ԍ')\n buf.write('Ԏ\\x05зȜ\\x02Ԏԏ\\x05нȟ\\x02ԏ')\n buf.write('Ԑ\\x05пȠ\\x02Ԑԑ\\x05ёȩ\\x02ԑ')\n buf.write('Ԓ\\x05їȬ\\x02Ԓԓ\\x05нȟ\\x02ԓ')\n buf.write('6\\x03\\x02\\x02\\x02Ԕԕ\\x05зȜ\\x02ԕԖ\\x05')\n buf.write('нȟ\\x02Ԗԗ\\x05сȡ\\x02ԗԘ')\n buf.write('\\x05хȣ\\x02Ԙԙ\\x05яȨ\\x02ԙ8\\x03')\n buf.write('\\x02\\x02\\x02Ԛԛ\\x05зȜ\\x02ԛԜ\\x05н')\n buf.write('ȟ\\x02Ԝԝ\\x05ћȮ\\x02ԝԞ\\x05ѡ')\n buf.write('ȱ\\x02Ԟԟ\\x05нȟ\\x02ԟԠ\\x05н')\n buf.write('ȟ\\x02Ԡԡ\\x05яȨ\\x02ԡ:\\x03\\x02\\x02\\x02Ԣ')\n buf.write('ԣ\\x05зȜ\\x02ԣԤ\\x05пȠ\\x02Ԥ')\n buf.write('ԥ\\x05хȣ\\x02ԥԦ\\x05ыȦ\\x02Ԧ')\n buf.write('ԧ\\x05нȟ\\x02ԧ<\\x03\\x02\\x02\\x02Ԩԩ\\x05з')\n buf.write('Ȝ\\x02ԩԪ\\x05хȣ\\x02Ԫԫ\\x05я')\n buf.write('Ȩ\\x02ԫԬ\\x05еț\\x02Ԭԭ\\x05ї')\n buf.write('Ȭ\\x02ԭԮ\\x05ѥȳ\\x02Ԯԯ\\x07a\\x02')\n buf.write('\\x02ԯ\\u0530\\x05лȞ\\x02\\u0530Ա\\x05ёȩ')\n buf.write('\\x02ԱԲ\\x05ѝȯ\\x02ԲԳ\\x05зȜ')\n buf.write('\\x02ԳԴ\\x05ыȦ\\x02ԴԵ\\x05нȟ')\n buf.write('\\x02Ե>\\x03\\x02\\x02\\x02ԶԷ\\x05зȜ\\x02ԷԸ')\n buf.write('\\x05хȣ\\x02ԸԹ\\x05яȨ\\x02ԹԺ')\n buf.write('\\x05еț\\x02ԺԻ\\x05їȬ\\x02ԻԼ')\n buf.write('\\x05ѥȳ\\x02ԼԽ\\x07a\\x02\\x02ԽԾ\\x05п')\n buf.write('Ƞ\\x02ԾԿ\\x05ыȦ\\x02ԿՀ\\x05ё')\n buf.write('ȩ\\x02ՀՁ\\x05еț\\x02ՁՂ\\x05ћ')\n buf.write('Ȯ\\x02Ղ@\\x03\\x02\\x02\\x02ՃՄ\\x05зȜ\\x02Մ')\n buf.write('Յ\\x05хȣ\\x02ՅՆ\\x05яȨ\\x02Ն')\n buf.write('Շ\\x05еț\\x02ՇՈ\\x05їȬ\\x02Ո')\n buf.write('Չ\\x05ѥȳ\\x02ՉՊ\\x07a\\x02\\x02ՊՋ')\n buf.write('\\x05хȣ\\x02ՋՌ\\x05яȨ\\x02ՌՍ')\n buf.write('\\x05ћȮ\\x02ՍՎ\\x05нȟ\\x02ՎՏ')\n buf.write('\\x05сȡ\\x02ՏՐ\\x05нȟ\\x02ՐՑ')\n buf.write('\\x05їȬ\\x02ՑB\\x03\\x02\\x02\\x02ՒՓ\\x05зȜ')\n buf.write('\\x02ՓՔ\\x05ыȦ\\x02ՔՕ\\x05ёȩ')\n buf.write('\\x02ՕՖ\\x05зȜ\\x02ՖD\\x03\\x02\\x02\\x02\\u0557\\u0558')\n buf.write('\\x05зȜ\\x02\\u0558ՙ\\x05ыȦ\\x02ՙ՚')\n buf.write('\\x05ёȩ\\x02՚՛\\x05йȝ\\x02՛՜')\n buf.write('\\x05щȥ\\x02՜F\\x03\\x02\\x02\\x02՝՞\\x05зȜ')\n buf.write('\\x02՞՟\\x05ёȩ\\x02՟ՠ\\x05лȞ')\n buf.write('\\x02ՠա\\x05ѥȳ\\x02աH\\x03\\x02\\x02\\x02բգ')\n buf.write('\\x05зȜ\\x02գդ\\x05ёȩ\\x02դե')\n buf.write('\\x05ёȩ\\x02եզ\\x05ыȦ\\x02զէ')\n buf.write('\\x05нȟ\\x02էը\\x05еț\\x02ըթ')\n buf.write('\\x05яȨ\\x02թJ\\x03\\x02\\x02\\x02ժի\\x05зȜ')\n buf.write('\\x02իլ\\x05ёȩ\\x02լխ\\x05ћȮ')\n buf.write('\\x02խծ\\x05уȢ\\x02ծL\\x03\\x02\\x02\\x02կհ')\n buf.write('\\x05зȜ\\x02հձ\\x05їȬ\\x02ձղ')\n buf.write('\\x05нȟ\\x02ղճ\\x05еț\\x02ճմ')\n buf.write('\\x05лȞ\\x02մյ\\x05ћȮ\\x02յն')\n buf.write('\\x05уȢ\\x02նN\\x03\\x02\\x02\\x02շո\\x05зȜ')\n buf.write('\\x02ոչ\\x05ѝȯ\\x02չպ\\x05ыȦ')\n buf.write('\\x02պջ\\x05щȥ\\x02ջP\\x03\\x02\\x02\\x02ռս')\n buf.write('\\x05зȜ\\x02սվ\\x05ѥȳ\\x02վR\\x03')\n buf.write('\\x02\\x02\\x02տր\\x05зȜ\\x02րց\\x05ѥ')\n buf.write('ȳ\\x02ցւ\\x05ћȮ\\x02ւփ\\x05н')\n buf.write('ȟ\\x02փT\\x03\\x02\\x02\\x02քօ\\x05йȝ\\x02օ')\n buf.write('V\\x03\\x02\\x02\\x02ֆև\\x05йȝ\\x02ևֈ\\x05е')\n buf.write('ț\\x02ֈ։\\x05йȝ\\x02։֊\\x05у')\n buf.write('Ȣ\\x02֊\\u058b\\x05нȟ\\x02\\u058bX\\x03\\x02\\x02\\x02\\u058c')\n buf.write('֍\\x05йȝ\\x02֍֎\\x05еț\\x02֎')\n buf.write('֏\\x05ыȦ\\x02֏\\u0590\\x05ыȦ\\x02\\u0590')\n buf.write('Z\\x03\\x02\\x02\\x02֑֒\\x05йȝ\\x02֒֓\\x05е')\n buf.write('ț\\x02֓֔\\x05яȨ\\x02֔֕\\x05ё')\n buf.write('ȩ\\x02֖֕\\x05яȨ\\x02֖֗\\x05х')\n buf.write('ȣ\\x02֗֘\\x05йȝ\\x02֘֙\\x05е')\n buf.write('ț\\x02֚֙\\x05ыȦ\\x02֚\\\\\\x03\\x02\\x02\\x02֛')\n buf.write('֜\\x05йȝ\\x02֜֝\\x05еț\\x02֝')\n buf.write('֞\\x05љȭ\\x02֞֟\\x05йȝ\\x02֟')\n buf.write('֠\\x05еț\\x02֠֡\\x05лȞ\\x02֡')\n buf.write('֢\\x05нȟ\\x02֢^\\x03\\x02\\x02\\x02֣֤\\x05й')\n buf.write('ȝ\\x02֤֥\\x05еț\\x02֥֦\\x05љ')\n buf.write('ȭ\\x02֦֧\\x05нȟ\\x02֧`\\x03\\x02\\x02\\x02֨')\n buf.write('֩\\x05йȝ\\x02֪֩\\x05еț\\x02֪')\n buf.write('֫\\x05љȭ\\x02֫֬\\x05ћȮ\\x02֬')\n buf.write('b\\x03\\x02\\x02\\x02֭֮\\x05йȝ\\x02֮֯\\x05у')\n buf.write('Ȣ\\x02ְ֯\\x05еț\\x02ְֱ\\x05ї')\n buf.write('Ȭ\\x02ֱd\\x03\\x02\\x02\\x02ֲֳ\\x05йȝ\\x02ֳ')\n buf.write('ִ\\x05уȢ\\x02ִֵ\\x05еț\\x02ֵ')\n buf.write('ֶ\\x05їȬ\\x02ֶַ\\x07a\\x02\\x02ַָ')\n buf.write('\\x05йȝ\\x02ָֹ\\x05љȭ\\x02ֹf\\x03')\n buf.write('\\x02\\x02\\x02ֺֻ\\x05йȝ\\x02ֻּ\\x05у')\n buf.write('Ȣ\\x02ּֽ\\x05еț\\x02ֽ־\\x05ї')\n buf.write('Ȭ\\x02־ֿ\\x05еț\\x02ֿ׀\\x05й')\n buf.write('ȝ\\x02׀ׁ\\x05ћȮ\\x02ׁׂ\\x05н')\n buf.write('ȟ\\x02ׂ׃\\x05їȬ\\x02׃h\\x03\\x02\\x02\\x02ׄ')\n buf.write('ׅ\\x05йȝ\\x02ׅ׆\\x05уȢ\\x02׆')\n buf.write('ׇ\\x05нȟ\\x02ׇ\\u05c8\\x05йȝ\\x02\\u05c8')\n buf.write('\\u05c9\\x05щȥ\\x02\\u05c9j\\x03\\x02\\x02\\x02\\u05ca\\u05cb\\x05й')\n buf.write('ȝ\\x02\\u05cb\\u05cc\\x05уȢ\\x02\\u05cc\\u05cd\\x05ї')\n buf.write('Ȭ\\x02\\u05cdl\\x03\\x02\\x02\\x02\\u05ce\\u05cf\\x05йȝ\\x02\\u05cf')\n buf.write('א\\x05ыȦ\\x02אב\\x05ёȩ\\x02ב')\n buf.write('ג\\x05зȜ\\x02גn\\x03\\x02\\x02\\x02דה\\x05й')\n buf.write('ȝ\\x02הו\\x05ыȦ\\x02וז\\x05ё')\n buf.write('ȩ\\x02זח\\x05љȭ\\x02חט\\x05н')\n buf.write('ȟ\\x02טp\\x03\\x02\\x02\\x02יך\\x05йȝ\\x02ך')\n buf.write('כ\\x05ыȦ\\x02כל\\x05ѝȯ\\x02ל')\n buf.write('ם\\x05љȭ\\x02םמ\\x05ћȮ\\x02מ')\n buf.write('ן\\x05нȟ\\x02ןנ\\x05їȬ\\x02נ')\n buf.write('r\\x03\\x02\\x02\\x02סע\\x05йȝ\\x02עף\\x05ё')\n buf.write('ȩ\\x02ףפ\\x05ыȦ\\x02פץ\\x05ы')\n buf.write('Ȧ\\x02ץצ\\x05нȟ\\x02צק\\x05й')\n buf.write('ȝ\\x02קר\\x05ћȮ\\x02רt\\x03\\x02\\x02\\x02ש')\n buf.write('ת\\x05йȝ\\x02ת\\u05eb\\x05ёȩ\\x02\\u05eb')\n buf.write('\\u05ec\\x05ыȦ\\x02\\u05ec\\u05ed\\x05ѝȯ\\x02\\u05ed')\n buf.write('\\u05ee\\x05эȧ\\x02\\u05eeׯ\\x05яȨ\\x02ׯ')\n buf.write('װ\\x05љȭ\\x02װv\\x03\\x02\\x02\\x02ױײ\\x05й')\n buf.write('ȝ\\x02ײ׳\\x05ёȩ\\x02׳״\\x05э')\n buf.write('ȧ\\x02״\\u05f5\\x05эȧ\\x02\\u05f5\\u05f6\\x05н')\n buf.write('ȟ\\x02\\u05f6\\u05f7\\x05яȨ\\x02\\u05f7\\u05f8\\x05ћ')\n buf.write('Ȯ\\x02\\u05f8x\\x03\\x02\\x02\\x02\\u05f9\\u05fa\\x05йȝ\\x02\\u05fa')\n buf.write('\\u05fb\\x05ёȩ\\x02\\u05fb\\u05fc\\x05эȧ\\x02\\u05fc')\n buf.write('\\u05fd\\x05эȧ\\x02\\u05fd\\u05fe\\x05хȣ\\x02\\u05fe')\n buf.write('\\u05ff\\x05ћȮ\\x02\\u05ffz\\x03\\x02\\x02\\x02\\u0600\\u0601\\x05й')\n buf.write('ȝ\\x02\\u0601\\u0602\\x05ёȩ\\x02\\u0602\\u0603\\x05э')\n buf.write('ȧ\\x02\\u0603\\u0604\\x05эȧ\\x02\\u0604\\u0605\\x05х')\n buf.write('ȣ\\x02\\u0605؆\\x05ћȮ\\x02؆؇\\x05ћ')\n buf.write('Ȯ\\x02؇؈\\x05нȟ\\x02؈؉\\x05л')\n buf.write('Ȟ\\x02؉|\\x03\\x02\\x02\\x02؊؋\\x05йȝ\\x02؋')\n buf.write('،\\x05ёȩ\\x02،؍\\x05эȧ\\x02؍')\n buf.write('؎\\x05ѓȪ\\x02؎؏\\x05еț\\x02؏')\n buf.write('ؐ\\x05ћȮ\\x02ؐؑ\\x05хȣ\\x02ؑ')\n buf.write('ؒ\\x05зȜ\\x02ؒؓ\\x05хȣ\\x02ؓ')\n buf.write('ؔ\\x05ыȦ\\x02ؔؕ\\x05хȣ\\x02ؕ')\n buf.write('ؖ\\x05ћȮ\\x02ؖؗ\\x05ѥȳ\\x02ؗ')\n buf.write('~\\x03\\x02\\x02\\x02ؘؙ\\x05йȝ\\x02ؙؚ\\x05ё')\n buf.write('ȩ\\x02ؚ؛\\x05эȧ\\x02؛\\u061c\\x05ѓ')\n buf.write('Ȫ\\x02\\u061c؝\\x05хȣ\\x02؝؞\\x05ы')\n buf.write('Ȧ\\x02؞؟\\x05нȟ\\x02؟\\x80\\x03\\x02\\x02')\n buf.write('\\x02ؠء\\x05йȝ\\x02ءآ\\x05ёȩ')\n buf.write('\\x02آأ\\x05эȧ\\x02أؤ\\x05ѓȪ')\n buf.write('\\x02ؤإ\\x05ёȩ\\x02إئ\\x05ѝȯ')\n buf.write('\\x02ئا\\x05яȨ\\x02اب\\x05лȞ')\n buf.write('\\x02ب\\x82\\x03\\x02\\x02\\x02ةت\\x05йȝ\\x02ت')\n buf.write('ث\\x05ёȩ\\x02ثج\\x05яȨ\\x02ج')\n buf.write('ح\\x05яȨ\\x02حخ\\x05нȟ\\x02خ')\n buf.write('د\\x05йȝ\\x02دذ\\x05ћȮ\\x02ذ')\n buf.write('\\x84\\x03\\x02\\x02\\x02رز\\x05йȝ\\x02زس')\n buf.write('\\x05ёȩ\\x02سش\\x05яȨ\\x02شص')\n buf.write('\\x05яȨ\\x02صض\\x05нȟ\\x02ضط')\n buf.write('\\x05йȝ\\x02طظ\\x05ћȮ\\x02ظع')\n buf.write('\\x07a\\x02\\x02عغ\\x05зȜ\\x02غػ\\x05ѥ')\n buf.write('ȳ\\x02ػؼ\\x07a\\x02\\x02ؼؽ\\x05їȬ')\n buf.write('\\x02ؽؾ\\x05ёȩ\\x02ؾؿ\\x05ёȩ')\n buf.write('\\x02ؿـ\\x05ћȮ\\x02ـ\\x86\\x03\\x02\\x02\\x02ف')\n buf.write('ق\\x05йȝ\\x02قك\\x05ёȩ\\x02ك')\n buf.write('ل\\x05яȨ\\x02لم\\x05љȭ\\x02م')\n buf.write('ن\\x05ћȮ\\x02نه\\x05еț\\x02ه')\n buf.write('و\\x05яȨ\\x02وى\\x05ћȮ\\x02ى')\n buf.write('\\x88\\x03\\x02\\x02\\x02يً\\x05йȝ\\x02ًٌ')\n buf.write('\\x05ёȩ\\x02ٌٍ\\x05яȨ\\x02ٍَ')\n buf.write('\\x05љȭ\\x02َُ\\x05ћȮ\\x02ُِ')\n buf.write('\\x05їȬ\\x02ِّ\\x05еț\\x02ّْ')\n buf.write('\\x05хȣ\\x02ْٓ\\x05яȨ\\x02ٓٔ')\n buf.write('\\x05ћȮ\\x02ٔ\\x8a\\x03\\x02\\x02\\x02ٕٖ\\x05й')\n buf.write('ȝ\\x02ٖٗ\\x05ёȩ\\x02ٗ٘\\x05я')\n buf.write('Ȩ\\x02٘ٙ\\x05љȭ\\x02ٙٚ\\x05ћ')\n buf.write('Ȯ\\x02ٚٛ\\x05їȬ\\x02ٜٛ\\x05е')\n buf.write('ț\\x02ٜٝ\\x05хȣ\\x02ٝٞ\\x05я')\n buf.write('Ȩ\\x02ٟٞ\\x05ћȮ\\x02ٟ٠\\x05љ')\n buf.write('ȭ\\x02٠\\x8c\\x03\\x02\\x02\\x02١٢\\x05йȝ')\n buf.write('\\x02٢٣\\x05ёȩ\\x02٣٤\\x05яȨ')\n buf.write('\\x02٤٥\\x05љȭ\\x02٥٦\\x05ћȮ')\n buf.write('\\x02٦٧\\x05їȬ\\x02٧٨\\x05ѝȯ')\n buf.write('\\x02٨٩\\x05йȝ\\x02٩٪\\x05ћȮ')\n buf.write('\\x02٪٫\\x05ёȩ\\x02٫٬\\x05їȬ')\n buf.write('\\x02٬\\x8e\\x03\\x02\\x02\\x02٭ٮ\\x05йȝ\\x02ٮ')\n buf.write('ٯ\\x05ёȩ\\x02ٯٰ\\x05яȨ\\x02ٰ')\n buf.write('ٱ\\x05ћȮ\\x02ٱٲ\\x05нȟ\\x02ٲ')\n buf.write('ٳ\\x05яȨ\\x02ٳٴ\\x05ћȮ\\x02ٴ')\n buf.write('\\x90\\x03\\x02\\x02\\x02ٵٶ\\x05йȝ\\x02ٶٷ')\n buf.write('\\x05ёȩ\\x02ٷٸ\\x05яȨ\\x02ٸٹ')\n buf.write('\\x05ћȮ\\x02ٹٺ\\x05нȟ\\x02ٺٻ')\n buf.write('\\x05ѣȲ\\x02ٻټ\\x05ћȮ\\x02ټ\\x92')\n buf.write('\\x03\\x02\\x02\\x02ٽپ\\x05йȝ\\x02پٿ\\x05ё')\n buf.write('ȩ\\x02ٿڀ\\x05яȨ\\x02ڀځ\\x05ћ')\n buf.write('Ȯ\\x02ځڂ\\x05хȣ\\x02ڂڃ\\x05я')\n buf.write('Ȩ\\x02ڃڄ\\x05ѝȯ\\x02ڄڅ\\x05н')\n buf.write('ȟ\\x02څ\\x94\\x03\\x02\\x02\\x02چڇ\\x05йȝ')\n buf.write('\\x02ڇڈ\\x05ёȩ\\x02ڈډ\\x05яȨ')\n buf.write('\\x02ډڊ\\x05џȰ\\x02ڊڋ\\x05нȟ')\n buf.write('\\x02ڋڌ\\x05їȬ\\x02ڌڍ\\x05ћȮ')\n buf.write('\\x02ڍ\\x96\\x03\\x02\\x02\\x02ڎڏ\\x05йȝ\\x02ڏ')\n buf.write('ڐ\\x05ёȩ\\x02ڐڑ\\x05їȬ\\x02ڑ')\n buf.write('ڒ\\x05їȬ\\x02ڒړ\\x05ѝȯ\\x02ړ')\n buf.write('ڔ\\x05ѓȪ\\x02ڔڕ\\x05ћȮ\\x02ڕ')\n buf.write('ږ\\x07a\\x02\\x02ږڗ\\x05ѣȲ\\x02ڗژ')\n buf.write('\\x05хȣ\\x02ژڙ\\x05лȞ\\x02ڙ\\x98')\n buf.write('\\x03\\x02\\x02\\x02ښڛ\\x05йȝ\\x02ڛڜ\\x05ё')\n buf.write('ȩ\\x02ڜڝ\\x05їȬ\\x02ڝڞ\\x05ї')\n buf.write('Ȭ\\x02ڞڟ\\x05ѝȯ\\x02ڟڠ\\x05ѓ')\n buf.write('Ȫ\\x02ڠڡ\\x05ћȮ\\x02ڡڢ\\x07a\\x02')\n buf.write('\\x02ڢڣ\\x05ѣȲ\\x02ڣڤ\\x05хȣ')\n buf.write('\\x02ڤڥ\\x05лȞ\\x02ڥڦ\\x07a\\x02\\x02ڦ')\n buf.write('ڧ\\x05еț\\x02ڧڨ\\x05ыȦ\\x02ڨ')\n buf.write('ک\\x05ыȦ\\x02ک\\x9a\\x03\\x02\\x02\\x02ڪګ')\n buf.write('\\x05йȝ\\x02ګڬ\\x05ёȩ\\x02ڬڭ')\n buf.write('\\x05љȭ\\x02ڭڮ\\x05ћȮ\\x02ڮ\\x9c')\n buf.write('\\x03\\x02\\x02\\x02گڰ\\x05йȝ\\x02ڰڱ\\x05ё')\n buf.write('ȩ\\x02ڱڲ\\x05ѝȯ\\x02ڲڳ\\x05я')\n buf.write('Ȩ\\x02ڳڴ\\x05ћȮ\\x02ڴ\\x9e\\x03\\x02\\x02')\n buf.write('\\x02ڵڶ\\x05йȝ\\x02ڶڷ\\x05їȬ')\n buf.write('\\x02ڷڸ\\x05нȟ\\x02ڸڹ\\x05еț')\n buf.write('\\x02ڹں\\x05ћȮ\\x02ںڻ\\x05нȟ')\n buf.write('\\x02ڻ\\xa0\\x03\\x02\\x02\\x02ڼڽ\\x05йȝ\\x02ڽ')\n buf.write('ھ\\x05їȬ\\x02ھڿ\\x05ёȩ\\x02ڿ')\n buf.write('ۀ\\x05љȭ\\x02ۀہ\\x05љȭ\\x02ہ')\n buf.write('¢\\x03\\x02\\x02\\x02ۂۃ\\x05йȝ\\x02ۃۄ')\n buf.write('\\x05ѝȯ\\x02ۄۅ\\x05зȜ\\x02ۅۆ')\n buf.write('\\x05нȟ\\x02ۆ¤\\x03\\x02\\x02\\x02ۇۈ\\x05й')\n buf.write('ȝ\\x02ۈۉ\\x05ѝȯ\\x02ۉۊ\\x05ї')\n buf.write('Ȭ\\x02ۊۋ\\x05їȬ\\x02ۋی\\x05н')\n buf.write('ȟ\\x02یۍ\\x05яȨ\\x02ۍێ\\x05ћ')\n buf.write('Ȯ\\x02ێ¦\\x03\\x02\\x02\\x02ۏې\\x05йȝ')\n buf.write('\\x02ېۑ\\x05ѝȯ\\x02ۑے\\x05їȬ')\n buf.write('\\x02ےۓ\\x05їȬ\\x02ۓ۔\\x05нȟ')\n buf.write('\\x02۔ە\\x05яȨ\\x02ەۖ\\x05ћȮ')\n buf.write('\\x02ۖۗ\\x07a\\x02\\x02ۗۘ\\x05ѝȯ\\x02ۘ')\n buf.write('ۙ\\x05љȭ\\x02ۙۚ\\x05нȟ\\x02ۚ')\n buf.write('ۛ\\x05їȬ\\x02ۛ¨\\x03\\x02\\x02\\x02ۜ\\u06dd')\n buf.write('\\x05йȝ\\x02\\u06dd۞\\x05ѝȯ\\x02۞۟')\n buf.write('\\x05їȬ\\x02۟۠\\x05љȭ\\x02۠ۡ')\n buf.write('\\x05ёȩ\\x02ۡۢ\\x05їȬ\\x02ۢª')\n buf.write('\\x03\\x02\\x02\\x02ۣۤ\\x05йȝ\\x02ۤۥ\\x05ѝ')\n buf.write('ȯ\\x02ۥۦ\\x05љȭ\\x02ۦۧ\\x05ћ')\n buf.write('Ȯ\\x02ۧۨ\\x05ёȩ\\x02ۨ۩\\x05э')\n buf.write('ȧ\\x02۩۪\\x05лȞ\\x02۪۫\\x05е')\n buf.write('ț\\x02۫۬\\x05ћȮ\\x02ۭ۬\\x05ѝ')\n buf.write('ȯ\\x02ۭۮ\\x05эȧ\\x02ۮ¬\\x03\\x02\\x02')\n buf.write('\\x02ۯ۰\\x05йȝ\\x02۰۱\\x05ѥȳ')\n buf.write('\\x02۱۲\\x05йȝ\\x02۲۳\\x05ыȦ')\n buf.write('\\x02۳۴\\x05нȟ\\x02۴®\\x03\\x02\\x02\\x02۵')\n buf.write('۶\\x05лȞ\\x02۶۷\\x05еț\\x02۷')\n buf.write('۸\\x05ћȮ\\x02۸۹\\x05еț\\x02۹')\n buf.write('°\\x03\\x02\\x02\\x02ۺۻ\\x05лȞ\\x02ۻۼ')\n buf.write('\\x05еț\\x02ۼ۽\\x05ћȮ\\x02۽۾')\n buf.write('\\x05еț\\x02۾ۿ\\x05зȜ\\x02ۿ܀')\n buf.write('\\x05еț\\x02܀܁\\x05љȭ\\x02܁܂')\n buf.write('\\x05нȟ\\x02܂²\\x03\\x02\\x02\\x02܃܄\\x05л')\n buf.write('Ȟ\\x02܄܅\\x05еț\\x02܅܆\\x05ћ')\n buf.write('Ȯ\\x02܆܇\\x05нȟ\\x02܇´\\x03\\x02\\x02')\n buf.write('\\x02܈܉\\x05лȞ\\x02܉܊\\x05еț')\n buf.write('\\x02܊܋\\x05ѥȳ\\x02܋¶\\x03\\x02\\x02\\x02܌')\n buf.write('܍\\x05лȞ\\x02܍\\u070e\\x05зȜ\\x02\\u070e')\n buf.write('\\u070f\\x07a\\x02\\x02\\u070fܐ\\x05їȬ\\x02ܐܑ')\n buf.write('\\x05ёȩ\\x02ܑܒ\\x05ыȦ\\x02ܒܓ')\n buf.write('\\x05нȟ\\x02ܓܔ\\x07a\\x02\\x02ܔܕ\\x05й')\n buf.write('ȝ\\x02ܕܖ\\x05уȢ\\x02ܖܗ\\x05е')\n buf.write('ț\\x02ܗܘ\\x05яȨ\\x02ܘܙ\\x05с')\n buf.write('ȡ\\x02ܙܚ\\x05нȟ\\x02ܚ¸\\x03\\x02\\x02')\n buf.write('\\x02ܛܜ\\x05лȞ\\x02ܜܝ\\x05зȜ')\n buf.write('\\x02ܝܞ\\x05ћȮ\\x02ܞܟ\\x05хȣ')\n buf.write('\\x02ܟܠ\\x05эȧ\\x02ܠܡ\\x05нȟ')\n buf.write('\\x02ܡܢ\\x05ѧȴ\\x02ܢܣ\\x05ёȩ')\n buf.write('\\x02ܣܤ\\x05яȨ\\x02ܤܥ\\x05нȟ')\n buf.write('\\x02ܥº\\x03\\x02\\x02\\x02ܦܧ\\x05лȞ\\x02ܧ')\n buf.write('ܨ\\x05лȞ\\x02ܨܩ\\x05ыȦ\\x02ܩ')\n buf.write('¼\\x03\\x02\\x02\\x02ܪܫ\\x05лȞ\\x02ܫܬ')\n buf.write('\\x05нȟ\\x02ܬܭ\\x05зȜ\\x02ܭܮ')\n buf.write('\\x05ѝȯ\\x02ܮܯ\\x05сȡ\\x02ܯ¾')\n buf.write('\\x03\\x02\\x02\\x02ܱܰ\\x05лȞ\\x02ܱܲ\\x05н')\n buf.write('ȟ\\x02ܲܳ\\x05йȝ\\x02ܳÀ\\x03\\x02\\x02')\n buf.write('\\x02ܴܵ\\x05лȞ\\x02ܵܶ\\x05нȟ')\n buf.write('\\x02ܷܶ\\x05йȝ\\x02ܷܸ\\x05хȣ')\n buf.write('\\x02ܸܹ\\x05эȧ\\x02ܹܺ\\x05еț')\n buf.write('\\x02ܻܺ\\x05ыȦ\\x02ܻÂ\\x03\\x02\\x02\\x02ܼ')\n buf.write('ܽ\\x05лȞ\\x02ܾܽ\\x05нȟ\\x02ܾ')\n buf.write('ܿ\\x05йȝ\\x02ܿ݀\\x05ыȦ\\x02݀')\n buf.write('݁\\x05еț\\x02݂݁\\x05їȬ\\x02݂')\n buf.write('݃\\x05нȟ\\x02݃Ä\\x03\\x02\\x02\\x02݄݅')\n buf.write('\\x05лȞ\\x02݆݅\\x05нȟ\\x02݆݇')\n buf.write('\\x05йȝ\\x02݈݇\\x05ёȩ\\x02݈݉')\n buf.write('\\x05эȧ\\x02݉݊\\x05ѓȪ\\x02݊\\u074b')\n buf.write('\\x05ёȩ\\x02\\u074b\\u074c\\x05љȭ\\x02\\u074cݍ')\n buf.write('\\x05нȟ\\x02ݍÆ\\x03\\x02\\x02\\x02ݎݏ\\x05л')\n buf.write('Ȟ\\x02ݏݐ\\x05нȟ\\x02ݐݑ\\x05й')\n buf.write('ȝ\\x02ݑݒ\\x05їȬ\\x02ݒݓ\\x05н')\n buf.write('ȟ\\x02ݓݔ\\x05эȧ\\x02ݔݕ\\x05н')\n buf.write('ȟ\\x02ݕݖ\\x05яȨ\\x02ݖݗ\\x05ћ')\n buf.write('Ȯ\\x02ݗÈ\\x03\\x02\\x02\\x02ݘݙ\\x05лȞ')\n buf.write('\\x02ݙݚ\\x05нȟ\\x02ݚݛ\\x05пȠ')\n buf.write('\\x02ݛݜ\\x05еț\\x02ݜݝ\\x05ѝȯ')\n buf.write('\\x02ݝݞ\\x05ыȦ\\x02ݞݟ\\x05ћȮ')\n buf.write('\\x02ݟÊ\\x03\\x02\\x02\\x02ݠݡ\\x05лȞ\\x02ݡ')\n buf.write('ݢ\\x05нȟ\\x02ݢݣ\\x05пȠ\\x02ݣ')\n buf.write('ݤ\\x05еț\\x02ݤݥ\\x05ѝȯ\\x02ݥ')\n buf.write('ݦ\\x05ыȦ\\x02ݦݧ\\x05ћȮ\\x02ݧ')\n buf.write('ݨ\\x05љȭ\\x02ݨÌ\\x03\\x02\\x02\\x02ݩݪ')\n buf.write('\\x05лȞ\\x02ݪݫ\\x05нȟ\\x02ݫݬ')\n buf.write('\\x05пȠ\\x02ݬݭ\\x05нȟ\\x02ݭݮ')\n buf.write('\\x05їȬ\\x02ݮݯ\\x05їȬ\\x02ݯݰ')\n buf.write('\\x05нȟ\\x02ݰݱ\\x05лȞ\\x02ݱÎ')\n buf.write('\\x03\\x02\\x02\\x02ݲݳ\\x05лȞ\\x02ݳݴ\\x05н')\n buf.write('ȟ\\x02ݴݵ\\x05пȠ\\x02ݵݶ\\x05х')\n buf.write('ȣ\\x02ݶݷ\\x05яȨ\\x02ݷݸ\\x05н')\n buf.write('ȟ\\x02ݸݹ\\x05їȬ\\x02ݹÐ\\x03\\x02\\x02')\n buf.write('\\x02ݺݻ\\x05лȞ\\x02ݻݼ\\x05нȟ')\n buf.write('\\x02ݼݽ\\x05ыȦ\\x02ݽݾ\\x05нȟ')\n buf.write('\\x02ݾݿ\\x05ћȮ\\x02ݿހ\\x05нȟ')\n buf.write('\\x02ހÒ\\x03\\x02\\x02\\x02ށނ\\x05лȞ\\x02ނ')\n buf.write('ރ\\x05нȟ\\x02ރބ\\x05ѓȪ\\x02ބ')\n buf.write('ޅ\\x05ћȮ\\x02ޅކ\\x05уȢ\\x02ކ')\n buf.write('Ô\\x03\\x02\\x02\\x02އވ\\x05лȞ\\x02ވމ')\n buf.write('\\x05нȟ\\x02މފ\\x05љȭ\\x02ފދ')\n buf.write('\\x05йȝ\\x02ދÖ\\x03\\x02\\x02\\x02ތލ\\x05л')\n buf.write('Ȟ\\x02ލގ\\x05нȟ\\x02ގޏ\\x05ћ')\n buf.write('Ȯ\\x02ޏސ\\x05нȟ\\x02ސޑ\\x05ї')\n buf.write('Ȭ\\x02ޑޒ\\x05эȧ\\x02ޒޓ\\x05х')\n buf.write('ȣ\\x02ޓޔ\\x05яȨ\\x02ޔޕ\\x05х')\n buf.write('ȣ\\x02ޕޖ\\x05љȭ\\x02ޖޗ\\x05ћ')\n buf.write('Ȯ\\x02ޗޘ\\x05хȣ\\x02ޘޙ\\x05й')\n buf.write('ȝ\\x02ޙØ\\x03\\x02\\x02\\x02ޚޛ\\x05лȞ')\n buf.write('\\x02ޛޜ\\x05хȣ\\x02ޜޝ\\x05эȧ')\n buf.write('\\x02ޝޞ\\x05нȟ\\x02ޞޟ\\x05яȨ')\n buf.write('\\x02ޟޠ\\x05љȭ\\x02ޠޡ\\x05хȣ')\n buf.write('\\x02ޡޢ\\x05ёȩ\\x02ޢޣ\\x05яȨ')\n buf.write('\\x02ޣÚ\\x03\\x02\\x02\\x02ޤޥ\\x05лȞ\\x02ޥ')\n buf.write('ަ\\x05хȣ\\x02ަާ\\x05љȭ\\x02ާ')\n buf.write('ި\\x05еț\\x02ިީ\\x05зȜ\\x02ީ')\n buf.write('ު\\x05ыȦ\\x02ުޫ\\x05нȟ\\x02ޫ')\n buf.write('Ü\\x03\\x02\\x02\\x02ެޭ\\x05лȞ\\x02ޭޮ')\n buf.write('\\x05хȣ\\x02ޮޯ\\x05љȭ\\x02ޯް')\n buf.write('\\x05еț\\x02ްޱ\\x05љȭ\\x02ޱ\\u07b2')\n buf.write('\\x05љȭ\\x02\\u07b2\\u07b3\\x05ёȩ\\x02\\u07b3\\u07b4')\n buf.write('\\x05йȝ\\x02\\u07b4\\u07b5\\x05хȣ\\x02\\u07b5\\u07b6')\n buf.write('\\x05еț\\x02\\u07b6\\u07b7\\x05ћȮ\\x02\\u07b7\\u07b8')\n buf.write('\\x05нȟ\\x02\\u07b8Þ\\x03\\x02\\x02\\x02\\u07b9\\u07ba\\x05л')\n buf.write('Ȟ\\x02\\u07ba\\u07bb\\x05хȣ\\x02\\u07bb\\u07bc\\x05љ')\n buf.write('ȭ\\x02\\u07bc\\u07bd\\x05ћȮ\\x02\\u07bd\\u07be\\x05х')\n buf.write('ȣ\\x02\\u07be\\u07bf\\x05яȨ\\x02\\u07bf߀\\x05й')\n buf.write('ȝ\\x02߀߁\\x05ћȮ\\x02߁à\\x03\\x02\\x02')\n buf.write('\\x02߂߃\\x05лȞ\\x02߃߄\\x05ёȩ')\n buf.write('\\x02߄߅\\x05йȝ\\x02߅߆\\x05ѝȯ')\n buf.write('\\x02߆߇\\x05эȧ\\x02߇߈\\x05нȟ')\n buf.write('\\x02߈߉\\x05яȨ\\x02߉ߊ\\x05ћȮ')\n buf.write('\\x02ߊâ\\x03\\x02\\x02\\x02ߋߌ\\x05лȞ\\x02ߌ')\n buf.write('ߍ\\x05ёȩ\\x02ߍߎ\\x05ѝȯ\\x02ߎ')\n buf.write('ߏ\\x05зȜ\\x02ߏߐ\\x05ыȦ\\x02ߐ')\n buf.write('ߑ\\x05нȟ\\x02ߑä\\x03\\x02\\x02\\x02ߒߓ')\n buf.write('\\x05лȞ\\x02ߓߔ\\x05їȬ\\x02ߔߕ')\n buf.write('\\x05ёȩ\\x02ߕߖ\\x05ѓȪ\\x02ߖæ')\n buf.write('\\x03\\x02\\x02\\x02ߗߘ\\x05лȞ\\x02ߘߙ\\x05љ')\n buf.write('ȭ\\x02ߙߚ\\x05хȣ\\x02ߚߛ\\x05я')\n buf.write('Ȩ\\x02ߛߜ\\x05ћȮ\\x02ߜߝ\\x05н')\n buf.write('ȟ\\x02ߝߞ\\x05їȬ\\x02ߞߟ\\x05џ')\n buf.write('Ȱ\\x02ߟߠ\\x05еț\\x02ߠߡ\\x05ы')\n buf.write('Ȧ\\x02ߡߢ\\x07a\\x02\\x02ߢߣ\\x05ѝȯ')\n buf.write('\\x02ߣߤ\\x05яȨ\\x02ߤߥ\\x05йȝ')\n buf.write('\\x02ߥߦ\\x05ёȩ\\x02ߦߧ\\x05яȨ')\n buf.write('\\x02ߧߨ\\x05љȭ\\x02ߨߩ\\x05ћȮ')\n buf.write('\\x02ߩߪ\\x05їȬ\\x02ߪ߫\\x05еț')\n buf.write('\\x02߫߬\\x05хȣ\\x02߬߭\\x05яȨ')\n buf.write('\\x02߭߮\\x05нȟ\\x02߮߯\\x05лȞ')\n buf.write('\\x02߯è\\x03\\x02\\x02\\x02߰߱\\x05нȟ\\x02߱')\n buf.write('߲\\x05еț\\x02߲߳\\x05йȝ\\x02߳')\n buf.write('ߴ\\x05уȢ\\x02ߴê\\x03\\x02\\x02\\x02ߵ߶')\n buf.write('\\x05нȟ\\x02߶߷\\x05ыȦ\\x02߷߸')\n buf.write('\\x05нȟ\\x02߸߹\\x05эȧ\\x02߹ߺ')\n buf.write('\\x05нȟ\\x02ߺ\\u07fb\\x05яȨ\\x02\\u07fb\\u07fc')\n buf.write('\\x05ћȮ\\x02\\u07fcì\\x03\\x02\\x02\\x02߽߾\\x05н')\n buf.write('ȟ\\x02߾߿\\x05ыȦ\\x02߿ࠀ\\x05љ')\n buf.write('ȭ\\x02ࠀࠁ\\x05нȟ\\x02ࠁî\\x03\\x02\\x02')\n buf.write('\\x02ࠂࠃ\\x05нȟ\\x02ࠃࠄ\\x05ыȦ')\n buf.write('\\x02ࠄࠅ\\x05љȭ\\x02ࠅࠆ\\x05хȣ')\n buf.write('\\x02ࠆࠇ\\x05пȠ\\x02ࠇð\\x03\\x02\\x02\\x02ࠈ')\n buf.write('ࠉ\\x05нȟ\\x02ࠉࠊ\\x05эȧ\\x02ࠊ')\n buf.write('ࠋ\\x05ѓȪ\\x02ࠋࠌ\\x05ћȮ\\x02ࠌ')\n buf.write('ࠍ\\x05ѥȳ\\x02ࠍò\\x03\\x02\\x02\\x02ࠎࠏ')\n buf.write('\\x05нȟ\\x02ࠏࠐ\\x05яȨ\\x02ࠐࠑ')\n buf.write('\\x05еț\\x02ࠑࠒ\\x05зȜ\\x02ࠒࠓ')\n buf.write('\\x05ыȦ\\x02ࠓࠔ\\x05нȟ\\x02ࠔô')\n buf.write('\\x03\\x02\\x02\\x02ࠕࠖ\\x05нȟ\\x02ࠖࠗ\\x05я')\n buf.write('Ȩ\\x02ࠗ࠘\\x05йȝ\\x02࠘࠙\\x05ё')\n buf.write('ȩ\\x02࠙ࠚ\\x05лȞ\\x02ࠚࠛ\\x05х')\n buf.write('ȣ\\x02ࠛࠜ\\x05яȨ\\x02ࠜࠝ\\x05с')\n buf.write('ȡ\\x02ࠝö\\x03\\x02\\x02\\x02ࠞࠟ\\x05нȟ')\n buf.write('\\x02ࠟࠠ\\x05яȨ\\x02ࠠࠡ\\x05лȞ')\n buf.write('\\x02ࠡø\\x03\\x02\\x02\\x02ࠢࠣ\\x05нȟ\\x02ࠣ')\n buf.write('ࠤ\\x05яȨ\\x02ࠤࠥ\\x05ћȮ\\x02ࠥ')\n buf.write('ࠦ\\x05хȣ\\x02ࠦࠧ\\x05ћȮ\\x02ࠧ')\n buf.write('ࠨ\\x05ѥȳ\\x02ࠨࠩ\\x05нȟ\\x02ࠩ')\n buf.write('ࠪ\\x05љȭ\\x02ࠪࠫ\\x05йȝ\\x02ࠫ')\n buf.write('ࠬ\\x05еț\\x02ࠬ࠭\\x05ѓȪ\\x02࠭')\n buf.write('\\u082e\\x05хȣ\\x02\\u082e\\u082f\\x05яȨ\\x02\\u082f')\n buf.write('࠰\\x05сȡ\\x02࠰ú\\x03\\x02\\x02\\x02࠱࠲')\n buf.write('\\x05нȟ\\x02࠲࠳\\x05їȬ\\x02࠳࠴')\n buf.write('\\x05їȬ\\x02࠴ü\\x03\\x02\\x02\\x02࠵࠶\\x05н')\n buf.write('ȟ\\x02࠶࠷\\x05їȬ\\x02࠷࠸\\x05ї')\n buf.write('Ȭ\\x02࠸࠹\\x05ёȩ\\x02࠹࠺\\x05ї')\n buf.write('Ȭ\\x02࠺࠻\\x05љȭ\\x02࠻þ\\x03\\x02\\x02')\n buf.write('\\x02࠼࠽\\x05нȟ\\x02࠽࠾\\x05љȭ')\n buf.write('\\x02࠾\\u083f\\x05йȝ\\x02\\u083fࡀ\\x05еț')\n buf.write('\\x02ࡀࡁ\\x05ѓȪ\\x02ࡁࡂ\\x05нȟ')\n buf.write('\\x02ࡂĀ\\x03\\x02\\x02\\x02ࡃࡄ\\x05нȟ\\x02ࡄ')\n buf.write('ࡅ\\x05џȰ\\x02ࡅࡆ\\x05еț\\x02ࡆ')\n buf.write('ࡇ\\x05ыȦ\\x02ࡇࡈ\\x05яȨ\\x02ࡈ')\n buf.write('ࡉ\\x05еț\\x02ࡉࡊ\\x05эȧ\\x02ࡊ')\n buf.write('ࡋ\\x05нȟ\\x02ࡋĂ\\x03\\x02\\x02\\x02ࡌࡍ')\n buf.write('\\x05нȟ\\x02ࡍࡎ\\x05ѣȲ\\x02ࡎࡏ')\n buf.write('\\x05йȝ\\x02ࡏࡐ\\x05нȟ\\x02ࡐࡑ')\n buf.write('\\x05ѓȪ\\x02ࡑࡒ\\x05ћȮ\\x02ࡒĄ')\n buf.write('\\x03\\x02\\x02\\x02ࡓࡔ\\x05нȟ\\x02ࡔࡕ\\x05ѣ')\n buf.write('Ȳ\\x02ࡕࡖ\\x05йȝ\\x02ࡖࡗ\\x05н')\n buf.write('ȟ\\x02ࡗࡘ\\x05ѓȪ\\x02ࡘ࡙\\x05ћ')\n buf.write('Ȯ\\x02࡙࡚\\x05хȣ\\x02࡚࡛\\x05ё')\n buf.write('ȩ\\x02࡛\\u085c\\x05яȨ\\x02\\u085cĆ\\x03\\x02\\x02')\n buf.write('\\x02\\u085d࡞\\x05нȟ\\x02࡞\\u085f\\x05ѣȲ')\n buf.write('\\x02\\u085fࡠ\\x05йȝ\\x02ࡠࡡ\\x05нȟ')\n buf.write('\\x02ࡡࡢ\\x05ѓȪ\\x02ࡢࡣ\\x05ћȮ')\n buf.write('\\x02ࡣࡤ\\x05хȣ\\x02ࡤࡥ\\x05ёȩ')\n buf.write('\\x02ࡥࡦ\\x05яȨ\\x02ࡦࡧ\\x07a\\x02\\x02ࡧ')\n buf.write('ࡨ\\x05хȣ\\x02ࡨࡩ\\x05яȨ\\x02ࡩ')\n buf.write('ࡪ\\x05хȣ\\x02ࡪ\\u086b\\x05ћȮ\\x02\\u086b')\n buf.write('Ĉ\\x03\\x02\\x02\\x02\\u086c\\u086d\\x05нȟ\\x02\\u086d\\u086e')\n buf.write('\\x05ѣȲ\\x02\\u086e\\u086f\\x05йȝ\\x02\\u086fࡰ')\n buf.write('\\x05нȟ\\x02ࡰࡱ\\x05ѓȪ\\x02ࡱࡲ')\n buf.write('\\x05ћȮ\\x02ࡲࡳ\\x05хȣ\\x02ࡳࡴ')\n buf.write('\\x05ёȩ\\x02ࡴࡵ\\x05яȨ\\x02ࡵࡶ')\n buf.write('\\x05љȭ\\x02ࡶĊ\\x03\\x02\\x02\\x02ࡷࡸ\\x05н')\n buf.write('ȟ\\x02ࡸࡹ\\x05ѣȲ\\x02ࡹࡺ\\x05й')\n buf.write('ȝ\\x02ࡺࡻ\\x05ыȦ\\x02ࡻࡼ\\x05ѝ')\n buf.write('ȯ\\x02ࡼࡽ\\x05лȞ\\x02ࡽࡾ\\x05н')\n buf.write('ȟ\\x02ࡾČ\\x03\\x02\\x02\\x02ࡿࢀ\\x05нȟ')\n buf.write('\\x02ࢀࢁ\\x05ѣȲ\\x02ࢁࢂ\\x05йȝ')\n buf.write('\\x02ࢂࢃ\\x05ыȦ\\x02ࢃࢄ\\x05ѝȯ')\n buf.write('\\x02ࢄࢅ\\x05љȭ\\x02ࢅࢆ\\x05хȣ')\n buf.write('\\x02ࢆࢇ\\x05џȰ\\x02ࢇ࢈\\x05нȟ')\n buf.write('\\x02࢈Ď\\x03\\x02\\x02\\x02ࢉࢊ\\x05нȟ\\x02ࢊ')\n buf.write('ࢋ\\x05ѣȲ\\x02ࢋࢌ\\x05нȟ\\x02ࢌ')\n buf.write('ࢍ\\x05йȝ\\x02ࢍࢎ\\x05ѝȯ\\x02ࢎ')\n buf.write('\\u088f\\x05ћȮ\\x02\\u088f\\u0890\\x05нȟ\\x02\\u0890')\n buf.write('Đ\\x03\\x02\\x02\\x02\\u0891\\u0892\\x05нȟ\\x02\\u0892\\u0893')\n buf.write('\\x05ѣȲ\\x02\\u0893\\u0894\\x05хȣ\\x02\\u0894\\u0895')\n buf.write('\\x05љȭ\\x02\\u0895\\u0896\\x05ћȮ\\x02\\u0896\\u0897')\n buf.write('\\x05љȭ\\x02\\u0897Ē\\x03\\x02\\x02\\x02࢙࢘\\x05н')\n buf.write('ȟ\\x02࢙࢚\\x05ѣȲ\\x02࢚࢛\\x05х')\n buf.write('ȣ\\x02࢛࢜\\x05ћȮ\\x02࢜Ĕ\\x03\\x02\\x02')\n buf.write('\\x02࢝࢞\\x05нȟ\\x02࢞࢟\\x05ѣȲ')\n buf.write('\\x02࢟ࢠ\\x05ѓȪ\\x02ࢠࢡ\\x05ыȦ')\n buf.write('\\x02ࢡࢢ\\x05еț\\x02ࢢࢣ\\x05хȣ')\n buf.write('\\x02ࢣࢤ\\x05яȨ\\x02ࢤĖ\\x03\\x02\\x02\\x02ࢥ')\n buf.write('ࢦ\\x05нȟ\\x02ࢦࢧ\\x05ѣȲ\\x02ࢧ')\n buf.write('ࢨ\\x05ћȮ\\x02ࢨࢩ\\x05нȟ\\x02ࢩ')\n buf.write('ࢪ\\x05їȬ\\x02ࢪࢫ\\x05яȨ\\x02ࢫ')\n buf.write('ࢬ\\x05еț\\x02ࢬࢭ\\x05ыȦ\\x02ࢭ')\n buf.write('Ę\\x03\\x02\\x02\\x02ࢮࢯ\\x05нȟ\\x02ࢯࢰ')\n buf.write('\\x05ѣȲ\\x02ࢰࢱ\\x05ћȮ\\x02ࢱࢲ')\n buf.write('\\x05їȬ\\x02ࢲࢳ\\x05еț\\x02ࢳࢴ')\n buf.write('\\x05йȝ\\x02ࢴࢵ\\x05ћȮ\\x02ࢵĚ')\n buf.write('\\x03\\x02\\x02\\x02ࢶࢷ\\x05пȠ\\x02ࢷࢸ\\x05е')\n buf.write('ț\\x02ࢸࢹ\\x05хȣ\\x02ࢹࢺ\\x05ы')\n buf.write('Ȧ\\x02ࢺࢻ\\x05ѝȯ\\x02ࢻࢼ\\x05ї')\n buf.write('Ȭ\\x02ࢼࢽ\\x05нȟ\\x02ࢽĜ\\x03\\x02\\x02')\n buf.write('\\x02ࢾࢿ\\x05пȠ\\x02ࢿࣀ\\x05еț')\n buf.write('\\x02ࣀࣁ\\x05ыȦ\\x02ࣁࣂ\\x05љȭ')\n buf.write('\\x02ࣂࣃ\\x05нȟ\\x02ࣃĞ\\x03\\x02\\x02\\x02ࣄ')\n buf.write('ࣅ\\x05пȠ\\x02ࣅࣆ\\x05нȟ\\x02ࣆ')\n buf.write('ࣇ\\x05ћȮ\\x02ࣇࣈ\\x05йȝ\\x02ࣈ')\n buf.write('ࣉ\\x05уȢ\\x02ࣉĠ\\x03\\x02\\x02\\x02࣊࣋')\n buf.write('\\x05пȠ\\x02࣋࣌\\x05хȣ\\x02࣌࣍')\n buf.write('\\x05яȨ\\x02࣍࣎\\x05еț\\x02࣏࣎')\n buf.write('\\x05ыȦ\\x02࣏Ģ\\x03\\x02\\x02\\x02࣐࣑\\x05п')\n buf.write('Ƞ\\x02࣑࣒\\x05хȣ\\x02࣒࣓\\x05ї')\n buf.write('Ȭ\\x02࣓ࣔ\\x05љȭ\\x02ࣔࣕ\\x05ћ')\n buf.write('Ȯ\\x02ࣕĤ\\x03\\x02\\x02\\x02ࣖࣗ\\x05пȠ')\n buf.write('\\x02ࣗࣘ\\x05хȣ\\x02ࣘࣙ\\x05їȬ')\n buf.write('\\x02ࣙࣚ\\x05љȭ\\x02ࣚࣛ\\x05ћȮ')\n buf.write('\\x02ࣛࣜ\\x07a\\x02\\x02ࣜࣝ\\x05џȰ\\x02ࣝ')\n buf.write('ࣞ\\x05еț\\x02ࣞࣟ\\x05ыȦ\\x02ࣟ')\n buf.write('࣠\\x05ѝȯ\\x02࣠࣡\\x05нȟ\\x02࣡')\n buf.write('Ħ\\x03\\x02\\x02\\x02\\u08e2ࣣ\\x05пȠ\\x02ࣣࣤ')\n buf.write('\\x05ыȦ\\x02ࣤࣥ\\x05ёȩ\\x02ࣦࣥ')\n buf.write('\\x05еț\\x02ࣦࣧ\\x05ћȮ\\x02ࣧĨ')\n buf.write('\\x03\\x02\\x02\\x02ࣩࣨ\\x05пȠ\\x02ࣩ࣪\\x05ё')\n buf.write('ȩ\\x02࣪࣫\\x05ыȦ\\x02࣫࣬\\x05ы')\n buf.write('Ȧ\\x02࣭࣬\\x05ёȩ\\x02࣭࣮\\x05ѡ')\n buf.write('ȱ\\x02࣮࣯\\x05хȣ\\x02ࣰ࣯\\x05я')\n buf.write('Ȩ\\x02ࣰࣱ\\x05сȡ\\x02ࣱĪ\\x03\\x02\\x02')\n buf.write('\\x02ࣲࣳ\\x05пȠ\\x02ࣳࣴ\\x05ёȩ')\n buf.write('\\x02ࣴࣵ\\x05ыȦ\\x02ࣶࣵ\\x05ыȦ')\n buf.write('\\x02ࣶࣷ\\x05ёȩ\\x02ࣷࣸ\\x05ѡȱ')\n buf.write('\\x02ࣹࣸ\\x05љȭ\\x02ࣹĬ\\x03\\x02\\x02\\x02ࣺ')\n buf.write('ࣻ\\x05пȠ\\x02ࣻࣼ\\x05ёȩ\\x02ࣼ')\n buf.write('ࣽ\\x05їȬ\\x02ࣽĮ\\x03\\x02\\x02\\x02ࣾࣿ')\n buf.write('\\x05пȠ\\x02ࣿऀ\\x05ёȩ\\x02ऀँ')\n buf.write('\\x05їȬ\\x02ँं\\x05еț\\x02ंः')\n buf.write('\\x05ыȦ\\x02ःऄ\\x05ыȦ\\x02ऄİ')\n buf.write('\\x03\\x02\\x02\\x02अआ\\x05пȠ\\x02आइ\\x05ё')\n buf.write('ȩ\\x02इई\\x05їȬ\\x02ईउ\\x05й')\n buf.write('ȝ\\x02उऊ\\x05нȟ\\x02ऊIJ\\x03\\x02\\x02')\n buf.write('\\x02ऋऌ\\x05пȠ\\x02ऌऍ\\x05їȬ')\n buf.write('\\x02ऍऎ\\x05ёȩ\\x02ऎए\\x05эȧ')\n buf.write('\\x02एĴ\\x03\\x02\\x02\\x02ऐऑ\\x05пȠ\\x02ऑ')\n buf.write('ऒ\\x05ѝȯ\\x02ऒओ\\x05ыȦ\\x02ओ')\n buf.write('औ\\x05ыȦ\\x02औĶ\\x03\\x02\\x02\\x02कख')\n buf.write('\\x05пȠ\\x02खग\\x05ѝȯ\\x02गघ')\n buf.write('\\x05яȨ\\x02घङ\\x05йȝ\\x02ङच')\n buf.write('\\x05ћȮ\\x02चछ\\x05хȣ\\x02छज')\n buf.write('\\x05ёȩ\\x02जझ\\x05яȨ\\x02झĸ')\n buf.write('\\x03\\x02\\x02\\x02ञट\\x05сȡ\\x02टठ\\x05ё')\n buf.write('ȩ\\x02ठड\\x05ћȮ\\x02डढ\\x05ё')\n buf.write('ȩ\\x02ढĺ\\x03\\x02\\x02\\x02णत\\x05сȡ')\n buf.write('\\x02तथ\\x05їȬ\\x02थद\\x05еț')\n buf.write('\\x02दध\\x05яȨ\\x02धन\\x05ћȮ')\n buf.write('\\x02नļ\\x03\\x02\\x02\\x02ऩप\\x05сȡ\\x02प')\n buf.write('फ\\x05їȬ\\x02फब\\x05ёȩ\\x02ब')\n buf.write('भ\\x05ѝȯ\\x02भम\\x05ѓȪ\\x02म')\n buf.write('ľ\\x03\\x02\\x02\\x02यर\\x05сȡ\\x02रऱ')\n buf.write('\\x05їȬ\\x02ऱल\\x05ёȩ\\x02लळ')\n buf.write('\\x05ѝȯ\\x02ळऴ\\x05ѓȪ\\x02ऴव')\n buf.write('\\x05хȣ\\x02वश\\x05яȨ\\x02शष')\n buf.write('\\x05сȡ\\x02षŀ\\x03\\x02\\x02\\x02सह\\x05у')\n buf.write('Ȣ\\x02हऺ\\x05еț\\x02ऺऻ\\x05љ')\n buf.write('ȭ\\x02ऻ़\\x05уȢ\\x02़ł\\x03\\x02\\x02')\n buf.write('\\x02ऽा\\x05уȢ\\x02ाि\\x05еț')\n buf.write('\\x02िी\\x05џȰ\\x02ीु\\x05хȣ')\n buf.write('\\x02ुू\\x05яȨ\\x02ूृ\\x05сȡ')\n buf.write('\\x02ृń\\x03\\x02\\x02\\x02ॄॅ\\x05уȢ\\x02ॅ')\n buf.write('ॆ\\x05хȣ\\x02ॆे\\x05лȞ\\x02े')\n buf.write('ै\\x05нȟ\\x02ैņ\\x03\\x02\\x02\\x02ॉॊ')\n buf.write('\\x05уȢ\\x02ॊो\\x05ёȩ\\x02ोौ')\n buf.write('\\x05ѝȯ\\x02ौ्\\x05їȬ\\x02्ň')\n buf.write('\\x03\\x02\\x02\\x02ॎॏ\\x05хȣ\\x02ॏॐ\\x05п')\n buf.write('Ƞ\\x02ॐŊ\\x03\\x02\\x02\\x02॒॑\\x05хȣ')\n buf.write('\\x02॒॓\\x05сȡ\\x02॓॔\\x05яȨ')\n buf.write('\\x02॔ॕ\\x05ёȩ\\x02ॕॖ\\x05їȬ')\n buf.write('\\x02ॖॗ\\x05нȟ\\x02ॗŌ\\x03\\x02\\x02\\x02क़')\n buf.write('ख़\\x05хȣ\\x02ख़ग़\\x05эȧ\\x02ग़')\n buf.write('ज़\\x05эȧ\\x02ज़ड़\\x05нȟ\\x02ड़')\n buf.write('ढ़\\x05лȞ\\x02ढ़फ़\\x05хȣ\\x02फ़')\n buf.write('य़\\x05еț\\x02य़ॠ\\x05ћȮ\\x02ॠ')\n buf.write('ॡ\\x05нȟ\\x02ॡŎ\\x03\\x02\\x02\\x02ॢॣ')\n buf.write('\\x05хȣ\\x02ॣ।\\x05яȨ\\x02।Ő')\n buf.write('\\x03\\x02\\x02\\x02॥०\\x05хȣ\\x02०१\\x05я')\n buf.write('Ȩ\\x02१२\\x05йȝ\\x02२३\\x05ы')\n buf.write('Ȧ\\x02३४\\x05ѝȯ\\x02४५\\x05л')\n buf.write('Ȟ\\x02५६\\x05нȟ\\x02६Œ\\x03\\x02\\x02')\n buf.write('\\x02७८\\x05хȣ\\x02८९\\x05яȨ')\n buf.write('\\x02९॰\\x05йȝ\\x02॰ॱ\\x05ыȦ')\n buf.write('\\x02ॱॲ\\x05ѝȯ\\x02ॲॳ\\x05лȞ')\n buf.write('\\x02ॳॴ\\x05хȣ\\x02ॴॵ\\x05яȨ')\n buf.write('\\x02ॵॶ\\x05сȡ\\x02ॶŔ\\x03\\x02\\x02\\x02ॷ')\n buf.write('ॸ\\x05хȣ\\x02ॸॹ\\x05яȨ\\x02ॹ')\n buf.write('ॺ\\x05йȝ\\x02ॺॻ\\x05їȬ\\x02ॻ')\n buf.write('ॼ\\x05нȟ\\x02ॼॽ\\x05эȧ\\x02ॽ')\n buf.write('ॾ\\x05нȟ\\x02ॾॿ\\x05яȨ\\x02ॿ')\n buf.write('ঀ\\x05ћȮ\\x02ঀŖ\\x03\\x02\\x02\\x02ঁং')\n buf.write('\\x05хȣ\\x02ংঃ\\x05яȨ\\x02ঃ\\u0984')\n buf.write('\\x05лȞ\\x02\\u0984অ\\x05нȟ\\x02অআ')\n buf.write('\\x05яȨ\\x02আই\\x05ћȮ\\x02ইŘ')\n buf.write('\\x03\\x02\\x02\\x02ঈউ\\x05хȣ\\x02উঊ\\x05я')\n buf.write('Ȩ\\x02ঊঋ\\x05лȞ\\x02ঋঌ\\x05н')\n buf.write('ȟ\\x02ঌ\\u098d\\x05ѣȲ\\x02\\u098dŚ\\x03\\x02\\x02')\n buf.write('\\x02\\u098eএ\\x05хȣ\\x02এঐ\\x05яȨ')\n buf.write('\\x02ঐ\\u0991\\x05лȞ\\x02\\u0991\\u0992\\x05нȟ')\n buf.write('\\x02\\u0992ও\\x05ѣȲ\\x02ওঔ\\x05нȟ')\n buf.write('\\x02ঔক\\x05лȞ\\x02কŜ\\x03\\x02\\x02\\x02খ')\n buf.write('গ\\x05хȣ\\x02গঘ\\x05яȨ\\x02ঘ')\n buf.write('ঙ\\x05лȞ\\x02ঙচ\\x05хȣ\\x02চ')\n buf.write('ছ\\x05йȝ\\x02ছজ\\x05еț\\x02জ')\n buf.write('ঝ\\x05ћȮ\\x02ঝঞ\\x05ёȩ\\x02ঞ')\n buf.write('ট\\x05їȬ\\x02টŞ\\x03\\x02\\x02\\x02ঠড')\n buf.write('\\x05хȣ\\x02ডঢ\\x05яȨ\\x02ঢণ')\n buf.write('\\x05лȞ\\x02ণত\\x05хȣ\\x02তথ')\n buf.write('\\x05йȝ\\x02থদ\\x05нȟ\\x02দধ')\n buf.write('\\x05љȭ\\x02ধŠ\\x03\\x02\\x02\\x02ন\\u09a9\\x05х')\n buf.write('ȣ\\x02\\u09a9প\\x05яȨ\\x02পফ\\x05п')\n buf.write('Ƞ\\x02ফব\\x05хȣ\\x02বভ\\x05я')\n buf.write('Ȩ\\x02ভম\\x05хȣ\\x02ময\\x05ћ')\n buf.write('Ȯ\\x02যর\\x05нȟ\\x02রŢ\\x03\\x02\\x02')\n buf.write('\\x02\\u09b1ল\\x05хȣ\\x02ল\\u09b3\\x05яȨ')\n buf.write('\\x02\\u09b3\\u09b4\\x05ыȦ\\x02\\u09b4\\u09b5\\x05хȣ')\n buf.write('\\x02\\u09b5শ\\x05яȨ\\x02শষ\\x05нȟ')\n buf.write('\\x02ষŤ\\x03\\x02\\x02\\x02সহ\\x05хȣ\\x02হ')\n buf.write('\\u09ba\\x05яȨ\\x02\\u09ba\\u09bb\\x05яȨ\\x02\\u09bb')\n buf.write('়\\x05нȟ\\x02়ঽ\\x05їȬ\\x02ঽ')\n buf.write('Ŧ\\x03\\x02\\x02\\x02াি\\x05хȣ\\x02িী')\n buf.write('\\x05яȨ\\x02ীু\\x05ёȩ\\x02ুূ')\n buf.write('\\x05ѝȯ\\x02ূৃ\\x05ћȮ\\x02ৃŨ')\n buf.write('\\x03\\x02\\x02\\x02ৄ\\u09c5\\x05хȣ\\x02\\u09c5\\u09c6\\x05я')\n buf.write('Ȩ\\x02\\u09c6ে\\x05љȭ\\x02েৈ\\x05н')\n buf.write('ȟ\\x02ৈ\\u09c9\\x05їȬ\\x02\\u09c9\\u09ca\\x05ћ')\n buf.write('Ȯ\\x02\\u09caŪ\\x03\\x02\\x02\\x02োৌ\\x05хȣ')\n buf.write('\\x02ৌ্\\x05яȨ\\x02্ৎ\\x05љȭ')\n buf.write('\\x02ৎ\\u09cf\\x05ћȮ\\x02\\u09cf\\u09d0\\x05еț')\n buf.write('\\x02\\u09d0\\u09d1\\x05яȨ\\x02\\u09d1\\u09d2\\x05ћȮ')\n buf.write('\\x02\\u09d2\\u09d3\\x05хȣ\\x02\\u09d3\\u09d4\\x05еț')\n buf.write('\\x02\\u09d4\\u09d5\\x05зȜ\\x02\\u09d5\\u09d6\\x05ыȦ')\n buf.write('\\x02\\u09d6ৗ\\x05нȟ\\x02ৗŬ\\x03\\x02\\x02\\x02\\u09d8')\n buf.write('\\u09d9\\x05хȣ\\x02\\u09d9\\u09da\\x05яȨ\\x02\\u09da')\n buf.write('\\u09db\\x05љȭ\\x02\\u09dbড়\\x05ћȮ\\x02ড়')\n buf.write('ঢ়\\x05нȟ\\x02ঢ়\\u09de\\x05еț\\x02\\u09de')\n buf.write('য়\\x05лȞ\\x02য়Ů\\x03\\x02\\x02\\x02ৠৡ')\n buf.write('\\x05хȣ\\x02ৡৢ\\x05яȨ\\x02ৢৣ')\n buf.write('\\x05ћȮ\\x02ৣŰ\\x03\\x02\\x02\\x02\\u09e4\\u09e5\\x05х')\n buf.write('ȣ\\x02\\u09e5০\\x05яȨ\\x02০১\\x05ћ')\n buf.write('Ȯ\\x02১২\\x05нȟ\\x02২৩\\x05с')\n buf.write('ȡ\\x02৩৪\\x05нȟ\\x02৪৫\\x05ї')\n buf.write('Ȭ\\x02৫Ų\\x03\\x02\\x02\\x02৬৭\\x05хȣ')\n buf.write('\\x02৭৮\\x05яȨ\\x02৮৯\\x05ћȮ')\n buf.write('\\x02৯ৰ\\x05нȟ\\x02ৰৱ\\x05їȬ')\n buf.write('\\x02ৱ৲\\x05љȭ\\x02৲৳\\x05нȟ')\n buf.write('\\x02৳৴\\x05йȝ\\x02৴৵\\x05ћȮ')\n buf.write('\\x02৵Ŵ\\x03\\x02\\x02\\x02৶৷\\x05хȣ\\x02৷')\n buf.write('৸\\x05яȨ\\x02৸৹\\x05ћȮ\\x02৹')\n buf.write('৺\\x05нȟ\\x02৺৻\\x05їȬ\\x02৻')\n buf.write('ৼ\\x05џȰ\\x02ৼ৽\\x05еț\\x02৽')\n buf.write('৾\\x05ыȦ\\x02৾Ŷ\\x03\\x02\\x02\\x02\\u09ff\\u0a00')\n buf.write('\\x05хȣ\\x02\\u0a00ਁ\\x05яȨ\\x02ਁਂ')\n buf.write('\\x05ћȮ\\x02ਂਃ\\x05ёȩ\\x02ਃŸ')\n buf.write('\\x03\\x02\\x02\\x02\\u0a04ਅ\\x05хȣ\\x02ਅਆ\\x05я')\n buf.write('Ȩ\\x02ਆਇ\\x05џȰ\\x02ਇਈ\\x05е')\n buf.write('ț\\x02ਈਉ\\x05ыȦ\\x02ਉਊ\\x05х')\n buf.write('ȣ\\x02ਊ\\u0a0b\\x05лȞ\\x02\\u0a0b\\u0a0c\\x05е')\n buf.write('ț\\x02\\u0a0c\\u0a0d\\x05ћȮ\\x02\\u0a0d\\u0a0e\\x05н')\n buf.write('ȟ\\x02\\u0a0eź\\x03\\x02\\x02\\x02ਏਐ\\x05хȣ')\n buf.write('\\x02ਐ\\u0a11\\x05љȭ\\x02\\u0a11ż\\x03\\x02\\x02\\x02\\u0a12')\n buf.write('ਓ\\x05хȣ\\x02ਓਔ\\x05љȭ\\x02ਔ')\n buf.write('ਕ\\x05ёȩ\\x02ਕਖ\\x05ыȦ\\x02ਖ')\n buf.write('ਗ\\x05еț\\x02ਗਘ\\x05ћȮ\\x02ਘ')\n buf.write('ਙ\\x05хȣ\\x02ਙਚ\\x05ёȩ\\x02ਚ')\n buf.write('ਛ\\x05яȨ\\x02ਛž\\x03\\x02\\x02\\x02ਜਝ')\n buf.write('\\x05хȣ\\x02ਝਞ\\x05ћȮ\\x02ਞਟ')\n buf.write('\\x05нȟ\\x02ਟਠ\\x05їȬ\\x02ਠਡ')\n buf.write('\\x05еț\\x02ਡਢ\\x05ћȮ\\x02ਢਣ')\n buf.write('\\x05нȟ\\x02ਣƀ\\x03\\x02\\x02\\x02ਤਥ\\x05ч')\n buf.write('Ȥ\\x02ਥਦ\\x05еț\\x02ਦਧ\\x05џ')\n buf.write('Ȱ\\x02ਧਨ\\x05еț\\x02ਨƂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0a29ਪ\\x05чȤ\\x02ਪਫ\\x05ёȩ')\n buf.write('\\x02ਫਬ\\x05хȣ\\x02ਬਭ\\x05яȨ')\n buf.write('\\x02ਭƄ\\x03\\x02\\x02\\x02ਮਯ\\x05щȥ\\x02ਯ')\n buf.write('ਰ\\x05нȟ\\x02ਰ\\u0a31\\x05нȟ\\x02\\u0a31')\n buf.write('ਲ\\x05ѓȪ\\x02ਲƆ\\x03\\x02\\x02\\x02ਲ਼\\u0a34')\n buf.write('\\x05ыȦ\\x02\\u0a34ਵ\\x05еț\\x02ਵਸ਼')\n buf.write('\\x05яȨ\\x02ਸ਼\\u0a37\\x05сȡ\\x02\\u0a37ਸ')\n buf.write('\\x05ѝȯ\\x02ਸਹ\\x05еț\\x02ਹ\\u0a3a')\n buf.write('\\x05сȡ\\x02\\u0a3a\\u0a3b\\x05нȟ\\x02\\u0a3bƈ')\n buf.write('\\x03\\x02\\x02\\x02਼\\u0a3d\\x05ыȦ\\x02\\u0a3dਾ\\x05е')\n buf.write('ț\\x02ਾਿ\\x05љȭ\\x02ਿੀ\\x05ћ')\n buf.write('Ȯ\\x02ੀƊ\\x03\\x02\\x02\\x02ੁੂ\\x05ыȦ')\n buf.write('\\x02ੂ\\u0a43\\x05еț\\x02\\u0a43\\u0a44\\x05љȭ')\n buf.write('\\x02\\u0a44\\u0a45\\x05ћȮ\\x02\\u0a45\\u0a46\\x07a\\x02\\x02\\u0a46')\n buf.write('ੇ\\x05џȰ\\x02ੇੈ\\x05еț\\x02ੈ')\n buf.write('\\u0a49\\x05ыȦ\\x02\\u0a49\\u0a4a\\x05ѝȯ\\x02\\u0a4a')\n buf.write('ੋ\\x05нȟ\\x02ੋƌ\\x03\\x02\\x02\\x02ੌ੍')\n buf.write('\\x05ыȦ\\x02੍\\u0a4e\\x05нȟ\\x02\\u0a4e\\u0a4f')\n buf.write('\\x05еț\\x02\\u0a4f\\u0a50\\x05лȞ\\x02\\u0a50ੑ')\n buf.write('\\x05хȣ\\x02ੑ\\u0a52\\x05яȨ\\x02\\u0a52\\u0a53')\n buf.write('\\x05сȡ\\x02\\u0a53Ǝ\\x03\\x02\\x02\\x02\\u0a54\\u0a55\\x05ы')\n buf.write('Ȧ\\x02\\u0a55\\u0a56\\x05нȟ\\x02\\u0a56\\u0a57\\x05п')\n buf.write('Ƞ\\x02\\u0a57\\u0a58\\x05ћȮ\\x02\\u0a58Ɛ\\x03\\x02\\x02')\n buf.write('\\x02ਖ਼ਗ਼\\x05ыȦ\\x02ਗ਼ਜ਼\\x05нȟ')\n buf.write('\\x02ਜ਼ੜ\\x05џȰ\\x02ੜ\\u0a5d\\x05нȟ')\n buf.write('\\x02\\u0a5dਫ਼\\x05ыȦ\\x02ਫ਼ƒ\\x03\\x02\\x02\\x02\\u0a5f')\n buf.write('\\u0a60\\x05ыȦ\\x02\\u0a60\\u0a61\\x05хȣ\\x02\\u0a61')\n buf.write('\\u0a62\\x05зȜ\\x02\\u0a62\\u0a63\\x05їȬ\\x02\\u0a63')\n buf.write('\\u0a64\\x05еț\\x02\\u0a64\\u0a65\\x05їȬ\\x02\\u0a65')\n buf.write('੦\\x05ѥȳ\\x02੦Ɣ\\x03\\x02\\x02\\x02੧੨')\n buf.write('\\x05ыȦ\\x02੨੩\\x05хȣ\\x02੩੪')\n buf.write('\\x05щȥ\\x02੪੫\\x05нȟ\\x02੫Ɩ')\n buf.write('\\x03\\x02\\x02\\x02੬੭\\x05ыȦ\\x02੭੮\\x05х')\n buf.write('ȣ\\x02੮੯\\x05щȥ\\x02੯ੰ\\x05н')\n buf.write('ȟ\\x02ੰੱ\\x074\\x02\\x02ੱƘ\\x03\\x02\\x02\\x02ੲ')\n buf.write('ੳ\\x05ыȦ\\x02ੳੴ\\x05хȣ\\x02ੴ')\n buf.write('ੵ\\x05щȥ\\x02ੵ੶\\x05нȟ\\x02੶')\n buf.write('\\u0a77\\x076\\x02\\x02\\u0a77ƚ\\x03\\x02\\x02\\x02\\u0a78\\u0a79\\x05ы'\n )\n buf.write('Ȧ\\x02\\u0a79\\u0a7a\\x05хȣ\\x02\\u0a7a\\u0a7b\\x05щ')\n buf.write('ȥ\\x02\\u0a7b\\u0a7c\\x05нȟ\\x02\\u0a7c\\u0a7d\\x05й')\n buf.write('ȝ\\x02\\u0a7dƜ\\x03\\x02\\x02\\x02\\u0a7e\\u0a7f\\x05ыȦ')\n buf.write('\\x02\\u0a7f\\u0a80\\x05хȣ\\x02\\u0a80ઁ\\x05эȧ')\n buf.write('\\x02ઁં\\x05хȣ\\x02ંઃ\\x05ћȮ')\n buf.write('\\x02ઃƞ\\x03\\x02\\x02\\x02\\u0a84અ\\x05ыȦ\\x02અ')\n buf.write('આ\\x05ёȩ\\x02આઇ\\x05йȝ\\x02ઇ')\n buf.write('ઈ\\x05еț\\x02ઈઉ\\x05ыȦ\\x02ઉ')\n buf.write('Ơ\\x03\\x02\\x02\\x02ઊઋ\\x05ыȦ\\x02ઋઌ')\n buf.write('\\x05ёȩ\\x02ઌઍ\\x05йȝ\\x02ઍ\\u0a8e')\n buf.write('\\x05щȥ\\x02\\u0a8eƢ\\x03\\x02\\x02\\x02એઐ\\x05ы')\n buf.write('Ȧ\\x02ઐઑ\\x05ёȩ\\x02ઑ\\u0a92\\x05й')\n buf.write('ȝ\\x02\\u0a92ઓ\\x05щȥ\\x02ઓઔ\\x05н')\n buf.write('ȟ\\x02ઔક\\x05лȞ\\x02કƤ\\x03\\x02\\x02')\n buf.write('\\x02ખગ\\x05ыȦ\\x02ગઘ\\x05ёȩ')\n buf.write('\\x02ઘઙ\\x05сȡ\\x02ઙƦ\\x03\\x02\\x02\\x02ચ')\n buf.write('છ\\x05ыȦ\\x02છજ\\x05ёȩ\\x02જ')\n buf.write('ઝ\\x05сȡ\\x02ઝઞ\\x05ёȩ\\x02ઞ')\n buf.write('ટ\\x05пȠ\\x02ટઠ\\x05пȠ\\x02ઠ')\n buf.write('ƨ\\x03\\x02\\x02\\x02ડઢ\\x05ыȦ\\x02ઢણ')\n buf.write('\\x05ёȩ\\x02ણત\\x05сȡ\\x02તથ')\n buf.write('\\x05ёȩ\\x02થદ\\x05яȨ\\x02દƪ')\n buf.write('\\x03\\x02\\x02\\x02ધન\\x05ыȦ\\x02ન\\u0aa9\\x05ё')\n buf.write('ȩ\\x02\\u0aa9પ\\x05яȨ\\x02પફ\\x05с')\n buf.write('ȡ\\x02ફƬ\\x03\\x02\\x02\\x02બભ\\x05ыȦ')\n buf.write('\\x02ભમ\\x05ёȩ\\x02મય\\x05ёȩ')\n buf.write('\\x02યર\\x05ѓȪ\\x02રƮ\\x03\\x02\\x02\\x02\\u0ab1')\n buf.write('લ\\x05эȧ\\x02લળ\\x05еț\\x02ળ')\n buf.write('\\u0ab4\\x05хȣ\\x02\\u0ab4વ\\x05яȨ\\x02વ')\n buf.write('ư\\x03\\x02\\x02\\x02શષ\\x05эȧ\\x02ષસ')\n buf.write('\\x05еț\\x02સહ\\x05ѓȪ\\x02હƲ')\n buf.write('\\x03\\x02\\x02\\x02\\u0aba\\u0abb\\x05эȧ\\x02\\u0abb઼\\x05е')\n buf.write('ț\\x02઼ઽ\\x05ћȮ\\x02ઽા\\x05й')\n buf.write('ȝ\\x02ાિ\\x05уȢ\\x02િી\\x05н')\n buf.write('ȟ\\x02ીુ\\x05лȞ\\x02ુƴ\\x03\\x02\\x02')\n buf.write('\\x02ૂૃ\\x05эȧ\\x02ૃૄ\\x05еț')\n buf.write('\\x02ૄૅ\\x05ѣȲ\\x02ૅ\\u0ac6\\x05џȰ')\n buf.write('\\x02\\u0ac6ે\\x05еț\\x02ેૈ\\x05ыȦ')\n buf.write('\\x02ૈૉ\\x05ѝȯ\\x02ૉ\\u0aca\\x05нȟ')\n buf.write('\\x02\\u0acaƶ\\x03\\x02\\x02\\x02ોૌ\\x05эȧ\\x02ૌ')\n buf.write('્\\x05нȟ\\x02્\\u0ace\\x05еț\\x02\\u0ace')\n buf.write('\\u0acf\\x05љȭ\\x02\\u0acfૐ\\x05ѝȯ\\x02ૐ')\n buf.write('\\u0ad1\\x05їȬ\\x02\\u0ad1\\u0ad2\\x05нȟ\\x02\\u0ad2')\n buf.write('\\u0ad3\\x05љȭ\\x02\\u0ad3Ƹ\\x03\\x02\\x02\\x02\\u0ad4\\u0ad5')\n buf.write('\\x05эȧ\\x02\\u0ad5\\u0ad6\\x05нȟ\\x02\\u0ad6\\u0ad7')\n buf.write('\\x05эȧ\\x02\\u0ad7\\u0ad8\\x05зȜ\\x02\\u0ad8\\u0ad9')\n buf.write('\\x05нȟ\\x02\\u0ad9\\u0ada\\x05їȬ\\x02\\u0adaƺ')\n buf.write('\\x03\\x02\\x02\\x02\\u0adb\\u0adc\\x05эȧ\\x02\\u0adc\\u0add\\x05н')\n buf.write('ȟ\\x02\\u0add\\u0ade\\x05їȬ\\x02\\u0ade\\u0adf\\x05с')\n buf.write('ȡ\\x02\\u0adfૠ\\x05нȟ\\x02ૠƼ\\x03\\x02\\x02')\n buf.write('\\x02ૡૢ\\x05эȧ\\x02ૢૣ\\x05хȣ')\n buf.write('\\x02ૣ\\u0ae4\\x05яȨ\\x02\\u0ae4\\u0ae5\\x05ѝȯ')\n buf.write('\\x02\\u0ae5૦\\x05љȭ\\x02૦ƾ\\x03\\x02\\x02\\x02૧')\n buf.write('૨\\x05эȧ\\x02૨૩\\x05хȣ\\x02૩')\n buf.write('૪\\x05яȨ\\x02૪૫\\x05ѝȯ\\x02૫')\n buf.write('૬\\x05ћȮ\\x02૬૭\\x05нȟ\\x02૭')\n buf.write('ǀ\\x03\\x02\\x02\\x02૮૯\\x05эȧ\\x02૯૰')\n buf.write('\\x05хȣ\\x02૰૱\\x05яȨ\\x02૱\\u0af2')\n buf.write('\\x05џȰ\\x02\\u0af2\\u0af3\\x05еț\\x02\\u0af3\\u0af4')\n buf.write('\\x05ыȦ\\x02\\u0af4\\u0af5\\x05ѝȯ\\x02\\u0af5\\u0af6')\n buf.write('\\x05нȟ\\x02\\u0af6ǂ\\x03\\x02\\x02\\x02\\u0af7\\u0af8\\x05э')\n buf.write('ȧ\\x02\\u0af8ૹ\\x05ыȦ\\x02ૹૺ\\x05љ')\n buf.write('ȭ\\x02ૺૻ\\x05ыȦ\\x02ૻૼ\\x05е')\n buf.write('ț\\x02ૼ૽\\x05зȜ\\x02૽૾\\x05н')\n buf.write('ȟ\\x02૾૿\\x05ыȦ\\x02૿DŽ\\x03\\x02\\x02')\n buf.write('\\x02\\u0b00ଁ\\x05эȧ\\x02ଁଂ\\x05ёȩ')\n buf.write('\\x02ଂଃ\\x05лȞ\\x02ଃ\\u0b04\\x05нȟ')\n buf.write('\\x02\\u0b04dž\\x03\\x02\\x02\\x02ଅଆ\\x05эȧ\\x02ଆ')\n buf.write('ଇ\\x05ёȩ\\x02ଇଈ\\x05лȞ\\x02ଈ')\n buf.write('ଉ\\x05нȟ\\x02ଉଊ\\x05ыȦ\\x02ଊ')\n buf.write('Lj\\x03\\x02\\x02\\x02ଋଌ\\x05эȧ\\x02ଌ\\u0b0d')\n buf.write('\\x05ёȩ\\x02\\u0b0d\\u0b0e\\x05лȞ\\x02\\u0b0eଏ')\n buf.write('\\x05хȣ\\x02ଏଐ\\x05пȠ\\x02ଐ\\u0b11')\n buf.write('\\x05ѥȳ\\x02\\u0b11NJ\\x03\\x02\\x02\\x02\\u0b12ଓ\\x05э')\n buf.write('ȧ\\x02ଓଔ\\x05ёȩ\\x02ଔକ\\x05я')\n buf.write('Ȩ\\x02କଖ\\x05ћȮ\\x02ଖଗ\\x05у')\n buf.write('Ȣ\\x02ଗnj\\x03\\x02\\x02\\x02ଘଙ\\x05эȧ')\n buf.write('\\x02ଙଚ\\x05ѝȯ\\x02ଚଛ\\x05ыȦ')\n buf.write('\\x02ଛଜ\\x05ћȮ\\x02ଜଝ\\x05хȣ')\n buf.write('\\x02ଝଞ\\x05љȭ\\x02ଞଟ\\x05нȟ')\n buf.write('\\x02ଟଠ\\x05ћȮ\\x02ଠǎ\\x03\\x02\\x02\\x02ଡ')\n buf.write('ଢ\\x05яȨ\\x02ଢଣ\\x05еț\\x02ଣ')\n buf.write('ତ\\x05эȧ\\x02ତଥ\\x05нȟ\\x02ଥ')\n buf.write('ǐ\\x03\\x02\\x02\\x02ଦଧ\\x05яȨ\\x02ଧନ')\n buf.write('\\x05еț\\x02ନ\\u0b29\\x05яȨ\\x02\\u0b29ǒ')\n buf.write('\\x03\\x02\\x02\\x02ପଫ\\x05яȨ\\x02ଫବ\\x05е')\n buf.write('ț\\x02ବଭ\\x05ћȮ\\x02ଭମ\\x05ѝ')\n buf.write('ȯ\\x02ମଯ\\x05їȬ\\x02ଯର\\x05е')\n buf.write('ț\\x02ର\\u0b31\\x05ыȦ\\x02\\u0b31ǔ\\x03\\x02\\x02')\n buf.write('\\x02ଲଳ\\x05яȨ\\x02ଳ\\u0b34\\x05еț')\n buf.write('\\x02\\u0b34ଵ\\x05ћȮ\\x02ଵଶ\\x05ѝȯ')\n buf.write('\\x02ଶଷ\\x05їȬ\\x02ଷସ\\x05еț')\n buf.write('\\x02ସହ\\x05ыȦ\\x02ହ\\u0b3a\\x05яȨ')\n buf.write('\\x02\\u0b3aǖ\\x03\\x02\\x02\\x02\\u0b3b଼\\x05яȨ\\x02଼')\n buf.write('ଽ\\x05еț\\x02ଽା\\x05џȰ\\x02ା')\n buf.write('ǘ\\x03\\x02\\x02\\x02ିୀ\\x05яȨ\\x02ୀୁ')\n buf.write('\\x05йȝ\\x02ୁୂ\\x05уȢ\\x02ୂୃ')\n buf.write('\\x05еț\\x02ୃୄ\\x05їȬ\\x02ୄǚ')\n buf.write('\\x03\\x02\\x02\\x02\\u0b45\\u0b46\\x05яȨ\\x02\\u0b46େ\\x05й')\n buf.write('ȝ\\x02େୈ\\x05уȢ\\x02ୈ\\u0b49\\x05е')\n buf.write('ț\\x02\\u0b49\\u0b4a\\x05їȬ\\x02\\u0b4aୋ\\x07a\\x02')\n buf.write('\\x02ୋୌ\\x05йȝ\\x02ୌ୍\\x05љȭ')\n buf.write('\\x02୍ǜ\\x03\\x02\\x02\\x02\\u0b4e\\u0b4f\\x05яȨ\\x02\\u0b4f')\n buf.write('\\u0b50\\x05йȝ\\x02\\u0b50\\u0b51\\x05ыȦ\\x02\\u0b51')\n buf.write('\\u0b52\\x05ёȩ\\x02\\u0b52\\u0b53\\x05зȜ\\x02\\u0b53')\n buf.write('Ǟ\\x03\\x02\\x02\\x02\\u0b54୕\\x05яȨ\\x02୕ୖ')\n buf.write('\\x05нȟ\\x02ୖୗ\\x05љȭ\\x02ୗ\\u0b58')\n buf.write('\\x05ћȮ\\x02\\u0b58\\u0b59\\x05нȟ\\x02\\u0b59\\u0b5a')\n buf.write('\\x05лȞ\\x02\\u0b5aǠ\\x03\\x02\\x02\\x02\\u0b5bଡ଼\\x05я')\n buf.write('Ȩ\\x02ଡ଼ଢ଼\\x05нȟ\\x02ଢ଼\\u0b5e\\x05ѡ')\n buf.write('ȱ\\x02\\u0b5eǢ\\x03\\x02\\x02\\x02ୟୠ\\x05яȨ')\n buf.write('\\x02ୠୡ\\x05ёȩ\\x02ୡǤ\\x03\\x02\\x02\\x02ୢ')\n buf.write('ୣ\\x05яȨ\\x02ୣ\\u0b64\\x05ёȩ\\x02\\u0b64')\n buf.write('\\u0b65\\x05еț\\x02\\u0b65୦\\x05ѝȯ\\x02୦')\n buf.write('୧\\x05лȞ\\x02୧୨\\x05хȣ\\x02୨')\n buf.write('୩\\x05ћȮ\\x02୩Ǧ\\x03\\x02\\x02\\x02୪୫')\n buf.write('\\x05яȨ\\x02୫୬\\x05ёȩ\\x02୬୭')\n buf.write('\\x05йȝ\\x02୭୮\\x05еț\\x02୮୯')\n buf.write('\\x05йȝ\\x02୯୰\\x05уȢ\\x02୰ୱ')\n buf.write('\\x05нȟ\\x02ୱǨ\\x03\\x02\\x02\\x02୲୳\\x05я')\n buf.write('Ȩ\\x02୳୴\\x05ёȩ\\x02୴୵\\x05й')\n buf.write('ȝ\\x02୵୶\\x05ёȩ\\x02୶୷\\x05ѓ')\n buf.write('Ȫ\\x02୷\\u0b78\\x05ѥȳ\\x02\\u0b78Ǫ\\x03\\x02\\x02')\n buf.write('\\x02\\u0b79\\u0b7a\\x05яȨ\\x02\\u0b7a\\u0b7b\\x05ёȩ')\n buf.write('\\x02\\u0b7b\\u0b7c\\x05йȝ\\x02\\u0b7c\\u0b7d\\x05ѥȳ')\n buf.write('\\x02\\u0b7d\\u0b7e\\x05йȝ\\x02\\u0b7e\\u0b7f\\x05ыȦ')\n buf.write('\\x02\\u0b7f\\u0b80\\x05нȟ\\x02\\u0b80Ǭ\\x03\\x02\\x02\\x02\\u0b81')\n buf.write('ஂ\\x05яȨ\\x02ஂஃ\\x05ёȩ\\x02ஃ')\n buf.write('\\u0b84\\x05нȟ\\x02\\u0b84அ\\x05яȨ\\x02அ')\n buf.write('ஆ\\x05ћȮ\\x02ஆஇ\\x05хȣ\\x02இ')\n buf.write('ஈ\\x05ћȮ\\x02ஈஉ\\x05ѥȳ\\x02உ')\n buf.write('ஊ\\x05нȟ\\x02ஊ\\u0b8b\\x05љȭ\\x02\\u0b8b')\n buf.write('\\u0b8c\\x05йȝ\\x02\\u0b8c\\u0b8d\\x05еț\\x02\\u0b8d')\n buf.write('எ\\x05ѓȪ\\x02எஏ\\x05хȣ\\x02ஏ')\n buf.write('ஐ\\x05яȨ\\x02ஐ\\u0b91\\x05сȡ\\x02\\u0b91')\n buf.write('Ǯ\\x03\\x02\\x02\\x02ஒஓ\\x05яȨ\\x02ஓஔ')\n buf.write('\\x05ёȩ\\x02ஔக\\x05эȧ\\x02க\\u0b96')\n buf.write('\\x05еț\\x02\\u0b96\\u0b97\\x05ѣȲ\\x02\\u0b97\\u0b98')\n buf.write('\\x05џȰ\\x02\\u0b98ங\\x05еț\\x02ஙச')\n buf.write('\\x05ыȦ\\x02ச\\u0b9b\\x05ѝȯ\\x02\\u0b9bஜ')\n buf.write('\\x05нȟ\\x02ஜǰ\\x03\\x02\\x02\\x02\\u0b9dஞ\\x05я')\n buf.write('Ȩ\\x02ஞட\\x05ёȩ\\x02ட\\u0ba0\\x05э')\n buf.write('ȧ\\x02\\u0ba0\\u0ba1\\x05хȣ\\x02\\u0ba1\\u0ba2\\x05я')\n buf.write('Ȩ\\x02\\u0ba2ண\\x05џȰ\\x02ணத\\x05е')\n buf.write('ț\\x02த\\u0ba5\\x05ыȦ\\x02\\u0ba5\\u0ba6\\x05ѝ')\n buf.write('ȯ\\x02\\u0ba6\\u0ba7\\x05нȟ\\x02\\u0ba7Dz\\x03\\x02\\x02')\n buf.write('\\x02நன\\x05яȨ\\x02னப\\x05ёȩ')\n buf.write('\\x02ப\\u0bab\\x05яȨ\\x02\\u0bab\\u0bac\\x05нȟ')\n buf.write('\\x02\\u0bacǴ\\x03\\x02\\x02\\x02\\u0badம\\x05яȨ\\x02ம')\n buf.write('ய\\x05ёȩ\\x02யர\\x05ёȩ\\x02ர')\n buf.write('ற\\x05їȬ\\x02றல\\x05лȞ\\x02ல')\n buf.write('ள\\x05нȟ\\x02ளழ\\x05їȬ\\x02ழ')\n buf.write('Ƕ\\x03\\x02\\x02\\x02வஶ\\x05яȨ\\x02ஶஷ')\n buf.write('\\x05ёȩ\\x02ஷஸ\\x05љȭ\\x02ஸஹ')\n buf.write('\\x05йȝ\\x02ஹ\\u0bba\\x05уȢ\\x02\\u0bba\\u0bbb')\n buf.write('\\x05нȟ\\x02\\u0bbb\\u0bbc\\x05эȧ\\x02\\u0bbc\\u0bbd')\n buf.write('\\x05еț\\x02\\u0bbdா\\x05йȝ\\x02ாி')\n buf.write('\\x05уȢ\\x02ிீ\\x05нȟ\\x02ீு')\n buf.write('\\x05йȝ\\x02ுூ\\x05щȥ\\x02ூǸ')\n buf.write('\\x03\\x02\\x02\\x02\\u0bc3\\u0bc4\\x05яȨ\\x02\\u0bc4\\u0bc5\\x05ё')\n buf.write('ȩ\\x02\\u0bc5ெ\\x05ћȮ\\x02ெǺ\\x03\\x02\\x02')\n buf.write('\\x02ேை\\x05яȨ\\x02ை\\u0bc9\\x05ёȩ')\n buf.write('\\x02\\u0bc9ொ\\x05ѡȱ\\x02ொோ\\x05еț')\n buf.write('\\x02ோௌ\\x05хȣ\\x02ௌ்\\x05ћȮ')\n buf.write('\\x02்Ǽ\\x03\\x02\\x02\\x02\\u0bce\\u0bcf\\x05яȨ\\x02\\u0bcf')\n buf.write('ௐ\\x05ѝȯ\\x02ௐ\\u0bd1\\x05ыȦ\\x02\\u0bd1')\n buf.write('\\u0bd2\\x05ыȦ\\x02\\u0bd2Ǿ\\x03\\x02\\x02\\x02\\u0bd3\\u0bd4')\n buf.write('\\x05яȨ\\x02\\u0bd4\\u0bd5\\x05ѝȯ\\x02\\u0bd5\\u0bd6')\n buf.write('\\x05ыȦ\\x02\\u0bd6ௗ\\x05ыȦ\\x02ௗ\\u0bd8')\n buf.write('\\x05љȭ\\x02\\u0bd8Ȁ\\x03\\x02\\x02\\x02\\u0bd9\\u0bda\\x05я')\n buf.write('Ȩ\\x02\\u0bda\\u0bdb\\x05ѝȯ\\x02\\u0bdb\\u0bdc\\x05э')\n buf.write('ȧ\\x02\\u0bdc\\u0bdd\\x05зȜ\\x02\\u0bdd\\u0bde\\x05н')\n buf.write('ȟ\\x02\\u0bde\\u0bdf\\x05їȬ\\x02\\u0bdfȂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0be0\\u0be1\\x05яȨ\\x02\\u0be1\\u0be2\\x05ѝȯ')\n buf.write('\\x02\\u0be2\\u0be3\\x05эȧ\\x02\\u0be3\\u0be4\\x05нȟ')\n buf.write('\\x02\\u0be4\\u0be5\\x05їȬ\\x02\\u0be5௦\\x05хȣ')\n buf.write('\\x02௦௧\\x05йȝ\\x02௧Ȅ\\x03\\x02\\x02\\x02௨')\n buf.write('௩\\x05яȨ\\x02௩௪\\x05џȰ\\x02௪')\n buf.write('௫\\x05еț\\x02௫௬\\x05їȬ\\x02௬')\n buf.write('௭\\x05йȝ\\x02௭௮\\x05уȢ\\x02௮')\n buf.write('௯\\x05еț\\x02௯௰\\x05їȬ\\x02௰')\n buf.write('௱\\x074\\x02\\x02௱Ȇ\\x03\\x02\\x02\\x02௲௳\\x05ё')\n buf.write('ȩ\\x02௳௴\\x05зȜ\\x02௴௵\\x05ч')\n buf.write('Ȥ\\x02௵௶\\x05нȟ\\x02௶௷\\x05й')\n buf.write('ȝ\\x02௷௸\\x05ћȮ\\x02௸Ȉ\\x03\\x02\\x02')\n buf.write('\\x02௹௺\\x05ёȩ\\x02௺\\u0bfb\\x05пȠ')\n buf.write('\\x02\\u0bfbȊ\\x03\\x02\\x02\\x02\\u0bfc\\u0bfd\\x05ёȩ\\x02\\u0bfd')\n buf.write('\\u0bfe\\x05пȠ\\x02\\u0bfe\\u0bff\\x05пȠ\\x02\\u0bff')\n buf.write('Ȍ\\x03\\x02\\x02\\x02ఀఁ\\x05ёȩ\\x02ఁం')\n buf.write('\\x05хȣ\\x02ంః\\x05лȞ\\x02ఃȎ')\n buf.write('\\x03\\x02\\x02\\x02ఄఅ\\x05ёȩ\\x02అఆ\\x05ы')\n buf.write('Ȧ\\x02ఆఇ\\x05лȞ\\x02ఇȐ\\x03\\x02\\x02')\n buf.write('\\x02ఈఉ\\x05ёȩ\\x02ఉఊ\\x05яȨ')\n buf.write('\\x02ఊȒ\\x03\\x02\\x02\\x02ఋఌ\\x05ёȩ\\x02ఌ')\n buf.write('\\u0c0d\\x05яȨ\\x02\\u0c0dఎ\\x05ыȦ\\x02ఎ')\n buf.write('ఏ\\x05ѥȳ\\x02ఏȔ\\x03\\x02\\x02\\x02ఐ\\u0c11')\n buf.write('\\x05ёȩ\\x02\\u0c11ఒ\\x05ѓȪ\\x02ఒఓ')\n buf.write('\\x05нȟ\\x02ఓఔ\\x05яȨ\\x02ఔȖ')\n buf.write('\\x03\\x02\\x02\\x02కఖ\\x05ёȩ\\x02ఖగ\\x05ѓ')\n buf.write('Ȫ\\x02గఘ\\x05ћȮ\\x02ఘఙ\\x05х')\n buf.write('ȣ\\x02ఙచ\\x05ёȩ\\x02చఛ\\x05я')\n buf.write('Ȩ\\x02ఛȘ\\x03\\x02\\x02\\x02జఝ\\x05ёȩ')\n buf.write('\\x02ఝఞ\\x05їȬ\\x02ఞȚ\\x03\\x02\\x02\\x02ట')\n buf.write('ఠ\\x05ёȩ\\x02ఠడ\\x05їȬ\\x02డ')\n buf.write('ఢ\\x05еț\\x02ఢణ\\x05лȞ\\x02ణ')\n buf.write('త\\x05еț\\x02తథ\\x05ћȮ\\x02థ')\n buf.write('ద\\x05еț\\x02దȜ\\x03\\x02\\x02\\x02ధన')\n buf.write('\\x05ёȩ\\x02న\\u0c29\\x05їȬ\\x02\\u0c29ప')\n buf.write('\\x05лȞ\\x02పఫ\\x05нȟ\\x02ఫబ')\n buf.write('\\x05їȬ\\x02బȞ\\x03\\x02\\x02\\x02భమ\\x05ё')\n buf.write('ȩ\\x02మయ\\x05їȬ\\x02యర\\x05л')\n buf.write('Ȟ\\x02రఱ\\x05хȣ\\x02ఱల\\x05я')\n buf.write('Ȩ\\x02లళ\\x05еț\\x02ళఴ\\x05ы')\n buf.write('Ȧ\\x02ఴవ\\x05хȣ\\x02వశ\\x05ћ')\n buf.write('Ȯ\\x02శష\\x05ѥȳ\\x02షȠ\\x03\\x02\\x02')\n buf.write('\\x02సహ\\x05ёȩ\\x02హ\\u0c3a\\x05љȭ')\n buf.write('\\x02\\u0c3a\\u0c3b\\x05нȟ\\x02\\u0c3b఼\\x05їȬ')\n buf.write('\\x02఼ఽ\\x05їȬ\\x02ఽా\\x05ёȩ')\n buf.write('\\x02ాి\\x05їȬ\\x02ిȢ\\x03\\x02\\x02\\x02ీ')\n buf.write('ు\\x05ёȩ\\x02ుూ\\x05ѝȯ\\x02ూ')\n buf.write('ృ\\x05ћȮ\\x02ృȤ\\x03\\x02\\x02\\x02ౄ\\u0c45')\n buf.write('\\x05ёȩ\\x02\\u0c45ె\\x05ѝȯ\\x02ెే')\n buf.write('\\x05ћȮ\\x02ేై\\x05нȟ\\x02ై\\u0c49')\n buf.write('\\x05їȬ\\x02\\u0c49Ȧ\\x03\\x02\\x02\\x02ొో\\x05ё')\n buf.write('ȩ\\x02ోౌ\\x05џȰ\\x02ౌ్\\x05н')\n buf.write('ȟ\\x02్\\u0c4e\\x05їȬ\\x02\\u0c4eȨ\\x03\\x02\\x02')\n buf.write('\\x02\\u0c4f\\u0c50\\x05ёȩ\\x02\\u0c50\\u0c51\\x05џȰ')\n buf.write('\\x02\\u0c51\\u0c52\\x05нȟ\\x02\\u0c52\\u0c53\\x05їȬ')\n buf.write('\\x02\\u0c53\\u0c54\\x05їȬ\\x02\\u0c54ౕ\\x05хȣ')\n buf.write('\\x02ౕౖ\\x05лȞ\\x02ౖ\\u0c57\\x05хȣ')\n buf.write('\\x02\\u0c57ౘ\\x05яȨ\\x02ౘౙ\\x05сȡ')\n buf.write('\\x02ౙȪ\\x03\\x02\\x02\\x02ౚ\\u0c5b\\x05ѓȪ\\x02\\u0c5b')\n buf.write('\\u0c5c\\x05еț\\x02\\u0c5cౝ\\x05йȝ\\x02ౝ')\n buf.write('\\u0c5e\\x05щȥ\\x02\\u0c5e\\u0c5f\\x05еț\\x02\\u0c5f')\n buf.write('ౠ\\x05сȡ\\x02ౠౡ\\x05нȟ\\x02ౡ')\n buf.write('Ȭ\\x03\\x02\\x02\\x02ౢౣ\\x05ѓȪ\\x02ౣ\\u0c64')\n buf.write('\\x05еț\\x02\\u0c64\\u0c65\\x05їȬ\\x02\\u0c65౦')\n buf.write('\\x05еț\\x02౦౧\\x05ыȦ\\x02౧౨')\n buf.write('\\x05ыȦ\\x02౨౩\\x05нȟ\\x02౩౪')\n buf.write('\\x05ыȦ\\x02౪౫\\x07a\\x02\\x02౫౬\\x05н')\n buf.write('ȟ\\x02౬౭\\x05яȨ\\x02౭౮\\x05е')\n buf.write('ț\\x02౮౯\\x05зȜ\\x02౯\\u0c70\\x05ы')\n buf.write('Ȧ\\x02\\u0c70\\u0c71\\x05нȟ\\x02\\u0c71Ȯ\\x03\\x02\\x02')\n buf.write('\\x02\\u0c72\\u0c73\\x05ѓȪ\\x02\\u0c73\\u0c74\\x05еț')\n buf.write('\\x02\\u0c74\\u0c75\\x05їȬ\\x02\\u0c75\\u0c76\\x05еț')\n buf.write('\\x02\\u0c76౷\\x05эȧ\\x02౷౸\\x05нȟ')\n buf.write('\\x02౸౹\\x05ћȮ\\x02౹౺\\x05нȟ')\n buf.write('\\x02౺౻\\x05їȬ\\x02౻౼\\x05љȭ')\n buf.write('\\x02౼Ȱ\\x03\\x02\\x02\\x02౽౾\\x05ѓȪ\\x02౾')\n buf.write('౿\\x05еț\\x02౿ಀ\\x05їȬ\\x02ಀ')\n buf.write('ಁ\\x05нȟ\\x02ಁಂ\\x05яȨ\\x02ಂ')\n buf.write('ಃ\\x05ћȮ\\x02ಃȲ\\x03\\x02\\x02\\x02಄ಅ')\n buf.write('\\x05ѓȪ\\x02ಅಆ\\x05еț\\x02ಆಇ')\n buf.write('\\x05їȬ\\x02ಇಈ\\x05ћȮ\\x02ಈಉ')\n buf.write('\\x05хȣ\\x02ಉಊ\\x05ћȮ\\x02ಊಋ')\n buf.write('\\x05хȣ\\x02ಋಌ\\x05ёȩ\\x02ಌ\\u0c8d')\n buf.write('\\x05яȨ\\x02\\u0c8dȴ\\x03\\x02\\x02\\x02ಎಏ\\x05ѓ')\n buf.write('Ȫ\\x02ಏಐ\\x05еț\\x02ಐ\\u0c91\\x05љ')\n buf.write('ȭ\\x02\\u0c91ಒ\\x05љȭ\\x02ಒಓ\\x05х')\n buf.write('ȣ\\x02ಓಔ\\x05яȨ\\x02ಔಕ\\x05с')\n buf.write('ȡ\\x02ಕȶ\\x03\\x02\\x02\\x02ಖಗ\\x05ѓȪ')\n buf.write('\\x02ಗಘ\\x05еț\\x02ಘಙ\\x05ћȮ')\n buf.write('\\x02ಙಚ\\x05уȢ\\x02ಚȸ\\x03\\x02\\x02\\x02ಛ')\n buf.write(\"ಜ\\x07'\\x02\\x02ಜಝ\\x05їȬ\\x02ಝಞ\")\n buf.write('\\x05ёȩ\\x02ಞಟ\\x05ѡȱ\\x02ಟಠ')\n buf.write('\\x05ћȮ\\x02ಠಡ\\x05ѥȳ\\x02ಡಢ')\n buf.write('\\x05ѓȪ\\x02ಢಣ\\x05нȟ\\x02ಣȺ')\n buf.write(\"\\x03\\x02\\x02\\x02ತಥ\\x07'\\x02\\x02ಥದ\\x05ћȮ\")\n buf.write('\\x02ದಧ\\x05ѥȳ\\x02ಧನ\\x05ѓȪ')\n buf.write('\\x02ನ\\u0ca9\\x05нȟ\\x02\\u0ca9ȼ\\x03\\x02\\x02\\x02ಪ')\n buf.write('ಫ\\x05ѓȪ\\x02ಫಬ\\x05хȣ\\x02ಬ')\n buf.write('ಭ\\x05ѓȪ\\x02ಭಮ\\x05нȟ\\x02ಮ')\n buf.write('ಯ\\x05ыȦ\\x02ಯರ\\x05хȣ\\x02ರ')\n buf.write('ಱ\\x05яȨ\\x02ಱಲ\\x05нȟ\\x02ಲ')\n buf.write('ಳ\\x05лȞ\\x02ಳȾ\\x03\\x02\\x02\\x02\\u0cb4ವ')\n buf.write('\\x05ѓȪ\\x02ವಶ\\x05хȣ\\x02ಶಷ')\n buf.write('\\x05џȰ\\x02ಷಸ\\x05ёȩ\\x02ಸಹ')\n buf.write('\\x05ћȮ\\x02ಹɀ\\x03\\x02\\x02\\x02\\u0cba\\u0cbb\\x05ѓ')\n buf.write('Ȫ\\x02\\u0cbb಼\\x05ыȦ\\x02಼ಽ\\x05е')\n buf.write('ț\\x02ಽಾ\\x05яȨ\\x02ಾɂ\\x03\\x02\\x02')\n buf.write('\\x02ಿೀ\\x05ѓȪ\\x02ೀು\\x05ыȦ')\n buf.write('\\x02ುೂ\\x05љȭ\\x02ೂೃ\\x07a\\x02\\x02ೃ')\n buf.write('ೄ\\x05хȣ\\x02ೄ\\u0cc5\\x05яȨ\\x02\\u0cc5')\n buf.write('ೆ\\x05ћȮ\\x02ೆೇ\\x05нȟ\\x02ೇ')\n buf.write('ೈ\\x05сȡ\\x02ೈ\\u0cc9\\x05нȟ\\x02\\u0cc9')\n buf.write('ೊ\\x05їȬ\\x02ೊɄ\\x03\\x02\\x02\\x02ೋೌ')\n buf.write('\\x05ѓȪ\\x02ೌ್\\x05ёȩ\\x02್\\u0cce')\n buf.write('\\x05љȭ\\x02\\u0cce\\u0ccf\\x05хȣ\\x02\\u0ccf\\u0cd0')\n buf.write('\\x05ћȮ\\x02\\u0cd0\\u0cd1\\x05хȣ\\x02\\u0cd1\\u0cd2')\n buf.write('\\x05џȰ\\x02\\u0cd2\\u0cd3\\x05нȟ\\x02\\u0cd3Ɇ')\n buf.write('\\x03\\x02\\x02\\x02\\u0cd4ೕ\\x05ѓȪ\\x02ೕೖ\\x05ё')\n buf.write('ȩ\\x02ೖ\\u0cd7\\x05љȭ\\x02\\u0cd7\\u0cd8\\x05х')\n buf.write('ȣ\\x02\\u0cd8\\u0cd9\\x05ћȮ\\x02\\u0cd9\\u0cda\\x05х')\n buf.write('ȣ\\x02\\u0cda\\u0cdb\\x05џȰ\\x02\\u0cdb\\u0cdc\\x05н')\n buf.write('ȟ\\x02\\u0cdcೝ\\x05яȨ\\x02ೝɈ\\x03\\x02\\x02')\n buf.write('\\x02ೞ\\u0cdf\\x05ѓȪ\\x02\\u0cdfೠ\\x05їȬ')\n buf.write('\\x02ೠೡ\\x05еț\\x02ೡೢ\\x05сȡ')\n buf.write('\\x02ೢೣ\\x05эȧ\\x02ೣ\\u0ce4\\x05еț')\n buf.write('\\x02\\u0ce4Ɋ\\x03\\x02\\x02\\x02\\u0ce5೦\\x05ѓȪ\\x02೦')\n buf.write('೧\\x05їȬ\\x02೧೨\\x05нȟ\\x02೨')\n buf.write('೩\\x05йȝ\\x02೩೪\\x05нȟ\\x02೪')\n buf.write('೫\\x05лȞ\\x02೫೬\\x05хȣ\\x02೬')\n buf.write('೭\\x05яȨ\\x02೭೮\\x05сȡ\\x02೮')\n buf.write('Ɍ\\x03\\x02\\x02\\x02೯\\u0cf0\\x05ѓȪ\\x02\\u0cf0ೱ')\n buf.write('\\x05їȬ\\x02ೱೲ\\x05нȟ\\x02ೲ\\u0cf3')\n buf.write('\\x05йȝ\\x02\\u0cf3\\u0cf4\\x05хȣ\\x02\\u0cf4\\u0cf5')\n buf.write('\\x05љȭ\\x02\\u0cf5\\u0cf6\\x05хȣ\\x02\\u0cf6\\u0cf7')\n buf.write('\\x05ёȩ\\x02\\u0cf7\\u0cf8\\x05яȨ\\x02\\u0cf8Ɏ')\n buf.write('\\x03\\x02\\x02\\x02\\u0cf9\\u0cfa\\x05ѓȪ\\x02\\u0cfa\\u0cfb\\x05ї')\n buf.write('Ȭ\\x02\\u0cfb\\u0cfc\\x05нȟ\\x02\\u0cfc\\u0cfd\\x05љ')\n buf.write('ȭ\\x02\\u0cfd\\u0cfe\\x05нȟ\\x02\\u0cfe\\u0cff\\x05я')\n buf.write('Ȩ\\x02\\u0cffഀ\\x05ћȮ\\x02ഀɐ\\x03\\x02\\x02')\n buf.write('\\x02ഁം\\x05ѓȪ\\x02ംഃ\\x05їȬ')\n buf.write('\\x02ഃഄ\\x05хȣ\\x02ഄഅ\\x05ёȩ')\n buf.write('\\x02അആ\\x05їȬ\\x02ആɒ\\x03\\x02\\x02\\x02ഇ')\n buf.write('ഈ\\x05ѓȪ\\x02ഈഉ\\x05їȬ\\x02ഉ')\n buf.write('ഊ\\x05ёȩ\\x02ഊഋ\\x05йȝ\\x02ഋ')\n buf.write('ഌ\\x05нȟ\\x02ഌ\\u0d0d\\x05лȞ\\x02\\u0d0d')\n buf.write('എ\\x05ѝȯ\\x02എഏ\\x05їȬ\\x02ഏ')\n buf.write('ഐ\\x05нȟ\\x02ഐɔ\\x03\\x02\\x02\\x02\\u0d11ഒ')\n buf.write('\\x05їȬ\\x02ഒഓ\\x05еț\\x02ഓഔ')\n buf.write('\\x05хȣ\\x02ഔക\\x05љȭ\\x02കഖ')\n buf.write('\\x05нȟ\\x02ഖɖ\\x03\\x02\\x02\\x02ഗഘ\\x05ї')\n buf.write('Ȭ\\x02ഘങ\\x05еț\\x02ങച\\x05я')\n buf.write('Ȩ\\x02ചഛ\\x05сȡ\\x02ഛജ\\x05н')\n buf.write('ȟ\\x02ജɘ\\x03\\x02\\x02\\x02ഝഞ\\x05їȬ')\n buf.write('\\x02ഞട\\x05еț\\x02ടഠ\\x05ѡȱ')\n buf.write('\\x02ഠɚ\\x03\\x02\\x02\\x02ഡഢ\\x05їȬ\\x02ഢ')\n buf.write('ണ\\x05нȟ\\x02ണത\\x05еț\\x02ത')\n buf.write('ഥ\\x05лȞ\\x02ഥɜ\\x03\\x02\\x02\\x02ദധ')\n buf.write('\\x05їȬ\\x02ധന\\x05нȟ\\x02നഩ')\n buf.write('\\x05еț\\x02ഩപ\\x05ыȦ\\x02പɞ')\n buf.write('\\x03\\x02\\x02\\x02ഫബ\\x05їȬ\\x02ബഭ\\x05н')\n buf.write('ȟ\\x02ഭമ\\x05йȝ\\x02മയ\\x05ё')\n buf.write('ȩ\\x02യര\\x05їȬ\\x02രറ\\x05л')\n buf.write('Ȟ\\x02റɠ\\x03\\x02\\x02\\x02ലള\\x05їȬ')\n buf.write('\\x02ളഴ\\x05нȟ\\x02ഴവ\\x05пȠ')\n buf.write('\\x02വɢ\\x03\\x02\\x02\\x02ശഷ\\x05їȬ\\x02ഷ')\n buf.write('സ\\x05нȟ\\x02സഹ\\x05пȠ\\x02ഹ')\n buf.write('ഺ\\x05нȟ\\x02ഺ഻\\x05їȬ\\x02഻')\n buf.write('഼\\x05нȟ\\x02഼ഽ\\x05яȨ\\x02ഽ')\n buf.write('ാ\\x05йȝ\\x02ാി\\x05нȟ\\x02ി')\n buf.write('ɤ\\x03\\x02\\x02\\x02ീു\\x05їȬ\\x02ുൂ')\n buf.write('\\x05нȟ\\x02ൂൃ\\x05пȠ\\x02ൃൄ')\n buf.write('\\x05нȟ\\x02ൄ\\u0d45\\x05їȬ\\x02\\u0d45െ')\n buf.write('\\x05нȟ\\x02െേ\\x05яȨ\\x02േൈ')\n buf.write('\\x05йȝ\\x02ൈ\\u0d49\\x05хȣ\\x02\\u0d49ൊ')\n buf.write('\\x05яȨ\\x02ൊോ\\x05сȡ\\x02ോɦ')\n buf.write('\\x03\\x02\\x02\\x02ൌ്\\x05їȬ\\x02്ൎ\\x05н')\n buf.write('ȟ\\x02ൎ൏\\x05чȤ\\x02൏\\u0d50\\x05н')\n buf.write('ȟ\\x02\\u0d50\\u0d51\\x05йȝ\\x02\\u0d51\\u0d52\\x05ћ')\n buf.write('Ȯ\\x02\\u0d52ɨ\\x03\\x02\\x02\\x02\\u0d53ൔ\\x05їȬ')\n buf.write('\\x02ൔൕ\\x05нȟ\\x02ൕൖ\\x05ыȦ')\n buf.write('\\x02ൖൗ\\x05хȣ\\x02ൗ൘\\x05нȟ')\n buf.write('\\x02൘൙\\x05љȭ\\x02൙൚\\x07a\\x02\\x02൚')\n buf.write('൛\\x05ёȩ\\x02൛൜\\x05яȨ\\x02൜')\n buf.write('ɪ\\x03\\x02\\x02\\x02൝൞\\x05їȬ\\x02൞ൟ')\n buf.write('\\x05нȟ\\x02ൟൠ\\x05яȨ\\x02ൠൡ')\n buf.write('\\x05еț\\x02ൡൢ\\x05эȧ\\x02ൢൣ')\n buf.write('\\x05нȟ\\x02ൣɬ\\x03\\x02\\x02\\x02\\u0d64\\u0d65\\x05ї')\n buf.write('Ȭ\\x02\\u0d65൦\\x05нȟ\\x02൦൧\\x05ѓ')\n buf.write('Ȫ\\x02൧൨\\x05ыȦ\\x02൨൩\\x05е')\n buf.write('ț\\x02൩൪\\x05йȝ\\x02൪൫\\x05н')\n buf.write('ȟ\\x02൫ɮ\\x03\\x02\\x02\\x02൬൭\\x05їȬ')\n buf.write('\\x02൭൮\\x05нȟ\\x02൮൯\\x05љȭ')\n buf.write('\\x02൯൰\\x05ѓȪ\\x02൰൱\\x05нȟ')\n buf.write('\\x02൱൲\\x05йȝ\\x02൲൳\\x05ћȮ')\n buf.write('\\x02൳ɰ\\x03\\x02\\x02\\x02൴൵\\x05їȬ\\x02൵')\n buf.write('൶\\x05нȟ\\x02൶൷\\x05љȭ\\x02൷')\n buf.write('൸\\x05ћȮ\\x02൸൹\\x05їȬ\\x02൹')\n buf.write('ൺ\\x05хȣ\\x02ൺൻ\\x05йȝ\\x02ൻ')\n buf.write('ർ\\x05ћȮ\\x02ർൽ\\x07a\\x02\\x02ൽൾ')\n buf.write('\\x05їȬ\\x02ൾൿ\\x05нȟ\\x02ൿ\\u0d80')\n buf.write('\\x05пȠ\\x02\\u0d80ඁ\\x05нȟ\\x02ඁං')\n buf.write('\\x05їȬ\\x02ංඃ\\x05нȟ\\x02ඃ\\u0d84')\n buf.write('\\x05яȨ\\x02\\u0d84අ\\x05йȝ\\x02අආ')\n buf.write('\\x05нȟ\\x02ආඇ\\x05љȭ\\x02ඇɲ')\n buf.write('\\x03\\x02\\x02\\x02ඈඉ\\x05їȬ\\x02ඉඊ\\x05н')\n buf.write('ȟ\\x02ඊඋ\\x05љȭ\\x02උඌ\\x05ѝ')\n buf.write('ȯ\\x02ඌඍ\\x05ыȦ\\x02ඍඎ\\x05ћ')\n buf.write('Ȯ\\x02ඎɴ\\x03\\x02\\x02\\x02ඏඐ\\x05їȬ')\n buf.write('\\x02ඐඑ\\x05нȟ\\x02එඒ\\x05љȭ')\n buf.write('\\x02ඒඓ\\x05ѝȯ\\x02ඓඔ\\x05ыȦ')\n buf.write('\\x02ඔඕ\\x05ћȮ\\x02ඕඖ\\x07a\\x02\\x02ඖ')\n buf.write('\\u0d97\\x05йȝ\\x02\\u0d97\\u0d98\\x05еț\\x02\\u0d98')\n buf.write('\\u0d99\\x05йȝ\\x02\\u0d99ක\\x05уȢ\\x02ක')\n buf.write('ඛ\\x05нȟ\\x02ඛɶ\\x03\\x02\\x02\\x02ගඝ')\n buf.write('\\x05їȬ\\x02ඝඞ\\x05нȟ\\x02ඞඟ')\n buf.write('\\x05ћȮ\\x02ඟච\\x05ѝȯ\\x02චඡ')\n buf.write('\\x05їȬ\\x02ඡජ\\x05яȨ\\x02ජɸ')\n buf.write('\\x03\\x02\\x02\\x02ඣඤ\\x05їȬ\\x02ඤඥ\\x05н')\n buf.write('ȟ\\x02ඥඦ\\x05ћȮ\\x02ඦට\\x05ѝ')\n buf.write('ȯ\\x02ටඨ\\x05їȬ\\x02ඨඩ\\x05я')\n buf.write('Ȩ\\x02ඩඪ\\x05хȣ\\x02ඪණ\\x05я')\n buf.write('Ȩ\\x02ණඬ\\x05сȡ\\x02ඬɺ\\x03\\x02\\x02')\n buf.write('\\x02තථ\\x05їȬ\\x02ථද\\x05нȟ')\n buf.write('\\x02දධ\\x05ѝȯ\\x02ධන\\x05љȭ')\n buf.write('\\x02න\\u0db2\\x05нȟ\\x02\\u0db2ɼ\\x03\\x02\\x02\\x02ඳ')\n buf.write('ප\\x05їȬ\\x02පඵ\\x05нȟ\\x02ඵ')\n buf.write('බ\\x05џȰ\\x02බභ\\x05нȟ\\x02භ')\n buf.write('ම\\x05їȬ\\x02මඹ\\x05љȭ\\x02ඹ')\n buf.write('ය\\x05нȟ\\x02යɾ\\x03\\x02\\x02\\x02ර\\u0dbc')\n buf.write('\\x05їȬ\\x02\\u0dbcල\\x05нȟ\\x02ල\\u0dbe')\n buf.write('\\x05џȰ\\x02\\u0dbe\\u0dbf\\x05ёȩ\\x02\\u0dbfව')\n buf.write('\\x05щȥ\\x02වශ\\x05нȟ\\x02ශʀ')\n buf.write('\\x03\\x02\\x02\\x02ෂස\\x05їȬ\\x02සහ\\x05х')\n buf.write('ȣ\\x02හළ\\x05сȡ\\x02ළෆ\\x05у')\n buf.write('Ȣ\\x02ෆ\\u0dc7\\x05ћȮ\\x02\\u0dc7ʂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0dc8\\u0dc9\\x05їȬ\\x02\\u0dc9්\\x05ёȩ')\n buf.write('\\x02්\\u0dcb\\x05ыȦ\\x02\\u0dcb\\u0dcc\\x05ыȦ')\n buf.write('\\x02\\u0dcc\\u0dcd\\x05зȜ\\x02\\u0dcd\\u0dce\\x05еț')\n buf.write('\\x02\\u0dceා\\x05йȝ\\x02ාැ\\x05щȥ')\n buf.write('\\x02ැʄ\\x03\\x02\\x02\\x02ෑි\\x05їȬ\\x02ි')\n buf.write('ී\\x05ёȩ\\x02ීු\\x05ыȦ\\x02ු')\n buf.write('\\u0dd5\\x05ыȦ\\x02\\u0dd5ූ\\x05ѝȯ\\x02ූ')\n buf.write('\\u0dd7\\x05ѓȪ\\x02\\u0dd7ʆ\\x03\\x02\\x02\\x02ෘෙ')\n buf.write('\\x05їȬ\\x02ෙේ\\x05ёȩ\\x02ේෛ')\n buf.write('\\x05ѡȱ\\x02ෛʈ\\x03\\x02\\x02\\x02ොෝ\\x05ї')\n buf.write('Ȭ\\x02ෝෞ\\x05ёȩ\\x02ෞෟ\\x05ѡ')\n buf.write('ȱ\\x02ෟ\\u0de0\\x05хȣ\\x02\\u0de0\\u0de1\\x05л')\n buf.write('Ȟ\\x02\\u0de1ʊ\\x03\\x02\\x02\\x02\\u0de2\\u0de3\\x05їȬ')\n buf.write('\\x02\\u0de3\\u0de4\\x05ёȩ\\x02\\u0de4\\u0de5\\x05ѡȱ')\n buf.write('\\x02\\u0de5෦\\x05љȭ\\x02෦ʌ\\x03\\x02\\x02\\x02෧')\n buf.write('෨\\x05їȬ\\x02෨෩\\x05ѝȯ\\x02෩')\n buf.write('෪\\x05ыȦ\\x02෪෫\\x05нȟ\\x02෫')\n buf.write('෬\\x05љȭ\\x02෬ʎ\\x03\\x02\\x02\\x02෭෮')\n buf.write('\\x05љȭ\\x02෮෯\\x05еț\\x02෯\\u0df0')\n buf.write('\\x05эȧ\\x02\\u0df0\\u0df1\\x05ѓȪ\\x02\\u0df1ෲ')\n buf.write('\\x05ыȦ\\x02ෲෳ\\x05нȟ\\x02ෳʐ')\n buf.write('\\x03\\x02\\x02\\x02෴\\u0df5\\x05љȭ\\x02\\u0df5\\u0df6\\x05е')\n buf.write('ț\\x02\\u0df6\\u0df7\\x05џȰ\\x02\\u0df7\\u0df8\\x05н')\n buf.write('ȟ\\x02\\u0df8ʒ\\x03\\x02\\x02\\x02\\u0df9\\u0dfa\\x05љȭ')\n buf.write('\\x02\\u0dfa\\u0dfb\\x05еț\\x02\\u0dfb\\u0dfc\\x05џȰ')\n buf.write('\\x02\\u0dfc\\u0dfd\\x05нȟ\\x02\\u0dfd\\u0dfe\\x05ѓȪ')\n buf.write('\\x02\\u0dfe\\u0dff\\x05ёȩ\\x02\\u0dff\\u0e00\\x05хȣ')\n buf.write('\\x02\\u0e00ก\\x05яȨ\\x02กข\\x05ћȮ')\n buf.write('\\x02ขʔ\\x03\\x02\\x02\\x02ฃค\\x05љȭ\\x02ค')\n buf.write('ฅ\\x05йȝ\\x02ฅฆ\\x05уȢ\\x02ฆ')\n buf.write('ง\\x05нȟ\\x02งจ\\x05эȧ\\x02จ')\n buf.write('ฉ\\x05еț\\x02ฉʖ\\x03\\x02\\x02\\x02ชซ')\n buf.write('\\x05љȭ\\x02ซฌ\\x05йȝ\\x02ฌญ')\n buf.write('\\x05уȢ\\x02ญฎ\\x05нȟ\\x02ฎฏ')\n buf.write('\\x05эȧ\\x02ฏฐ\\x05еț\\x02ฐฑ')\n buf.write('\\x05йȝ\\x02ฑฒ\\x05уȢ\\x02ฒณ')\n buf.write('\\x05нȟ\\x02ณด\\x05йȝ\\x02ดต')\n buf.write('\\x05щȥ\\x02ตʘ\\x03\\x02\\x02\\x02ถท\\x05љ')\n buf.write('ȭ\\x02ทธ\\x05йȝ\\x02ธน\\x05я')\n buf.write('Ȩ\\x02นʚ\\x03\\x02\\x02\\x02บป\\x05љȭ')\n buf.write('\\x02ปผ\\x05нȟ\\x02ผฝ\\x05еț')\n buf.write('\\x02ฝพ\\x05їȬ\\x02พฟ\\x05йȝ')\n buf.write('\\x02ฟภ\\x05уȢ\\x02ภʜ\\x03\\x02\\x02\\x02ม')\n buf.write('ย\\x05љȭ\\x02ยร\\x05нȟ\\x02ร')\n buf.write('ฤ\\x05йȝ\\x02ฤล\\x05ёȩ\\x02ล')\n buf.write('ฦ\\x05яȨ\\x02ฦว\\x05лȞ\\x02ว')\n buf.write('ʞ\\x03\\x02\\x02\\x02ศษ\\x05љȭ\\x02ษส')\n buf.write('\\x05нȟ\\x02สห\\x05нȟ\\x02หฬ')\n buf.write('\\x05лȞ\\x02ฬʠ\\x03\\x02\\x02\\x02อฮ\\x05љ')\n buf.write('ȭ\\x02ฮฯ\\x05нȟ\\x02ฯะ\\x05с')\n buf.write('ȡ\\x02ะั\\x05эȧ\\x02ัา\\x05н')\n buf.write('ȟ\\x02าำ\\x05яȨ\\x02ำิ\\x05ћ')\n buf.write('Ȯ\\x02ิʢ\\x03\\x02\\x02\\x02ีึ\\x05љȭ')\n buf.write('\\x02ึื\\x05нȟ\\x02ืุ\\x05ыȦ')\n buf.write('\\x02ุู\\x05нȟ\\x02ฺู\\x05йȝ')\n buf.write('\\x02ฺ\\u0e3b\\x05ћȮ\\x02\\u0e3bʤ\\x03\\x02\\x02\\x02\\u0e3c')\n buf.write('\\u0e3d\\x05љȭ\\x02\\u0e3d\\u0e3e\\x05нȟ\\x02\\u0e3e')\n buf.write('฿\\x05ыȦ\\x02฿เ\\x05пȠ\\x02เ')\n buf.write('ʦ\\x03\\x02\\x02\\x02แโ\\x05љȭ\\x02โใ')\n buf.write('\\x05нȟ\\x02ใไ\\x05ѕȫ\\x02ไๅ')\n buf.write('\\x05ѝȯ\\x02ๅๆ\\x05нȟ\\x02ๆ็')\n buf.write('\\x05яȨ\\x02็่\\x05йȝ\\x02่้')\n buf.write('\\x05нȟ\\x02้ʨ\\x03\\x02\\x02\\x02๊๋\\x05љ')\n buf.write('ȭ\\x02๋์\\x05нȟ\\x02์ํ\\x05ѕ')\n buf.write('ȫ\\x02ํ๎\\x05ѝȯ\\x02๎๏\\x05н')\n buf.write('ȟ\\x02๏๐\\x05яȨ\\x02๐๑\\x05ћ')\n buf.write('Ȯ\\x02๑๒\\x05хȣ\\x02๒๓\\x05е')\n buf.write('ț\\x02๓๔\\x05ыȦ\\x02๔ʪ\\x03\\x02\\x02')\n buf.write('\\x02๕๖\\x05љȭ\\x02๖๗\\x05нȟ')\n buf.write('\\x02๗๘\\x05їȬ\\x02๘๙\\x05хȣ')\n buf.write('\\x02๙๚\\x05еț\\x02๚๛\\x05ыȦ')\n buf.write('\\x02๛\\u0e5c\\x05хȣ\\x02\\u0e5c\\u0e5d\\x05ѧȴ')\n buf.write('\\x02\\u0e5d\\u0e5e\\x05еț\\x02\\u0e5e\\u0e5f\\x05зȜ')\n buf.write('\\x02\\u0e5f\\u0e60\\x05ыȦ\\x02\\u0e60\\u0e61\\x05нȟ')\n buf.write('\\x02\\u0e61ʬ\\x03\\x02\\x02\\x02\\u0e62\\u0e63\\x05љȭ\\x02\\u0e63')\n buf.write('\\u0e64\\x05нȟ\\x02\\u0e64\\u0e65\\x05їȬ\\x02\\u0e65')\n buf.write('\\u0e66\\x05хȣ\\x02\\u0e66\\u0e67\\x05еț\\x02\\u0e67')\n buf.write('\\u0e68\\x05ыȦ\\x02\\u0e68\\u0e69\\x05ыȦ\\x02\\u0e69')\n buf.write('\\u0e6a\\x05ѥȳ\\x02\\u0e6a\\u0e6b\\x07a\\x02\\x02\\u0e6b\\u0e6c')\n buf.write('\\x05їȬ\\x02\\u0e6c\\u0e6d\\x05нȟ\\x02\\u0e6d\\u0e6e')\n buf.write('\\x05ѝȯ\\x02\\u0e6e\\u0e6f\\x05љȭ\\x02\\u0e6f\\u0e70')\n buf.write('\\x05еț\\x02\\u0e70\\u0e71\\x05зȜ\\x02\\u0e71\\u0e72')\n buf.write('\\x05ыȦ\\x02\\u0e72\\u0e73\\x05нȟ\\x02\\u0e73ʮ')\n buf.write('\\x03\\x02\\x02\\x02\\u0e74\\u0e75\\x05љȭ\\x02\\u0e75\\u0e76\\x05н')\n buf.write('ȟ\\x02\\u0e76\\u0e77\\x05їȬ\\x02\\u0e77\\u0e78\\x05џ')\n buf.write('Ȱ\\x02\\u0e78\\u0e79\\x05нȟ\\x02\\u0e79\\u0e7a\\x05ї')\n buf.write('Ȭ\\x02\\u0e7a\\u0e7b\\x05нȟ\\x02\\u0e7b\\u0e7c\\x05ї')\n buf.write('Ȭ\\x02\\u0e7c\\u0e7d\\x05їȬ\\x02\\u0e7d\\u0e7e\\x05ё')\n buf.write('ȩ\\x02\\u0e7e\\u0e7f\\x05їȬ\\x02\\u0e7fʰ\\x03\\x02\\x02')\n buf.write('\\x02\\u0e80ກ\\x05љȭ\\x02ກຂ\\x05нȟ')\n buf.write('\\x02ຂ\\u0e83\\x05љȭ\\x02\\u0e83ຄ\\x05љȭ')\n buf.write('\\x02ຄ\\u0e85\\x05хȣ\\x02\\u0e85ຆ\\x05ёȩ')\n buf.write('\\x02ຆງ\\x05яȨ\\x02ງຈ\\x05ћȮ')\n buf.write('\\x02ຈຉ\\x05хȣ\\x02ຉຊ\\x05эȧ')\n buf.write('\\x02ຊ\\u0e8b\\x05нȟ\\x02\\u0e8bຌ\\x05ѧȴ')\n buf.write('\\x02ຌຍ\\x05ёȩ\\x02ຍຎ\\x05яȨ')\n buf.write('\\x02ຎຏ\\x05нȟ\\x02ຏʲ\\x03\\x02\\x02\\x02ຐ')\n buf.write('ຑ\\x05љȭ\\x02ຑຒ\\x05нȟ\\x02ຒ')\n buf.write('ຓ\\x05ћȮ\\x02ຓʴ\\x03\\x02\\x02\\x02ດຕ')\n buf.write('\\x05љȭ\\x02ຕຖ\\x05нȟ\\x02ຖທ')\n buf.write('\\x05ћȮ\\x02ທຘ\\x05љȭ\\x02ຘʶ')\n buf.write('\\x03\\x02\\x02\\x02ນບ\\x05љȭ\\x02ບປ\\x05н')\n buf.write('ȟ\\x02ປຜ\\x05ћȮ\\x02ຜຝ\\x05ћ')\n buf.write('Ȯ\\x02ຝພ\\x05хȣ\\x02ພຟ\\x05я')\n buf.write('Ȩ\\x02ຟຠ\\x05сȡ\\x02ຠມ\\x05љ')\n buf.write('ȭ\\x02ມʸ\\x03\\x02\\x02\\x02ຢຣ\\x05љȭ')\n buf.write('\\x02ຣ\\u0ea4\\x05уȢ\\x02\\u0ea4ລ\\x05еț')\n buf.write('\\x02ລ\\u0ea6\\x05їȬ\\x02\\u0ea6ວ\\x05нȟ')\n buf.write('\\x02ວʺ\\x03\\x02\\x02\\x02ຨຩ\\x05љȭ\\x02ຩ')\n buf.write('ສ\\x05уȢ\\x02ສຫ\\x05ёȩ\\x02ຫ')\n buf.write('ຬ\\x05ѡȱ\\x02ຬʼ\\x03\\x02\\x02\\x02ອຮ')\n buf.write('\\x05љȭ\\x02ຮຯ\\x05уȢ\\x02ຯະ')\n buf.write('\\x05ѝȯ\\x02ະັ\\x05ћȮ\\x02ັາ')\n buf.write('\\x05лȞ\\x02າຳ\\x05ёȩ\\x02ຳິ')\n buf.write('\\x05ѡȱ\\x02ິີ\\x05яȨ\\x02ີʾ')\n buf.write('\\x03\\x02\\x02\\x02ຶື\\x05љȭ\\x02ືຸ\\x05х')\n buf.write('ȣ\\x02ຸູ\\x05зȜ\\x02຺ູ\\x05ы')\n buf.write('Ȧ\\x02຺ົ\\x05хȣ\\x02ົຼ\\x05я')\n buf.write('Ȩ\\x02ຼຽ\\x05сȡ\\x02ຽ\\u0ebe\\x05љ')\n buf.write('ȭ\\x02\\u0ebeˀ\\x03\\x02\\x02\\x02\\u0ebfເ\\x05љȭ')\n buf.write('\\x02ເແ\\x05хȣ\\x02ແໂ\\x05сȡ')\n buf.write('\\x02ໂໃ\\x05яȨ\\x02ໃໄ\\x05ћȮ')\n buf.write('\\x02ໄ\\u0ec5\\x05ѥȳ\\x02\\u0ec5ໆ\\x05ѓȪ')\n buf.write('\\x02ໆ\\u0ec7\\x05нȟ\\x02\\u0ec7˂\\x03\\x02\\x02\\x02່')\n buf.write('້\\x05љȭ\\x02້໊\\x05хȣ\\x02໊')\n buf.write('໋\\x05эȧ\\x02໋໌\\x05ѓȪ\\x02໌')\n buf.write('ໍ\\x05ыȦ\\x02ໍ\\u0ece\\x05нȟ\\x02\\u0ece')\n buf.write('\\u0ecf\\x07a\\x02\\x02\\u0ecf໐\\x05хȣ\\x02໐໑')\n buf.write('\\x05яȨ\\x02໑໒\\x05ћȮ\\x02໒໓')\n buf.write('\\x05нȟ\\x02໓໔\\x05сȡ\\x02໔໕')\n buf.write('\\x05нȟ\\x02໕໖\\x05їȬ\\x02໖˄')\n buf.write('\\x03\\x02\\x02\\x02໗໘\\x05љȭ\\x02໘໙\\x05х')\n buf.write('ȣ\\x02໙\\u0eda\\x05яȨ\\x02\\u0eda\\u0edb\\x05с')\n buf.write('ȡ\\x02\\u0edbໜ\\x05ыȦ\\x02ໜໝ\\x05н')\n buf.write('ȟ\\x02ໝˆ\\x03\\x02\\x02\\x02ໞໟ\\x05љȭ')\n buf.write('\\x02ໟ\\u0ee0\\x05хȣ\\x02\\u0ee0\\u0ee1\\x05ѧȴ')\n buf.write('\\x02\\u0ee1\\u0ee2\\x05нȟ\\x02\\u0ee2ˈ\\x03\\x02\\x02\\x02\\u0ee3')\n buf.write('\\u0ee4\\x05љȭ\\x02\\u0ee4\\u0ee5\\x05щȥ\\x02\\u0ee5')\n buf.write('\\u0ee6\\x05хȣ\\x02\\u0ee6\\u0ee7\\x05ѓȪ\\x02\\u0ee7')\n buf.write('ˊ\\x03\\x02\\x02\\x02\\u0ee8\\u0ee9\\x05љȭ\\x02\\u0ee9\\u0eea')\n buf.write('\\x05эȧ\\x02\\u0eea\\u0eeb\\x05еț\\x02\\u0eeb\\u0eec')\n buf.write('\\x05ыȦ\\x02\\u0eec\\u0eed\\x05ыȦ\\x02\\u0eed\\u0eee')\n buf.write('\\x05хȣ\\x02\\u0eee\\u0eef\\x05яȨ\\x02\\u0eef\\u0ef0')\n buf.write('\\x05ћȮ\\x02\\u0ef0ˌ\\x03\\x02\\x02\\x02\\u0ef1\\u0ef2\\x05љ')\n buf.write('ȭ\\x02\\u0ef2\\u0ef3\\x05яȨ\\x02\\u0ef3\\u0ef4\\x05е')\n buf.write('ț\\x02\\u0ef4\\u0ef5\\x05ѓȪ\\x02\\u0ef5\\u0ef6\\x05љ')\n buf.write('ȭ\\x02\\u0ef6\\u0ef7\\x05уȢ\\x02\\u0ef7\\u0ef8\\x05ё')\n buf.write('ȩ\\x02\\u0ef8\\u0ef9\\x05ћȮ\\x02\\u0ef9ˎ\\x03\\x02\\x02')\n buf.write('\\x02\\u0efa\\u0efb\\x05љȭ\\x02\\u0efb\\u0efc\\x05ёȩ')\n buf.write('\\x02\\u0efc\\u0efd\\x05эȧ\\x02\\u0efd\\u0efe\\x05нȟ')\n buf.write('\\x02\\u0efeː\\x03\\x02\\x02\\x02\\u0effༀ\\x05љȭ\\x02ༀ')\n buf.write('༁\\x05ѓȪ\\x02༁༂\\x05нȟ\\x02༂')\n buf.write('༃\\x05йȝ\\x02༃༄\\x05хȣ\\x02༄')\n buf.write('༅\\x05пȠ\\x02༅༆\\x05хȣ\\x02༆')\n buf.write('༇\\x05йȝ\\x02༇༈\\x05еț\\x02༈')\n buf.write('༉\\x05ћȮ\\x02༉༊\\x05хȣ\\x02༊')\n buf.write('་\\x05ёȩ\\x02་༌\\x05яȨ\\x02༌')\n buf.write('˒\\x03\\x02\\x02\\x02།༎\\x05љȭ\\x02༎༏')\n buf.write('\\x05ѕȫ\\x02༏༐\\x05ыȦ\\x02༐༑')\n buf.write('\\x05лȞ\\x02༑༒\\x05еț\\x02༒༓')\n buf.write('\\x05ћȮ\\x02༓༔\\x05еț\\x02༔˔')\n buf.write('\\x03\\x02\\x02\\x02༕༖\\x05љȭ\\x02༖༗\\x05ѕ')\n buf.write('ȫ\\x02༗༘\\x05ыȦ\\x02༘༙\\x05н')\n buf.write('ȟ\\x02༙༚\\x05їȬ\\x02༚༛\\x05ї')\n buf.write('Ȭ\\x02༛༜\\x05ёȩ\\x02༜༝\\x05ї')\n buf.write('Ȭ\\x02༝˖\\x03\\x02\\x02\\x02༞༟\\x05љȭ')\n buf.write('\\x02༟༠\\x05ћȮ\\x02༠༡\\x05еț')\n buf.write('\\x02༡༢\\x05яȨ\\x02༢༣\\x05лȞ')\n buf.write('\\x02༣༤\\x05еț\\x02༤༥\\x05ыȦ')\n buf.write('\\x02༥༦\\x05ёȩ\\x02༦༧\\x05яȨ')\n buf.write('\\x02༧༨\\x05нȟ\\x02༨˘\\x03\\x02\\x02\\x02༩')\n buf.write('༪\\x05љȭ\\x02༪༫\\x05ћȮ\\x02༫')\n buf.write('༬\\x05еț\\x02༬༭\\x05їȬ\\x02༭')\n buf.write('༮\\x05ћȮ\\x02༮˚\\x03\\x02\\x02\\x02༯༰')\n buf.write('\\x05љȭ\\x02༰༱\\x05ћȮ\\x02༱༲')\n buf.write('\\x05еț\\x02༲༳\\x05їȬ\\x02༳༴')\n buf.write('\\x05ћȮ\\x02༴༵\\x05ѝȯ\\x02༵༶')\n buf.write('\\x05ѓȪ\\x02༶˜\\x03\\x02\\x02\\x02༷༸\\x05љ')\n buf.write('ȭ\\x02༸༹\\x05ћȮ\\x02༹༺\\x05е')\n buf.write('ț\\x02༺༻\\x05ћȮ\\x02༻༼\\x05н')\n buf.write('ȟ\\x02༼༽\\x05эȧ\\x02༽༾\\x05н')\n buf.write('ȟ\\x02༾༿\\x05яȨ\\x02༿ཀ\\x05ћ')\n buf.write('Ȯ\\x02ཀ˞\\x03\\x02\\x02\\x02ཁག\\x05љȭ')\n buf.write('\\x02གགྷ\\x05ћȮ\\x02གྷང\\x05еț')\n buf.write('\\x02ངཅ\\x05ћȮ\\x02ཅཆ\\x05нȟ')\n buf.write('\\x02ཆཇ\\x05эȧ\\x02ཇ\\u0f48\\x05нȟ')\n buf.write('\\x02\\u0f48ཉ\\x05яȨ\\x02ཉཊ\\x05ћȮ')\n buf.write('\\x02ཊཋ\\x07a\\x02\\x02ཋཌ\\x05хȣ\\x02ཌ')\n buf.write('ཌྷ\\x05лȞ\\x02ཌྷˠ\\x03\\x02\\x02\\x02ཎཏ')\n buf.write('\\x05љȭ\\x02ཏཐ\\x05ћȮ\\x02ཐད')\n buf.write('\\x05еț\\x02དདྷ\\x05ћȮ\\x02དྷན')\n buf.write('\\x05хȣ\\x02ནཔ\\x05йȝ\\x02པˢ')\n buf.write('\\x03\\x02\\x02\\x02ཕབ\\x05љȭ\\x02བབྷ\\x05ћ')\n buf.write('Ȯ\\x02བྷམ\\x05еț\\x02མཙ\\x05ћ')\n buf.write('Ȯ\\x02ཙཚ\\x05хȣ\\x02ཚཛ\\x05љ')\n buf.write('ȭ\\x02ཛཛྷ\\x05ћȮ\\x02ཛྷཝ\\x05х')\n buf.write('ȣ\\x02ཝཞ\\x05йȝ\\x02ཞཟ\\x05љ')\n buf.write('ȭ\\x02ཟˤ\\x03\\x02\\x02\\x02འཡ\\x05љȭ')\n buf.write('\\x02ཡར\\x05ћȮ\\x02རལ\\x05їȬ')\n buf.write('\\x02ལཤ\\x05хȣ\\x02ཤཥ\\x05яȨ')\n buf.write('\\x02ཥས\\x05сȡ\\x02ས˦\\x03\\x02\\x02\\x02ཧ')\n buf.write('ཨ\\x05љȭ\\x02ཨཀྵ\\x05ѝȯ\\x02ཀྵ')\n buf.write('ཪ\\x05зȜ\\x02ཪཫ\\x05эȧ\\x02ཫ')\n buf.write('ཬ\\x05ѝȯ\\x02ཬ\\u0f6d\\x05ыȦ\\x02\\u0f6d')\n buf.write('\\u0f6e\\x05ћȮ\\x02\\u0f6e\\u0f6f\\x05хȣ\\x02\\u0f6f')\n buf.write('\\u0f70\\x05љȭ\\x02\\u0f70ཱ\\x05нȟ\\x02ཱ')\n buf.write('ི\\x05ћȮ\\x02ི˨\\x03\\x02\\x02\\x02ཱིུ')\n buf.write('\\x05љȭ\\x02ཱུུ\\x05ѝȯ\\x02ཱུྲྀ')\n buf.write('\\x05зȜ\\x02ྲྀཷ\\x05ѓȪ\\x02ཷླྀ')\n buf.write('\\x05еț\\x02ླྀཹ\\x05їȬ\\x02ཹེ')\n buf.write('\\x05ћȮ\\x02ེཻ\\x05хȣ\\x02ཻོ')\n buf.write('\\x05ћȮ\\x02ོཽ\\x05хȣ\\x02ཽཾ')\n buf.write('\\x05ёȩ\\x02ཾཿ\\x05яȨ\\x02ཿ˪')\n buf.write('\\x03\\x02\\x02\\x02ཱྀྀ\\x05љȭ\\x02ཱྀྂ\\x05ѝ')\n buf.write('ȯ\\x02ྂྃ\\x05зȜ\\x02྄ྃ\\x05љ')\n buf.write('ȭ\\x02྄྅\\x05ћȮ\\x02྅྆\\x05х')\n buf.write('ȣ\\x02྆྇\\x05ћȮ\\x02྇ྈ\\x05ѝ')\n buf.write('ȯ\\x02ྈྉ\\x05ћȮ\\x02ྉྊ\\x05е')\n buf.write('ț\\x02ྊྋ\\x05зȜ\\x02ྋྌ\\x05ы')\n buf.write('Ȧ\\x02ྌྍ\\x05нȟ\\x02ྍˬ\\x03\\x02\\x02')\n buf.write('\\x02ྎྏ\\x05љȭ\\x02ྏྐ\\x05ѝȯ')\n buf.write('\\x02ྐྑ\\x05зȜ\\x02ྑྒ\\x05ћȮ')\n buf.write('\\x02ྒྒྷ\\x05ѥȳ\\x02ྒྷྔ\\x05ѓȪ')\n buf.write('\\x02ྔྕ\\x05нȟ\\x02ྕˮ\\x03\\x02\\x02\\x02ྖ')\n buf.write('ྗ\\x05љȭ\\x02ྗ\\u0f98\\x05ѝȯ\\x02\\u0f98')\n buf.write('ྙ\\x05йȝ\\x02ྙྚ\\x05йȝ\\x02ྚ')\n buf.write('ྛ\\x05нȟ\\x02ྛྜ\\x05љȭ\\x02ྜ')\n buf.write('ྜྷ\\x05љȭ\\x02ྜྷ˰\\x03\\x02\\x02\\x02ྞྟ')\n buf.write('\\x05љȭ\\x02ྟྠ\\x05ѝȯ\\x02ྠྡ')\n buf.write('\\x05љȭ\\x02ྡྡྷ\\x05ѓȪ\\x02ྡྷྣ')\n buf.write('\\x05нȟ\\x02ྣྤ\\x05яȨ\\x02ྤྥ')\n buf.write('\\x05лȞ\\x02ྥ˲\\x03\\x02\\x02\\x02ྦྦྷ\\x05ћ')\n buf.write('Ȯ\\x02ྦྷྨ\\x05еț\\x02ྨྩ\\x05з')\n buf.write('Ȝ\\x02ྩྪ\\x05ыȦ\\x02ྪྫ\\x05н')\n buf.write('ȟ\\x02ྫ˴\\x03\\x02\\x02\\x02ྫྷྭ\\x05ћȮ')\n buf.write('\\x02ྭྮ\\x05уȢ\\x02ྮྯ\\x05нȟ')\n buf.write('\\x02ྯ˶\\x03\\x02\\x02\\x02ྰྱ\\x05ћȮ\\x02ྱ')\n buf.write('ྲ\\x05уȢ\\x02ྲླ\\x05нȟ\\x02ླ')\n buf.write('ྴ\\x05яȨ\\x02ྴ˸\\x03\\x02\\x02\\x02ྵྶ')\n buf.write('\\x05ћȮ\\x02ྶྷ\\x05хȣ\\x02ྷྸ')\n buf.write('\\x05эȧ\\x02ྸྐྵ\\x05нȟ\\x02ྐྵ˺')\n buf.write('\\x03\\x02\\x02\\x02ྺྻ\\x05ћȮ\\x02ྻྼ\\x05х')\n buf.write('ȣ\\x02ྼ\\u0fbd\\x05эȧ\\x02\\u0fbd྾\\x05н')\n buf.write('ȟ\\x02྾྿\\x05љȭ\\x02྿࿀\\x05ћ')\n buf.write('Ȯ\\x02࿀࿁\\x05еț\\x02࿁࿂\\x05э')\n buf.write('ȧ\\x02࿂࿃\\x05ѓȪ\\x02࿃˼\\x03\\x02\\x02')\n buf.write('\\x02࿄࿅\\x05ћȮ\\x02࿅࿆\\x05хȣ')\n buf.write('\\x02࿆࿇\\x05эȧ\\x02࿇࿈\\x05нȟ')\n buf.write('\\x02࿈࿉\\x05љȭ\\x02࿉࿊\\x05ћȮ')\n buf.write('\\x02࿊࿋\\x05еț\\x02࿋࿌\\x05эȧ')\n buf.write('\\x02࿌\\u0fcd\\x05ѓȪ\\x02\\u0fcd࿎\\x07a\\x02\\x02࿎')\n buf.write('࿏\\x05ыȦ\\x02࿏࿐\\x05ћȮ\\x02࿐')\n buf.write('࿑\\x05ѧȴ\\x02࿑࿒\\x07a\\x02\\x02࿒࿓')\n buf.write('\\x05ѝȯ\\x02࿓࿔\\x05яȨ\\x02࿔࿕')\n buf.write('\\x05йȝ\\x02࿕࿖\\x05ёȩ\\x02࿖࿗')\n buf.write('\\x05яȨ\\x02࿗࿘\\x05љȭ\\x02࿘࿙')\n buf.write('\\x05ћȮ\\x02࿙࿚\\x05їȬ\\x02࿚\\u0fdb')\n buf.write('\\x05еț\\x02\\u0fdb\\u0fdc\\x05хȣ\\x02\\u0fdc\\u0fdd')\n buf.write('\\x05яȨ\\x02\\u0fdd\\u0fde\\x05нȟ\\x02\\u0fde\\u0fdf')\n buf.write('\\x05лȞ\\x02\\u0fdf˾\\x03\\x02\\x02\\x02\\u0fe0\\u0fe1\\x05ћ')\n buf.write('Ȯ\\x02\\u0fe1\\u0fe2\\x05хȣ\\x02\\u0fe2\\u0fe3\\x05э')\n buf.write('ȧ\\x02\\u0fe3\\u0fe4\\x05нȟ\\x02\\u0fe4\\u0fe5\\x05љ')\n buf.write('ȭ\\x02\\u0fe5\\u0fe6\\x05ћȮ\\x02\\u0fe6\\u0fe7\\x05е')\n buf.write('ț\\x02\\u0fe7\\u0fe8\\x05эȧ\\x02\\u0fe8\\u0fe9\\x05ѓ')\n buf.write('Ȫ\\x02\\u0fe9\\u0fea\\x07a\\x02\\x02\\u0fea\\u0feb\\x05ћȮ')\n buf.write('\\x02\\u0feb\\u0fec\\x05ѧȴ\\x02\\u0fec\\u0fed\\x07a\\x02\\x02\\u0fed')\n buf.write('\\u0fee\\x05ѝȯ\\x02\\u0fee\\u0fef\\x05яȨ\\x02\\u0fef')\n buf.write('\\u0ff0\\x05йȝ\\x02\\u0ff0\\u0ff1\\x05ёȩ\\x02\\u0ff1')\n buf.write('\\u0ff2\\x05яȨ\\x02\\u0ff2\\u0ff3\\x05љȭ\\x02\\u0ff3')\n buf.write('\\u0ff4\\x05ћȮ\\x02\\u0ff4\\u0ff5\\x05їȬ\\x02\\u0ff5')\n buf.write('\\u0ff6\\x05еț\\x02\\u0ff6\\u0ff7\\x05хȣ\\x02\\u0ff7')\n buf.write('\\u0ff8\\x05яȨ\\x02\\u0ff8\\u0ff9\\x05нȟ\\x02\\u0ff9')\n buf.write('\\u0ffa\\x05лȞ\\x02\\u0ffà\\x03\\x02\\x02\\x02\\u0ffb\\u0ffc')\n buf.write('\\x05ћȮ\\x02\\u0ffc\\u0ffd\\x05хȣ\\x02\\u0ffd\\u0ffe')\n buf.write('\\x05эȧ\\x02\\u0ffe\\u0fff\\x05нȟ\\x02\\u0fffက')\n buf.write('\\x05љȭ\\x02ကခ\\x05ћȮ\\x02ခဂ')\n buf.write('\\x05еț\\x02ဂဃ\\x05эȧ\\x02ဃင')\n buf.write('\\x05ѓȪ\\x02ငစ\\x07a\\x02\\x02စဆ\\x05ѝ')\n buf.write('ȯ\\x02ဆဇ\\x05яȨ\\x02ဇဈ\\x05й')\n buf.write('ȝ\\x02ဈဉ\\x05ёȩ\\x02ဉည\\x05я')\n buf.write('Ȩ\\x02ညဋ\\x05љȭ\\x02ဋဌ\\x05ћ')\n buf.write('Ȯ\\x02ဌဍ\\x05їȬ\\x02ဍဎ\\x05е')\n buf.write('ț\\x02ဎဏ\\x05хȣ\\x02ဏတ\\x05я')\n buf.write('Ȩ\\x02တထ\\x05нȟ\\x02ထဒ\\x05л')\n buf.write('Ȟ\\x02ဒ̂\\x03\\x02\\x02\\x02ဓန\\x05ћȮ')\n buf.write('\\x02နပ\\x05хȣ\\x02ပဖ\\x05эȧ')\n buf.write('\\x02ဖဗ\\x05нȟ\\x02ဗဘ\\x05ѧȴ')\n buf.write('\\x02ဘမ\\x05ёȩ\\x02မယ\\x05яȨ')\n buf.write('\\x02ယရ\\x05нȟ\\x02ရလ\\x07a\\x02\\x02လ')\n buf.write('ဝ\\x05еț\\x02ဝသ\\x05зȜ\\x02သ')\n buf.write('ဟ\\x05зȜ\\x02ဟဠ\\x05їȬ\\x02ဠ')\n buf.write('̄\\x03\\x02\\x02\\x02အဢ\\x05ћȮ\\x02ဢဣ')\n buf.write('\\x05хȣ\\x02ဣဤ\\x05эȧ\\x02ဤဥ')\n buf.write('\\x05нȟ\\x02ဥဦ\\x05ѧȴ\\x02ဦဧ')\n buf.write('\\x05ёȩ\\x02ဧဨ\\x05яȨ\\x02ဨဩ')\n buf.write('\\x05нȟ\\x02ဩဪ\\x07a\\x02\\x02ဪါ\\x05у')\n buf.write('Ȣ\\x02ါာ\\x05ёȩ\\x02ာိ\\x05ѝ')\n buf.write('ȯ\\x02ိီ\\x05їȬ\\x02ီ̆\\x03\\x02\\x02')\n buf.write('\\x02ုူ\\x05ћȮ\\x02ူေ\\x05хȣ')\n buf.write('\\x02ေဲ\\x05эȧ\\x02ဲဳ\\x05нȟ')\n buf.write('\\x02ဳဴ\\x05ѧȴ\\x02ဴဵ\\x05ёȩ')\n buf.write('\\x02ဵံ\\x05яȨ\\x02ံ့\\x05нȟ')\n buf.write('\\x02့း\\x07a\\x02\\x02း္\\x05эȧ\\x02္')\n buf.write('်\\x05хȣ\\x02်ျ\\x05яȨ\\x02ျ')\n buf.write('ြ\\x05ѝȯ\\x02ြွ\\x05ћȮ\\x02ွ')\n buf.write('ှ\\x05нȟ\\x02ှ̈\\x03\\x02\\x02\\x02ဿ၀')\n buf.write('\\x05ћȮ\\x02၀၁\\x05хȣ\\x02၁၂')\n buf.write('\\x05эȧ\\x02၂၃\\x05нȟ\\x02၃၄')\n buf.write('\\x05ѧȴ\\x02၄၅\\x05ёȩ\\x02၅၆')\n buf.write('\\x05яȨ\\x02၆၇\\x05нȟ\\x02၇၈')\n buf.write('\\x07a\\x02\\x02၈၉\\x05їȬ\\x02၉၊\\x05н')\n buf.write('ȟ\\x02၊။\\x05сȡ\\x02။၌\\x05х')\n buf.write('ȣ\\x02၌၍\\x05ёȩ\\x02၍၎\\x05я')\n buf.write('Ȩ\\x02၎̊\\x03\\x02\\x02\\x02၏ၐ\\x05ћȮ')\n buf.write('\\x02ၐၑ\\x05ёȩ\\x02ၑ̌\\x03\\x02\\x02\\x02ၒ')\n buf.write('ၓ\\x05ћȮ\\x02ၓၔ\\x05їȬ\\x02ၔ')\n buf.write('ၕ\\x05еț\\x02ၕၖ\\x05хȣ\\x02ၖ')\n buf.write('ၗ\\x05ыȦ\\x02ၗၘ\\x05хȣ\\x02ၘ')\n buf.write('ၙ\\x05яȨ\\x02ၙၚ\\x05сȡ\\x02ၚ')\n buf.write('̎\\x03\\x02\\x02\\x02ၛၜ\\x05ћȮ\\x02ၜၝ')\n buf.write('\\x05їȬ\\x02ၝၞ\\x05еț\\x02ၞၟ')\n buf.write('\\x05яȨ\\x02ၟၠ\\x05љȭ\\x02ၠၡ')\n buf.write('\\x05еț\\x02ၡၢ\\x05йȝ\\x02ၢၣ')\n buf.write('\\x05ћȮ\\x02ၣၤ\\x05хȣ\\x02ၤၥ')\n buf.write('\\x05ёȩ\\x02ၥၦ\\x05яȨ\\x02ၦ̐')\n buf.write('\\x03\\x02\\x02\\x02ၧၨ\\x05ћȮ\\x02ၨၩ\\x05ї')\n buf.write('Ȭ\\x02ၩၪ\\x05еț\\x02ၪၫ\\x05я')\n buf.write('Ȩ\\x02ၫၬ\\x05љȭ\\x02ၬၭ\\x05ы')\n buf.write('Ȧ\\x02ၭၮ\\x05еț\\x02ၮၯ\\x05ћ')\n buf.write('Ȯ\\x02ၯၰ\\x05нȟ\\x02ၰ̒\\x03\\x02\\x02')\n buf.write('\\x02ၱၲ\\x05ћȮ\\x02ၲၳ\\x05їȬ')\n buf.write('\\x02ၳၴ\\x05нȟ\\x02ၴၵ\\x05еț')\n buf.write('\\x02ၵၶ\\x05ћȮ\\x02ၶ̔\\x03\\x02\\x02\\x02ၷ')\n buf.write('ၸ\\x05ћȮ\\x02ၸၹ\\x05їȬ\\x02ၹ')\n buf.write('ၺ\\x05хȣ\\x02ၺၻ\\x05сȡ\\x02ၻ')\n buf.write('ၼ\\x05сȡ\\x02ၼၽ\\x05нȟ\\x02ၽ')\n buf.write('ၾ\\x05їȬ\\x02ၾ̖\\x03\\x02\\x02\\x02ၿႀ')\n buf.write('\\x05ћȮ\\x02ႀႁ\\x05їȬ\\x02ႁႂ')\n buf.write('\\x05хȣ\\x02ႂႃ\\x05эȧ\\x02ႃ̘')\n buf.write('\\x03\\x02\\x02\\x02ႄႅ\\x05ћȮ\\x02ႅႆ\\x05ї')\n buf.write('Ȭ\\x02ႆႇ\\x05ѝȯ\\x02ႇႈ\\x05н')\n buf.write('ȟ\\x02ႈ̚\\x03\\x02\\x02\\x02ႉႊ\\x05ћȮ')\n buf.write('\\x02ႊႋ\\x05їȬ\\x02ႋႌ\\x05ѝȯ')\n buf.write('\\x02ႌႍ\\x05яȨ\\x02ႍႎ\\x05йȝ')\n buf.write('\\x02ႎႏ\\x05еț\\x02ႏ႐\\x05ћȮ')\n buf.write('\\x02႐႑\\x05нȟ\\x02႑̜\\x03\\x02\\x02\\x02႒')\n buf.write('႓\\x05ћȮ\\x02႓႔\\x05ѥȳ\\x02႔')\n buf.write('႕\\x05ѓȪ\\x02႕႖\\x05нȟ\\x02႖')\n buf.write('̞\\x03\\x02\\x02\\x02႗႘\\x05ѝȯ\\x02႘႙')\n buf.write('\\x05яȨ\\x02႙ႚ\\x05зȜ\\x02ႚႛ')\n buf.write('\\x05ёȩ\\x02ႛႜ\\x05ѝȯ\\x02ႜႝ')\n buf.write('\\x05яȨ\\x02ႝ႞\\x05лȞ\\x02႞႟')\n buf.write('\\x05нȟ\\x02႟Ⴀ\\x05лȞ\\x02Ⴀ̠')\n buf.write('\\x03\\x02\\x02\\x02ႡႢ\\x05ѝȯ\\x02ႢႣ\\x05я')\n buf.write('Ȩ\\x02ႣႤ\\x05лȞ\\x02ႤႥ\\x05н')\n buf.write('ȟ\\x02ႥႦ\\x05їȬ\\x02Ⴆ̢\\x03\\x02\\x02')\n buf.write('\\x02ႧႨ\\x05ѝȯ\\x02ႨႩ\\x05яȨ')\n buf.write('\\x02ႩႪ\\x05хȣ\\x02ႪႫ\\x05ёȩ')\n buf.write('\\x02ႫႬ\\x05яȨ\\x02Ⴌ̤\\x03\\x02\\x02\\x02Ⴍ')\n buf.write('Ⴎ\\x05ѝȯ\\x02ႮႯ\\x05яȨ\\x02Ⴏ')\n buf.write('Ⴐ\\x05хȣ\\x02ႰႱ\\x05ѕȫ\\x02Ⴑ')\n buf.write('Ⴒ\\x05ѝȯ\\x02ႲႳ\\x05нȟ\\x02Ⴓ')\n buf.write('̦\\x03\\x02\\x02\\x02ႴႵ\\x05ѝȯ\\x02ႵႶ')\n buf.write('\\x05яȨ\\x02ႶႷ\\x05ыȦ\\x02ႷႸ')\n buf.write('\\x05хȣ\\x02ႸႹ\\x05эȧ\\x02ႹႺ')\n buf.write('\\x05хȣ\\x02ႺႻ\\x05ћȮ\\x02ႻႼ')\n buf.write('\\x05нȟ\\x02ႼႽ\\x05лȞ\\x02Ⴝ̨')\n buf.write('\\x03\\x02\\x02\\x02ႾႿ\\x05ѝȯ\\x02ႿჀ\\x05я')\n buf.write('Ȩ\\x02ჀჁ\\x05ѓȪ\\x02ჁჂ\\x05х')\n buf.write('ȣ\\x02ჂჃ\\x05џȰ\\x02ჃჄ\\x05ё')\n buf.write('ȩ\\x02ჄჅ\\x05ћȮ\\x02Ⴥ̪\\x03\\x02\\x02')\n buf.write('\\x02\\u10c6Ⴧ\\x05ѝȯ\\x02Ⴧ\\u10c8\\x05яȨ')\n buf.write('\\x02\\u10c8\\u10c9\\x05ћȮ\\x02\\u10c9\\u10ca\\x05хȣ')\n buf.write('\\x02\\u10ca\\u10cb\\x05ыȦ\\x02\\u10cb̬\\x03\\x02\\x02\\x02\\u10cc')\n buf.write('Ⴭ\\x05ѝȯ\\x02Ⴭ\\u10ce\\x05ѓȪ\\x02\\u10ce')\n buf.write('\\u10cf\\x05лȞ\\x02\\u10cfა\\x05еț\\x02ა')\n buf.write('ბ\\x05ћȮ\\x02ბგ\\x05нȟ\\x02გ')\n buf.write('̮\\x03\\x02\\x02\\x02დე\\x05ѝȯ\\x02ევ')\n buf.write('\\x05ѓȪ\\x02ვზ\\x05лȞ\\x02ზთ')\n buf.write('\\x05еț\\x02თი\\x05ћȮ\\x02იკ')\n buf.write('\\x05нȟ\\x02კლ\\x05лȞ\\x02ლ̰')\n buf.write('\\x03\\x02\\x02\\x02მნ\\x05ѝȯ\\x02ნო\\x05ѓ')\n buf.write('Ȫ\\x02ოპ\\x05љȭ\\x02პჟ\\x05н')\n buf.write('ȟ\\x02ჟრ\\x05їȬ\\x02რს\\x05ћ')\n buf.write('Ȯ\\x02ს̲\\x03\\x02\\x02\\x02ტუ\\x05ѝȯ')\n buf.write('\\x02უფ\\x05їȬ\\x02ფქ\\x05ёȩ')\n buf.write('\\x02ქღ\\x05ѡȱ\\x02ღყ\\x05хȣ')\n buf.write('\\x02ყშ\\x05лȞ\\x02შ̴\\x03\\x02\\x02\\x02ჩ')\n buf.write('ც\\x05ѝȯ\\x02ცძ\\x05љȭ\\x02ძ')\n buf.write('წ\\x05нȟ\\x02წ̶\\x03\\x02\\x02\\x02ჭხ')\n buf.write('\\x05ѝȯ\\x02ხჯ\\x05љȭ\\x02ჯჰ')\n buf.write('\\x05хȣ\\x02ჰჱ\\x05яȨ\\x02ჱჲ')\n buf.write('\\x05сȡ\\x02ჲ̸\\x03\\x02\\x02\\x02ჳჴ\\x05џ')\n buf.write('Ȱ\\x02ჴჵ\\x05еț\\x02ჵჶ\\x05ы')\n buf.write('Ȧ\\x02ჶჷ\\x05хȣ\\x02ჷჸ\\x05л')\n buf.write('Ȟ\\x02ჸჹ\\x05еț\\x02ჹჺ\\x05ћ')\n buf.write('Ȯ\\x02ჺ჻\\x05нȟ\\x02჻̺\\x03\\x02\\x02')\n buf.write('\\x02ჼჽ\\x05џȰ\\x02ჽჾ\\x05еț')\n buf.write('\\x02ჾჿ\\x05ыȦ\\x02ჿᄀ\\x05ѝȯ')\n buf.write('\\x02ᄀᄁ\\x05нȟ\\x02ᄁ̼\\x03\\x02\\x02\\x02ᄂ')\n buf.write('ᄃ\\x05џȰ\\x02ᄃᄄ\\x05еț\\x02ᄄ')\n buf.write('ᄅ\\x05ыȦ\\x02ᄅᄆ\\x05ѝȯ\\x02ᄆ')\n buf.write('ᄇ\\x05нȟ\\x02ᄇᄈ\\x05љȭ\\x02ᄈ')\n buf.write('̾\\x03\\x02\\x02\\x02ᄉᄊ\\x05џȰ\\x02ᄊᄋ')\n buf.write('\\x05еț\\x02ᄋᄌ\\x05їȬ\\x02ᄌᄍ')\n buf.write('\\x05йȝ\\x02ᄍᄎ\\x05уȢ\\x02ᄎᄏ')\n buf.write('\\x05еț\\x02ᄏᄐ\\x05їȬ\\x02ᄐ̀')\n buf.write('\\x03\\x02\\x02\\x02ᄑᄒ\\x05џȰ\\x02ᄒᄓ\\x05е')\n buf.write('ț\\x02ᄓᄔ\\x05їȬ\\x02ᄔᄕ\\x05й')\n buf.write('ȝ\\x02ᄕᄖ\\x05уȢ\\x02ᄖᄗ\\x05е')\n buf.write('ț\\x02ᄗᄘ\\x05їȬ\\x02ᄘᄙ\\x074')\n buf.write('\\x02\\x02ᄙ͂\\x03\\x02\\x02\\x02ᄚᄛ\\x05џȰ\\x02ᄛ')\n buf.write('ᄜ\\x05еț\\x02ᄜᄝ\\x05їȬ\\x02ᄝ')\n buf.write('ᄞ\\x05хȣ\\x02ᄞᄟ\\x05еț\\x02ᄟ')\n buf.write('ᄠ\\x05зȜ\\x02ᄠᄡ\\x05ыȦ\\x02ᄡ')\n buf.write('ᄢ\\x05нȟ\\x02ᄢ̈́\\x03\\x02\\x02\\x02ᄣᄤ')\n buf.write('\\x05џȰ\\x02ᄤᄥ\\x05еț\\x02ᄥᄦ')\n buf.write('\\x05їȬ\\x02ᄦᄧ\\x05їȬ\\x02ᄧᄨ')\n buf.write('\\x05еț\\x02ᄨᄩ\\x05ѥȳ\\x02ᄩ͆')\n buf.write('\\x03\\x02\\x02\\x02ᄪᄫ\\x05џȰ\\x02ᄫᄬ\\x05е')\n buf.write('ț\\x02ᄬᄭ\\x05їȬ\\x02ᄭᄮ\\x05ѥ')\n buf.write('ȳ\\x02ᄮᄯ\\x05хȣ\\x02ᄯᄰ\\x05я')\n buf.write('Ȩ\\x02ᄰᄱ\\x05сȡ\\x02ᄱ͈\\x03\\x02\\x02')\n buf.write('\\x02ᄲᄳ\\x05џȰ\\x02ᄳᄴ\\x05нȟ')\n buf.write('\\x02ᄴᄵ\\x05їȬ\\x02ᄵᄶ\\x05љȭ')\n buf.write('\\x02ᄶᄷ\\x05хȣ\\x02ᄷᄸ\\x05ёȩ')\n buf.write('\\x02ᄸᄹ\\x05яȨ\\x02ᄹ͊\\x03\\x02\\x02\\x02ᄺ')\n buf.write('ᄻ\\x05џȰ\\x02ᄻᄼ\\x05нȟ\\x02ᄼ')\n buf.write('ᄽ\\x05їȬ\\x02ᄽᄾ\\x05љȭ\\x02ᄾ')\n buf.write('ᄿ\\x05хȣ\\x02ᄿᅀ\\x05ёȩ\\x02ᅀ')\n buf.write('ᅁ\\x05яȨ\\x02ᅁᅂ\\x05љȭ\\x02ᅂ')\n buf.write('͌\\x03\\x02\\x02\\x02ᅃᅄ\\x05ѡȱ\\x02ᅄᅅ')\n buf.write('\\x05еț\\x02ᅅᅆ\\x05хȣ\\x02ᅆᅇ')\n buf.write('\\x05ћȮ\\x02ᅇ͎\\x03\\x02\\x02\\x02ᅈᅉ\\x05ѡ')\n buf.write('ȱ\\x02ᅉᅊ\\x05еț\\x02ᅊᅋ\\x05ї')\n buf.write('Ȭ\\x02ᅋᅌ\\x05яȨ\\x02ᅌᅍ\\x05х')\n buf.write('ȣ\\x02ᅍᅎ\\x05яȨ\\x02ᅎᅏ\\x05с')\n buf.write('ȡ\\x02ᅏ͐\\x03\\x02\\x02\\x02ᅐᅑ\\x05ѡȱ')\n buf.write('\\x02ᅑᅒ\\x05нȟ\\x02ᅒᅓ\\x05ыȦ')\n buf.write('\\x02ᅓᅔ\\x05ыȦ\\x02ᅔᅕ\\x05пȠ')\n buf.write('\\x02ᅕᅖ\\x05ёȩ\\x02ᅖᅗ\\x05їȬ')\n buf.write('\\x02ᅗᅘ\\x05эȧ\\x02ᅘᅙ\\x05нȟ')\n buf.write('\\x02ᅙᅚ\\x05лȞ\\x02ᅚ͒\\x03\\x02\\x02\\x02ᅛ')\n buf.write('ᅜ\\x05ѡȱ\\x02ᅜᅝ\\x05уȢ\\x02ᅝ')\n buf.write('ᅞ\\x05нȟ\\x02ᅞᅟ\\x05яȨ\\x02ᅟ')\n buf.write('͔\\x03\\x02\\x02\\x02ᅠᅡ\\x05ѡȱ\\x02ᅡᅢ')\n buf.write('\\x05уȢ\\x02ᅢᅣ\\x05нȟ\\x02ᅣᅤ')\n buf.write('\\x05яȨ\\x02ᅤᅥ\\x05нȟ\\x02ᅥᅦ')\n buf.write('\\x05џȰ\\x02ᅦᅧ\\x05нȟ\\x02ᅧᅨ')\n buf.write('\\x05їȬ\\x02ᅨ͖\\x03\\x02\\x02\\x02ᅩᅪ\\x05ѡ')\n buf.write('ȱ\\x02ᅪᅫ\\x05уȢ\\x02ᅫᅬ\\x05н')\n buf.write('ȟ\\x02ᅬᅭ\\x05їȬ\\x02ᅭᅮ\\x05н')\n buf.write('ȟ\\x02ᅮ͘\\x03\\x02\\x02\\x02ᅯᅰ\\x05ѡȱ')\n buf.write('\\x02ᅰᅱ\\x05уȢ\\x02ᅱᅲ\\x05хȣ')\n buf.write('\\x02ᅲᅳ\\x05ыȦ\\x02ᅳᅴ\\x05нȟ')\n buf.write('\\x02ᅴ͚\\x03\\x02\\x02\\x02ᅵᅶ\\x05ѡȱ\\x02ᅶ')\n buf.write('ᅷ\\x05хȣ\\x02ᅷᅸ\\x05ћȮ\\x02ᅸ')\n buf.write('ᅹ\\x05уȢ\\x02ᅹ͜\\x03\\x02\\x02\\x02ᅺᅻ')\n buf.write('\\x05ѡȱ\\x02ᅻᅼ\\x05хȣ\\x02ᅼᅽ')\n buf.write('\\x05ћȮ\\x02ᅽᅾ\\x05уȢ\\x02ᅾᅿ')\n buf.write('\\x05хȣ\\x02ᅿᆀ\\x05яȨ\\x02ᆀ͞')\n buf.write('\\x03\\x02\\x02\\x02ᆁᆂ\\x05ѡȱ\\x02ᆂᆃ\\x05ё')\n buf.write('ȩ\\x02ᆃᆄ\\x05їȬ\\x02ᆄᆅ\\x05щ')\n buf.write('ȥ\\x02ᆅ͠\\x03\\x02\\x02\\x02ᆆᆇ\\x05ѡȱ')\n buf.write('\\x02ᆇᆈ\\x05їȬ\\x02ᆈᆉ\\x05хȣ')\n buf.write('\\x02ᆉᆊ\\x05ћȮ\\x02ᆊᆋ\\x05нȟ')\n buf.write('\\x02ᆋ͢\\x03\\x02\\x02\\x02ᆌᆍ\\x05ѣȲ\\x02ᆍ')\n buf.write('ᆎ\\x05эȧ\\x02ᆎᆏ\\x05ыȦ\\x02ᆏ')\n buf.write('ͤ\\x03\\x02\\x02\\x02ᆐᆑ\\x05ѣȲ\\x02ᆑᆒ')\n buf.write('\\x05эȧ\\x02ᆒᆓ\\x05ыȦ\\x02ᆓᆔ')\n buf.write('\\x05еț\\x02ᆔᆕ\\x05сȡ\\x02ᆕᆖ')\n buf.write('\\x05сȡ\\x02ᆖͦ\\x03\\x02\\x02\\x02ᆗᆘ\\x05ѣ')\n buf.write('Ȳ\\x02ᆘᆙ\\x05эȧ\\x02ᆙᆚ\\x05ы')\n buf.write('Ȧ\\x02ᆚᆛ\\x05еț\\x02ᆛᆜ\\x05ћ')\n buf.write('Ȯ\\x02ᆜᆝ\\x05ћȮ\\x02ᆝᆞ\\x05ї')\n buf.write('Ȭ\\x02ᆞᆟ\\x05хȣ\\x02ᆟᆠ\\x05з')\n buf.write('Ȝ\\x02ᆠᆡ\\x05ѝȯ\\x02ᆡᆢ\\x05ћ')\n buf.write('Ȯ\\x02ᆢᆣ\\x05нȟ\\x02ᆣᆤ\\x05љ')\n buf.write('ȭ\\x02ᆤͨ\\x03\\x02\\x02\\x02ᆥᆦ\\x05ѣȲ')\n buf.write('\\x02ᆦᆧ\\x05эȧ\\x02ᆧᆨ\\x05ыȦ')\n buf.write('\\x02ᆨᆩ\\x05йȝ\\x02ᆩᆪ\\x05еț')\n buf.write('\\x02ᆪᆫ\\x05љȭ\\x02ᆫᆬ\\x05ћȮ')\n buf.write('\\x02ᆬͪ\\x03\\x02\\x02\\x02ᆭᆮ\\x05ѣȲ\\x02ᆮ')\n buf.write('ᆯ\\x05эȧ\\x02ᆯᆰ\\x05ыȦ\\x02ᆰ')\n buf.write('ᆱ\\x05йȝ\\x02ᆱᆲ\\x05ёȩ\\x02ᆲ')\n buf.write('ᆳ\\x05ыȦ\\x02ᆳᆴ\\x05еț\\x02ᆴ')\n buf.write('ᆵ\\x05ћȮ\\x02ᆵᆶ\\x05ћȮ\\x02ᆶ')\n buf.write('ᆷ\\x05џȰ\\x02ᆷᆸ\\x05еț\\x02ᆸ')\n buf.write('ᆹ\\x05ыȦ\\x02ᆹͬ\\x03\\x02\\x02\\x02ᆺᆻ')\n buf.write('\\x05ѣȲ\\x02ᆻᆼ\\x05эȧ\\x02ᆼᆽ')\n buf.write('\\x05ыȦ\\x02ᆽᆾ\\x05нȟ\\x02ᆾᆿ')\n buf.write('\\x05ыȦ\\x02ᆿᇀ\\x05нȟ\\x02ᇀᇁ')\n buf.write('\\x05эȧ\\x02ᇁᇂ\\x05нȟ\\x02ᇂᇃ')\n buf.write('\\x05яȨ\\x02ᇃᇄ\\x05ћȮ\\x02ᇄͮ')\n buf.write('\\x03\\x02\\x02\\x02ᇅᇆ\\x05ѣȲ\\x02ᇆᇇ\\x05э')\n buf.write('ȧ\\x02ᇇᇈ\\x05ыȦ\\x02ᇈᇉ\\x05н')\n buf.write('ȟ\\x02ᇉᇊ\\x05ѣȲ\\x02ᇊᇋ\\x05х')\n buf.write('ȣ\\x02ᇋᇌ\\x05љȭ\\x02ᇌᇍ\\x05ћ')\n buf.write('Ȯ\\x02ᇍᇎ\\x05љȭ\\x02ᇎͰ\\x03\\x02\\x02')\n buf.write('\\x02ᇏᇐ\\x05ѣȲ\\x02ᇐᇑ\\x05эȧ')\n buf.write('\\x02ᇑᇒ\\x05ыȦ\\x02ᇒᇓ\\x05пȠ')\n buf.write('\\x02ᇓᇔ\\x05ёȩ\\x02ᇔᇕ\\x05їȬ')\n buf.write('\\x02ᇕᇖ\\x05нȟ\\x02ᇖᇗ\\x05љȭ')\n buf.write('\\x02ᇗᇘ\\x05ћȮ\\x02ᇘͲ\\x03\\x02\\x02\\x02ᇙ')\n buf.write('ᇚ\\x05ѣȲ\\x02ᇚᇛ\\x05эȧ\\x02ᇛ')\n buf.write('ᇜ\\x05ыȦ\\x02ᇜᇝ\\x05яȨ\\x02ᇝ')\n buf.write('ᇞ\\x05еț\\x02ᇞᇟ\\x05эȧ\\x02ᇟ')\n buf.write('ᇠ\\x05нȟ\\x02ᇠᇡ\\x05љȭ\\x02ᇡ')\n buf.write('ᇢ\\x05ѓȪ\\x02ᇢᇣ\\x05еț\\x02ᇣ')\n buf.write('ᇤ\\x05йȝ\\x02ᇤᇥ\\x05нȟ\\x02ᇥ')\n buf.write('ᇦ\\x05љȭ\\x02ᇦʹ\\x03\\x02\\x02\\x02ᇧᇨ')\n buf.write('\\x05ѣȲ\\x02ᇨᇩ\\x05эȧ\\x02ᇩᇪ')\n buf.write('\\x05ыȦ\\x02ᇪᇫ\\x05ѓȪ\\x02ᇫᇬ')\n buf.write('\\x05еț\\x02ᇬᇭ\\x05їȬ\\x02ᇭᇮ')\n buf.write('\\x05љȭ\\x02ᇮᇯ\\x05нȟ\\x02ᇯͶ')\n buf.write('\\x03\\x02\\x02\\x02ᇰᇱ\\x05ѣȲ\\x02ᇱᇲ\\x05э')\n buf.write('ȧ\\x02ᇲᇳ\\x05ыȦ\\x02ᇳᇴ\\x05ѓ')\n buf.write('Ȫ\\x02ᇴᇵ\\x05хȣ\\x02ᇵ\\u0378\\x03\\x02\\x02')\n buf.write('\\x02ᇶᇷ\\x05ѣȲ\\x02ᇷᇸ\\x05эȧ')\n buf.write('\\x02ᇸᇹ\\x05ыȦ\\x02ᇹᇺ\\x05ѕȫ')\n buf.write('\\x02ᇺᇻ\\x05ѝȯ\\x02ᇻᇼ\\x05нȟ')\n buf.write('\\x02ᇼᇽ\\x05їȬ\\x02ᇽᇾ\\x05ѥȳ')\n buf.write('\\x02ᇾͺ\\x03\\x02\\x02\\x02ᇿሀ\\x05ѣȲ\\x02ሀ')\n buf.write('ሁ\\x05эȧ\\x02ሁሂ\\x05ыȦ\\x02ሂ')\n buf.write('ሃ\\x05їȬ\\x02ሃሄ\\x05ёȩ\\x02ሄ')\n buf.write('ህ\\x05ёȩ\\x02ህሆ\\x05ћȮ\\x02ሆ')\n buf.write('ͼ\\x03\\x02\\x02\\x02ሇለ\\x05ѣȲ\\x02ለሉ')\n buf.write('\\x05эȧ\\x02ሉሊ\\x05ыȦ\\x02ሊላ')\n buf.write('\\x05љȭ\\x02ላሌ\\x05нȟ\\x02ሌል')\n buf.write('\\x05їȬ\\x02ልሎ\\x05хȣ\\x02ሎሏ')\n buf.write('\\x05еț\\x02ሏሐ\\x05ыȦ\\x02ሐሑ')\n buf.write('\\x05хȣ\\x02ሑሒ\\x05ѧȴ\\x02ሒሓ')\n buf.write('\\x05нȟ\\x02ሓ;\\x03\\x02\\x02\\x02ሔሕ\\x05ѣ')\n buf.write('Ȳ\\x02ሕሖ\\x05эȧ\\x02ሖሗ\\x05ы')\n buf.write('Ȧ\\x02ሗመ\\x05ћȮ\\x02መሙ\\x05е')\n buf.write('ț\\x02ሙሚ\\x05зȜ\\x02ሚማ\\x05ы')\n buf.write('Ȧ\\x02ማሜ\\x05нȟ\\x02ሜ\\u0380\\x03\\x02\\x02')\n buf.write('\\x02ምሞ\\x05ѥȳ\\x02ሞሟ\\x05нȟ')\n buf.write('\\x02ሟሠ\\x05еț\\x02ሠሡ\\x05їȬ')\n buf.write('\\x02ሡ\\u0382\\x03\\x02\\x02\\x02ሢሣ\\x05ѥȳ\\x02ሣ')\n buf.write('ሤ\\x05нȟ\\x02ሤሥ\\x05љȭ\\x02ሥ')\n buf.write('΄\\x03\\x02\\x02\\x02ሦሧ\\x05ѥȳ\\x02ሧረ')\n buf.write('\\x05эȧ\\x02ረሩ\\x05хȣ\\x02ሩሪ')\n buf.write('\\x05яȨ\\x02ሪራ\\x05ћȮ\\x02ራሬ')\n buf.write('\\x05нȟ\\x02ሬር\\x05їȬ\\x02ርሮ')\n buf.write('\\x05џȰ\\x02ሮሯ\\x05еț\\x02ሯሰ')\n buf.write('\\x05ыȦ\\x02ሰሱ\\x07a\\x02\\x02ሱሲ\\x05ѝ')\n buf.write('ȯ\\x02ሲሳ\\x05яȨ\\x02ሳሴ\\x05й')\n buf.write('ȝ\\x02ሴስ\\x05ёȩ\\x02ስሶ\\x05я')\n buf.write('Ȩ\\x02ሶሷ\\x05љȭ\\x02ሷሸ\\x05ћ')\n buf.write('Ȯ\\x02ሸሹ\\x05їȬ\\x02ሹሺ\\x05е')\n buf.write('ț\\x02ሺሻ\\x05хȣ\\x02ሻሼ\\x05я')\n buf.write('Ȩ\\x02ሼሽ\\x05нȟ\\x02ሽሾ\\x05л')\n buf.write('Ȟ\\x02ሾΆ\\x03\\x02\\x02\\x02ሿቀ\\x05ѧȴ')\n buf.write('\\x02ቀቁ\\x05ёȩ\\x02ቁቂ\\x05яȨ')\n buf.write('\\x02ቂቃ\\x05нȟ\\x02ቃΈ\\x03\\x02\\x02\\x02ቄ')\n buf.write('ቅ\\x05ѓȪ\\x02ቅቆ\\x05їȬ\\x02ቆ')\n buf.write('ቇ\\x05нȟ\\x02ቇቈ\\x05лȞ\\x02ቈ')\n buf.write('\\u1249\\x05хȣ\\x02\\u1249ቊ\\x05йȝ\\x02ቊ')\n buf.write('ቋ\\x05ћȮ\\x02ቋቌ\\x05хȣ\\x02ቌ')\n buf.write('ቍ\\x05ёȩ\\x02ቍ\\u124e\\x05яȨ\\x02\\u124e')\n buf.write('Ί\\x03\\x02\\x02\\x02\\u124fቐ\\x05ѓȪ\\x02ቐቑ')\n buf.write('\\x05їȬ\\x02ቑቒ\\x05нȟ\\x02ቒቓ')\n buf.write('\\x05лȞ\\x02ቓቔ\\x05хȣ\\x02ቔቕ')\n buf.write('\\x05йȝ\\x02ቕቖ\\x05ћȮ\\x02ቖ\\u1257')\n buf.write('\\x05хȣ\\x02\\u1257ቘ\\x05ёȩ\\x02ቘ\\u1259')\n buf.write('\\x05яȨ\\x02\\u1259ቚ\\x07a\\x02\\x02ቚቛ\\x05з')\n buf.write('Ȝ\\x02ቛቜ\\x05ёȩ\\x02ቜቝ\\x05ѝ')\n buf.write('ȯ\\x02ቝ\\u125e\\x05яȨ\\x02\\u125e\\u125f\\x05л')\n buf.write('Ȟ\\x02\\u125fበ\\x05љȭ\\x02በΌ\\x03\\x02\\x02')\n buf.write('\\x02ቡቢ\\x05ѓȪ\\x02ቢባ\\x05їȬ')\n buf.write('\\x02ባቤ\\x05нȟ\\x02ቤብ\\x05лȞ')\n buf.write('\\x02ብቦ\\x05хȣ\\x02ቦቧ\\x05йȝ')\n buf.write('\\x02ቧቨ\\x05ћȮ\\x02ቨቩ\\x05хȣ')\n buf.write('\\x02ቩቪ\\x05ёȩ\\x02ቪቫ\\x05яȨ')\n buf.write('\\x02ቫቬ\\x07a\\x02\\x02ቬቭ\\x05йȝ\\x02ቭ')\n buf.write('ቮ\\x05ёȩ\\x02ቮቯ\\x05љȭ\\x02ቯ')\n buf.write('ተ\\x05ћȮ\\x02ተΎ\\x03\\x02\\x02\\x02ቱቲ')\n buf.write('\\x05ѓȪ\\x02ቲታ\\x05їȬ\\x02ታቴ')\n buf.write('\\x05нȟ\\x02ቴት\\x05лȞ\\x02ትቶ')\n buf.write('\\x05хȣ\\x02ቶቷ\\x05йȝ\\x02ቷቸ')\n buf.write('\\x05ћȮ\\x02ቸቹ\\x05хȣ\\x02ቹቺ')\n buf.write('\\x05ёȩ\\x02ቺቻ\\x05яȨ\\x02ቻቼ')\n buf.write('\\x07a\\x02\\x02ቼች\\x05лȞ\\x02ችቾ\\x05н')\n buf.write('ȟ\\x02ቾቿ\\x05ћȮ\\x02ቿኀ\\x05е')\n buf.write('ț\\x02ኀኁ\\x05хȣ\\x02ኁኂ\\x05ы')\n buf.write('Ȧ\\x02ኂኃ\\x05љȭ\\x02ኃΐ\\x03\\x02\\x02')\n buf.write('\\x02ኄኅ\\x05ѓȪ\\x02ኅኆ\\x05їȬ')\n buf.write('\\x02ኆኇ\\x05нȟ\\x02ኇኈ\\x05лȞ')\n buf.write('\\x02ኈ\\u1289\\x05хȣ\\x02\\u1289ኊ\\x05йȝ')\n buf.write('\\x02ኊኋ\\x05ћȮ\\x02ኋኌ\\x05хȣ')\n buf.write('\\x02ኌኍ\\x05ёȩ\\x02ኍ\\u128e\\x05яȨ')\n buf.write('\\x02\\u128e\\u128f\\x07a\\x02\\x02\\u128fነ\\x05ѓȪ\\x02ነ')\n buf.write('ኑ\\x05їȬ\\x02ኑኒ\\x05ёȩ\\x02ኒ')\n buf.write('ና\\x05зȜ\\x02ናኔ\\x05еț\\x02ኔ')\n buf.write('ን\\x05зȜ\\x02ንኖ\\x05хȣ\\x02ኖ')\n buf.write('ኗ\\x05ыȦ\\x02ኗኘ\\x05хȣ\\x02ኘ')\n buf.write('ኙ\\x05ћȮ\\x02ኙኚ\\x05ѥȳ\\x02ኚ')\n buf.write('Β\\x03\\x02\\x02\\x02ኛኜ\\x05ѓȪ\\x02ኜኝ')\n buf.write('\\x05їȬ\\x02ኝኞ\\x05нȟ\\x02ኞኟ')\n buf.write('\\x05лȞ\\x02ኟአ\\x05хȣ\\x02አኡ')\n buf.write('\\x05йȝ\\x02ኡኢ\\x05ћȮ\\x02ኢኣ')\n buf.write('\\x05хȣ\\x02ኣኤ\\x05ёȩ\\x02ኤእ')\n buf.write('\\x05яȨ\\x02እኦ\\x07a\\x02\\x02ኦኧ\\x05љ')\n buf.write('ȭ\\x02ኧከ\\x05нȟ\\x02ከኩ\\x05ћ')\n buf.write('Ȯ\\x02ኩΔ\\x03\\x02\\x02\\x02ኪካ\\x05йȝ')\n buf.write('\\x02ካኬ\\x05ѝȯ\\x02ኬክ\\x05эȧ')\n buf.write('\\x02ክኮ\\x05нȟ\\x02ኮኯ\\x07a\\x02\\x02ኯ')\n buf.write('ኰ\\x05лȞ\\x02ኰ\\u12b1\\x05хȣ\\x02\\u12b1')\n buf.write('ኲ\\x05љȭ\\x02ኲኳ\\x05ћȮ\\x02ኳ')\n buf.write('Ζ\\x03\\x02\\x02\\x02ኴኵ\\x05лȞ\\x02ኵ\\u12b6')\n buf.write('\\x05нȟ\\x02\\u12b6\\u12b7\\x05яȨ\\x02\\u12b7ኸ')\n buf.write('\\x05љȭ\\x02ኸኹ\\x05нȟ\\x02ኹኺ')\n buf.write('\\x07a\\x02\\x02ኺኻ\\x05їȬ\\x02ኻኼ\\x05е')\n buf.write('ț\\x02ኼኽ\\x05яȨ\\x02ኽኾ\\x05щ')\n buf.write('ȥ\\x02ኾΘ\\x03\\x02\\x02\\x02\\u12bfዀ\\x05ыȦ')\n buf.write('\\x02ዀ\\u12c1\\x05хȣ\\x02\\u12c1ዂ\\x05љȭ')\n buf.write('\\x02ዂዃ\\x05ћȮ\\x02ዃዄ\\x05еț')\n buf.write('\\x02ዄዅ\\x05сȡ\\x02ዅ\\u12c6\\x05сȡ')\n buf.write('\\x02\\u12c6Κ\\x03\\x02\\x02\\x02\\u12c7ወ\\x05ѓȪ\\x02ወ')\n buf.write('ዉ\\x05нȟ\\x02ዉዊ\\x05їȬ\\x02ዊ')\n buf.write('ዋ\\x05йȝ\\x02ዋዌ\\x05нȟ\\x02ዌ')\n buf.write('ው\\x05яȨ\\x02ውዎ\\x05ћȮ\\x02ዎ')\n buf.write('ዏ\\x07a\\x02\\x02ዏዐ\\x05їȬ\\x02ዐዑ')\n buf.write('\\x05еț\\x02ዑዒ\\x05яȨ\\x02ዒዓ')\n buf.write('\\x05щȥ\\x02ዓΜ\\x03\\x02\\x02\\x02ዔዕ\\x05ѓ')\n buf.write('Ȫ\\x02ዕዖ\\x05нȟ\\x02ዖ\\u12d7\\x05ї')\n buf.write('Ȭ\\x02\\u12d7ዘ\\x05йȝ\\x02ዘዙ\\x05н')\n buf.write('ȟ\\x02ዙዚ\\x05яȨ\\x02ዚዛ\\x05ћ')\n buf.write('Ȯ\\x02ዛዜ\\x05хȣ\\x02ዜዝ\\x05ы')\n buf.write('Ȧ\\x02ዝዞ\\x05нȟ\\x02ዞዟ\\x07a\\x02')\n buf.write('\\x02ዟዠ\\x05йȝ\\x02ዠዡ\\x05ёȩ')\n buf.write('\\x02ዡዢ\\x05яȨ\\x02ዢዣ\\x05ћȮ')\n buf.write('\\x02ዣΞ\\x03\\x02\\x02\\x02ዤዥ\\x05ѓȪ\\x02ዥ')\n buf.write('ዦ\\x05нȟ\\x02ዦዧ\\x05їȬ\\x02ዧ')\n buf.write('የ\\x05йȝ\\x02የዩ\\x05нȟ\\x02ዩ')\n buf.write('ዪ\\x05яȨ\\x02ዪያ\\x05ћȮ\\x02ያ')\n buf.write('ዬ\\x05хȣ\\x02ዬይ\\x05ыȦ\\x02ይ')\n buf.write('ዮ\\x05нȟ\\x02ዮዯ\\x07a\\x02\\x02ዯደ')\n buf.write('\\x05лȞ\\x02ደዱ\\x05хȣ\\x02ዱዲ')\n buf.write('\\x05љȭ\\x02ዲዳ\\x05йȝ\\x02ዳΠ')\n buf.write('\\x03\\x02\\x02\\x02ዴድ\\x05їȬ\\x02ድዶ\\x05е')\n buf.write('ț\\x02ዶዷ\\x05яȨ\\x02ዷዸ\\x05щ')\n buf.write('ȥ\\x02ዸ\\u03a2\\x03\\x02\\x02\\x02ዹዺ\\x05еț')\n buf.write('\\x02ዺዻ\\x05џȰ\\x02ዻዼ\\x05сȡ')\n buf.write('\\x02ዼΤ\\x03\\x02\\x02\\x02ዽዾ\\x05йȝ\\x02ዾ')\n buf.write('ዿ\\x05ёȩ\\x02ዿጀ\\x05їȬ\\x02ጀ')\n buf.write('ጁ\\x05їȬ\\x02ጁΦ\\x03\\x02\\x02\\x02ጂጃ')\n buf.write('\\x05ыȦ\\x02ጃጄ\\x05еț\\x02ጄጅ')\n buf.write('\\x05сȡ\\x02ጅΨ\\x03\\x02\\x02\\x02ጆጇ\\x05ы')\n buf.write('Ȧ\\x02ጇገ\\x05нȟ\\x02ገጉ\\x05е')\n buf.write('ț\\x02ጉጊ\\x05лȞ\\x02ጊΪ\\x03\\x02\\x02')\n buf.write('\\x02ጋጌ\\x05эȧ\\x02ጌግ\\x05еț')\n buf.write('\\x02ግጎ\\x05ѣȲ\\x02ጎά\\x03\\x02\\x02\\x02ጏ')\n buf.write('ጐ\\x05эȧ\\x02ጐ\\u1311\\x05нȟ\\x02\\u1311')\n buf.write('ጒ\\x05лȞ\\x02ጒጓ\\x05хȣ\\x02ጓ')\n buf.write('ጔ\\x05еț\\x02ጔጕ\\x05яȨ\\x02ጕ')\n buf.write('ή\\x03\\x02\\x02\\x02\\u1316\\u1317\\x05эȧ\\x02\\u1317ጘ')\n buf.write('\\x05хȣ\\x02ጘጙ\\x05яȨ\\x02ጙΰ')\n buf.write('\\x03\\x02\\x02\\x02ጚጛ\\x05яȨ\\x02ጛጜ\\x05ћ')\n buf.write('Ȯ\\x02ጜጝ\\x05хȣ\\x02ጝጞ\\x05ы')\n buf.write('Ȧ\\x02ጞጟ\\x05нȟ\\x02ጟβ\\x03\\x02\\x02')\n buf.write('\\x02ጠጡ\\x05їȬ\\x02ጡጢ\\x05еț')\n buf.write('\\x02ጢጣ\\x05ћȮ\\x02ጣጤ\\x05хȣ')\n buf.write('\\x02ጤጥ\\x05ёȩ\\x02ጥጦ\\x07a\\x02\\x02ጦ')\n buf.write('ጧ\\x05ћȮ\\x02ጧጨ\\x05ёȩ\\x02ጨ')\n buf.write('ጩ\\x07a\\x02\\x02ጩጪ\\x05їȬ\\x02ጪጫ')\n buf.write('\\x05нȟ\\x02ጫጬ\\x05ѓȪ\\x02ጬጭ')\n buf.write('\\x05ёȩ\\x02ጭጮ\\x05їȬ\\x02ጮጯ')\n buf.write('\\x05ћȮ\\x02ጯδ\\x03\\x02\\x02\\x02ጰጱ\\x05ї')\n buf.write('Ȭ\\x02ጱጲ\\x05ёȩ\\x02ጲጳ\\x05ѡ')\n buf.write('ȱ\\x02ጳጴ\\x07a\\x02\\x02ጴጵ\\x05яȨ')\n buf.write('\\x02ጵጶ\\x05ѝȯ\\x02ጶጷ\\x05эȧ')\n buf.write('\\x02ጷጸ\\x05зȜ\\x02ጸጹ\\x05нȟ')\n buf.write('\\x02ጹጺ\\x05їȬ\\x02ጺζ\\x03\\x02\\x02\\x02ጻ')\n buf.write('ጼ\\x05љȭ\\x02ጼጽ\\x05ѝȯ\\x02ጽ')\n buf.write('ጾ\\x05эȧ\\x02ጾθ\\x03\\x02\\x02\\x02ጿፀ')\n buf.write('\\x05џȰ\\x02ፀፁ\\x05еț\\x02ፁፂ')\n buf.write('\\x05їȬ\\x02ፂፃ\\x05хȣ\\x02ፃፄ')\n buf.write('\\x05еț\\x02ፄፅ\\x05яȨ\\x02ፅፆ')\n buf.write('\\x05йȝ\\x02ፆፇ\\x05нȟ\\x02ፇκ')\n buf.write('\\x03\\x02\\x02\\x02ፈፉ\\x05їȬ\\x02ፉፊ\\x05н')\n buf.write('ȟ\\x02ፊፋ\\x05сȡ\\x02ፋፌ\\x05ї')\n buf.write('Ȭ\\x02ፌፍ\\x07a\\x02\\x02ፍμ\\x03\\x02\\x02\\x02ፎ')\n buf.write('ፏ\\x05љȭ\\x02ፏፐ\\x05ћȮ\\x02ፐ')\n buf.write('ፑ\\x05лȞ\\x02ፑፒ\\x05лȞ\\x02ፒ')\n buf.write('ፓ\\x05нȟ\\x02ፓፔ\\x05џȰ\\x02ፔ')\n buf.write('ξ\\x03\\x02\\x02\\x02ፕፖ\\x05џȰ\\x02ፖፗ')\n buf.write('\\x05еț\\x02ፗፘ\\x05їȬ\\x02ፘፙ')\n buf.write('\\x07a\\x02\\x02ፙπ\\x03\\x02\\x02\\x02ፚ\\u135b\\x05йȝ')\n buf.write('\\x02\\u135b\\u135c\\x05ёȩ\\x02\\u135c፝\\x05џȰ')\n buf.write('\\x02፝፞\\x05еț\\x02፞፟\\x05їȬ')\n buf.write('\\x02፟፠\\x07a\\x02\\x02፠ς\\x03\\x02\\x02\\x02፡።')\n buf.write('\\x05яȨ\\x02።፩\\x07)\\x02\\x02፣፨\\n\\x02\\x02')\n buf.write('\\x02፤፥\\x07)\\x02\\x02፥፨\\x07)\\x02\\x02፦፨\\x05')\n buf.write('Эȗ\\x02፧፣\\x03\\x02\\x02\\x02፧፤\\x03\\x02\\x02\\x02')\n buf.write('፧፦\\x03\\x02\\x02\\x02፨፫\\x03\\x02\\x02\\x02፩፧\\x03')\n buf.write('\\x02\\x02\\x02፩፪\\x03\\x02\\x02\\x02፪፬\\x03\\x02\\x02\\x02፫፩')\n buf.write('\\x03\\x02\\x02\\x02፬፭\\x07)\\x02\\x02፭τ\\x03\\x02\\x02\\x02፮')\n buf.write('፷\\x05зȜ\\x02፯፳\\x07)\\x02\\x02፰፲')\n buf.write('\\x0423\\x02፱፰\\x03\\x02\\x02\\x02፲፵\\x03\\x02\\x02\\x02፳')\n buf.write('፱\\x03\\x02\\x02\\x02፳፴\\x03\\x02\\x02\\x02፴፶\\x03\\x02\\x02\\x02')\n buf.write('፵፳\\x03\\x02\\x02\\x02፶፸\\x07)\\x02\\x02፷፯\\x03')\n buf.write('\\x02\\x02\\x02፸፹\\x03\\x02\\x02\\x02፹፷\\x03\\x02\\x02\\x02፹፺')\n buf.write('\\x03\\x02\\x02\\x02፺φ\\x03\\x02\\x02\\x02፻ᎄ\\x05ѣȲ')\n buf.write('\\x02፼ᎀ\\x07)\\x02\\x02\\u137d\\u137f\\t\\x03\\x02\\x02\\u137e\\u137d')\n buf.write(\n '\\x03\\x02\\x02\\x02\\u137fᎂ\\x03\\x02\\x02\\x02ᎀ\\u137e\\x03\\x02\\x02\\x02ᎀ')\n buf.write('ᎁ\\x03\\x02\\x02\\x02ᎁᎃ\\x03\\x02\\x02\\x02ᎂᎀ\\x03\\x02\\x02\\x02')\n buf.write('ᎃᎅ\\x07)\\x02\\x02ᎄ፼\\x03\\x02\\x02\\x02ᎅᎆ\\x03')\n buf.write('\\x02\\x02\\x02ᎆᎄ\\x03\\x02\\x02\\x02ᎆᎇ\\x03\\x02\\x02\\x02ᎇψ')\n buf.write('\\x03\\x02\\x02\\x02ᎈᎉ\\x070\\x02\\x02ᎉᎊ\\x070\\x02\\x02ᎊ')\n buf.write('ϊ\\x03\\x02\\x02\\x02ᎋᎌ\\x070\\x02\\x02ᎌό\\x03\\x02\\x02')\n buf.write('\\x02ᎍᎎ\\x05УȒ\\x02ᎎώ\\x03\\x02\\x02\\x02ᎏ')\n buf.write('᎘\\x05Хȓ\\x02᎐᎒\\t\\x04\\x02\\x02᎑᎓')\n buf.write('\\t\\x05\\x02\\x02᎒᎑\\x03\\x02\\x02\\x02᎒᎓\\x03\\x02\\x02\\x02᎓')\n buf.write('᎖\\x03\\x02\\x02\\x02᎔᎗\\x05Хȓ\\x02᎕᎗')\n buf.write('\\x05УȒ\\x02᎖᎔\\x03\\x02\\x02\\x02᎖᎕\\x03\\x02\\x02')\n buf.write('\\x02᎗᎙\\x03\\x02\\x02\\x02᎘᎐\\x03\\x02\\x02\\x02᎘᎙')\n buf.write('\\x03\\x02\\x02\\x02᎙\\u139c\\x03\\x02\\x02\\x02\\u139a\\u139d\\x05лȞ')\n buf.write(\n '\\x02\\u139b\\u139d\\x05пȠ\\x02\\u139c\\u139a\\x03\\x02\\x02\\x02\\u139c')\n buf.write(\n '\\u139b\\x03\\x02\\x02\\x02\\u139c\\u139d\\x03\\x02\\x02\\x02\\u139dϐ\\x03\\x02\\x02\\x02'\n )\n buf.write('\\u139eᎥ\\x07)\\x02\\x02\\u139fᎤ\\n\\x02\\x02\\x02ᎠᎡ\\x07')\n buf.write(')\\x02\\x02ᎡᎤ\\x07)\\x02\\x02ᎢᎤ\\x05Эȗ\\x02Ꭳ')\n buf.write('\\u139f\\x03\\x02\\x02\\x02ᎣᎠ\\x03\\x02\\x02\\x02ᎣᎢ\\x03\\x02\\x02\\x02')\n buf.write('ᎤᎧ\\x03\\x02\\x02\\x02ᎥᎣ\\x03\\x02\\x02\\x02ᎥᎦ\\x03')\n buf.write('\\x02\\x02\\x02ᎦᎨ\\x03\\x02\\x02\\x02ᎧᎥ\\x03\\x02\\x02\\x02ᎨᎩ')\n buf.write('\\x07)\\x02\\x02Ꭹϒ\\x03\\x02\\x02\\x02ᎪᎯ\\x05ѕȫ')\n buf.write('\\x02ᎫᎰ\\x05ϗǬ\\x02ᎬᎰ\\x05ϙǭ')\n buf.write('\\x02ᎭᎰ\\x05ϛǮ\\x02ᎮᎰ\\x05ϝǯ')\n buf.write('\\x02ᎯᎫ\\x03\\x02\\x02\\x02ᎯᎬ\\x03\\x02\\x02\\x02ᎯᎭ')\n buf.write('\\x03\\x02\\x02\\x02ᎯᎮ\\x03\\x02\\x02\\x02ᎰᎱ\\x03\\x02\\x02\\x02Ꮁ')\n buf.write('Ꮂ\\x08Ǫ\\x02\\x02Ꮂϔ\\x03\\x02\\x02\\x02ᎳᎴ\\x07)')\n buf.write('\\x02\\x02Ꮄϖ\\x03\\x02\\x02\\x02ᎵᎶ\\x05ϕǫ\\x02Ꮆ')\n buf.write('Ꮊ\\x07>\\x02\\x02ᎷᎹ\\x0b\\x02\\x02\\x02ᎸᎷ\\x03\\x02\\x02\\x02')\n buf.write('ᎹᎼ\\x03\\x02\\x02\\x02ᎺᎻ\\x03\\x02\\x02\\x02ᎺᎸ\\x03')\n buf.write('\\x02\\x02\\x02ᎻᎽ\\x03\\x02\\x02\\x02ᎼᎺ\\x03\\x02\\x02\\x02ᎽᎾ')\n buf.write('\\x07@\\x02\\x02ᎾᎿ\\x05ϕǫ\\x02ᎿϘ\\x03\\x02\\x02')\n buf.write('\\x02ᏀᏁ\\x05ϕǫ\\x02ᏁᏅ\\x07}\\x02\\x02Ꮒ')\n buf.write('Ꮔ\\x0b\\x02\\x02\\x02ᏃᏂ\\x03\\x02\\x02\\x02ᏄᏇ\\x03\\x02\\x02')\n buf.write('\\x02ᏅᏆ\\x03\\x02\\x02\\x02ᏅᏃ\\x03\\x02\\x02\\x02ᏆᏈ')\n buf.write('\\x03\\x02\\x02\\x02ᏇᏅ\\x03\\x02\\x02\\x02ᏈᏉ\\x07\\x7f\\x02\\x02Ꮙ')\n buf.write('Ꮚ\\x05ϕǫ\\x02ᏊϚ\\x03\\x02\\x02\\x02ᏋᏌ')\n buf.write('\\x05ϕǫ\\x02ᏌᏐ\\x07]\\x02\\x02ᏍᏏ\\x0b\\x02\\x02')\n buf.write('\\x02ᏎᏍ\\x03\\x02\\x02\\x02ᏏᏒ\\x03\\x02\\x02\\x02ᏐᏑ')\n buf.write('\\x03\\x02\\x02\\x02ᏐᏎ\\x03\\x02\\x02\\x02ᏑᏓ\\x03\\x02\\x02\\x02Ꮢ')\n buf.write('Ꮠ\\x03\\x02\\x02\\x02ᏓᏔ\\x07_\\x02\\x02ᏔᏕ\\x05ϕ')\n buf.write('ǫ\\x02ᏕϜ\\x03\\x02\\x02\\x02ᏖᏗ\\x05ϕǫ')\n buf.write('\\x02ᏗᏛ\\x07*\\x02\\x02ᏘᏚ\\x0b\\x02\\x02\\x02ᏙᏘ')\n buf.write('\\x03\\x02\\x02\\x02ᏚᏝ\\x03\\x02\\x02\\x02ᏛᏜ\\x03\\x02\\x02\\x02Ꮫ')\n buf.write('Ꮩ\\x03\\x02\\x02\\x02ᏜᏞ\\x03\\x02\\x02\\x02ᏝᏛ\\x03\\x02\\x02\\x02')\n buf.write('ᏞᏟ\\x07+\\x02\\x02ᏟᏠ\\x05ϕǫ\\x02Ꮰ')\n buf.write('Ϟ\\x03\\x02\\x02\\x02ᏡᏢ\\n\\x06\\x02\\x02ᏢϠ\\x03\\x02\\x02\\x02')\n buf.write('ᏣᏧ\\x07$\\x02\\x02ᏤᏨ\\n\\x07\\x02\\x02ᏥᏦ\\x07')\n buf.write('$\\x02\\x02ᏦᏨ\\x07$\\x02\\x02ᏧᏤ\\x03\\x02\\x02\\x02ᏧᏥ')\n buf.write('\\x03\\x02\\x02\\x02ᏨᏩ\\x03\\x02\\x02\\x02ᏩᏧ\\x03\\x02\\x02\\x02Ꮹ')\n buf.write('Ꮺ\\x03\\x02\\x02\\x02ᏪᏫ\\x03\\x02\\x02\\x02ᏫᏬ\\x07$\\x02\\x02')\n buf.write(\"ᏬϢ\\x03\\x02\\x02\\x02ᏭᏮ\\x07'\\x02\\x02ᏮϤ\\x03\")\n buf.write('\\x02\\x02\\x02ᏯᏰ\\x07(\\x02\\x02ᏰϦ\\x03\\x02\\x02\\x02ᏱᏲ')\n buf.write('\\x07*\\x02\\x02ᏲϨ\\x03\\x02\\x02\\x02ᏳᏴ\\x07+\\x02\\x02ᏴϪ')\n buf.write(\n '\\x03\\x02\\x02\\x02Ᏽ\\u13f6\\x07,\\x02\\x02\\u13f6\\u13f7\\x07,\\x02\\x02\\u13f7Ϭ'\n )\n buf.write('\\x03\\x02\\x02\\x02ᏸᏹ\\x07,\\x02\\x02ᏹϮ\\x03\\x02\\x02\\x02ᏺ')\n buf.write('ᏻ\\x07-\\x02\\x02ᏻϰ\\x03\\x02\\x02\\x02ᏼᏽ\\x07/\\x02\\x02ᏽ')\n buf.write(\n 'ϲ\\x03\\x02\\x02\\x02\\u13fe\\u13ff\\x07.\\x02\\x02\\u13ffϴ\\x03\\x02\\x02\\x02'\n )\n buf.write('᐀ᐁ\\x071\\x02\\x02ᐁ϶\\x03\\x02\\x02\\x02ᐂᐃ')\n buf.write('\\x07B\\x02\\x02ᐃϸ\\x03\\x02\\x02\\x02ᐄᐅ\\x07<\\x02\\x02ᐅᐆ')\n buf.write('\\x07?\\x02\\x02ᐆϺ\\x03\\x02\\x02\\x02ᐇᐈ\\x07<\\x02\\x02ᐈᐍ')\n buf.write('\\x05Сȑ\\x02ᐉᐌ\\x05Сȑ\\x02ᐊᐌ')\n buf.write('\\t\\x08\\x02\\x02ᐋᐉ\\x03\\x02\\x02\\x02ᐋᐊ\\x03\\x02\\x02\\x02ᐌ')\n buf.write('ᐏ\\x03\\x02\\x02\\x02ᐍᐋ\\x03\\x02\\x02\\x02ᐍᐎ\\x03\\x02\\x02\\x02')\n buf.write('ᐎᐖ\\x03\\x02\\x02\\x02ᐏᐍ\\x03\\x02\\x02\\x02ᐐᐑ\\x07')\n buf.write('<\\x02\\x02ᐑᐖ\\x05ϡDZ\\x02ᐒᐓ\\x07<\\x02\\x02ᐓ')\n buf.write('ᐖ\\x05ύǧ\\x02ᐔᐖ\\x05Бȉ\\x02ᐕ')\n buf.write('ᐇ\\x03\\x02\\x02\\x02ᐕᐐ\\x03\\x02\\x02\\x02ᐕᐒ\\x03\\x02\\x02\\x02')\n buf.write('ᐕᐔ\\x03\\x02\\x02\\x02ᐖϼ\\x03\\x02\\x02\\x02ᐗᐘ\\x07')\n buf.write('<\\x02\\x02ᐘϾ\\x03\\x02\\x02\\x02ᐙᐚ\\x07=\\x02\\x02ᐚЀ')\n buf.write('\\x03\\x02\\x02\\x02ᐛᐜ\\x07>\\x02\\x02ᐜᐝ\\x07?\\x02\\x02ᐝЂ')\n buf.write('\\x03\\x02\\x02\\x02ᐞᐟ\\x07>\\x02\\x02ᐟЄ\\x03\\x02\\x02\\x02ᐠ')\n buf.write('ᐡ\\x07@\\x02\\x02ᐡᐢ\\x07?\\x02\\x02ᐢІ\\x03\\x02\\x02\\x02ᐣ')\n buf.write('ᐤ\\x07#\\x02\\x02ᐤᐬ\\x07?\\x02\\x02ᐥᐦ\\x07>\\x02\\x02ᐦ')\n buf.write('ᐬ\\x07@\\x02\\x02ᐧᐨ\\x07`\\x02\\x02ᐨᐬ\\x07?\\x02\\x02ᐩ')\n buf.write('ᐪ\\x07\\x80\\x02\\x02ᐪᐬ\\x07?\\x02\\x02ᐫᐣ\\x03\\x02')\n buf.write('\\x02\\x02ᐫᐥ\\x03\\x02\\x02\\x02ᐫᐧ\\x03\\x02\\x02\\x02ᐫᐩ')\n buf.write('\\x03\\x02\\x02\\x02ᐬЈ\\x03\\x02\\x02\\x02ᐭᐮ\\x07`\\x02\\x02ᐮ')\n buf.write('Њ\\x03\\x02\\x02\\x02ᐯᐰ\\x07\\x80\\x02\\x02ᐰЌ\\x03\\x02')\n buf.write('\\x02\\x02ᐱᐲ\\x07#\\x02\\x02ᐲЎ\\x03\\x02\\x02\\x02ᐳᐴ')\n buf.write('\\x07@\\x02\\x02ᐴА\\x03\\x02\\x02\\x02ᐵᐶ\\x07A\\x02\\x02ᐶВ')\n buf.write('\\x03\\x02\\x02\\x02ᐷᐸ\\x07~\\x02\\x02ᐸᐹ\\x07~\\x02\\x02ᐹД')\n buf.write('\\x03\\x02\\x02\\x02ᐺᐻ\\x07~\\x02\\x02ᐻЖ\\x03\\x02\\x02\\x02ᐼ')\n buf.write('ᐽ\\x07?\\x02\\x02ᐽИ\\x03\\x02\\x02\\x02ᐾᐿ\\x07]\\x02\\x02ᐿ')\n buf.write('К\\x03\\x02\\x02\\x02ᑀᑁ\\x07_\\x02\\x02ᑁМ\\x03\\x02\\x02\\x02')\n buf.write('ᑂᑃ\\x07a\\x02\\x02ᑃО\\x03\\x02\\x02\\x02ᑄᑆ\\t')\n buf.write('\\t\\x02\\x02ᑅᑄ\\x03\\x02\\x02\\x02ᑆᑇ\\x03\\x02\\x02\\x02ᑇᑅ')\n buf.write('\\x03\\x02\\x02\\x02ᑇᑈ\\x03\\x02\\x02\\x02ᑈᑉ\\x03\\x02\\x02\\x02ᑉ')\n buf.write('ᑊ\\x08Ȑ\\x03\\x02ᑊР\\x03\\x02\\x02\\x02ᑋᑌ\\t\\n')\n buf.write('\\x02\\x02ᑌТ\\x03\\x02\\x02\\x02ᑍᑏ\\x042;\\x02ᑎᑍ')\n buf.write('\\x03\\x02\\x02\\x02ᑏᑐ\\x03\\x02\\x02\\x02ᑐᑎ\\x03\\x02\\x02\\x02ᑐ')\n buf.write('ᑑ\\x03\\x02\\x02\\x02ᑑФ\\x03\\x02\\x02\\x02ᑒᑔ\\x05ύ')\n buf.write('ǧ\\x02ᑓᑒ\\x03\\x02\\x02\\x02ᑔᑗ\\x03\\x02\\x02\\x02ᑕ')\n buf.write('ᑓ\\x03\\x02\\x02\\x02ᑕᑖ\\x03\\x02\\x02\\x02ᑖᑙ\\x03\\x02\\x02\\x02')\n buf.write('ᑗᑕ\\x03\\x02\\x02\\x02ᑘᑚ\\x070\\x02\\x02ᑙᑘ')\n buf.write('\\x03\\x02\\x02\\x02ᑙᑚ\\x03\\x02\\x02\\x02ᑚᑜ\\x03\\x02\\x02\\x02ᑛ')\n buf.write('ᑝ\\x05ύǧ\\x02ᑜᑛ\\x03\\x02\\x02\\x02ᑝᑞ')\n buf.write('\\x03\\x02\\x02\\x02ᑞᑜ\\x03\\x02\\x02\\x02ᑞᑟ\\x03\\x02\\x02\\x02ᑟ')\n buf.write('Ц\\x03\\x02\\x02\\x02ᑠᑡ\\x07/\\x02\\x02ᑡᑢ\\x07/\\x02\\x02ᑢ')\n buf.write('ᑦ\\x03\\x02\\x02\\x02ᑣᑥ\\n\\x0b\\x02\\x02ᑤᑣ\\x03\\x02\\x02')\n buf.write('\\x02ᑥᑨ\\x03\\x02\\x02\\x02ᑦᑤ\\x03\\x02\\x02\\x02ᑦᑧ')\n buf.write('\\x03\\x02\\x02\\x02ᑧᑫ\\x03\\x02\\x02\\x02ᑨᑦ\\x03\\x02\\x02\\x02ᑩ')\n buf.write('ᑬ\\x05Эȗ\\x02ᑪᑬ\\x07\\x02\\x02\\x03ᑫᑩ')\n buf.write('\\x03\\x02\\x02\\x02ᑫᑪ\\x03\\x02\\x02\\x02ᑬᑭ\\x03\\x02\\x02\\x02ᑭ')\n buf.write('ᑮ\\x08Ȕ\\x04\\x02ᑮШ\\x03\\x02\\x02\\x02ᑯᑰ\\x071')\n buf.write('\\x02\\x02ᑰᑱ\\x07,\\x02\\x02ᑱᑵ\\x03\\x02\\x02\\x02ᑲᑴ')\n buf.write('\\x0b\\x02\\x02\\x02ᑳᑲ\\x03\\x02\\x02\\x02ᑴᑷ\\x03\\x02\\x02\\x02ᑵ')\n buf.write('ᑶ\\x03\\x02\\x02\\x02ᑵᑳ\\x03\\x02\\x02\\x02ᑶᑸ\\x03\\x02\\x02\\x02')\n buf.write('ᑷᑵ\\x03\\x02\\x02\\x02ᑸᑹ\\x07,\\x02\\x02ᑹᑺ\\x07')\n buf.write('1\\x02\\x02ᑺᑻ\\x03\\x02\\x02\\x02ᑻᑼ\\x08ȕ\\x04\\x02ᑼ')\n buf.write('Ъ\\x03\\x02\\x02\\x02ᑽᑾ\\x07r\\x02\\x02ᑾᑿ\\x07t\\x02\\x02ᑿ')\n buf.write('ᒀ\\x07q\\x02\\x02ᒀᒁ\\x07o\\x02\\x02ᒁᒂ\\x07r\\x02\\x02ᒂ')\n buf.write('ᒃ\\x07v\\x02\\x02ᒃᒄ\\x03\\x02\\x02\\x02ᒄᒈ\\x05Я')\n buf.write('Ș\\x02ᒅᒇ\\n\\x0b\\x02\\x02ᒆᒅ\\x03\\x02\\x02\\x02ᒇ')\n buf.write('ᒊ\\x03\\x02\\x02\\x02ᒈᒆ\\x03\\x02\\x02\\x02ᒈᒉ\\x03\\x02\\x02\\x02')\n buf.write('ᒉᒍ\\x03\\x02\\x02\\x02ᒊᒈ\\x03\\x02\\x02\\x02ᒋᒎ\\x05')\n buf.write('Эȗ\\x02ᒌᒎ\\x07\\x02\\x02\\x03ᒍᒋ\\x03\\x02\\x02\\x02')\n buf.write('ᒍᒌ\\x03\\x02\\x02\\x02ᒎЬ\\x03\\x02\\x02\\x02ᒏᒑ\\x07')\n buf.write('\\x0f\\x02\\x02ᒐᒏ\\x03\\x02\\x02\\x02ᒐᒑ\\x03\\x02\\x02\\x02ᒑ')\n buf.write('ᒒ\\x03\\x02\\x02\\x02ᒒᒓ\\x07\\x0c\\x02\\x02ᒓЮ\\x03\\x02\\x02\\x02')\n buf.write('ᒔᒕ\\t\\x0c\\x02\\x02ᒕа\\x03\\x02\\x02\\x02ᒖᒛ\\x05')\n buf.write('Сȑ\\x02ᒗᒚ\\x05Сȑ\\x02ᒘᒚ')\n buf.write('\\t\\r\\x02\\x02ᒙᒗ\\x03\\x02\\x02\\x02ᒙᒘ\\x03\\x02\\x02\\x02ᒚ')\n buf.write('ᒝ\\x03\\x02\\x02\\x02ᒛᒙ\\x03\\x02\\x02\\x02ᒛᒜ\\x03\\x02\\x02\\x02')\n buf.write('ᒜв\\x03\\x02\\x02\\x02ᒝᒛ\\x03\\x02\\x02\\x02ᒞᒟ\\x07')\n buf.write('B\\x02\\x02ᒟᒠ\\x07#\\x02\\x02ᒠᒡ\\x03\\x02\\x02\\x02ᒡᒢ')\n buf.write('\\x08Ț\\x04\\x02ᒢд\\x03\\x02\\x02\\x02ᒣᒤ\\t\\x0e\\x02\\x02')\n buf.write('ᒤж\\x03\\x02\\x02\\x02ᒥᒦ\\t\\x0f\\x02\\x02ᒦи')\n buf.write('\\x03\\x02\\x02\\x02ᒧᒨ\\t\\x10\\x02\\x02ᒨк\\x03\\x02\\x02\\x02ᒩ')\n buf.write('ᒪ\\t\\x11\\x02\\x02ᒪм\\x03\\x02\\x02\\x02ᒫᒬ\\t\\x04\\x02')\n buf.write('\\x02ᒬо\\x03\\x02\\x02\\x02ᒭᒮ\\t\\x12\\x02\\x02ᒮр')\n buf.write('\\x03\\x02\\x02\\x02ᒯᒰ\\t\\x13\\x02\\x02ᒰт\\x03\\x02\\x02\\x02ᒱ')\n buf.write('ᒲ\\t\\x14\\x02\\x02ᒲф\\x03\\x02\\x02\\x02ᒳᒴ\\t\\x15\\x02')\n buf.write('\\x02ᒴц\\x03\\x02\\x02\\x02ᒵᒶ\\t\\x16\\x02\\x02ᒶш')\n buf.write('\\x03\\x02\\x02\\x02ᒷᒸ\\t\\x17\\x02\\x02ᒸъ\\x03\\x02\\x02\\x02ᒹ')\n buf.write('ᒺ\\t\\x18\\x02\\x02ᒺь\\x03\\x02\\x02\\x02ᒻᒼ\\t\\x19\\x02')\n buf.write('\\x02ᒼю\\x03\\x02\\x02\\x02ᒽᒾ\\t\\x1a\\x02\\x02ᒾѐ')\n buf.write('\\x03\\x02\\x02\\x02ᒿᓀ\\t\\x1b\\x02\\x02ᓀђ\\x03\\x02\\x02\\x02ᓁ')\n buf.write('ᓂ\\t\\x1c\\x02\\x02ᓂє\\x03\\x02\\x02\\x02ᓃᓄ\\t\\x1d\\x02')\n buf.write('\\x02ᓄі\\x03\\x02\\x02\\x02ᓅᓆ\\t\\x1e\\x02\\x02ᓆј')\n buf.write('\\x03\\x02\\x02\\x02ᓇᓈ\\t\\x1f\\x02\\x02ᓈњ\\x03\\x02\\x02\\x02ᓉ')\n buf.write('ᓊ\\t \\x02\\x02ᓊќ\\x03\\x02\\x02\\x02ᓋᓌ\\t!\\x02\\x02ᓌ')\n buf.write('ў\\x03\\x02\\x02\\x02ᓍᓎ\\t\"\\x02\\x02ᓎѠ\\x03\\x02\\x02\\x02')\n buf.write('ᓏᓐ\\t#\\x02\\x02ᓐѢ\\x03\\x02\\x02\\x02ᓑᓒ\\t')\n buf.write('$\\x02\\x02ᓒѤ\\x03\\x02\\x02\\x02ᓓᓔ\\t%\\x02\\x02ᓔѦ')\n buf.write(\"\\x03\\x02\\x02\\x02ᓕᓖ\\t&\\x02\\x02ᓖѨ\\x03\\x02\\x02\\x02'\\x02፧\")\n buf.write('፩፳፹ᎀᎆ᎒᎖᎘\\u139c')\n buf.write('ᎣᎥᎯᎺᏅᏐᏛᏧᏩ')\n buf.write('ᐋᐍᐕᐫᑇᑐᑕᑙᑞ')\n buf.write('ᑦᑫᑵᒈᒍᒐᒙᒛ\\x05\\tǪ')\n buf.write('\\x02\\x08\\x02\\x02\\x02\\x03\\x02')\n return buf.getvalue()\n\n\nclass PlSqlLexer(Lexer):\n atn = ATNDeserializer().deserialize(serializedATN())\n decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]\n T__0 = 1\n A_LETTER = 2\n ADD = 3\n AFTER = 4\n AGENT = 5\n AGGREGATE = 6\n ALL = 7\n ALTER = 8\n ANALYZE = 9\n AND = 10\n ANY = 11\n ARRAY = 12\n AS = 13\n ASSUME = 14\n ASSERT = 15\n ASC = 16\n ASSOCIATE = 17\n AT = 18\n ATTRIBUTE = 19\n AUDIT = 20\n AUTHID = 21\n AUTO = 22\n AUTOMATIC = 23\n AUTONOMOUS_TRANSACTION = 24\n BATCH = 25\n BEFORE = 26\n BEGIN = 27\n BETWEEN = 28\n BFILE = 29\n BINARY_DOUBLE = 30\n BINARY_FLOAT = 31\n BINARY_INTEGER = 32\n BLOB = 33\n BLOCK = 34\n BODY = 35\n BOOLEAN = 36\n BOTH = 37\n BREADTH = 38\n BULK = 39\n BY = 40\n BYTE = 41\n C_LETTER = 42\n CACHE = 43\n CALL = 44\n CANONICAL = 45\n CASCADE = 46\n CASE = 47\n CAST = 48\n CHAR = 49\n CHAR_CS = 50\n CHARACTER = 51\n CHECK = 52\n CHR = 53\n CLOB = 54\n CLOSE = 55\n CLUSTER = 56\n COLLECT = 57\n COLUMNS = 58\n COMMENT = 59\n COMMIT = 60\n COMMITTED = 61\n COMPATIBILITY = 62\n COMPILE = 63\n COMPOUND = 64\n CONNECT = 65\n CONNECT_BY_ROOT = 66\n CONSTANT = 67\n CONSTRAINT = 68\n CONSTRAINTS = 69\n CONSTRUCTOR = 70\n CONTENT = 71\n CONTEXT = 72\n CONTINUE = 73\n CONVERT = 74\n CORRUPT_XID = 75\n CORRUPT_XID_ALL = 76\n COST = 77\n COUNT = 78\n CREATE = 79\n CROSS = 80\n CUBE = 81\n CURRENT = 82\n CURRENT_USER = 83\n CURSOR = 84\n CUSTOMDATUM = 85\n CYCLE = 86\n DATA = 87\n DATABASE = 88\n DATE = 89\n DAY = 90\n DB_ROLE_CHANGE = 91\n DBTIMEZONE = 92\n DDL = 93\n DEBUG = 94\n DEC = 95\n DECIMAL = 96\n DECLARE = 97\n DECOMPOSE = 98\n DECREMENT = 99\n DEFAULT = 100\n DEFAULTS = 101\n DEFERRED = 102\n DEFINER = 103\n DELETE = 104\n DEPTH = 105\n DESC = 106\n DETERMINISTIC = 107\n DIMENSION = 108\n DISABLE = 109\n DISASSOCIATE = 110\n DISTINCT = 111\n DOCUMENT = 112\n DOUBLE = 113\n DROP = 114\n DSINTERVAL_UNCONSTRAINED = 115\n EACH = 116\n ELEMENT = 117\n ELSE = 118\n ELSIF = 119\n EMPTY = 120\n ENABLE = 121\n ENCODING = 122\n END = 123\n ENTITYESCAPING = 124\n ERR = 125\n ERRORS = 126\n ESCAPE = 127\n EVALNAME = 128\n EXCEPT = 129\n EXCEPTION = 130\n EXCEPTION_INIT = 131\n EXCEPTIONS = 132\n EXCLUDE = 133\n EXCLUSIVE = 134\n EXECUTE = 135\n EXISTS = 136\n EXIT = 137\n EXPLAIN = 138\n EXTERNAL = 139\n EXTRACT = 140\n FAILURE = 141\n FALSE = 142\n FETCH = 143\n FINAL = 144\n FIRST = 145\n FIRST_VALUE = 146\n FLOAT = 147\n FOLLOWING = 148\n FOLLOWS = 149\n FOR = 150\n FORALL = 151\n FORCE = 152\n FROM = 153\n FULL = 154\n FUNCTION = 155\n GOTO = 156\n GRANT = 157\n GROUP = 158\n GROUPING = 159\n HASH = 160\n HAVING = 161\n HIDE = 162\n HOUR = 163\n IF = 164\n IGNORE = 165\n IMMEDIATE = 166\n IN = 167\n INCLUDE = 168\n INCLUDING = 169\n INCREMENT = 170\n INDENT = 171\n INDEX = 172\n INDEXED = 173\n INDICATOR = 174\n INDICES = 175\n INFINITE = 176\n INLINE = 177\n INNER = 178\n INOUT = 179\n INSERT = 180\n INSTANTIABLE = 181\n INSTEAD = 182\n INT = 183\n INTEGER = 184\n INTERSECT = 185\n INTERVAL = 186\n INTO = 187\n INVALIDATE = 188\n IS = 189\n ISOLATION = 190\n ITERATE = 191\n JAVA = 192\n JOIN = 193\n KEEP = 194\n LANGUAGE = 195\n LAST = 196\n LAST_VALUE = 197\n LEADING = 198\n LEFT = 199\n LEVEL = 200\n LIBRARY = 201\n LIKE = 202\n LIKE2 = 203\n LIKE4 = 204\n LIKEC = 205\n LIMIT = 206\n LOCAL = 207\n LOCK = 208\n LOCKED = 209\n LOG = 210\n LOGOFF = 211\n LOGON = 212\n LONG = 213\n LOOP = 214\n MAIN = 215\n MAP = 216\n MATCHED = 217\n MAXVALUE = 218\n MEASURES = 219\n MEMBER = 220\n MERGE = 221\n MINUS = 222\n MINUTE = 223\n MINVALUE = 224\n MLSLABEL = 225\n MODE = 226\n MODEL = 227\n MODIFY = 228\n MONTH = 229\n MULTISET = 230\n NAME = 231\n NAN = 232\n NATURAL = 233\n NATURALN = 234\n NAV = 235\n NCHAR = 236\n NCHAR_CS = 237\n NCLOB = 238\n NESTED = 239\n NEW = 240\n NO = 241\n NOAUDIT = 242\n NOCACHE = 243\n NOCOPY = 244\n NOCYCLE = 245\n NOENTITYESCAPING = 246\n NOMAXVALUE = 247\n NOMINVALUE = 248\n NONE = 249\n NOORDER = 250\n NOSCHEMACHECK = 251\n NOT = 252\n NOWAIT = 253\n NULL = 254\n NULLS = 255\n NUMBER = 256\n NUMERIC = 257\n NVARCHAR2 = 258\n OBJECT = 259\n OF = 260\n OFF = 261\n OID = 262\n OLD = 263\n ON = 264\n ONLY = 265\n OPEN = 266\n OPTION = 267\n OR = 268\n ORADATA = 269\n ORDER = 270\n ORDINALITY = 271\n OSERROR = 272\n OUT = 273\n OUTER = 274\n OVER = 275\n OVERRIDING = 276\n PACKAGE = 277\n PARALLEL_ENABLE = 278\n PARAMETERS = 279\n PARENT = 280\n PARTITION = 281\n PASSING = 282\n PATH = 283\n PERCENT_ROWTYPE = 284\n PERCENT_TYPE = 285\n PIPELINED = 286\n PIVOT = 287\n PLAN = 288\n PLS_INTEGER = 289\n POSITIVE = 290\n POSITIVEN = 291\n PRAGMA = 292\n PRECEDING = 293\n PRECISION = 294\n PRESENT = 295\n PRIOR = 296\n PROCEDURE = 297\n RAISE = 298\n RANGE = 299\n RAW = 300\n READ = 301\n REAL = 302\n RECORD = 303\n REF = 304\n REFERENCE = 305\n REFERENCING = 306\n REJECT = 307\n RELIES_ON = 308\n RENAME = 309\n REPLACE = 310\n RESPECT = 311\n RESTRICT_REFERENCES = 312\n RESULT = 313\n RESULT_CACHE = 314\n RETURN = 315\n RETURNING = 316\n REUSE = 317\n REVERSE = 318\n REVOKE = 319\n RIGHT = 320\n ROLLBACK = 321\n ROLLUP = 322\n ROW = 323\n ROWID = 324\n ROWS = 325\n RULES = 326\n SAMPLE = 327\n SAVE = 328\n SAVEPOINT = 329\n SCHEMA = 330\n SCHEMACHECK = 331\n SCN = 332\n SEARCH = 333\n SECOND = 334\n SEED = 335\n SEGMENT = 336\n SELECT = 337\n SELF = 338\n SEQUENCE = 339\n SEQUENTIAL = 340\n SERIALIZABLE = 341\n SERIALLY_REUSABLE = 342\n SERVERERROR = 343\n SESSIONTIMEZONE = 344\n SET = 345\n SETS = 346\n SETTINGS = 347\n SHARE = 348\n SHOW = 349\n SHUTDOWN = 350\n SIBLINGS = 351\n SIGNTYPE = 352\n SIMPLE_INTEGER = 353\n SINGLE = 354\n SIZE = 355\n SKIP_ = 356\n SMALLINT = 357\n SNAPSHOT = 358\n SOME = 359\n SPECIFICATION = 360\n SQLDATA = 361\n SQLERROR = 362\n STANDALONE = 363\n START = 364\n STARTUP = 365\n STATEMENT = 366\n STATEMENT_ID = 367\n STATIC = 368\n STATISTICS = 369\n STRING = 370\n SUBMULTISET = 371\n SUBPARTITION = 372\n SUBSTITUTABLE = 373\n SUBTYPE = 374\n SUCCESS = 375\n SUSPEND = 376\n TABLE = 377\n THE = 378\n THEN = 379\n TIME = 380\n TIMESTAMP = 381\n TIMESTAMP_LTZ_UNCONSTRAINED = 382\n TIMESTAMP_TZ_UNCONSTRAINED = 383\n TIMESTAMP_UNCONSTRAINED = 384\n TIMEZONE_ABBR = 385\n TIMEZONE_HOUR = 386\n TIMEZONE_MINUTE = 387\n TIMEZONE_REGION = 388\n TO = 389\n TRAILING = 390\n TRANSACTION = 391\n TRANSLATE = 392\n TREAT = 393\n TRIGGER = 394\n TRIM = 395\n TRUE = 396\n TRUNCATE = 397\n TYPE = 398\n UNBOUNDED = 399\n UNDER = 400\n UNION = 401\n UNIQUE = 402\n UNLIMITED = 403\n UNPIVOT = 404\n UNTIL = 405\n UPDATE = 406\n UPDATED = 407\n UPSERT = 408\n UROWID = 409\n USE = 410\n USING = 411\n VALIDATE = 412\n VALUE = 413\n VALUES = 414\n VARCHAR = 415\n VARCHAR2 = 416\n VARIABLE = 417\n VARRAY = 418\n VARYING = 419\n VERSION = 420\n VERSIONS = 421\n WAIT = 422\n WARNING = 423\n WELLFORMED = 424\n WHEN = 425\n WHENEVER = 426\n WHERE = 427\n WHILE = 428\n WITH = 429\n WITHIN = 430\n WORK = 431\n WRITE = 432\n XML = 433\n XMLAGG = 434\n XMLATTRIBUTES = 435\n XMLCAST = 436\n XMLCOLATTVAL = 437\n XMLELEMENT = 438\n XMLEXISTS = 439\n XMLFOREST = 440\n XMLNAMESPACES = 441\n XMLPARSE = 442\n XMLPI = 443\n XMLQUERY = 444\n XMLROOT = 445\n XMLSERIALIZE = 446\n XMLTABLE = 447\n YEAR = 448\n YES = 449\n YMINTERVAL_UNCONSTRAINED = 450\n ZONE = 451\n PREDICTION = 452\n PREDICTION_BOUNDS = 453\n PREDICTION_COST = 454\n PREDICTION_DETAILS = 455\n PREDICTION_PROBABILITY = 456\n PREDICTION_SET = 457\n CUME_DIST = 458\n DENSE_RANK = 459\n LISTAGG = 460\n PERCENT_RANK = 461\n PERCENTILE_CONT = 462\n PERCENTILE_DISC = 463\n RANK = 464\n AVG = 465\n CORR = 466\n LAG = 467\n LEAD = 468\n MAX = 469\n MEDIAN = 470\n MIN = 471\n NTILE = 472\n RATIO_TO_REPORT = 473\n ROW_NUMBER = 474\n SUM = 475\n VARIANCE = 476\n REGR_ = 477\n STDDEV = 478\n VAR_ = 479\n COVAR_ = 480\n NATIONAL_CHAR_STRING_LIT = 481\n BIT_STRING_LIT = 482\n HEX_STRING_LIT = 483\n DOUBLE_PERIOD = 484\n PERIOD = 485\n UNSIGNED_INTEGER = 486\n APPROXIMATE_NUM_LIT = 487\n CHAR_STRING = 488\n DELIMITED_ID = 489\n PERCENT = 490\n AMPERSAND = 491\n LEFT_PAREN = 492\n RIGHT_PAREN = 493\n DOUBLE_ASTERISK = 494\n ASTERISK = 495\n PLUS_SIGN = 496\n MINUS_SIGN = 497\n COMMA = 498\n SOLIDUS = 499\n AT_SIGN = 500\n ASSIGN_OP = 501\n BINDVAR = 502\n COLON = 503\n SEMICOLON = 504\n LESS_THAN_OR_EQUALS_OP = 505\n LESS_THAN_OP = 506\n GREATER_THAN_OR_EQUALS_OP = 507\n NOT_EQUAL_OP = 508\n CARRET_OPERATOR_PART = 509\n TILDE_OPERATOR_PART = 510\n EXCLAMATION_OPERATOR_PART = 511\n GREATER_THAN_OP = 512\n CONCATENATION_OP = 513\n VERTICAL_BAR = 514\n EQUALS_OP = 515\n LEFT_BRACKET = 516\n RIGHT_BRACKET = 517\n INTRODUCER = 518\n SPACES = 519\n SINGLE_LINE_COMMENT = 520\n MULTI_LINE_COMMENT = 521\n PROMPT = 522\n REGULAR_ID = 523\n ZV = 524\n channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']\n modeNames = ['DEFAULT_MODE']\n literalNames = ['<INVALID>', \"'..'\", \"'.'\", \"'%'\", \"'&'\", \"'('\", \"')'\",\n \"'**'\", \"'*'\", \"'+'\", \"'-'\", \"','\", \"'/'\", \"'@'\", \"':='\", \"':'\",\n \"';'\", \"'<='\", \"'<'\", \"'>='\", \"'^'\", \"'~'\", \"'!'\", \"'>'\", \"'||'\",\n \"'|'\", \"'='\", \"'['\", \"']'\", \"'_'\", \"'@!'\"]\n symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',\n 'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',\n 'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',\n 'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',\n 'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',\n 'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',\n 'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',\n 'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',\n 'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',\n 'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',\n 'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',\n 'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',\n 'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',\n 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',\n 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',\n 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',\n 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',\n 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',\n 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',\n 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',\n 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',\n 'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',\n 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',\n 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',\n 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',\n 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',\n 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',\n 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',\n 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',\n 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',\n 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',\n 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',\n 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',\n 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',\n 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',\n 'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',\n 'ZV']\n ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',\n 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',\n 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',\n 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',\n 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',\n 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',\n 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',\n 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',\n 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',\n 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',\n 'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',\n 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',\n 'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',\n 'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',\n 'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',\n 'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',\n 'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',\n 'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',\n 'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',\n 'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',\n 'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',\n 'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',\n 'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',\n 'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',\n 'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',\n 'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',\n 'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',\n 'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',\n 'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',\n 'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',\n 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',\n 'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',\n 'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',\n 'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',\n 'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',\n 'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',\n 'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',\n 'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',\n 'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',\n 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',\n 'V', 'W', 'X', 'Y', 'Z']\n grammarFileName = 'PlSql.g4'\n\n def __init__(self, input=None, output: TextIO=sys.stdout):\n super().__init__(input, output)\n self.checkVersion('4.7.2')\n self._interp = LexerATNSimulator(self, self.atn, self.\n decisionsToDFA, PredictionContextCache())\n self._actions = None\n self._predicates = None\n",
"step-4": "from antlr4 import *\nfrom io import StringIO\nfrom typing.io import TextIO\nimport sys\n\n\ndef serializedATN():\n with StringIO() as buf:\n buf.write('\\x03悋Ꜫ脳맭䅼㯧瞆奤\\x02Ȏ')\n buf.write(\n 'ᓗ\\x08\\x01\\x04\\x02\\t\\x02\\x04\\x03\\t\\x03\\x04\\x04\\t\\x04\\x04\\x05\\t\\x05\\x04\\x06\\t\\x06\\x04\\x07'\n )\n buf.write(\n '\\t\\x07\\x04\\x08\\t\\x08\\x04\\t\\t\\t\\x04\\n\\t\\n\\x04\\x0b\\t\\x0b\\x04\\x0c\\t\\x0c\\x04\\r\\t\\r'\n )\n buf.write(\n '\\x04\\x0e\\t\\x0e\\x04\\x0f\\t\\x0f\\x04\\x10\\t\\x10\\x04\\x11\\t\\x11\\x04\\x12\\t\\x12\\x04\\x13'\n )\n buf.write(\n '\\t\\x13\\x04\\x14\\t\\x14\\x04\\x15\\t\\x15\\x04\\x16\\t\\x16\\x04\\x17\\t\\x17\\x04\\x18\\t\\x18'\n )\n buf.write(\n '\\x04\\x19\\t\\x19\\x04\\x1a\\t\\x1a\\x04\\x1b\\t\\x1b\\x04\\x1c\\t\\x1c\\x04\\x1d\\t\\x1d\\x04\\x1e'\n )\n buf.write(\n '\\t\\x1e\\x04\\x1f\\t\\x1f\\x04 \\t \\x04!\\t!\\x04\"\\t\"\\x04#\\t#\\x04$\\t$\\x04%\\t%'\n )\n buf.write(\n \"\\x04&\\t&\\x04'\\t'\\x04(\\t(\\x04)\\t)\\x04*\\t*\\x04+\\t+\\x04,\\t,\\x04-\\t-\\x04.\"\n )\n buf.write('\\t.\\x04/\\t/\\x040\\t0\\x041\\t1\\x042\\t2\\x043\\t3\\x044')\n buf.write('\\t4\\x045\\t5\\x046\\t6\\x047\\t7\\x048\\t8\\x049\\t9\\x04:\\t:')\n buf.write(\n '\\x04;\\t;\\x04<\\t<\\x04=\\t=\\x04>\\t>\\x04?\\t?\\x04@\\t@\\x04A\\tA\\x04B\\tB\\x04C\\t'\n )\n buf.write(\n 'C\\x04D\\tD\\x04E\\tE\\x04F\\tF\\x04G\\tG\\x04H\\tH\\x04I\\tI\\x04J\\tJ\\x04K\\tK\\x04L\\t'\n )\n buf.write(\n 'L\\x04M\\tM\\x04N\\tN\\x04O\\tO\\x04P\\tP\\x04Q\\tQ\\x04R\\tR\\x04S\\tS\\x04T\\tT\\x04U\\t'\n )\n buf.write(\n 'U\\x04V\\tV\\x04W\\tW\\x04X\\tX\\x04Y\\tY\\x04Z\\tZ\\x04[\\t[\\x04\\\\\\t\\\\\\x04]\\t]\\x04'\n )\n buf.write(\n '^\\t^\\x04_\\t_\\x04`\\t`\\x04a\\ta\\x04b\\tb\\x04c\\tc\\x04d\\td\\x04e\\te\\x04f\\tf\\x04'\n )\n buf.write(\n 'g\\tg\\x04h\\th\\x04i\\ti\\x04j\\tj\\x04k\\tk\\x04l\\tl\\x04m\\tm\\x04n\\tn\\x04o\\to\\x04'\n )\n buf.write(\n 'p\\tp\\x04q\\tq\\x04r\\tr\\x04s\\ts\\x04t\\tt\\x04u\\tu\\x04v\\tv\\x04w\\tw\\x04x\\tx\\x04'\n )\n buf.write(\n 'y\\ty\\x04z\\tz\\x04{\\t{\\x04|\\t|\\x04}\\t}\\x04~\\t~\\x04\\x7f\\t\\x7f\\x04\\x80'\n )\n buf.write('\\t\\x80\\x04\\x81\\t\\x81\\x04\\x82\\t\\x82\\x04\\x83\\t\\x83')\n buf.write('\\x04\\x84\\t\\x84\\x04\\x85\\t\\x85\\x04\\x86\\t\\x86\\x04\\x87')\n buf.write('\\t\\x87\\x04\\x88\\t\\x88\\x04\\x89\\t\\x89\\x04\\x8a\\t\\x8a')\n buf.write('\\x04\\x8b\\t\\x8b\\x04\\x8c\\t\\x8c\\x04\\x8d\\t\\x8d\\x04\\x8e')\n buf.write('\\t\\x8e\\x04\\x8f\\t\\x8f\\x04\\x90\\t\\x90\\x04\\x91\\t\\x91')\n buf.write('\\x04\\x92\\t\\x92\\x04\\x93\\t\\x93\\x04\\x94\\t\\x94\\x04\\x95')\n buf.write('\\t\\x95\\x04\\x96\\t\\x96\\x04\\x97\\t\\x97\\x04\\x98\\t\\x98')\n buf.write('\\x04\\x99\\t\\x99\\x04\\x9a\\t\\x9a\\x04\\x9b\\t\\x9b\\x04\\x9c')\n buf.write('\\t\\x9c\\x04\\x9d\\t\\x9d\\x04\\x9e\\t\\x9e\\x04\\x9f\\t\\x9f')\n buf.write('\\x04\\xa0\\t\\xa0\\x04¡\\t¡\\x04¢\\t¢\\x04£')\n buf.write('\\t£\\x04¤\\t¤\\x04¥\\t¥\\x04¦\\t¦')\n buf.write('\\x04§\\t§\\x04¨\\t¨\\x04©\\t©\\x04ª')\n buf.write('\\tª\\x04«\\t«\\x04¬\\t¬\\x04\\xad\\t\\xad')\n buf.write('\\x04®\\t®\\x04¯\\t¯\\x04°\\t°\\x04±')\n buf.write('\\t±\\x04²\\t²\\x04³\\t³\\x04´\\t´')\n buf.write('\\x04µ\\tµ\\x04¶\\t¶\\x04·\\t·\\x04¸')\n buf.write('\\t¸\\x04¹\\t¹\\x04º\\tº\\x04»\\t»')\n buf.write('\\x04¼\\t¼\\x04½\\t½\\x04¾\\t¾\\x04¿')\n buf.write('\\t¿\\x04À\\tÀ\\x04Á\\tÁ\\x04Â\\tÂ')\n buf.write('\\x04Ã\\tÃ\\x04Ä\\tÄ\\x04Å\\tÅ\\x04Æ')\n buf.write('\\tÆ\\x04Ç\\tÇ\\x04È\\tÈ\\x04É\\tÉ')\n buf.write('\\x04Ê\\tÊ\\x04Ë\\tË\\x04Ì\\tÌ\\x04Í')\n buf.write('\\tÍ\\x04Î\\tÎ\\x04Ï\\tÏ\\x04Ð\\tÐ')\n buf.write('\\x04Ñ\\tÑ\\x04Ò\\tÒ\\x04Ó\\tÓ\\x04Ô')\n buf.write('\\tÔ\\x04Õ\\tÕ\\x04Ö\\tÖ\\x04×\\t×')\n buf.write('\\x04Ø\\tØ\\x04Ù\\tÙ\\x04Ú\\tÚ\\x04Û')\n buf.write('\\tÛ\\x04Ü\\tÜ\\x04Ý\\tÝ\\x04Þ\\tÞ')\n buf.write('\\x04ß\\tß\\x04à\\tà\\x04á\\tá\\x04â')\n buf.write('\\tâ\\x04ã\\tã\\x04ä\\tä\\x04å\\tå')\n buf.write('\\x04æ\\tæ\\x04ç\\tç\\x04è\\tè\\x04é')\n buf.write('\\té\\x04ê\\tê\\x04ë\\të\\x04ì\\tì')\n buf.write('\\x04í\\tí\\x04î\\tî\\x04ï\\tï\\x04ð')\n buf.write('\\tð\\x04ñ\\tñ\\x04ò\\tò\\x04ó\\tó')\n buf.write('\\x04ô\\tô\\x04õ\\tõ\\x04ö\\tö\\x04÷')\n buf.write('\\t÷\\x04ø\\tø\\x04ù\\tù\\x04ú\\tú')\n buf.write('\\x04û\\tû\\x04ü\\tü\\x04ý\\tý\\x04þ')\n buf.write('\\tþ\\x04ÿ\\tÿ\\x04Ā\\tĀ\\x04ā\\tā')\n buf.write('\\x04Ă\\tĂ\\x04ă\\tă\\x04Ą\\tĄ\\x04ą')\n buf.write('\\tą\\x04Ć\\tĆ\\x04ć\\tć\\x04Ĉ\\tĈ')\n buf.write('\\x04ĉ\\tĉ\\x04Ċ\\tĊ\\x04ċ\\tċ\\x04Č')\n buf.write('\\tČ\\x04č\\tč\\x04Ď\\tĎ\\x04ď\\tď')\n buf.write('\\x04Đ\\tĐ\\x04đ\\tđ\\x04Ē\\tĒ\\x04ē')\n buf.write('\\tē\\x04Ĕ\\tĔ\\x04ĕ\\tĕ\\x04Ė\\tĖ')\n buf.write('\\x04ė\\tė\\x04Ę\\tĘ\\x04ę\\tę\\x04Ě')\n buf.write('\\tĚ\\x04ě\\tě\\x04Ĝ\\tĜ\\x04ĝ\\tĝ')\n buf.write('\\x04Ğ\\tĞ\\x04ğ\\tğ\\x04Ġ\\tĠ\\x04ġ')\n buf.write('\\tġ\\x04Ģ\\tĢ\\x04ģ\\tģ\\x04Ĥ\\tĤ')\n buf.write('\\x04ĥ\\tĥ\\x04Ħ\\tĦ\\x04ħ\\tħ\\x04Ĩ')\n buf.write('\\tĨ\\x04ĩ\\tĩ\\x04Ī\\tĪ\\x04ī\\tī')\n buf.write('\\x04Ĭ\\tĬ\\x04ĭ\\tĭ\\x04Į\\tĮ\\x04į')\n buf.write('\\tį\\x04İ\\tİ\\x04ı\\tı\\x04IJ\\tIJ')\n buf.write('\\x04ij\\tij\\x04Ĵ\\tĴ\\x04ĵ\\tĵ\\x04Ķ')\n buf.write('\\tĶ\\x04ķ\\tķ\\x04ĸ\\tĸ\\x04Ĺ\\tĹ')\n buf.write('\\x04ĺ\\tĺ\\x04Ļ\\tĻ\\x04ļ\\tļ\\x04Ľ')\n buf.write('\\tĽ\\x04ľ\\tľ\\x04Ŀ\\tĿ\\x04ŀ\\tŀ')\n buf.write('\\x04Ł\\tŁ\\x04ł\\tł\\x04Ń\\tŃ\\x04ń')\n buf.write('\\tń\\x04Ņ\\tŅ\\x04ņ\\tņ\\x04Ň\\tŇ')\n buf.write('\\x04ň\\tň\\x04ʼn\\tʼn\\x04Ŋ\\tŊ\\x04ŋ')\n buf.write('\\tŋ\\x04Ō\\tŌ\\x04ō\\tō\\x04Ŏ\\tŎ')\n buf.write('\\x04ŏ\\tŏ\\x04Ő\\tŐ\\x04ő\\tő\\x04Œ')\n buf.write('\\tŒ\\x04œ\\tœ\\x04Ŕ\\tŔ\\x04ŕ\\tŕ')\n buf.write('\\x04Ŗ\\tŖ\\x04ŗ\\tŗ\\x04Ř\\tŘ\\x04ř')\n buf.write('\\tř\\x04Ś\\tŚ\\x04ś\\tś\\x04Ŝ\\tŜ')\n buf.write('\\x04ŝ\\tŝ\\x04Ş\\tŞ\\x04ş\\tş\\x04Š')\n buf.write('\\tŠ\\x04š\\tš\\x04Ţ\\tŢ\\x04ţ\\tţ')\n buf.write('\\x04Ť\\tŤ\\x04ť\\tť\\x04Ŧ\\tŦ\\x04ŧ')\n buf.write('\\tŧ\\x04Ũ\\tŨ\\x04ũ\\tũ\\x04Ū\\tŪ')\n buf.write('\\x04ū\\tū\\x04Ŭ\\tŬ\\x04ŭ\\tŭ\\x04Ů')\n buf.write('\\tŮ\\x04ů\\tů\\x04Ű\\tŰ\\x04ű\\tű')\n buf.write('\\x04Ų\\tŲ\\x04ų\\tų\\x04Ŵ\\tŴ\\x04ŵ')\n buf.write('\\tŵ\\x04Ŷ\\tŶ\\x04ŷ\\tŷ\\x04Ÿ\\tŸ')\n buf.write('\\x04Ź\\tŹ\\x04ź\\tź\\x04Ż\\tŻ\\x04ż')\n buf.write('\\tż\\x04Ž\\tŽ\\x04ž\\tž\\x04ſ\\tſ')\n buf.write('\\x04ƀ\\tƀ\\x04Ɓ\\tƁ\\x04Ƃ\\tƂ\\x04ƃ')\n buf.write('\\tƃ\\x04Ƅ\\tƄ\\x04ƅ\\tƅ\\x04Ɔ\\tƆ')\n buf.write('\\x04Ƈ\\tƇ\\x04ƈ\\tƈ\\x04Ɖ\\tƉ\\x04Ɗ')\n buf.write('\\tƊ\\x04Ƌ\\tƋ\\x04ƌ\\tƌ\\x04ƍ\\tƍ')\n buf.write('\\x04Ǝ\\tƎ\\x04Ə\\tƏ\\x04Ɛ\\tƐ\\x04Ƒ')\n buf.write('\\tƑ\\x04ƒ\\tƒ\\x04Ɠ\\tƓ\\x04Ɣ\\tƔ')\n buf.write('\\x04ƕ\\tƕ\\x04Ɩ\\tƖ\\x04Ɨ\\tƗ\\x04Ƙ')\n buf.write('\\tƘ\\x04ƙ\\tƙ\\x04ƚ\\tƚ\\x04ƛ\\tƛ')\n buf.write('\\x04Ɯ\\tƜ\\x04Ɲ\\tƝ\\x04ƞ\\tƞ\\x04Ɵ')\n buf.write('\\tƟ\\x04Ơ\\tƠ\\x04ơ\\tơ\\x04Ƣ\\tƢ')\n buf.write('\\x04ƣ\\tƣ\\x04Ƥ\\tƤ\\x04ƥ\\tƥ\\x04Ʀ')\n buf.write('\\tƦ\\x04Ƨ\\tƧ\\x04ƨ\\tƨ\\x04Ʃ\\tƩ')\n buf.write('\\x04ƪ\\tƪ\\x04ƫ\\tƫ\\x04Ƭ\\tƬ\\x04ƭ')\n buf.write('\\tƭ\\x04Ʈ\\tƮ\\x04Ư\\tƯ\\x04ư\\tư')\n buf.write('\\x04Ʊ\\tƱ\\x04Ʋ\\tƲ\\x04Ƴ\\tƳ\\x04ƴ')\n buf.write('\\tƴ\\x04Ƶ\\tƵ\\x04ƶ\\tƶ\\x04Ʒ\\tƷ')\n buf.write('\\x04Ƹ\\tƸ\\x04ƹ\\tƹ\\x04ƺ\\tƺ\\x04ƻ')\n buf.write('\\tƻ\\x04Ƽ\\tƼ\\x04ƽ\\tƽ\\x04ƾ\\tƾ')\n buf.write('\\x04ƿ\\tƿ\\x04ǀ\\tǀ\\x04ǁ\\tǁ\\x04ǂ')\n buf.write('\\tǂ\\x04ǃ\\tǃ\\x04DŽ\\tDŽ\\x04Dž\\tDž')\n buf.write('\\x04dž\\tdž\\x04LJ\\tLJ\\x04Lj\\tLj\\x04lj')\n buf.write('\\tlj\\x04NJ\\tNJ\\x04Nj\\tNj\\x04nj\\tnj')\n buf.write('\\x04Ǎ\\tǍ\\x04ǎ\\tǎ\\x04Ǐ\\tǏ\\x04ǐ')\n buf.write('\\tǐ\\x04Ǒ\\tǑ\\x04ǒ\\tǒ\\x04Ǔ\\tǓ')\n buf.write('\\x04ǔ\\tǔ\\x04Ǖ\\tǕ\\x04ǖ\\tǖ\\x04Ǘ')\n buf.write('\\tǗ\\x04ǘ\\tǘ\\x04Ǚ\\tǙ\\x04ǚ\\tǚ')\n buf.write('\\x04Ǜ\\tǛ\\x04ǜ\\tǜ\\x04ǝ\\tǝ\\x04Ǟ')\n buf.write('\\tǞ\\x04ǟ\\tǟ\\x04Ǡ\\tǠ\\x04ǡ\\tǡ')\n buf.write('\\x04Ǣ\\tǢ\\x04ǣ\\tǣ\\x04Ǥ\\tǤ\\x04ǥ')\n buf.write('\\tǥ\\x04Ǧ\\tǦ\\x04ǧ\\tǧ\\x04Ǩ\\tǨ')\n buf.write('\\x04ǩ\\tǩ\\x04Ǫ\\tǪ\\x04ǫ\\tǫ\\x04Ǭ')\n buf.write('\\tǬ\\x04ǭ\\tǭ\\x04Ǯ\\tǮ\\x04ǯ\\tǯ')\n buf.write('\\x04ǰ\\tǰ\\x04DZ\\tDZ\\x04Dz\\tDz\\x04dz')\n buf.write('\\tdz\\x04Ǵ\\tǴ\\x04ǵ\\tǵ\\x04Ƕ\\tǶ')\n buf.write('\\x04Ƿ\\tǷ\\x04Ǹ\\tǸ\\x04ǹ\\tǹ\\x04Ǻ')\n buf.write('\\tǺ\\x04ǻ\\tǻ\\x04Ǽ\\tǼ\\x04ǽ\\tǽ')\n buf.write('\\x04Ǿ\\tǾ\\x04ǿ\\tǿ\\x04Ȁ\\tȀ\\x04ȁ')\n buf.write('\\tȁ\\x04Ȃ\\tȂ\\x04ȃ\\tȃ\\x04Ȅ\\tȄ')\n buf.write('\\x04ȅ\\tȅ\\x04Ȇ\\tȆ\\x04ȇ\\tȇ\\x04Ȉ')\n buf.write('\\tȈ\\x04ȉ\\tȉ\\x04Ȋ\\tȊ\\x04ȋ\\tȋ')\n buf.write('\\x04Ȍ\\tȌ\\x04ȍ\\tȍ\\x04Ȏ\\tȎ\\x04ȏ')\n buf.write('\\tȏ\\x04Ȑ\\tȐ\\x04ȑ\\tȑ\\x04Ȓ\\tȒ')\n buf.write('\\x04ȓ\\tȓ\\x04Ȕ\\tȔ\\x04ȕ\\tȕ\\x04Ȗ')\n buf.write('\\tȖ\\x04ȗ\\tȗ\\x04Ș\\tȘ\\x04ș\\tș')\n buf.write('\\x04Ț\\tȚ\\x04ț\\tț\\x04Ȝ\\tȜ\\x04ȝ')\n buf.write('\\tȝ\\x04Ȟ\\tȞ\\x04ȟ\\tȟ\\x04Ƞ\\tȠ')\n buf.write('\\x04ȡ\\tȡ\\x04Ȣ\\tȢ\\x04ȣ\\tȣ\\x04Ȥ')\n buf.write('\\tȤ\\x04ȥ\\tȥ\\x04Ȧ\\tȦ\\x04ȧ\\tȧ')\n buf.write('\\x04Ȩ\\tȨ\\x04ȩ\\tȩ\\x04Ȫ\\tȪ\\x04ȫ')\n buf.write('\\tȫ\\x04Ȭ\\tȬ\\x04ȭ\\tȭ\\x04Ȯ\\tȮ')\n buf.write('\\x04ȯ\\tȯ\\x04Ȱ\\tȰ\\x04ȱ\\tȱ\\x04Ȳ')\n buf.write('\\tȲ\\x04ȳ\\tȳ\\x04ȴ\\tȴ\\x03\\x02\\x03\\x02\\x03\\x02\\x03')\n buf.write(\n '\\x03\\x03\\x03\\x03\\x04\\x03\\x04\\x03\\x04\\x03\\x04\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x05\\x03\\x06\\x03\\x06'\n )\n buf.write(\n '\\x03\\x06\\x03\\x06\\x03\\x06\\x03\\x06\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03\\x07\\x03'\n )\n buf.write(\"\"\"\u0007\u0003\b\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\t\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\"\"\")\n buf.write(\"\"\"\u0003\n\u0003\n\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\f\u0003\"\"\")\n buf.write(\n '\\x0c\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\r\\x03\\x0e\\x03\\x0e\\x03\\x0e\\x03\\x0f\\x03\\x0f\\x03'\n )\n buf.write(\n '\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x0f\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10\\x03\\x10'\n )\n buf.write(\n '\\x03\\x10\\x03\\x11\\x03\\x11\\x03\\x11\\x03\\x11\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12'\n )\n buf.write(\n '\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x12\\x03\\x13\\x03\\x13\\x03\\x13\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14'\n )\n buf.write(\n '\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x14\\x03\\x15\\x03\\x15\\x03\\x15\\x03\\x15\\x03\\x15'\n )\n buf.write(\n '\\x03\\x15\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x16\\x03\\x17\\x03\\x17\\x03\\x17'\n )\n buf.write(\n '\\x03\\x17\\x03\\x17\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18\\x03\\x18'\n )\n buf.write(\n '\\x03\\x18\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19'\n )\n buf.write(\n '\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19\\x03\\x19'\n )\n buf.write(\n '\\x03\\x19\\x03\\x19\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1a\\x03\\x1b\\x03\\x1b\\x03\\x1b'\n )\n buf.write(\n '\\x03\\x1b\\x03\\x1b\\x03\\x1b\\x03\\x1b\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1c\\x03\\x1d'\n )\n buf.write(\n '\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1d\\x03\\x1e\\x03\\x1e\\x03\\x1e\\x03\\x1e'\n )\n buf.write(\n '\\x03\\x1e\\x03\\x1e\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f'\n )\n buf.write(\n '\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03\\x1f\\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03 \\x03'\n )\n buf.write(\n ' \\x03 \\x03 \\x03 \\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03!\\x03'\n )\n buf.write(\n '!\\x03\"\\x03\"\\x03\"\\x03\"\\x03\"\\x03#\\x03#\\x03#\\x03#\\x03#\\x03#\\x03$\\x03$\\x03$\\x03$\\x03'\n )\n buf.write(\n \"$\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03%\\x03&\\x03&\\x03&\\x03&\\x03&\\x03'\\x03'\\x03'\\x03\"\n )\n buf.write(\n \"'\\x03'\\x03'\\x03'\\x03'\\x03(\\x03(\\x03(\\x03(\\x03(\\x03)\\x03)\\x03)\\x03*\\x03*\\x03*\\x03\"\n )\n buf.write(\n '*\\x03*\\x03+\\x03+\\x03,\\x03,\\x03,\\x03,\\x03,\\x03,\\x03-\\x03-\\x03-\\x03-\\x03-\\x03.\\x03.\\x03.\\x03'\n )\n buf.write(\n '.\\x03.\\x03.\\x03.\\x03.\\x03.\\x03.\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x03/\\x030\\x030'\n )\n buf.write('\\x030\\x030\\x030\\x031\\x031\\x031\\x031\\x031\\x032\\x032\\x032')\n buf.write('\\x032\\x032\\x033\\x033\\x033\\x033\\x033\\x033\\x033\\x033\\x034')\n buf.write('\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x034\\x035\\x035')\n buf.write('\\x035\\x035\\x035\\x035\\x036\\x036\\x036\\x036\\x037\\x037\\x037')\n buf.write(\n '\\x037\\x037\\x038\\x038\\x038\\x038\\x038\\x038\\x039\\x039\\x039\\x039\\x039\\x039\\x039\\x039\\x03'\n )\n buf.write(\n ':\\x03:\\x03:\\x03:\\x03:\\x03:\\x03:\\x03:\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03;\\x03<\\x03<\\x03'\n )\n buf.write(\n '<\\x03<\\x03<\\x03<\\x03<\\x03<\\x03=\\x03=\\x03=\\x03=\\x03=\\x03=\\x03=\\x03>\\x03>\\x03>\\x03>\\x03>\\x03'\n )\n buf.write(\n '>\\x03>\\x03>\\x03>\\x03>\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03?\\x03'\n )\n buf.write(\n '?\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03@\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03A\\x03'\n )\n buf.write(\n 'B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03B\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03'\n )\n buf.write(\n 'C\\x03C\\x03C\\x03C\\x03C\\x03C\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03D\\x03E\\x03E\\x03E\\x03'\n )\n buf.write(\n 'E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03E\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03F\\x03'\n )\n buf.write(\n 'F\\x03F\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03G\\x03H\\x03H\\x03H\\x03H\\x03'\n )\n buf.write(\n 'H\\x03H\\x03H\\x03H\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03I\\x03J\\x03J\\x03J\\x03J\\x03J\\x03J\\x03'\n )\n buf.write(\n 'J\\x03J\\x03J\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03K\\x03L\\x03L\\x03L\\x03L\\x03L\\x03L\\x03L\\x03'\n )\n buf.write(\n 'L\\x03L\\x03L\\x03L\\x03L\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03M\\x03'\n )\n buf.write(\n 'M\\x03M\\x03M\\x03N\\x03N\\x03N\\x03N\\x03N\\x03O\\x03O\\x03O\\x03O\\x03O\\x03O\\x03P\\x03P\\x03P\\x03P\\x03'\n )\n buf.write(\n 'P\\x03P\\x03P\\x03Q\\x03Q\\x03Q\\x03Q\\x03Q\\x03Q\\x03R\\x03R\\x03R\\x03R\\x03R\\x03S\\x03S\\x03S\\x03S\\x03'\n )\n buf.write(\n 'S\\x03S\\x03S\\x03S\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03T\\x03U\\x03'\n )\n buf.write(\n 'U\\x03U\\x03U\\x03U\\x03U\\x03U\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03V\\x03'\n )\n buf.write(\n 'W\\x03W\\x03W\\x03W\\x03W\\x03W\\x03X\\x03X\\x03X\\x03X\\x03X\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03Y\\x03'\n )\n buf.write(\n 'Y\\x03Y\\x03Z\\x03Z\\x03Z\\x03Z\\x03Z\\x03[\\x03[\\x03[\\x03[\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03'\n )\n buf.write(\n '\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03\\\\\\x03]\\x03]\\x03]\\x03]\\x03]'\n )\n buf.write(\n '\\x03]\\x03]\\x03]\\x03]\\x03]\\x03]\\x03^\\x03^\\x03^\\x03^\\x03_\\x03_\\x03_\\x03_\\x03_\\x03_\\x03`\\x03'\n )\n buf.write(\n '`\\x03`\\x03`\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03a\\x03b\\x03b\\x03b\\x03b\\x03b\\x03b\\x03b\\x03'\n )\n buf.write(\n 'b\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03c\\x03d\\x03d\\x03d\\x03d\\x03d\\x03d\\x03d\\x03'\n )\n buf.write(\n 'd\\x03d\\x03d\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03e\\x03f\\x03f\\x03f\\x03f\\x03f\\x03f\\x03f\\x03'\n )\n buf.write(\n 'f\\x03f\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03g\\x03h\\x03h\\x03h\\x03h\\x03h\\x03h\\x03h\\x03'\n )\n buf.write(\n 'h\\x03i\\x03i\\x03i\\x03i\\x03i\\x03i\\x03i\\x03j\\x03j\\x03j\\x03j\\x03j\\x03j\\x03k\\x03k\\x03k\\x03k\\x03'\n )\n buf.write(\n 'k\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03l\\x03m\\x03m\\x03m\\x03'\n )\n buf.write(\n 'm\\x03m\\x03m\\x03m\\x03m\\x03m\\x03m\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03n\\x03o\\x03o\\x03o\\x03'\n )\n buf.write(\n 'o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03o\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03p\\x03'\n )\n buf.write(\n 'p\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03q\\x03r\\x03r\\x03r\\x03r\\x03r\\x03r\\x03r\\x03s\\x03'\n )\n buf.write(\n 's\\x03s\\x03s\\x03s\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03'\n )\n buf.write(\n 't\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03t\\x03u\\x03u\\x03u\\x03u\\x03u\\x03v\\x03v\\x03'\n )\n buf.write(\n 'v\\x03v\\x03v\\x03v\\x03v\\x03v\\x03w\\x03w\\x03w\\x03w\\x03w\\x03x\\x03x\\x03x\\x03x\\x03x\\x03x\\x03y\\x03'\n )\n buf.write(\n 'y\\x03y\\x03y\\x03y\\x03y\\x03z\\x03z\\x03z\\x03z\\x03z\\x03z\\x03z\\x03{\\x03{\\x03{\\x03{\\x03{\\x03{\\x03'\n )\n buf.write(\n '{\\x03{\\x03{\\x03|\\x03|\\x03|\\x03|\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03}\\x03'\n )\n buf.write(\n '}\\x03}\\x03}\\x03}\\x03~\\x03~\\x03~\\x03~\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03\\x7f\\x03'\n )\n buf.write(\n '\\x7f\\x03\\x7f\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80\\x03\\x80')\n buf.write('\\x03\\x80\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x81')\n buf.write('\\x03\\x81\\x03\\x81\\x03\\x81\\x03\\x82\\x03\\x82\\x03\\x82\\x03\\x82')\n buf.write('\\x03\\x82\\x03\\x82\\x03\\x82\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83')\n buf.write('\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x83\\x03\\x84')\n buf.write('\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84')\n buf.write('\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84\\x03\\x84')\n buf.write('\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85')\n buf.write('\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x85\\x03\\x86\\x03\\x86\\x03\\x86')\n buf.write('\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x86\\x03\\x87\\x03\\x87')\n buf.write('\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87\\x03\\x87')\n buf.write('\\x03\\x87\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88\\x03\\x88')\n buf.write('\\x03\\x88\\x03\\x88\\x03\\x89\\x03\\x89\\x03\\x89\\x03\\x89\\x03\\x89')\n buf.write('\\x03\\x89\\x03\\x89\\x03\\x8a\\x03\\x8a\\x03\\x8a\\x03\\x8a\\x03\\x8a')\n buf.write('\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b\\x03\\x8b')\n buf.write('\\x03\\x8b\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8c')\n buf.write('\\x03\\x8c\\x03\\x8c\\x03\\x8c\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8d')\n buf.write('\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8d\\x03\\x8e\\x03\\x8e\\x03\\x8e')\n buf.write('\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8e\\x03\\x8f\\x03\\x8f')\n buf.write('\\x03\\x8f\\x03\\x8f\\x03\\x8f\\x03\\x8f\\x03\\x90\\x03\\x90\\x03\\x90')\n buf.write('\\x03\\x90\\x03\\x90\\x03\\x90\\x03\\x91\\x03\\x91\\x03\\x91\\x03\\x91')\n buf.write('\\x03\\x91\\x03\\x91\\x03\\x92\\x03\\x92\\x03\\x92\\x03\\x92\\x03\\x92')\n buf.write('\\x03\\x92\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93')\n buf.write('\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x93\\x03\\x94')\n buf.write('\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x94\\x03\\x95\\x03\\x95')\n buf.write('\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95\\x03\\x95')\n buf.write('\\x03\\x95\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96\\x03\\x96')\n buf.write('\\x03\\x96\\x03\\x96\\x03\\x97\\x03\\x97\\x03\\x97\\x03\\x97\\x03\\x98')\n buf.write('\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x98\\x03\\x99')\n buf.write('\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x99\\x03\\x9a\\x03\\x9a')\n buf.write('\\x03\\x9a\\x03\\x9a\\x03\\x9a\\x03\\x9b\\x03\\x9b\\x03\\x9b\\x03\\x9b')\n buf.write('\\x03\\x9b\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9c')\n buf.write('\\x03\\x9c\\x03\\x9c\\x03\\x9c\\x03\\x9d\\x03\\x9d\\x03\\x9d\\x03\\x9d')\n buf.write('\\x03\\x9d\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e\\x03\\x9e')\n buf.write('\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\x9f\\x03\\xa0')\n buf.write('\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0\\x03\\xa0')\n buf.write('\\x03\\xa0\\x03¡\\x03¡\\x03¡\\x03¡\\x03¡\\x03¢')\n buf.write('\\x03¢\\x03¢\\x03¢\\x03¢\\x03¢\\x03¢\\x03£')\n buf.write('\\x03£\\x03£\\x03£\\x03£\\x03¤\\x03¤\\x03¤')\n buf.write('\\x03¤\\x03¤\\x03¥\\x03¥\\x03¥\\x03¦\\x03¦')\n buf.write('\\x03¦\\x03¦\\x03¦\\x03¦\\x03¦\\x03§\\x03§')\n buf.write('\\x03§\\x03§\\x03§\\x03§\\x03§\\x03§\\x03§')\n buf.write('\\x03§\\x03¨\\x03¨\\x03¨\\x03©\\x03©\\x03©')\n buf.write('\\x03©\\x03©\\x03©\\x03©\\x03©\\x03ª\\x03ª')\n buf.write('\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª\\x03ª')\n buf.write('\\x03ª\\x03«\\x03«\\x03«\\x03«\\x03«\\x03«')\n buf.write('\\x03«\\x03«\\x03«\\x03«\\x03¬\\x03¬\\x03¬')\n buf.write('\\x03¬\\x03¬\\x03¬\\x03¬\\x03\\xad\\x03\\xad\\x03\\xad')\n buf.write('\\x03\\xad\\x03\\xad\\x03\\xad\\x03®\\x03®\\x03®\\x03®')\n buf.write('\\x03®\\x03®\\x03®\\x03®\\x03¯\\x03¯\\x03¯')\n buf.write('\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯\\x03¯')\n buf.write('\\x03°\\x03°\\x03°\\x03°\\x03°\\x03°\\x03°')\n buf.write('\\x03°\\x03±\\x03±\\x03±\\x03±\\x03±\\x03±')\n buf.write('\\x03±\\x03±\\x03±\\x03²\\x03²\\x03²\\x03²')\n buf.write('\\x03²\\x03²\\x03²\\x03³\\x03³\\x03³\\x03³')\n buf.write('\\x03³\\x03³\\x03´\\x03´\\x03´\\x03´\\x03´')\n buf.write('\\x03´\\x03µ\\x03µ\\x03µ\\x03µ\\x03µ\\x03µ')\n buf.write('\\x03µ\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶')\n buf.write('\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶\\x03¶')\n buf.write('\\x03·\\x03·\\x03·\\x03·\\x03·\\x03·\\x03·')\n buf.write('\\x03·\\x03¸\\x03¸\\x03¸\\x03¸\\x03¹\\x03¹')\n buf.write('\\x03¹\\x03¹\\x03¹\\x03¹\\x03¹\\x03¹\\x03º')\n buf.write('\\x03º\\x03º\\x03º\\x03º\\x03º\\x03º\\x03º')\n buf.write('\\x03º\\x03º\\x03»\\x03»\\x03»\\x03»\\x03»')\n buf.write('\\x03»\\x03»\\x03»\\x03»\\x03¼\\x03¼\\x03¼')\n buf.write('\\x03¼\\x03¼\\x03½\\x03½\\x03½\\x03½\\x03½')\n buf.write('\\x03½\\x03½\\x03½\\x03½\\x03½\\x03½\\x03¾')\n buf.write('\\x03¾\\x03¾\\x03¿\\x03¿\\x03¿\\x03¿\\x03¿')\n buf.write('\\x03¿\\x03¿\\x03¿\\x03¿\\x03¿\\x03À\\x03À')\n buf.write('\\x03À\\x03À\\x03À\\x03À\\x03À\\x03À\\x03Á')\n buf.write('\\x03Á\\x03Á\\x03Á\\x03Á\\x03Â\\x03Â\\x03Â')\n buf.write('\\x03Â\\x03Â\\x03Ã\\x03Ã\\x03Ã\\x03Ã\\x03Ã')\n buf.write('\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä\\x03Ä')\n buf.write('\\x03Ä\\x03Ä\\x03Å\\x03Å\\x03Å\\x03Å\\x03Å')\n buf.write('\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Æ')\n buf.write('\\x03Æ\\x03Æ\\x03Æ\\x03Æ\\x03Ç\\x03Ç\\x03Ç')\n buf.write('\\x03Ç\\x03Ç\\x03Ç\\x03Ç\\x03Ç\\x03È\\x03È')\n buf.write('\\x03È\\x03È\\x03È\\x03É\\x03É\\x03É\\x03É')\n buf.write('\\x03É\\x03É\\x03Ê\\x03Ê\\x03Ê\\x03Ê\\x03Ê')\n buf.write('\\x03Ê\\x03Ê\\x03Ê\\x03Ë\\x03Ë\\x03Ë\\x03Ë')\n buf.write('\\x03Ë\\x03Ì\\x03Ì\\x03Ì\\x03Ì\\x03Ì\\x03Ì')\n buf.write('\\x03Í\\x03Í\\x03Í\\x03Í\\x03Í\\x03Í\\x03Î')\n buf.write('\\x03Î\\x03Î\\x03Î\\x03Î\\x03Î\\x03Ï\\x03Ï')\n buf.write('\\x03Ï\\x03Ï\\x03Ï\\x03Ï\\x03Ð\\x03Ð\\x03Ð')\n buf.write('\\x03Ð\\x03Ð\\x03Ð\\x03Ñ\\x03Ñ\\x03Ñ\\x03Ñ')\n buf.write('\\x03Ñ\\x03Ò\\x03Ò\\x03Ò\\x03Ò\\x03Ò\\x03Ò')\n buf.write('\\x03Ò\\x03Ó\\x03Ó\\x03Ó\\x03Ó\\x03Ô\\x03Ô')\n buf.write('\\x03Ô\\x03Ô\\x03Ô\\x03Ô\\x03Ô\\x03Õ\\x03Õ')\n buf.write('\\x03Õ\\x03Õ\\x03Õ\\x03Õ\\x03Ö\\x03Ö\\x03Ö')\n buf.write('\\x03Ö\\x03Ö\\x03×\\x03×\\x03×\\x03×\\x03×')\n buf.write('\\x03Ø\\x03Ø\\x03Ø\\x03Ø\\x03Ø\\x03Ù\\x03Ù')\n buf.write('\\x03Ù\\x03Ù\\x03Ú\\x03Ú\\x03Ú\\x03Ú\\x03Ú')\n buf.write('\\x03Ú\\x03Ú\\x03Ú\\x03Û\\x03Û\\x03Û\\x03Û')\n buf.write('\\x03Û\\x03Û\\x03Û\\x03Û\\x03Û\\x03Ü\\x03Ü')\n buf.write('\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü\\x03Ü')\n buf.write('\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý\\x03Ý')\n buf.write('\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03Þ\\x03ß')\n buf.write('\\x03ß\\x03ß\\x03ß\\x03ß\\x03ß\\x03à\\x03à')\n buf.write('\\x03à\\x03à\\x03à\\x03à\\x03à\\x03á\\x03á')\n buf.write('\\x03á\\x03á\\x03á\\x03á\\x03á\\x03á\\x03á')\n buf.write('\\x03â\\x03â\\x03â\\x03â\\x03â\\x03â\\x03â')\n buf.write('\\x03â\\x03â\\x03ã\\x03ã\\x03ã\\x03ã\\x03ã')\n buf.write('\\x03ä\\x03ä\\x03ä\\x03ä\\x03ä\\x03ä\\x03å')\n buf.write('\\x03å\\x03å\\x03å\\x03å\\x03å\\x03å\\x03æ')\n buf.write('\\x03æ\\x03æ\\x03æ\\x03æ\\x03æ\\x03ç\\x03ç')\n buf.write('\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç\\x03ç')\n buf.write('\\x03è\\x03è\\x03è\\x03è\\x03è\\x03é\\x03é')\n buf.write('\\x03é\\x03é\\x03ê\\x03ê\\x03ê\\x03ê\\x03ê')\n buf.write('\\x03ê\\x03ê\\x03ê\\x03ë\\x03ë\\x03ë\\x03ë')\n buf.write('\\x03ë\\x03ë\\x03ë\\x03ë\\x03ë\\x03ì\\x03ì')\n buf.write('\\x03ì\\x03ì\\x03í\\x03í\\x03í\\x03í\\x03í')\n buf.write('\\x03í\\x03î\\x03î\\x03î\\x03î\\x03î\\x03î')\n buf.write('\\x03î\\x03î\\x03î\\x03ï\\x03ï\\x03ï\\x03ï')\n buf.write('\\x03ï\\x03ï\\x03ð\\x03ð\\x03ð\\x03ð\\x03ð')\n buf.write('\\x03ð\\x03ð\\x03ñ\\x03ñ\\x03ñ\\x03ñ\\x03ò')\n buf.write('\\x03ò\\x03ò\\x03ó\\x03ó\\x03ó\\x03ó\\x03ó')\n buf.write('\\x03ó\\x03ó\\x03ó\\x03ô\\x03ô\\x03ô\\x03ô')\n buf.write('\\x03ô\\x03ô\\x03ô\\x03ô\\x03õ\\x03õ\\x03õ')\n buf.write('\\x03õ\\x03õ\\x03õ\\x03õ\\x03ö\\x03ö\\x03ö')\n buf.write('\\x03ö\\x03ö\\x03ö\\x03ö\\x03ö\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷\\x03÷')\n buf.write('\\x03÷\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø')\n buf.write('\\x03ø\\x03ø\\x03ø\\x03ø\\x03ø\\x03ù\\x03ù')\n buf.write('\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù\\x03ù')\n buf.write('\\x03ù\\x03ù\\x03ú\\x03ú\\x03ú\\x03ú\\x03ú')\n buf.write('\\x03û\\x03û\\x03û\\x03û\\x03û\\x03û\\x03û')\n buf.write('\\x03û\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü')\n buf.write('\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü\\x03ü')\n buf.write('\\x03ü\\x03ý\\x03ý\\x03ý\\x03ý\\x03þ\\x03þ')\n buf.write('\\x03þ\\x03þ\\x03þ\\x03þ\\x03þ\\x03ÿ\\x03ÿ')\n buf.write('\\x03ÿ\\x03ÿ\\x03ÿ\\x03Ā\\x03Ā\\x03Ā\\x03Ā')\n buf.write('\\x03Ā\\x03Ā\\x03ā\\x03ā\\x03ā\\x03ā\\x03ā')\n buf.write('\\x03ā\\x03ā\\x03Ă\\x03Ă\\x03Ă\\x03Ă\\x03Ă')\n buf.write('\\x03Ă\\x03Ă\\x03Ă\\x03ă\\x03ă\\x03ă\\x03ă')\n buf.write('\\x03ă\\x03ă\\x03ă\\x03ă\\x03ă\\x03ă\\x03Ą')\n buf.write('\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03Ą\\x03ą')\n buf.write('\\x03ą\\x03ą\\x03Ć\\x03Ć\\x03Ć\\x03Ć\\x03ć')\n buf.write('\\x03ć\\x03ć\\x03ć\\x03Ĉ\\x03Ĉ\\x03Ĉ\\x03Ĉ')\n buf.write('\\x03ĉ\\x03ĉ\\x03ĉ\\x03Ċ\\x03Ċ\\x03Ċ\\x03Ċ')\n buf.write('\\x03Ċ\\x03ċ\\x03ċ\\x03ċ\\x03ċ\\x03ċ\\x03Č')\n buf.write('\\x03Č\\x03Č\\x03Č\\x03Č\\x03Č\\x03Č\\x03č')\n buf.write('\\x03č\\x03č\\x03Ď\\x03Ď\\x03Ď\\x03Ď\\x03Ď')\n buf.write('\\x03Ď\\x03Ď\\x03Ď\\x03ď\\x03ď\\x03ď\\x03ď')\n buf.write('\\x03ď\\x03ď\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ')\n buf.write('\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03Đ\\x03đ')\n buf.write('\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ\\x03đ')\n buf.write('\\x03Ē\\x03Ē\\x03Ē\\x03Ē\\x03ē\\x03ē\\x03ē')\n buf.write('\\x03ē\\x03ē\\x03ē\\x03Ĕ\\x03Ĕ\\x03Ĕ\\x03Ĕ')\n buf.write('\\x03Ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ')\n buf.write('\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03ĕ\\x03Ė\\x03Ė')\n buf.write('\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03Ė\\x03ė')\n buf.write('\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė')\n buf.write('\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė\\x03ė')\n buf.write('\\x03ė\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę')\n buf.write('\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03Ę\\x03ę\\x03ę')\n buf.write('\\x03ę\\x03ę\\x03ę\\x03ę\\x03ę\\x03Ě\\x03Ě')\n buf.write('\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě\\x03Ě')\n buf.write('\\x03Ě\\x03ě\\x03ě\\x03ě\\x03ě\\x03ě\\x03ě')\n buf.write('\\x03ě\\x03ě\\x03Ĝ\\x03Ĝ\\x03Ĝ\\x03Ĝ\\x03Ĝ')\n buf.write('\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ\\x03ĝ')\n buf.write('\\x03ĝ\\x03ĝ\\x03Ğ\\x03Ğ\\x03Ğ\\x03Ğ\\x03Ğ')\n buf.write('\\x03Ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03ğ')\n buf.write('\\x03ğ\\x03ğ\\x03ğ\\x03ğ\\x03Ġ\\x03Ġ\\x03Ġ')\n buf.write('\\x03Ġ\\x03Ġ\\x03Ġ\\x03ġ\\x03ġ\\x03ġ\\x03ġ')\n buf.write('\\x03ġ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ')\n buf.write('\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03Ģ\\x03ģ')\n buf.write('\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ\\x03ģ')\n buf.write('\\x03ģ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ')\n buf.write('\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03Ĥ\\x03ĥ\\x03ĥ\\x03ĥ')\n buf.write('\\x03ĥ\\x03ĥ\\x03ĥ\\x03ĥ\\x03Ħ\\x03Ħ\\x03Ħ')\n buf.write('\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ\\x03Ħ')\n buf.write('\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ\\x03ħ')\n buf.write('\\x03ħ\\x03ħ\\x03ħ\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03Ĩ')\n buf.write('\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03Ĩ\\x03ĩ\\x03ĩ\\x03ĩ')\n buf.write('\\x03ĩ\\x03ĩ\\x03ĩ\\x03Ī\\x03Ī\\x03Ī\\x03Ī')\n buf.write('\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03Ī\\x03ī')\n buf.write('\\x03ī\\x03ī\\x03ī\\x03ī\\x03ī\\x03Ĭ\\x03Ĭ')\n buf.write('\\x03Ĭ\\x03Ĭ\\x03Ĭ\\x03Ĭ\\x03ĭ\\x03ĭ\\x03ĭ')\n buf.write('\\x03ĭ\\x03Į\\x03Į\\x03Į\\x03Į\\x03Į\\x03į')\n buf.write('\\x03į\\x03į\\x03į\\x03į\\x03İ\\x03İ\\x03İ')\n buf.write('\\x03İ\\x03İ\\x03İ\\x03İ\\x03ı\\x03ı\\x03ı')\n buf.write('\\x03ı\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03IJ')\n buf.write('\\x03IJ\\x03IJ\\x03IJ\\x03IJ\\x03ij\\x03ij\\x03ij')\n buf.write('\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij\\x03ij')\n buf.write('\\x03ij\\x03ij\\x03Ĵ\\x03Ĵ\\x03Ĵ\\x03Ĵ\\x03Ĵ')\n buf.write('\\x03Ĵ\\x03Ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ')\n buf.write('\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03ĵ\\x03Ķ\\x03Ķ')\n buf.write('\\x03Ķ\\x03Ķ\\x03Ķ\\x03Ķ\\x03Ķ\\x03ķ\\x03ķ')\n buf.write('\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ķ\\x03ĸ')\n buf.write('\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ\\x03ĸ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ')\n buf.write('\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03Ĺ\\x03ĺ')\n buf.write('\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03ĺ\\x03Ļ')\n buf.write('\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ')\n buf.write('\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03Ļ\\x03ļ\\x03ļ')\n buf.write('\\x03ļ\\x03ļ\\x03ļ\\x03ļ\\x03ļ\\x03Ľ\\x03Ľ')\n buf.write('\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ\\x03Ľ')\n buf.write('\\x03Ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ\\x03ľ')\n buf.write('\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ\\x03Ŀ')\n buf.write('\\x03Ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ\\x03ŀ')\n buf.write('\\x03ŀ\\x03Ł\\x03Ł\\x03Ł\\x03Ł\\x03Ł\\x03Ł')\n buf.write('\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł\\x03ł')\n buf.write('\\x03ł\\x03ł\\x03Ń\\x03Ń\\x03Ń\\x03Ń\\x03Ń')\n buf.write('\\x03Ń\\x03Ń\\x03ń\\x03ń\\x03ń\\x03ń\\x03Ņ')\n buf.write('\\x03Ņ\\x03Ņ\\x03Ņ\\x03Ņ\\x03Ņ\\x03ņ\\x03ņ')\n buf.write('\\x03ņ\\x03ņ\\x03ņ\\x03Ň\\x03Ň\\x03Ň\\x03Ň')\n buf.write('\\x03Ň\\x03Ň\\x03ň\\x03ň\\x03ň\\x03ň\\x03ň')\n buf.write('\\x03ň\\x03ň\\x03ʼn\\x03ʼn\\x03ʼn\\x03ʼn\\x03ʼn')\n buf.write('\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03Ŋ')\n buf.write('\\x03Ŋ\\x03Ŋ\\x03Ŋ\\x03ŋ\\x03ŋ\\x03ŋ\\x03ŋ')\n buf.write('\\x03ŋ\\x03ŋ\\x03ŋ\\x03Ō\\x03Ō\\x03Ō\\x03Ō')\n buf.write('\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō\\x03Ō')\n buf.write('\\x03Ō\\x03ō\\x03ō\\x03ō\\x03ō\\x03Ŏ\\x03Ŏ')\n buf.write('\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03Ŏ\\x03ŏ\\x03ŏ')\n buf.write('\\x03ŏ\\x03ŏ\\x03ŏ\\x03ŏ\\x03ŏ\\x03Ő\\x03Ő')\n buf.write('\\x03Ő\\x03Ő\\x03Ő\\x03ő\\x03ő\\x03ő\\x03ő')\n buf.write('\\x03ő\\x03ő\\x03ő\\x03ő\\x03Œ\\x03Œ\\x03Œ')\n buf.write('\\x03Œ\\x03Œ\\x03Œ\\x03Œ\\x03œ\\x03œ\\x03œ')\n buf.write('\\x03œ\\x03œ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ')\n buf.write('\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03Ŕ\\x03ŕ\\x03ŕ\\x03ŕ')\n buf.write('\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ\\x03ŕ')\n buf.write('\\x03ŕ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ')\n buf.write('\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ\\x03Ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ')\n buf.write('\\x03ŗ\\x03ŗ\\x03ŗ\\x03ŗ\\x03Ř\\x03Ř\\x03Ř')\n buf.write('\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř\\x03Ř')\n buf.write('\\x03Ř\\x03Ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř')\n buf.write('\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř\\x03ř')\n buf.write('\\x03ř\\x03ř\\x03ř\\x03ř\\x03Ś\\x03Ś\\x03Ś')\n buf.write('\\x03Ś\\x03ś\\x03ś\\x03ś\\x03ś\\x03ś\\x03Ŝ')\n buf.write('\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ\\x03Ŝ')\n buf.write('\\x03Ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ\\x03ŝ')\n buf.write('\\x03Ş\\x03Ş\\x03Ş\\x03Ş\\x03Ş\\x03ş\\x03ş')\n buf.write('\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş\\x03ş')\n buf.write('\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š\\x03Š')\n buf.write('\\x03Š\\x03Š\\x03š\\x03š\\x03š\\x03š\\x03š')\n buf.write('\\x03š\\x03š\\x03š\\x03š\\x03Ţ\\x03Ţ\\x03Ţ')\n buf.write('\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ')\n buf.write('\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03Ţ\\x03ţ\\x03ţ')\n buf.write('\\x03ţ\\x03ţ\\x03ţ\\x03ţ\\x03ţ\\x03Ť\\x03Ť')\n buf.write('\\x03Ť\\x03Ť\\x03Ť\\x03ť\\x03ť\\x03ť\\x03ť')\n buf.write('\\x03ť\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03Ŧ')\n buf.write('\\x03Ŧ\\x03Ŧ\\x03Ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ')\n buf.write('\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03ŧ\\x03Ũ\\x03Ũ')\n buf.write('\\x03Ũ\\x03Ũ\\x03Ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ')\n buf.write('\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ\\x03ũ')\n buf.write('\\x03ũ\\x03ũ\\x03ũ\\x03Ū\\x03Ū\\x03Ū\\x03Ū')\n buf.write('\\x03Ū\\x03Ū\\x03Ū\\x03Ū\\x03ū\\x03ū\\x03ū')\n buf.write('\\x03ū\\x03ū\\x03ū\\x03ū\\x03ū\\x03ū\\x03Ŭ')\n buf.write('\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03Ŭ')\n buf.write('\\x03Ŭ\\x03Ŭ\\x03Ŭ\\x03ŭ\\x03ŭ\\x03ŭ\\x03ŭ')\n buf.write('\\x03ŭ\\x03ŭ\\x03Ů\\x03Ů\\x03Ů\\x03Ů\\x03Ů')\n buf.write('\\x03Ů\\x03Ů\\x03Ů\\x03ů\\x03ů\\x03ů\\x03ů')\n buf.write('\\x03ů\\x03ů\\x03ů\\x03ů\\x03ů\\x03ů\\x03Ű')\n buf.write('\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű')\n buf.write('\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03Ű\\x03ű\\x03ű')\n buf.write('\\x03ű\\x03ű\\x03ű\\x03ű\\x03ű\\x03Ų\\x03Ų')\n buf.write('\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų\\x03Ų')\n buf.write('\\x03Ų\\x03Ų\\x03ų\\x03ų\\x03ų\\x03ų\\x03ų')\n buf.write('\\x03ų\\x03ų\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ')\n buf.write('\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ\\x03Ŵ')\n buf.write('\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ')\n buf.write('\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03ŵ\\x03Ŷ')\n buf.write('\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ')\n buf.write('\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03Ŷ\\x03ŷ')\n buf.write('\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ\\x03ŷ')\n buf.write('\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ\\x03Ÿ')\n buf.write('\\x03Ÿ\\x03Ź\\x03Ź\\x03Ź\\x03Ź\\x03Ź\\x03Ź')\n buf.write('\\x03Ź\\x03Ź\\x03ź\\x03ź\\x03ź\\x03ź\\x03ź')\n buf.write('\\x03ź\\x03Ż\\x03Ż\\x03Ż\\x03Ż\\x03ż\\x03ż')\n buf.write('\\x03ż\\x03ż\\x03ż\\x03Ž\\x03Ž\\x03Ž\\x03Ž')\n buf.write('\\x03Ž\\x03ž\\x03ž\\x03ž\\x03ž\\x03ž\\x03ž')\n buf.write('\\x03ž\\x03ž\\x03ž\\x03ž\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ſ')\n buf.write('\\x03ſ\\x03ſ\\x03ſ\\x03ſ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ\\x03ƀ')\n buf.write('\\x03ƀ\\x03ƀ\\x03ƀ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ')\n buf.write('\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ɓ\\x03Ƃ')\n buf.write('\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ')\n buf.write('\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03Ƃ\\x03ƃ')\n buf.write('\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ')\n buf.write('\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03ƃ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ\\x03Ƅ')\n buf.write('\\x03Ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ')\n buf.write('\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ\\x03ƅ')\n buf.write('\\x03ƅ\\x03ƅ\\x03ƅ\\x03Ɔ\\x03Ɔ\\x03Ɔ\\x03Ƈ')\n buf.write('\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ\\x03Ƈ')\n buf.write('\\x03Ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ')\n buf.write('\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03ƈ\\x03Ɖ')\n buf.write('\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ\\x03Ɖ')\n buf.write('\\x03Ɖ\\x03Ɖ\\x03Ɗ\\x03Ɗ\\x03Ɗ\\x03Ɗ\\x03Ɗ')\n buf.write('\\x03Ɗ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ\\x03Ƌ')\n buf.write('\\x03Ƌ\\x03Ƌ\\x03ƌ\\x03ƌ\\x03ƌ\\x03ƌ\\x03ƌ')\n buf.write('\\x03ƍ\\x03ƍ\\x03ƍ\\x03ƍ\\x03ƍ\\x03Ǝ\\x03Ǝ')\n buf.write('\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ\\x03Ǝ')\n buf.write('\\x03Ə\\x03Ə\\x03Ə\\x03Ə\\x03Ə\\x03Ɛ\\x03Ɛ')\n buf.write('\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ\\x03Ɛ')\n buf.write('\\x03Ɛ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ\\x03Ƒ')\n buf.write('\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03ƒ\\x03Ɠ')\n buf.write('\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɠ\\x03Ɣ')\n buf.write('\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ\\x03Ɣ')\n buf.write('\\x03Ɣ\\x03Ɣ\\x03ƕ\\x03ƕ\\x03ƕ\\x03ƕ\\x03ƕ')\n buf.write('\\x03ƕ\\x03ƕ\\x03ƕ\\x03Ɩ\\x03Ɩ\\x03Ɩ\\x03Ɩ')\n buf.write('\\x03Ɩ\\x03Ɩ\\x03Ɨ\\x03Ɨ\\x03Ɨ\\x03Ɨ\\x03Ɨ')\n buf.write('\\x03Ɨ\\x03Ɨ\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03Ƙ')\n buf.write('\\x03Ƙ\\x03Ƙ\\x03Ƙ\\x03ƙ\\x03ƙ\\x03ƙ\\x03ƙ')\n buf.write('\\x03ƙ\\x03ƙ\\x03ƙ\\x03ƚ\\x03ƚ\\x03ƚ\\x03ƚ')\n buf.write('\\x03ƚ\\x03ƚ\\x03ƚ\\x03ƛ\\x03ƛ\\x03ƛ\\x03ƛ')\n buf.write('\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɯ\\x03Ɲ')\n buf.write('\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ\\x03Ɲ')\n buf.write('\\x03Ɲ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ\\x03ƞ')\n buf.write('\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ\\x03Ɵ')\n buf.write('\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ\\x03Ơ')\n buf.write('\\x03Ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ\\x03ơ')\n buf.write('\\x03ơ\\x03ơ\\x03ơ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ')\n buf.write('\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03Ƣ\\x03ƣ\\x03ƣ')\n buf.write('\\x03ƣ\\x03ƣ\\x03ƣ\\x03ƣ\\x03ƣ\\x03Ƥ\\x03Ƥ')\n buf.write('\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03Ƥ\\x03ƥ')\n buf.write('\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ\\x03ƥ')\n buf.write('\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ\\x03Ʀ')\n buf.write('\\x03Ʀ\\x03Ʀ\\x03Ƨ\\x03Ƨ\\x03Ƨ\\x03Ƨ\\x03Ƨ')\n buf.write('\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ\\x03ƨ')\n buf.write('\\x03ƨ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ')\n buf.write('\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03Ʃ\\x03ƪ\\x03ƪ')\n buf.write('\\x03ƪ\\x03ƪ\\x03ƪ\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ')\n buf.write('\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ\\x03ƫ\\x03Ƭ\\x03Ƭ')\n buf.write('\\x03Ƭ\\x03Ƭ\\x03Ƭ\\x03Ƭ\\x03ƭ\\x03ƭ\\x03ƭ')\n buf.write('\\x03ƭ\\x03ƭ\\x03ƭ\\x03Ʈ\\x03Ʈ\\x03Ʈ\\x03Ʈ')\n buf.write('\\x03Ʈ\\x03Ư\\x03Ư\\x03Ư\\x03Ư\\x03Ư\\x03Ư')\n buf.write('\\x03Ư\\x03ư\\x03ư\\x03ư\\x03ư\\x03ư\\x03Ʊ')\n buf.write('\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʊ\\x03Ʋ\\x03Ʋ')\n buf.write('\\x03Ʋ\\x03Ʋ\\x03Ƴ\\x03Ƴ\\x03Ƴ\\x03Ƴ\\x03Ƴ')\n buf.write('\\x03Ƴ\\x03Ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ')\n buf.write('\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ\\x03ƴ')\n buf.write('\\x03ƴ\\x03ƴ\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03Ƶ')\n buf.write('\\x03Ƶ\\x03Ƶ\\x03Ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ')\n buf.write('\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ\\x03ƶ')\n buf.write('\\x03ƶ\\x03ƶ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ')\n buf.write('\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ʒ\\x03Ƹ')\n buf.write('\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ\\x03Ƹ')\n buf.write('\\x03Ƹ\\x03Ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ')\n buf.write('\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƹ\\x03ƺ\\x03ƺ')\n buf.write('\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ')\n buf.write('\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƺ\\x03ƻ\\x03ƻ')\n buf.write('\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ\\x03ƻ')\n buf.write('\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03Ƽ\\x03ƽ')\n buf.write('\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ\\x03ƽ')\n buf.write('\\x03ƽ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ\\x03ƾ')\n buf.write('\\x03ƾ\\x03ƾ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ')\n buf.write('\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ\\x03ƿ')\n buf.write('\\x03ƿ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǀ')\n buf.write('\\x03ǀ\\x03ǀ\\x03ǀ\\x03ǁ\\x03ǁ\\x03ǁ\\x03ǁ')\n buf.write('\\x03ǁ\\x03ǂ\\x03ǂ\\x03ǂ\\x03ǂ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ\\x03ǃ')\n buf.write('\\x03ǃ\\x03ǃ\\x03DŽ\\x03DŽ\\x03DŽ\\x03DŽ\\x03DŽ')\n buf.write('\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03Dž')\n buf.write('\\x03Dž\\x03Dž\\x03Dž\\x03Dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž\\x03dž')\n buf.write('\\x03dž\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ')\n buf.write('\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ\\x03LJ')\n buf.write('\\x03LJ\\x03LJ\\x03LJ\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj\\x03Lj')\n buf.write('\\x03Lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj\\x03lj')\n buf.write('\\x03lj\\x03lj\\x03lj\\x03NJ\\x03NJ\\x03NJ\\x03NJ')\n buf.write('\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03NJ')\n buf.write('\\x03NJ\\x03NJ\\x03NJ\\x03NJ\\x03Nj\\x03Nj\\x03Nj')\n buf.write('\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj\\x03Nj')\n buf.write('\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj\\x03nj')\n buf.write('\\x03nj\\x03nj\\x03nj\\x03nj\\x03Ǎ\\x03Ǎ\\x03Ǎ')\n buf.write('\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03Ǎ\\x03ǎ\\x03ǎ')\n buf.write('\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ')\n buf.write('\\x03ǎ\\x03ǎ\\x03ǎ\\x03ǎ\\x03Ǐ\\x03Ǐ\\x03Ǐ')\n buf.write('\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ')\n buf.write('\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03Ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ\\x03ǐ')\n buf.write('\\x03ǐ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03Ǒ\\x03ǒ')\n buf.write('\\x03ǒ\\x03ǒ\\x03ǒ\\x03Ǔ\\x03Ǔ\\x03Ǔ\\x03Ǔ')\n buf.write('\\x03Ǔ\\x03ǔ\\x03ǔ\\x03ǔ\\x03ǔ\\x03Ǖ\\x03Ǖ')\n buf.write('\\x03Ǖ\\x03Ǖ\\x03Ǖ\\x03ǖ\\x03ǖ\\x03ǖ\\x03ǖ')\n buf.write('\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ\\x03Ǘ')\n buf.write('\\x03ǘ\\x03ǘ\\x03ǘ\\x03ǘ\\x03Ǚ\\x03Ǚ\\x03Ǚ')\n buf.write('\\x03Ǚ\\x03Ǚ\\x03Ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ')\n buf.write('\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ')\n buf.write('\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03ǚ\\x03Ǜ\\x03Ǜ')\n buf.write('\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ\\x03Ǜ')\n buf.write('\\x03Ǜ\\x03Ǜ\\x03ǜ\\x03ǜ\\x03ǜ\\x03ǜ\\x03ǝ')\n buf.write('\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ\\x03ǝ')\n buf.write('\\x03ǝ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ\\x03Ǟ')\n buf.write('\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ\\x03ǟ')\n buf.write('\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03Ǡ\\x03ǡ\\x03ǡ')\n buf.write('\\x03ǡ\\x03ǡ\\x03ǡ\\x03ǡ\\x03ǡ\\x03Ǣ\\x03Ǣ')\n buf.write('\\x03Ǣ\\x03Ǣ\\x03Ǣ\\x03Ǣ\\x07Ǣ፨\\nǢ')\n buf.write('\\x0cǢ\\x0eǢ፫\\x0bǢ\\x03Ǣ\\x03Ǣ\\x03ǣ')\n buf.write('\\x03ǣ\\x03ǣ\\x07ǣ፲\\nǣ\\x0cǣ\\x0eǣ')\n buf.write('፵\\x0bǣ\\x03ǣ\\x06ǣ፸\\nǣ\\rǣ')\n buf.write('\\x0eǣ፹\\x03Ǥ\\x03Ǥ\\x03Ǥ\\x07Ǥ\\u137f')\n buf.write('\\nǤ\\x0cǤ\\x0eǤᎂ\\x0bǤ\\x03Ǥ\\x06Ǥ')\n buf.write('ᎅ\\nǤ\\rǤ\\x0eǤᎆ\\x03ǥ\\x03ǥ')\n buf.write('\\x03ǥ\\x03Ǧ\\x03Ǧ\\x03ǧ\\x03ǧ\\x03Ǩ\\x03Ǩ')\n buf.write('\\x03Ǩ\\x05Ǩ᎓\\nǨ\\x03Ǩ\\x03Ǩ\\x05Ǩ')\n buf.write('᎗\\nǨ\\x05Ǩ᎙\\nǨ\\x03Ǩ\\x03Ǩ\\x05')\n buf.write('Ǩ\\u139d\\nǨ\\x03ǩ\\x03ǩ\\x03ǩ\\x03ǩ\\x03')\n buf.write('ǩ\\x07ǩᎤ\\nǩ\\x0cǩ\\x0eǩᎧ\\x0b')\n buf.write('ǩ\\x03ǩ\\x03ǩ\\x03Ǫ\\x03Ǫ\\x03Ǫ\\x03Ǫ')\n buf.write('\\x03Ǫ\\x05ǪᎰ\\nǪ\\x03Ǫ\\x03Ǫ\\x03ǫ')\n buf.write('\\x03ǫ\\x03Ǭ\\x03Ǭ\\x03Ǭ\\x07ǬᎹ\\nǬ')\n buf.write('\\x0cǬ\\x0eǬᎼ\\x0bǬ\\x03Ǭ\\x03Ǭ\\x03Ǭ')\n buf.write('\\x03ǭ\\x03ǭ\\x03ǭ\\x07ǭᏄ\\nǭ\\x0cǭ')\n buf.write('\\x0eǭᏇ\\x0bǭ\\x03ǭ\\x03ǭ\\x03ǭ\\x03Ǯ')\n buf.write('\\x03Ǯ\\x03Ǯ\\x07ǮᏏ\\nǮ\\x0cǮ\\x0eǮ')\n buf.write('Ꮢ\\x0bǮ\\x03Ǯ\\x03Ǯ\\x03Ǯ\\x03ǯ\\x03ǯ')\n buf.write('\\x03ǯ\\x07ǯᏚ\\nǯ\\x0cǯ\\x0eǯᏝ')\n buf.write('\\x0bǯ\\x03ǯ\\x03ǯ\\x03ǯ\\x03ǰ\\x03ǰ\\x03DZ')\n buf.write('\\x03DZ\\x03DZ\\x03DZ\\x06DZᏨ\\nDZ\\rDZ')\n buf.write('\\x0eDZᏩ\\x03DZ\\x03DZ\\x03Dz\\x03Dz\\x03dz')\n buf.write('\\x03dz\\x03Ǵ\\x03Ǵ\\x03ǵ\\x03ǵ\\x03Ƕ\\x03Ƕ')\n buf.write('\\x03Ƕ\\x03Ƿ\\x03Ƿ\\x03Ǹ\\x03Ǹ\\x03ǹ\\x03ǹ')\n buf.write('\\x03Ǻ\\x03Ǻ\\x03ǻ\\x03ǻ\\x03Ǽ\\x03Ǽ\\x03ǽ')\n buf.write('\\x03ǽ\\x03ǽ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x07Ǿ')\n buf.write('ᐌ\\nǾ\\x0cǾ\\x0eǾᐏ\\x0bǾ\\x03Ǿ')\n buf.write('\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x03Ǿ\\x05Ǿᐖ\\nǾ')\n buf.write('\\x03ǿ\\x03ǿ\\x03Ȁ\\x03Ȁ\\x03ȁ\\x03ȁ\\x03ȁ')\n buf.write('\\x03Ȃ\\x03Ȃ\\x03ȃ\\x03ȃ\\x03ȃ\\x03Ȅ\\x03Ȅ')\n buf.write('\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x03Ȅ\\x05Ȅ')\n buf.write('ᐬ\\nȄ\\x03ȅ\\x03ȅ\\x03Ȇ\\x03Ȇ\\x03ȇ')\n buf.write('\\x03ȇ\\x03Ȉ\\x03Ȉ\\x03ȉ\\x03ȉ\\x03Ȋ\\x03Ȋ')\n buf.write('\\x03Ȋ\\x03ȋ\\x03ȋ\\x03Ȍ\\x03Ȍ\\x03ȍ\\x03ȍ')\n buf.write('\\x03Ȏ\\x03Ȏ\\x03ȏ\\x03ȏ\\x03Ȑ\\x06Ȑᑆ')\n buf.write('\\nȐ\\rȐ\\x0eȐᑇ\\x03Ȑ\\x03Ȑ\\x03ȑ')\n buf.write('\\x03ȑ\\x03Ȓ\\x06Ȓᑏ\\nȒ\\rȒ\\x0eȒ')\n buf.write('ᑐ\\x03ȓ\\x07ȓᑔ\\nȓ\\x0cȓ\\x0eȓ')\n buf.write('ᑗ\\x0bȓ\\x03ȓ\\x05ȓᑚ\\nȓ\\x03ȓ')\n buf.write('\\x06ȓᑝ\\nȓ\\rȓ\\x0eȓᑞ\\x03Ȕ')\n buf.write('\\x03Ȕ\\x03Ȕ\\x03Ȕ\\x07Ȕᑥ\\nȔ\\x0cȔ')\n buf.write('\\x0eȔᑨ\\x0bȔ\\x03Ȕ\\x03Ȕ\\x05Ȕᑬ')\n buf.write('\\nȔ\\x03Ȕ\\x03Ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ')\n buf.write('\\x07ȕᑴ\\nȕ\\x0cȕ\\x0eȕᑷ\\x0bȕ')\n buf.write('\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03ȕ\\x03Ȗ\\x03Ȗ')\n buf.write('\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ\\x03Ȗ')\n buf.write('\\x07Ȗᒇ\\nȖ\\x0cȖ\\x0eȖᒊ\\x0bȖ')\n buf.write('\\x03Ȗ\\x03Ȗ\\x05Ȗᒎ\\nȖ\\x03ȗ\\x05ȗ')\n buf.write('ᒑ\\nȗ\\x03ȗ\\x03ȗ\\x03Ș\\x03Ș\\x03ș')\n buf.write('\\x03ș\\x03ș\\x07șᒚ\\nș\\x0cș\\x0eș')\n buf.write('ᒝ\\x0bș\\x03Ț\\x03Ț\\x03Ț\\x03Ț\\x03Ț')\n buf.write('\\x03ț\\x03ț\\x03Ȝ\\x03Ȝ\\x03ȝ\\x03ȝ\\x03Ȟ')\n buf.write('\\x03Ȟ\\x03ȟ\\x03ȟ\\x03Ƞ\\x03Ƞ\\x03ȡ\\x03ȡ')\n buf.write('\\x03Ȣ\\x03Ȣ\\x03ȣ\\x03ȣ\\x03Ȥ\\x03Ȥ\\x03ȥ')\n buf.write('\\x03ȥ\\x03Ȧ\\x03Ȧ\\x03ȧ\\x03ȧ\\x03Ȩ\\x03Ȩ')\n buf.write('\\x03ȩ\\x03ȩ\\x03Ȫ\\x03Ȫ\\x03ȫ\\x03ȫ\\x03Ȭ')\n buf.write('\\x03Ȭ\\x03ȭ\\x03ȭ\\x03Ȯ\\x03Ȯ\\x03ȯ\\x03ȯ')\n buf.write('\\x03Ȱ\\x03Ȱ\\x03ȱ\\x03ȱ\\x03Ȳ\\x03Ȳ\\x03ȳ')\n buf.write('\\x03ȳ\\x03ȴ\\x03ȴ\\x07ᎺᏅᏐᏛᑵ')\n buf.write(\n '\\x02ȵ\\x03\\x03\\x05\\x04\\x07\\x05\\t\\x06\\x0b\\x07\\r\\x08\\x0f\\t\\x11\\n\\x13\\x0b\\x15\\x0c'\n )\n buf.write(\n \"\\x17\\r\\x19\\x0e\\x1b\\x0f\\x1d\\x10\\x1f\\x11!\\x12#\\x13%\\x14'\\x15)\\x16+\\x17\"\n )\n buf.write('-\\x18/\\x191\\x1a3\\x1b5\\x1c7\\x1d9\\x1e;\\x1f= ?!A\"C#E$G%')\n buf.write(\"I&K'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7\")\n buf.write('m8o9q:s;u<w=y>{?}@\\x7fA\\x81B\\x83C\\x85D\\x87E\\x89')\n buf.write('F\\x8bG\\x8dH\\x8fI\\x91J\\x93K\\x95L\\x97M\\x99')\n buf.write('N\\x9bO\\x9dP\\x9fQ¡R£S¥T§U©')\n buf.write('V«W\\xadX¯Y±Z³[µ\\\\·]¹')\n buf.write('^»_½`¿aÁbÃcÅdÇeÉ')\n buf.write('fËgÍhÏiÑjÓkÕl×mÙ')\n buf.write('nÛoÝpßqárãsåtçué')\n buf.write('vëwíxïyñzó{õ|÷}ù')\n buf.write('~û\\x7fý\\x80ÿ\\x81ā\\x82ă')\n buf.write('\\x83ą\\x84ć\\x85ĉ\\x86ċ\\x87')\n buf.write('č\\x88ď\\x89đ\\x8aē\\x8bĕ')\n buf.write('\\x8cė\\x8dę\\x8eě\\x8fĝ\\x90')\n buf.write('ğ\\x91ġ\\x92ģ\\x93ĥ\\x94ħ')\n buf.write('\\x95ĩ\\x96ī\\x97ĭ\\x98į\\x99')\n buf.write('ı\\x9aij\\x9bĵ\\x9cķ\\x9dĹ')\n buf.write('\\x9eĻ\\x9fĽ\\xa0Ŀ¡Ł¢')\n buf.write('Ń£Ņ¤Ň¥ʼn¦ŋ')\n buf.write('§ō¨ŏ©őªœ«')\n buf.write('ŕ¬ŗ\\xadř®ś¯ŝ')\n buf.write('°ş±š²ţ³ť´')\n buf.write('ŧµũ¶ū·ŭ¸ů')\n buf.write('¹űºų»ŵ¼ŷ½')\n buf.write('Ź¾Ż¿ŽÀſÁƁ')\n buf.write('ÂƃÃƅÄƇÅƉÆ')\n buf.write('ƋÇƍÈƏÉƑÊƓ')\n buf.write('ËƕÌƗÍƙÎƛÏ')\n buf.write('ƝÐƟÑơÒƣÓƥ')\n buf.write('ÔƧÕƩÖƫ×ƭØ')\n buf.write('ƯÙƱÚƳÛƵÜƷ')\n buf.write('ÝƹÞƻßƽàƿá')\n buf.write('ǁâǃãDžäLJålj')\n buf.write('æNjçǍèǏéǑê')\n buf.write('ǓëǕìǗíǙîǛ')\n buf.write('ïǝðǟñǡòǣó')\n buf.write('ǥôǧõǩöǫ÷ǭ')\n buf.write('øǯùDZúdzûǵü')\n buf.write('ǷýǹþǻÿǽĀǿ')\n buf.write('āȁĂȃăȅĄȇą')\n buf.write('ȉĆȋćȍĈȏĉȑ')\n buf.write('ĊȓċȕČȗčșĎ')\n buf.write('țďȝĐȟđȡĒȣ')\n buf.write('ēȥĔȧĕȩĖȫė')\n buf.write('ȭĘȯęȱĚȳěȵ')\n buf.write('ĜȷĝȹĞȻğȽĠ')\n buf.write('ȿġɁĢɃģɅĤɇ')\n buf.write('ĥɉĦɋħɍĨɏĩ')\n buf.write('ɑĪɓīɕĬɗĭə')\n buf.write('ĮɛįɝİɟıɡIJ')\n buf.write('ɣijɥĴɧĵɩĶɫ')\n buf.write('ķɭĸɯĹɱĺɳĻ')\n buf.write('ɵļɷĽɹľɻĿɽ')\n buf.write('ŀɿŁʁłʃŃʅń')\n buf.write('ʇŅʉņʋŇʍňʏ')\n buf.write('ʼnʑŊʓŋʕŌʗō')\n buf.write('ʙŎʛŏʝŐʟőʡ')\n buf.write('ŒʣœʥŔʧŕʩŖ')\n buf.write('ʫŗʭŘʯřʱŚʳ')\n buf.write('śʵŜʷŝʹŞʻş')\n buf.write('ʽŠʿšˁŢ˃ţ˅')\n buf.write('ŤˇťˉŦˋŧˍŨ')\n buf.write('ˏũˑŪ˓ū˕Ŭ˗')\n buf.write('ŭ˙ٲů˝Ű˟ű')\n buf.write('ˡŲˣų˥Ŵ˧ŵ˩')\n buf.write('Ŷ˫ŷ˭Ÿ˯Ź˱ź')\n buf.write('˳Ż˵ż˷Ž˹ž˻')\n buf.write('ſ˽ƀ˿Ɓ́Ƃ̃ƃ')\n buf.write('̅Ƅ̇ƅ̉Ɔ̋Ƈ̍')\n buf.write('ƈ̏Ɖ̑Ɗ̓Ƌ̕ƌ')\n buf.write('̗ƍ̙Ǝ̛Ə̝Ɛ̟')\n buf.write('Ƒ̡ƒ̣Ɠ̥Ɣ̧ƕ')\n buf.write('̩Ɩ̫Ɨ̭Ƙ̯ƙ̱')\n buf.write('ƚ̳ƛ̵Ɯ̷Ɲ̹ƞ')\n buf.write('̻Ɵ̽Ơ̿ớƢ̓')\n buf.write('ƣͅƤ͇ƥ͉Ʀ͋Ƨ')\n buf.write('͍ƨ͏Ʃ͑ƪ͓ƫ͕')\n buf.write('Ƭ͗ƭ͙Ʈ͛Ư͝ư')\n buf.write('͟Ʊ͡ƲͣƳͥƴͧ')\n buf.write('ƵͩƶͫƷͭƸͯƹ')\n buf.write('ͱƺͳƻ͵Ƽͷƽ\\u0379')\n buf.write('ƾͻƿͽǀͿǁ\\u0381ǂ')\n buf.write('\\u0383ǃ΅DŽ·DžΉdž\\u038b')\n buf.write('LJ\\u038dLjΏljΑNJΓNj')\n buf.write('ΕnjΗǍΙǎΛǏΝ')\n buf.write('ǐΟǑΡǒΣǓΥǔ')\n buf.write('ΧǕΩǖΫǗέǘί')\n buf.write('ǙαǚγǛεǜηǝ')\n buf.write('ιǞλǟνǠοǡρ')\n buf.write('ǢσǣυǤχǥωǦ')\n buf.write('ϋǧύǨϏǩϑǪϓ')\n buf.write('\\x02ϕ\\x02ϗ\\x02ϙ\\x02ϛ\\x02ϝ\\x02ϟ\\x02ϡ')\n buf.write('ǫϣǬϥǭϧǮϩǯ')\n buf.write('ϫǰϭDZϯDzϱdzϳ')\n buf.write('ǴϵǵϷǶϹǷϻǸ')\n buf.write('ϽǹϿǺЁǻЃǼЅ')\n buf.write('ǽЇǾЉǿЋȀЍȁ')\n buf.write('ЏȂБ\\x02ГȃЕȄЗȅ')\n buf.write('ЙȆЛȇНȈПȉС')\n buf.write('\\x02У\\x02Х\\x02ЧȊЩȋЫȌ')\n buf.write('Э\\x02Я\\x02бȍгȎе\\x02з')\n buf.write('\\x02й\\x02л\\x02н\\x02п\\x02с\\x02у\\x02х')\n buf.write('\\x02ч\\x02щ\\x02ы\\x02э\\x02я\\x02ё\\x02ѓ')\n buf.write('\\x02ѕ\\x02ї\\x02љ\\x02ћ\\x02ѝ\\x02џ\\x02ѡ')\n buf.write(\n \"\\x02ѣ\\x02ѥ\\x02ѧ\\x02\\x03\\x02'\\x05\\x02\\x0c\\x0c\\x0f\\x0f))\\x05\\x022\")\n buf.write(\n ';CHch\\x04\\x02GGgg\\x04\\x02--//\\t\\x02\\x0b\\x0c\\x0f\\x0f\"\"**>>]]}}\\x05\\x02\\x0c'\n )\n buf.write(\n '\\x0c\\x0f\\x0f$$\\x04\\x022;aa\\x05\\x02\\x0b\\x0c\\x0f\\x0f\"\"\\x04\\x02C\\\\c|\\x04\\x02\\x0c'\n )\n buf.write(\n '\\x0c\\x0f\\x0f\\x04\\x02\\x0b\\x0b\"\"\\x05\\x02%&2;aa\\x04\\x02CCcc\\x04\\x02DDdd\\x04\\x02'\n )\n buf.write(\n 'EEee\\x04\\x02FFff\\x04\\x02HHhh\\x04\\x02IIii\\x04\\x02JJjj\\x04\\x02KKkk\\x04\\x02LLll\\x04'\n )\n buf.write(\n '\\x02MMmm\\x04\\x02NNnn\\x04\\x02OOoo\\x04\\x02PPpp\\x04\\x02QQqq\\x04\\x02RRrr\\x04\\x02SSs'\n )\n buf.write(\n 's\\x04\\x02TTtt\\x04\\x02UUuu\\x04\\x02VVvv\\x04\\x02WWww\\x04\\x02XXxx\\x04\\x02YYyy\\x04\\x02'\n )\n buf.write(\n 'ZZzz\\x04\\x02[[{{\\x04\\x02\\\\\\\\||\\x02ᓝ\\x02\\x03\\x03\\x02\\x02\\x02\\x02\\x05\\x03\\x02\\x02\\x02'\n )\n buf.write(\n '\\x02\\x07\\x03\\x02\\x02\\x02\\x02\\t\\x03\\x02\\x02\\x02\\x02\\x0b\\x03\\x02\\x02\\x02\\x02\\r\\x03\\x02\\x02\\x02\\x02\\x0f'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x11\\x03\\x02\\x02\\x02\\x02\\x13\\x03\\x02\\x02\\x02\\x02\\x15\\x03\\x02\\x02\\x02\\x02\\x17\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02\\x19\\x03\\x02\\x02\\x02\\x02\\x1b\\x03\\x02\\x02\\x02\\x02\\x1d\\x03\\x02\\x02\\x02\\x02\\x1f\\x03\\x02'\n )\n buf.write(\n \"\\x02\\x02\\x02!\\x03\\x02\\x02\\x02\\x02#\\x03\\x02\\x02\\x02\\x02%\\x03\\x02\\x02\\x02\\x02'\\x03\\x02\\x02\\x02\\x02)\\x03\"\n )\n buf.write(\n '\\x02\\x02\\x02\\x02+\\x03\\x02\\x02\\x02\\x02-\\x03\\x02\\x02\\x02\\x02/\\x03\\x02\\x02\\x02\\x021\\x03\\x02\\x02\\x02\\x02'\n )\n buf.write(\n '3\\x03\\x02\\x02\\x02\\x025\\x03\\x02\\x02\\x02\\x027\\x03\\x02\\x02\\x02\\x029\\x03\\x02\\x02\\x02\\x02;\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02=\\x03\\x02\\x02\\x02\\x02?\\x03\\x02\\x02\\x02\\x02A\\x03\\x02\\x02\\x02\\x02C\\x03\\x02\\x02\\x02\\x02E'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02G\\x03\\x02\\x02\\x02\\x02I\\x03\\x02\\x02\\x02\\x02K\\x03\\x02\\x02\\x02\\x02M\\x03\\x02\\x02\\x02\\x02'\n )\n buf.write(\n 'O\\x03\\x02\\x02\\x02\\x02Q\\x03\\x02\\x02\\x02\\x02S\\x03\\x02\\x02\\x02\\x02U\\x03\\x02\\x02\\x02\\x02W\\x03\\x02\\x02\\x02'\n )\n buf.write(\n '\\x02Y\\x03\\x02\\x02\\x02\\x02[\\x03\\x02\\x02\\x02\\x02]\\x03\\x02\\x02\\x02\\x02_\\x03\\x02\\x02\\x02\\x02a\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02c\\x03\\x02\\x02\\x02\\x02e\\x03\\x02\\x02\\x02\\x02g\\x03\\x02\\x02\\x02\\x02i\\x03\\x02\\x02\\x02\\x02k\\x03\\x02'\n )\n buf.write(\n '\\x02\\x02\\x02m\\x03\\x02\\x02\\x02\\x02o\\x03\\x02\\x02\\x02\\x02q\\x03\\x02\\x02\\x02\\x02s\\x03\\x02\\x02\\x02\\x02u\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x02w\\x03\\x02\\x02\\x02\\x02y\\x03\\x02\\x02\\x02\\x02{\\x03\\x02\\x02\\x02\\x02}\\x03\\x02\\x02\\x02\\x02\\x7f'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x81\\x03\\x02\\x02\\x02\\x02\\x83\\x03\\x02\\x02\\x02\\x02\\x85\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02\\x87\\x03\\x02\\x02\\x02\\x02\\x89\\x03\\x02\\x02\\x02\\x02\\x8b\\x03\\x02\\x02\\x02\\x02\\x8d'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x8f\\x03\\x02\\x02\\x02\\x02\\x91\\x03\\x02\\x02\\x02\\x02\\x93\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02\\x95\\x03\\x02\\x02\\x02\\x02\\x97\\x03\\x02\\x02\\x02\\x02\\x99\\x03\\x02\\x02\\x02\\x02\\x9b'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\x9d\\x03\\x02\\x02\\x02\\x02\\x9f\\x03\\x02\\x02\\x02\\x02¡\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02£\\x03\\x02\\x02\\x02\\x02¥\\x03\\x02\\x02\\x02\\x02§\\x03\\x02\\x02\\x02\\x02©'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02«\\x03\\x02\\x02\\x02\\x02\\xad\\x03\\x02\\x02\\x02\\x02¯\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02±\\x03\\x02\\x02\\x02\\x02³\\x03\\x02\\x02\\x02\\x02µ\\x03\\x02\\x02\\x02\\x02·'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02¹\\x03\\x02\\x02\\x02\\x02»\\x03\\x02\\x02\\x02\\x02½\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02¿\\x03\\x02\\x02\\x02\\x02Á\\x03\\x02\\x02\\x02\\x02Ã\\x03\\x02\\x02\\x02\\x02Å'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ç\\x03\\x02\\x02\\x02\\x02É\\x03\\x02\\x02\\x02\\x02Ë\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Í\\x03\\x02\\x02\\x02\\x02Ï\\x03\\x02\\x02\\x02\\x02Ñ\\x03\\x02\\x02\\x02\\x02Ó'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Õ\\x03\\x02\\x02\\x02\\x02×\\x03\\x02\\x02\\x02\\x02Ù\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Û\\x03\\x02\\x02\\x02\\x02Ý\\x03\\x02\\x02\\x02\\x02ß\\x03\\x02\\x02\\x02\\x02á'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ã\\x03\\x02\\x02\\x02\\x02å\\x03\\x02\\x02\\x02\\x02ç\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02é\\x03\\x02\\x02\\x02\\x02ë\\x03\\x02\\x02\\x02\\x02í\\x03\\x02\\x02\\x02\\x02ï'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ñ\\x03\\x02\\x02\\x02\\x02ó\\x03\\x02\\x02\\x02\\x02õ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02÷\\x03\\x02\\x02\\x02\\x02ù\\x03\\x02\\x02\\x02\\x02û\\x03\\x02\\x02\\x02\\x02ý'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ÿ\\x03\\x02\\x02\\x02\\x02ā\\x03\\x02\\x02\\x02\\x02ă\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ą\\x03\\x02\\x02\\x02\\x02ć\\x03\\x02\\x02\\x02\\x02ĉ\\x03\\x02\\x02\\x02\\x02ċ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02č\\x03\\x02\\x02\\x02\\x02ď\\x03\\x02\\x02\\x02\\x02đ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ē\\x03\\x02\\x02\\x02\\x02ĕ\\x03\\x02\\x02\\x02\\x02ė\\x03\\x02\\x02\\x02\\x02ę'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ě\\x03\\x02\\x02\\x02\\x02ĝ\\x03\\x02\\x02\\x02\\x02ğ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ġ\\x03\\x02\\x02\\x02\\x02ģ\\x03\\x02\\x02\\x02\\x02ĥ\\x03\\x02\\x02\\x02\\x02ħ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ĩ\\x03\\x02\\x02\\x02\\x02ī\\x03\\x02\\x02\\x02\\x02ĭ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02į\\x03\\x02\\x02\\x02\\x02ı\\x03\\x02\\x02\\x02\\x02ij\\x03\\x02\\x02\\x02\\x02ĵ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ķ\\x03\\x02\\x02\\x02\\x02Ĺ\\x03\\x02\\x02\\x02\\x02Ļ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ľ\\x03\\x02\\x02\\x02\\x02Ŀ\\x03\\x02\\x02\\x02\\x02Ł\\x03\\x02\\x02\\x02\\x02Ń'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ņ\\x03\\x02\\x02\\x02\\x02Ň\\x03\\x02\\x02\\x02\\x02ʼn\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŋ\\x03\\x02\\x02\\x02\\x02ō\\x03\\x02\\x02\\x02\\x02ŏ\\x03\\x02\\x02\\x02\\x02ő'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02œ\\x03\\x02\\x02\\x02\\x02ŕ\\x03\\x02\\x02\\x02\\x02ŗ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ř\\x03\\x02\\x02\\x02\\x02ś\\x03\\x02\\x02\\x02\\x02ŝ\\x03\\x02\\x02\\x02\\x02ş'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02š\\x03\\x02\\x02\\x02\\x02ţ\\x03\\x02\\x02\\x02\\x02ť\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŧ\\x03\\x02\\x02\\x02\\x02ũ\\x03\\x02\\x02\\x02\\x02ū\\x03\\x02\\x02\\x02\\x02ŭ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ů\\x03\\x02\\x02\\x02\\x02ű\\x03\\x02\\x02\\x02\\x02ų\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ŵ\\x03\\x02\\x02\\x02\\x02ŷ\\x03\\x02\\x02\\x02\\x02Ź\\x03\\x02\\x02\\x02\\x02Ż'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ž\\x03\\x02\\x02\\x02\\x02ſ\\x03\\x02\\x02\\x02\\x02Ɓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƃ\\x03\\x02\\x02\\x02\\x02ƅ\\x03\\x02\\x02\\x02\\x02Ƈ\\x03\\x02\\x02\\x02\\x02Ɖ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƌ\\x03\\x02\\x02\\x02\\x02ƍ\\x03\\x02\\x02\\x02\\x02Ə\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ƒ\\x03\\x02\\x02\\x02\\x02Ɠ\\x03\\x02\\x02\\x02\\x02ƕ\\x03\\x02\\x02\\x02\\x02Ɨ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ƙ\\x03\\x02\\x02\\x02\\x02ƛ\\x03\\x02\\x02\\x02\\x02Ɲ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ɵ\\x03\\x02\\x02\\x02\\x02ơ\\x03\\x02\\x02\\x02\\x02ƣ\\x03\\x02\\x02\\x02\\x02ƥ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƨ\\x03\\x02\\x02\\x02\\x02Ʃ\\x03\\x02\\x02\\x02\\x02ƫ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƭ\\x03\\x02\\x02\\x02\\x02Ư\\x03\\x02\\x02\\x02\\x02Ʊ\\x03\\x02\\x02\\x02\\x02Ƴ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ƶ\\x03\\x02\\x02\\x02\\x02Ʒ\\x03\\x02\\x02\\x02\\x02ƹ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ƻ\\x03\\x02\\x02\\x02\\x02ƽ\\x03\\x02\\x02\\x02\\x02ƿ\\x03\\x02\\x02\\x02\\x02ǁ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǃ\\x03\\x02\\x02\\x02\\x02Dž\\x03\\x02\\x02\\x02\\x02LJ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02lj\\x03\\x02\\x02\\x02\\x02Nj\\x03\\x02\\x02\\x02\\x02Ǎ\\x03\\x02\\x02\\x02\\x02Ǐ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ǒ\\x03\\x02\\x02\\x02\\x02Ǔ\\x03\\x02\\x02\\x02\\x02Ǖ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ǘ\\x03\\x02\\x02\\x02\\x02Ǚ\\x03\\x02\\x02\\x02\\x02Ǜ\\x03\\x02\\x02\\x02\\x02ǝ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǟ\\x03\\x02\\x02\\x02\\x02ǡ\\x03\\x02\\x02\\x02\\x02ǣ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ǥ\\x03\\x02\\x02\\x02\\x02ǧ\\x03\\x02\\x02\\x02\\x02ǩ\\x03\\x02\\x02\\x02\\x02ǫ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǭ\\x03\\x02\\x02\\x02\\x02ǯ\\x03\\x02\\x02\\x02\\x02DZ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02dz\\x03\\x02\\x02\\x02\\x02ǵ\\x03\\x02\\x02\\x02\\x02Ƿ\\x03\\x02\\x02\\x02\\x02ǹ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ǻ\\x03\\x02\\x02\\x02\\x02ǽ\\x03\\x02\\x02\\x02\\x02ǿ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȁ\\x03\\x02\\x02\\x02\\x02ȃ\\x03\\x02\\x02\\x02\\x02ȅ\\x03\\x02\\x02\\x02\\x02ȇ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȉ\\x03\\x02\\x02\\x02\\x02ȋ\\x03\\x02\\x02\\x02\\x02ȍ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȏ\\x03\\x02\\x02\\x02\\x02ȑ\\x03\\x02\\x02\\x02\\x02ȓ\\x03\\x02\\x02\\x02\\x02ȕ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȗ\\x03\\x02\\x02\\x02\\x02ș\\x03\\x02\\x02\\x02\\x02ț\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȝ\\x03\\x02\\x02\\x02\\x02ȟ\\x03\\x02\\x02\\x02\\x02ȡ\\x03\\x02\\x02\\x02\\x02ȣ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȥ\\x03\\x02\\x02\\x02\\x02ȧ\\x03\\x02\\x02\\x02\\x02ȩ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȫ\\x03\\x02\\x02\\x02\\x02ȭ\\x03\\x02\\x02\\x02\\x02ȯ\\x03\\x02\\x02\\x02\\x02ȱ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ȳ\\x03\\x02\\x02\\x02\\x02ȵ\\x03\\x02\\x02\\x02\\x02ȷ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ȹ\\x03\\x02\\x02\\x02\\x02Ȼ\\x03\\x02\\x02\\x02\\x02Ƚ\\x03\\x02\\x02\\x02\\x02ȿ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ɂ\\x03\\x02\\x02\\x02\\x02Ƀ\\x03\\x02\\x02\\x02\\x02Ʌ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɇ\\x03\\x02\\x02\\x02\\x02ɉ\\x03\\x02\\x02\\x02\\x02ɋ\\x03\\x02\\x02\\x02\\x02ɍ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɏ\\x03\\x02\\x02\\x02\\x02ɑ\\x03\\x02\\x02\\x02\\x02ɓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɕ\\x03\\x02\\x02\\x02\\x02ɗ\\x03\\x02\\x02\\x02\\x02ə\\x03\\x02\\x02\\x02\\x02ɛ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɝ\\x03\\x02\\x02\\x02\\x02ɟ\\x03\\x02\\x02\\x02\\x02ɡ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɣ\\x03\\x02\\x02\\x02\\x02ɥ\\x03\\x02\\x02\\x02\\x02ɧ\\x03\\x02\\x02\\x02\\x02ɩ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɫ\\x03\\x02\\x02\\x02\\x02ɭ\\x03\\x02\\x02\\x02\\x02ɯ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɱ\\x03\\x02\\x02\\x02\\x02ɳ\\x03\\x02\\x02\\x02\\x02ɵ\\x03\\x02\\x02\\x02\\x02ɷ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ɹ\\x03\\x02\\x02\\x02\\x02ɻ\\x03\\x02\\x02\\x02\\x02ɽ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ɿ\\x03\\x02\\x02\\x02\\x02ʁ\\x03\\x02\\x02\\x02\\x02ʃ\\x03\\x02\\x02\\x02\\x02ʅ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʇ\\x03\\x02\\x02\\x02\\x02ʉ\\x03\\x02\\x02\\x02\\x02ʋ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʍ\\x03\\x02\\x02\\x02\\x02ʏ\\x03\\x02\\x02\\x02\\x02ʑ\\x03\\x02\\x02\\x02\\x02ʓ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʕ\\x03\\x02\\x02\\x02\\x02ʗ\\x03\\x02\\x02\\x02\\x02ʙ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʛ\\x03\\x02\\x02\\x02\\x02ʝ\\x03\\x02\\x02\\x02\\x02ʟ\\x03\\x02\\x02\\x02\\x02ʡ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʣ\\x03\\x02\\x02\\x02\\x02ʥ\\x03\\x02\\x02\\x02\\x02ʧ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʩ\\x03\\x02\\x02\\x02\\x02ʫ\\x03\\x02\\x02\\x02\\x02ʭ\\x03\\x02\\x02\\x02\\x02ʯ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʱ\\x03\\x02\\x02\\x02\\x02ʳ\\x03\\x02\\x02\\x02\\x02ʵ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ʷ\\x03\\x02\\x02\\x02\\x02ʹ\\x03\\x02\\x02\\x02\\x02ʻ\\x03\\x02\\x02\\x02\\x02ʽ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ʿ\\x03\\x02\\x02\\x02\\x02ˁ\\x03\\x02\\x02\\x02\\x02˃\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˅\\x03\\x02\\x02\\x02\\x02ˇ\\x03\\x02\\x02\\x02\\x02ˉ\\x03\\x02\\x02\\x02\\x02ˋ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ˍ\\x03\\x02\\x02\\x02\\x02ˏ\\x03\\x02\\x02\\x02\\x02ˑ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˓\\x03\\x02\\x02\\x02\\x02˕\\x03\\x02\\x02\\x02\\x02˗\\x03\\x02\\x02\\x02\\x02˙'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˛\\x03\\x02\\x02\\x02\\x02˝\\x03\\x02\\x02\\x02\\x02˟\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ˡ\\x03\\x02\\x02\\x02\\x02ˣ\\x03\\x02\\x02\\x02\\x02˥\\x03\\x02\\x02\\x02\\x02˧'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˩\\x03\\x02\\x02\\x02\\x02˫\\x03\\x02\\x02\\x02\\x02˭\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˯\\x03\\x02\\x02\\x02\\x02˱\\x03\\x02\\x02\\x02\\x02˳\\x03\\x02\\x02\\x02\\x02˵'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02˷\\x03\\x02\\x02\\x02\\x02˹\\x03\\x02\\x02\\x02\\x02˻\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02˽\\x03\\x02\\x02\\x02\\x02˿\\x03\\x02\\x02\\x02\\x02́\\x03\\x02\\x02\\x02\\x02̃'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̅\\x03\\x02\\x02\\x02\\x02̇\\x03\\x02\\x02\\x02\\x02̉\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̋\\x03\\x02\\x02\\x02\\x02̍\\x03\\x02\\x02\\x02\\x02̏\\x03\\x02\\x02\\x02\\x02̑'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̓\\x03\\x02\\x02\\x02\\x02̕\\x03\\x02\\x02\\x02\\x02̗\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̙\\x03\\x02\\x02\\x02\\x02̛\\x03\\x02\\x02\\x02\\x02̝\\x03\\x02\\x02\\x02\\x02̟'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̡\\x03\\x02\\x02\\x02\\x02̣\\x03\\x02\\x02\\x02\\x02̥\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̧\\x03\\x02\\x02\\x02\\x02̩\\x03\\x02\\x02\\x02\\x02̫\\x03\\x02\\x02\\x02\\x02̭'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̯\\x03\\x02\\x02\\x02\\x02̱\\x03\\x02\\x02\\x02\\x02̳\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̵\\x03\\x02\\x02\\x02\\x02̷\\x03\\x02\\x02\\x02\\x02̹\\x03\\x02\\x02\\x02\\x02̻'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02̽\\x03\\x02\\x02\\x02\\x02̿\\x03\\x02\\x02\\x02\\x02́\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02̓\\x03\\x02\\x02\\x02\\x02ͅ\\x03\\x02\\x02\\x02\\x02͇\\x03\\x02\\x02\\x02\\x02͉'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͋\\x03\\x02\\x02\\x02\\x02͍\\x03\\x02\\x02\\x02\\x02͏\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02͑\\x03\\x02\\x02\\x02\\x02͓\\x03\\x02\\x02\\x02\\x02͕\\x03\\x02\\x02\\x02\\x02͗'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͙\\x03\\x02\\x02\\x02\\x02͛\\x03\\x02\\x02\\x02\\x02͝\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02͟\\x03\\x02\\x02\\x02\\x02͡\\x03\\x02\\x02\\x02\\x02ͣ\\x03\\x02\\x02\\x02\\x02ͥ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ͧ\\x03\\x02\\x02\\x02\\x02ͩ\\x03\\x02\\x02\\x02\\x02ͫ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ͭ\\x03\\x02\\x02\\x02\\x02ͯ\\x03\\x02\\x02\\x02\\x02ͱ\\x03\\x02\\x02\\x02\\x02ͳ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02͵\\x03\\x02\\x02\\x02\\x02ͷ\\x03\\x02\\x02\\x02\\x02\\u0379\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ͻ\\x03\\x02\\x02\\x02\\x02ͽ\\x03\\x02\\x02\\x02\\x02Ϳ\\x03\\x02\\x02\\x02\\x02\\u0381'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02\\u0383\\x03\\x02\\x02\\x02\\x02΅\\x03\\x02\\x02\\x02\\x02·\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ή\\x03\\x02\\x02\\x02\\x02\\u038b\\x03\\x02\\x02\\x02\\x02\\u038d\\x03\\x02\\x02\\x02\\x02Ώ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Α\\x03\\x02\\x02\\x02\\x02Γ\\x03\\x02\\x02\\x02\\x02Ε\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Η\\x03\\x02\\x02\\x02\\x02Ι\\x03\\x02\\x02\\x02\\x02Λ\\x03\\x02\\x02\\x02\\x02Ν'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ο\\x03\\x02\\x02\\x02\\x02Ρ\\x03\\x02\\x02\\x02\\x02Σ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Υ\\x03\\x02\\x02\\x02\\x02Χ\\x03\\x02\\x02\\x02\\x02Ω\\x03\\x02\\x02\\x02\\x02Ϋ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02έ\\x03\\x02\\x02\\x02\\x02ί\\x03\\x02\\x02\\x02\\x02α\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02γ\\x03\\x02\\x02\\x02\\x02ε\\x03\\x02\\x02\\x02\\x02η\\x03\\x02\\x02\\x02\\x02ι'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02λ\\x03\\x02\\x02\\x02\\x02ν\\x03\\x02\\x02\\x02\\x02ο\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ρ\\x03\\x02\\x02\\x02\\x02σ\\x03\\x02\\x02\\x02\\x02υ\\x03\\x02\\x02\\x02\\x02χ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ω\\x03\\x02\\x02\\x02\\x02ϋ\\x03\\x02\\x02\\x02\\x02ύ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ϗ\\x03\\x02\\x02\\x02\\x02ϑ\\x03\\x02\\x02\\x02\\x02ϓ\\x03\\x02\\x02\\x02\\x02ϡ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ϣ\\x03\\x02\\x02\\x02\\x02ϥ\\x03\\x02\\x02\\x02\\x02ϧ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02ϩ\\x03\\x02\\x02\\x02\\x02ϫ\\x03\\x02\\x02\\x02\\x02ϭ\\x03\\x02\\x02\\x02\\x02ϯ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02ϱ\\x03\\x02\\x02\\x02\\x02ϳ\\x03\\x02\\x02\\x02\\x02ϵ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ϸ\\x03\\x02\\x02\\x02\\x02Ϲ\\x03\\x02\\x02\\x02\\x02ϻ\\x03\\x02\\x02\\x02\\x02Ͻ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ͽ\\x03\\x02\\x02\\x02\\x02Ё\\x03\\x02\\x02\\x02\\x02Ѓ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Ѕ\\x03\\x02\\x02\\x02\\x02Ї\\x03\\x02\\x02\\x02\\x02Љ\\x03\\x02\\x02\\x02\\x02Ћ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Ѝ\\x03\\x02\\x02\\x02\\x02Џ\\x03\\x02\\x02\\x02\\x02Г\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Е\\x03\\x02\\x02\\x02\\x02З\\x03\\x02\\x02\\x02\\x02Й\\x03\\x02\\x02\\x02\\x02Л'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x02Н\\x03\\x02\\x02\\x02\\x02П\\x03\\x02\\x02\\x02\\x02Ч\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\x02Щ\\x03\\x02\\x02\\x02\\x02Ы\\x03\\x02\\x02\\x02\\x02б\\x03\\x02\\x02\\x02\\x02г'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x03ѩ\\x03\\x02\\x02\\x02\\x05Ѭ\\x03\\x02\\x02\\x02\\x07Ѯ\\x03\\x02\\x02'\n )\n buf.write(\n '\\x02\\tѲ\\x03\\x02\\x02\\x02\\x0bѸ\\x03\\x02\\x02\\x02\\rѾ\\x03\\x02\\x02\\x02\\x0f'\n )\n buf.write(\n '҈\\x03\\x02\\x02\\x02\\x11Ҍ\\x03\\x02\\x02\\x02\\x13Ғ\\x03\\x02\\x02\\x02\\x15Қ')\n buf.write(\n '\\x03\\x02\\x02\\x02\\x17Ҟ\\x03\\x02\\x02\\x02\\x19Ң\\x03\\x02\\x02\\x02\\x1bҨ\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02\\x1dҫ\\x03\\x02\\x02\\x02\\x1fҲ\\x03\\x02\\x02\\x02!ҹ\\x03\\x02\\x02'\n )\n buf.write(\n \"\\x02#ҽ\\x03\\x02\\x02\\x02%Ӈ\\x03\\x02\\x02\\x02'ӊ\\x03\\x02\\x02\\x02)Ӕ\")\n buf.write(\n '\\x03\\x02\\x02\\x02+Ӛ\\x03\\x02\\x02\\x02-ӡ\\x03\\x02\\x02\\x02/Ӧ\\x03\\x02\\x02\\x02'\n )\n buf.write('1Ӱ\\x03\\x02\\x02\\x023ԇ\\x03\\x02\\x02\\x025ԍ\\x03\\x02\\x02\\x027')\n buf.write('Ԕ\\x03\\x02\\x02\\x029Ԛ\\x03\\x02\\x02\\x02;Ԣ\\x03\\x02\\x02\\x02=Ԩ\\x03'\n )\n buf.write(\n '\\x02\\x02\\x02?Զ\\x03\\x02\\x02\\x02AՃ\\x03\\x02\\x02\\x02CՒ\\x03\\x02\\x02\\x02E\\u0557'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02G՝\\x03\\x02\\x02\\x02Iբ\\x03\\x02\\x02\\x02Kժ\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'Mկ\\x03\\x02\\x02\\x02Oշ\\x03\\x02\\x02\\x02Qռ\\x03\\x02\\x02\\x02Sտ\\x03')\n buf.write(\n '\\x02\\x02\\x02Uք\\x03\\x02\\x02\\x02Wֆ\\x03\\x02\\x02\\x02Y\\u058c\\x03\\x02\\x02\\x02[֑'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02]֛\\x03\\x02\\x02\\x02_֣\\x03\\x02\\x02\\x02a֨\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'c֭\\x03\\x02\\x02\\x02eֲ\\x03\\x02\\x02\\x02gֺ\\x03\\x02\\x02\\x02iׄ\\x03')\n buf.write(\n '\\x02\\x02\\x02k\\u05ca\\x03\\x02\\x02\\x02m\\u05ce\\x03\\x02\\x02\\x02oד\\x03\\x02\\x02\\x02qי'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02sס\\x03\\x02\\x02\\x02uש\\x03\\x02\\x02\\x02wױ\\x03\\x02\\x02\\x02'\n )\n buf.write(\n 'y\\u05f9\\x03\\x02\\x02\\x02{\\u0600\\x03\\x02\\x02\\x02}؊\\x03\\x02\\x02\\x02\\x7fؘ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x81ؠ\\x03\\x02\\x02\\x02\\x83ة\\x03\\x02\\x02\\x02\\x85')\n buf.write('ر\\x03\\x02\\x02\\x02\\x87ف\\x03\\x02\\x02\\x02\\x89ي\\x03\\x02\\x02\\x02'\n )\n buf.write('\\x8bٕ\\x03\\x02\\x02\\x02\\x8d١\\x03\\x02\\x02\\x02\\x8f٭\\x03')\n buf.write('\\x02\\x02\\x02\\x91ٵ\\x03\\x02\\x02\\x02\\x93ٽ\\x03\\x02\\x02\\x02\\x95چ'\n )\n buf.write(\n '\\x03\\x02\\x02\\x02\\x97ڎ\\x03\\x02\\x02\\x02\\x99ښ\\x03\\x02\\x02\\x02\\x9b')\n buf.write('ڪ\\x03\\x02\\x02\\x02\\x9dگ\\x03\\x02\\x02\\x02\\x9fڵ\\x03\\x02\\x02\\x02'\n )\n buf.write('¡ڼ\\x03\\x02\\x02\\x02£ۂ\\x03\\x02\\x02\\x02¥ۇ\\x03')\n buf.write('\\x02\\x02\\x02§ۏ\\x03\\x02\\x02\\x02©ۜ\\x03\\x02\\x02\\x02«ۣ')\n buf.write('\\x03\\x02\\x02\\x02\\xadۯ\\x03\\x02\\x02\\x02¯۵\\x03\\x02\\x02\\x02±')\n buf.write('ۺ\\x03\\x02\\x02\\x02³܃\\x03\\x02\\x02\\x02µ܈\\x03\\x02\\x02\\x02')\n buf.write('·܌\\x03\\x02\\x02\\x02¹ܛ\\x03\\x02\\x02\\x02»ܦ\\x03')\n buf.write('\\x02\\x02\\x02½ܪ\\x03\\x02\\x02\\x02¿ܰ\\x03\\x02\\x02\\x02Áܴ')\n buf.write('\\x03\\x02\\x02\\x02Ãܼ\\x03\\x02\\x02\\x02Å݄\\x03\\x02\\x02\\x02Ç')\n buf.write('ݎ\\x03\\x02\\x02\\x02Éݘ\\x03\\x02\\x02\\x02Ëݠ\\x03\\x02\\x02\\x02')\n buf.write('Íݩ\\x03\\x02\\x02\\x02Ïݲ\\x03\\x02\\x02\\x02Ñݺ\\x03')\n buf.write('\\x02\\x02\\x02Óށ\\x03\\x02\\x02\\x02Õއ\\x03\\x02\\x02\\x02×ތ')\n buf.write('\\x03\\x02\\x02\\x02Ùޚ\\x03\\x02\\x02\\x02Ûޤ\\x03\\x02\\x02\\x02Ý')\n buf.write('ެ\\x03\\x02\\x02\\x02ß\\u07b9\\x03\\x02\\x02\\x02á߂\\x03\\x02\\x02\\x02')\n buf.write('ãߋ\\x03\\x02\\x02\\x02åߒ\\x03\\x02\\x02\\x02çߗ\\x03')\n buf.write('\\x02\\x02\\x02é߰\\x03\\x02\\x02\\x02ëߵ\\x03\\x02\\x02\\x02í߽')\n buf.write('\\x03\\x02\\x02\\x02ïࠂ\\x03\\x02\\x02\\x02ñࠈ\\x03\\x02\\x02\\x02ó')\n buf.write('ࠎ\\x03\\x02\\x02\\x02õࠕ\\x03\\x02\\x02\\x02÷ࠞ\\x03\\x02\\x02\\x02')\n buf.write('ùࠢ\\x03\\x02\\x02\\x02û࠱\\x03\\x02\\x02\\x02ý࠵\\x03')\n buf.write('\\x02\\x02\\x02ÿ࠼\\x03\\x02\\x02\\x02āࡃ\\x03\\x02\\x02\\x02ăࡌ')\n buf.write('\\x03\\x02\\x02\\x02ąࡓ\\x03\\x02\\x02\\x02ć\\u085d\\x03\\x02\\x02\\x02ĉ')\n buf.write('\\u086c\\x03\\x02\\x02\\x02ċࡷ\\x03\\x02\\x02\\x02čࡿ\\x03\\x02\\x02\\x02')\n buf.write('ďࢉ\\x03\\x02\\x02\\x02đ\\u0891\\x03\\x02\\x02\\x02ē࢘\\x03')\n buf.write('\\x02\\x02\\x02ĕ࢝\\x03\\x02\\x02\\x02ėࢥ\\x03\\x02\\x02\\x02ęࢮ')\n buf.write('\\x03\\x02\\x02\\x02ěࢶ\\x03\\x02\\x02\\x02ĝࢾ\\x03\\x02\\x02\\x02ğ')\n buf.write('ࣄ\\x03\\x02\\x02\\x02ġ࣊\\x03\\x02\\x02\\x02ģ࣐\\x03\\x02\\x02\\x02')\n buf.write('ĥࣖ\\x03\\x02\\x02\\x02ħ\\u08e2\\x03\\x02\\x02\\x02ĩࣨ\\x03')\n buf.write('\\x02\\x02\\x02īࣲ\\x03\\x02\\x02\\x02ĭࣺ\\x03\\x02\\x02\\x02įࣾ')\n buf.write('\\x03\\x02\\x02\\x02ıअ\\x03\\x02\\x02\\x02ijऋ\\x03\\x02\\x02\\x02ĵ')\n buf.write('ऐ\\x03\\x02\\x02\\x02ķक\\x03\\x02\\x02\\x02Ĺञ\\x03\\x02\\x02\\x02')\n buf.write('Ļण\\x03\\x02\\x02\\x02Ľऩ\\x03\\x02\\x02\\x02Ŀय\\x03')\n buf.write('\\x02\\x02\\x02Łस\\x03\\x02\\x02\\x02Ńऽ\\x03\\x02\\x02\\x02Ņॄ')\n buf.write('\\x03\\x02\\x02\\x02Ňॉ\\x03\\x02\\x02\\x02ʼnॎ\\x03\\x02\\x02\\x02ŋ')\n buf.write('॑\\x03\\x02\\x02\\x02ōक़\\x03\\x02\\x02\\x02ŏॢ\\x03\\x02\\x02\\x02')\n buf.write('ő॥\\x03\\x02\\x02\\x02œ७\\x03\\x02\\x02\\x02ŕॷ\\x03')\n buf.write('\\x02\\x02\\x02ŗঁ\\x03\\x02\\x02\\x02řঈ\\x03\\x02\\x02\\x02ś\\u098e')\n buf.write('\\x03\\x02\\x02\\x02ŝখ\\x03\\x02\\x02\\x02şঠ\\x03\\x02\\x02\\x02š')\n buf.write('ন\\x03\\x02\\x02\\x02ţ\\u09b1\\x03\\x02\\x02\\x02ťস\\x03\\x02\\x02\\x02')\n buf.write('ŧা\\x03\\x02\\x02\\x02ũৄ\\x03\\x02\\x02\\x02ūো\\x03')\n buf.write(\n '\\x02\\x02\\x02ŭ\\u09d8\\x03\\x02\\x02\\x02ůৠ\\x03\\x02\\x02\\x02ű\\u09e4')\n buf.write('\\x03\\x02\\x02\\x02ų৬\\x03\\x02\\x02\\x02ŵ৶\\x03\\x02\\x02\\x02ŷ')\n buf.write(\n '\\u09ff\\x03\\x02\\x02\\x02Ź\\u0a04\\x03\\x02\\x02\\x02Żਏ\\x03\\x02\\x02\\x02')\n buf.write('Ž\\u0a12\\x03\\x02\\x02\\x02ſਜ\\x03\\x02\\x02\\x02Ɓਤ\\x03')\n buf.write('\\x02\\x02\\x02ƃ\\u0a29\\x03\\x02\\x02\\x02ƅਮ\\x03\\x02\\x02\\x02Ƈਲ਼')\n buf.write('\\x03\\x02\\x02\\x02Ɖ਼\\x03\\x02\\x02\\x02Ƌੁ\\x03\\x02\\x02\\x02ƍ')\n buf.write('ੌ\\x03\\x02\\x02\\x02Ə\\u0a54\\x03\\x02\\x02\\x02Ƒਖ਼\\x03\\x02\\x02\\x02')\n buf.write('Ɠ\\u0a5f\\x03\\x02\\x02\\x02ƕ੧\\x03\\x02\\x02\\x02Ɨ੬\\x03')\n buf.write(\n '\\x02\\x02\\x02ƙੲ\\x03\\x02\\x02\\x02ƛ\\u0a78\\x03\\x02\\x02\\x02Ɲ\\u0a7e')\n buf.write('\\x03\\x02\\x02\\x02Ɵ\\u0a84\\x03\\x02\\x02\\x02ơઊ\\x03\\x02\\x02\\x02ƣ')\n buf.write('એ\\x03\\x02\\x02\\x02ƥખ\\x03\\x02\\x02\\x02Ƨચ\\x03\\x02\\x02\\x02')\n buf.write('Ʃડ\\x03\\x02\\x02\\x02ƫધ\\x03\\x02\\x02\\x02ƭબ\\x03')\n buf.write(\n '\\x02\\x02\\x02Ư\\u0ab1\\x03\\x02\\x02\\x02Ʊશ\\x03\\x02\\x02\\x02Ƴ\\u0aba')\n buf.write('\\x03\\x02\\x02\\x02Ƶૂ\\x03\\x02\\x02\\x02Ʒો\\x03\\x02\\x02\\x02ƹ')\n buf.write(\n '\\u0ad4\\x03\\x02\\x02\\x02ƻ\\u0adb\\x03\\x02\\x02\\x02ƽૡ\\x03\\x02\\x02\\x02')\n buf.write('ƿ૧\\x03\\x02\\x02\\x02ǁ૮\\x03\\x02\\x02\\x02ǃ\\u0af7\\x03')\n buf.write('\\x02\\x02\\x02Dž\\u0b00\\x03\\x02\\x02\\x02LJଅ\\x03\\x02\\x02\\x02ljଋ')\n buf.write('\\x03\\x02\\x02\\x02Nj\\u0b12\\x03\\x02\\x02\\x02Ǎଘ\\x03\\x02\\x02\\x02Ǐ')\n buf.write('ଡ\\x03\\x02\\x02\\x02Ǒଦ\\x03\\x02\\x02\\x02Ǔପ\\x03\\x02\\x02\\x02')\n buf.write('Ǖଲ\\x03\\x02\\x02\\x02Ǘ\\u0b3b\\x03\\x02\\x02\\x02Ǚି\\x03')\n buf.write(\n '\\x02\\x02\\x02Ǜ\\u0b45\\x03\\x02\\x02\\x02ǝ\\u0b4e\\x03\\x02\\x02\\x02ǟ\\u0b54'\n )\n buf.write('\\x03\\x02\\x02\\x02ǡ\\u0b5b\\x03\\x02\\x02\\x02ǣୟ\\x03\\x02\\x02\\x02ǥ')\n buf.write('ୢ\\x03\\x02\\x02\\x02ǧ୪\\x03\\x02\\x02\\x02ǩ୲\\x03\\x02\\x02\\x02')\n buf.write('ǫ\\u0b79\\x03\\x02\\x02\\x02ǭ\\u0b81\\x03\\x02\\x02\\x02ǯஒ\\x03')\n buf.write(\n '\\x02\\x02\\x02DZ\\u0b9d\\x03\\x02\\x02\\x02dzந\\x03\\x02\\x02\\x02ǵ\\u0bad')\n buf.write('\\x03\\x02\\x02\\x02Ƿவ\\x03\\x02\\x02\\x02ǹ\\u0bc3\\x03\\x02\\x02\\x02ǻ')\n buf.write(\n 'ே\\x03\\x02\\x02\\x02ǽ\\u0bce\\x03\\x02\\x02\\x02ǿ\\u0bd3\\x03\\x02\\x02\\x02')\n buf.write('ȁ\\u0bd9\\x03\\x02\\x02\\x02ȃ\\u0be0\\x03\\x02\\x02\\x02ȅ௨\\x03')\n buf.write('\\x02\\x02\\x02ȇ௲\\x03\\x02\\x02\\x02ȉ௹\\x03\\x02\\x02\\x02ȋ\\u0bfc')\n buf.write('\\x03\\x02\\x02\\x02ȍఀ\\x03\\x02\\x02\\x02ȏఄ\\x03\\x02\\x02\\x02ȑ')\n buf.write('ఈ\\x03\\x02\\x02\\x02ȓఋ\\x03\\x02\\x02\\x02ȕఐ\\x03\\x02\\x02\\x02')\n buf.write('ȗక\\x03\\x02\\x02\\x02șజ\\x03\\x02\\x02\\x02țట\\x03')\n buf.write('\\x02\\x02\\x02ȝధ\\x03\\x02\\x02\\x02ȟభ\\x03\\x02\\x02\\x02ȡస')\n buf.write('\\x03\\x02\\x02\\x02ȣీ\\x03\\x02\\x02\\x02ȥౄ\\x03\\x02\\x02\\x02ȧ')\n buf.write('ొ\\x03\\x02\\x02\\x02ȩ\\u0c4f\\x03\\x02\\x02\\x02ȫౚ\\x03\\x02\\x02\\x02')\n buf.write('ȭౢ\\x03\\x02\\x02\\x02ȯ\\u0c72\\x03\\x02\\x02\\x02ȱ౽\\x03')\n buf.write('\\x02\\x02\\x02ȳ಄\\x03\\x02\\x02\\x02ȵಎ\\x03\\x02\\x02\\x02ȷಖ')\n buf.write('\\x03\\x02\\x02\\x02ȹಛ\\x03\\x02\\x02\\x02Ȼತ\\x03\\x02\\x02\\x02Ƚ')\n buf.write(\n 'ಪ\\x03\\x02\\x02\\x02ȿ\\u0cb4\\x03\\x02\\x02\\x02Ɂ\\u0cba\\x03\\x02\\x02\\x02')\n buf.write('Ƀಿ\\x03\\x02\\x02\\x02Ʌೋ\\x03\\x02\\x02\\x02ɇ\\u0cd4\\x03')\n buf.write('\\x02\\x02\\x02ɉೞ\\x03\\x02\\x02\\x02ɋ\\u0ce5\\x03\\x02\\x02\\x02ɍ೯')\n buf.write('\\x03\\x02\\x02\\x02ɏ\\u0cf9\\x03\\x02\\x02\\x02ɑഁ\\x03\\x02\\x02\\x02ɓ')\n buf.write('ഇ\\x03\\x02\\x02\\x02ɕ\\u0d11\\x03\\x02\\x02\\x02ɗഗ\\x03\\x02\\x02\\x02')\n buf.write('əഝ\\x03\\x02\\x02\\x02ɛഡ\\x03\\x02\\x02\\x02ɝദ\\x03')\n buf.write('\\x02\\x02\\x02ɟഫ\\x03\\x02\\x02\\x02ɡല\\x03\\x02\\x02\\x02ɣശ')\n buf.write('\\x03\\x02\\x02\\x02ɥീ\\x03\\x02\\x02\\x02ɧൌ\\x03\\x02\\x02\\x02ɩ')\n buf.write(\n '\\u0d53\\x03\\x02\\x02\\x02ɫ൝\\x03\\x02\\x02\\x02ɭ\\u0d64\\x03\\x02\\x02\\x02')\n buf.write('ɯ൬\\x03\\x02\\x02\\x02ɱ൴\\x03\\x02\\x02\\x02ɳඈ\\x03')\n buf.write('\\x02\\x02\\x02ɵඏ\\x03\\x02\\x02\\x02ɷග\\x03\\x02\\x02\\x02ɹඣ')\n buf.write('\\x03\\x02\\x02\\x02ɻත\\x03\\x02\\x02\\x02ɽඳ\\x03\\x02\\x02\\x02ɿ')\n buf.write('ර\\x03\\x02\\x02\\x02ʁෂ\\x03\\x02\\x02\\x02ʃ\\u0dc8\\x03\\x02\\x02\\x02')\n buf.write('ʅෑ\\x03\\x02\\x02\\x02ʇෘ\\x03\\x02\\x02\\x02ʉො\\x03')\n buf.write('\\x02\\x02\\x02ʋ\\u0de2\\x03\\x02\\x02\\x02ʍ෧\\x03\\x02\\x02\\x02ʏ෭')\n buf.write('\\x03\\x02\\x02\\x02ʑ෴\\x03\\x02\\x02\\x02ʓ\\u0df9\\x03\\x02\\x02\\x02ʕ')\n buf.write('ฃ\\x03\\x02\\x02\\x02ʗช\\x03\\x02\\x02\\x02ʙถ\\x03\\x02\\x02\\x02')\n buf.write('ʛบ\\x03\\x02\\x02\\x02ʝม\\x03\\x02\\x02\\x02ʟศ\\x03')\n buf.write('\\x02\\x02\\x02ʡอ\\x03\\x02\\x02\\x02ʣี\\x03\\x02\\x02\\x02ʥ\\u0e3c')\n buf.write('\\x03\\x02\\x02\\x02ʧแ\\x03\\x02\\x02\\x02ʩ๊\\x03\\x02\\x02\\x02ʫ')\n buf.write(\n '๕\\x03\\x02\\x02\\x02ʭ\\u0e62\\x03\\x02\\x02\\x02ʯ\\u0e74\\x03\\x02\\x02\\x02')\n buf.write('ʱ\\u0e80\\x03\\x02\\x02\\x02ʳຐ\\x03\\x02\\x02\\x02ʵດ\\x03')\n buf.write('\\x02\\x02\\x02ʷນ\\x03\\x02\\x02\\x02ʹຢ\\x03\\x02\\x02\\x02ʻຨ')\n buf.write('\\x03\\x02\\x02\\x02ʽອ\\x03\\x02\\x02\\x02ʿຶ\\x03\\x02\\x02\\x02ˁ')\n buf.write('\\u0ebf\\x03\\x02\\x02\\x02˃່\\x03\\x02\\x02\\x02˅໗\\x03\\x02\\x02\\x02')\n buf.write('ˇໞ\\x03\\x02\\x02\\x02ˉ\\u0ee3\\x03\\x02\\x02\\x02ˋ\\u0ee8\\x03')\n buf.write(\n '\\x02\\x02\\x02ˍ\\u0ef1\\x03\\x02\\x02\\x02ˏ\\u0efa\\x03\\x02\\x02\\x02ˑ\\u0eff'\n )\n buf.write('\\x03\\x02\\x02\\x02˓།\\x03\\x02\\x02\\x02˕༕\\x03\\x02\\x02\\x02˗')\n buf.write('༞\\x03\\x02\\x02\\x02˙༩\\x03\\x02\\x02\\x02˛༯\\x03\\x02\\x02\\x02')\n buf.write('˝༷\\x03\\x02\\x02\\x02˟ཁ\\x03\\x02\\x02\\x02ˡཎ\\x03')\n buf.write('\\x02\\x02\\x02ˣཕ\\x03\\x02\\x02\\x02˥འ\\x03\\x02\\x02\\x02˧ཧ')\n buf.write('\\x03\\x02\\x02\\x02˩ཱི\\x03\\x02\\x02\\x02˫ྀ\\x03\\x02\\x02\\x02˭')\n buf.write('ྎ\\x03\\x02\\x02\\x02˯ྖ\\x03\\x02\\x02\\x02˱ྞ\\x03\\x02\\x02\\x02')\n buf.write('˳ྦ\\x03\\x02\\x02\\x02˵ྫྷ\\x03\\x02\\x02\\x02˷ྰ\\x03')\n buf.write('\\x02\\x02\\x02˹ྵ\\x03\\x02\\x02\\x02˻ྺ\\x03\\x02\\x02\\x02˽࿄')\n buf.write(\n '\\x03\\x02\\x02\\x02˿\\u0fe0\\x03\\x02\\x02\\x02́\\u0ffb\\x03\\x02\\x02\\x02̃')\n buf.write('ဓ\\x03\\x02\\x02\\x02̅အ\\x03\\x02\\x02\\x02̇ု\\x03\\x02\\x02\\x02')\n buf.write('̉ဿ\\x03\\x02\\x02\\x02̋၏\\x03\\x02\\x02\\x02̍ၒ\\x03')\n buf.write('\\x02\\x02\\x02̏ၛ\\x03\\x02\\x02\\x02̑ၧ\\x03\\x02\\x02\\x02̓ၱ')\n buf.write('\\x03\\x02\\x02\\x02̕ၷ\\x03\\x02\\x02\\x02̗ၿ\\x03\\x02\\x02\\x02̙')\n buf.write('ႄ\\x03\\x02\\x02\\x02̛ႉ\\x03\\x02\\x02\\x02̝႒\\x03\\x02\\x02\\x02')\n buf.write('̟႗\\x03\\x02\\x02\\x02̡Ⴁ\\x03\\x02\\x02\\x02̣Ⴇ\\x03')\n buf.write('\\x02\\x02\\x02̥Ⴍ\\x03\\x02\\x02\\x02̧Ⴔ\\x03\\x02\\x02\\x02̩Ⴞ')\n buf.write(\n '\\x03\\x02\\x02\\x02̫\\u10c6\\x03\\x02\\x02\\x02̭\\u10cc\\x03\\x02\\x02\\x02̯')\n buf.write('დ\\x03\\x02\\x02\\x02̱მ\\x03\\x02\\x02\\x02̳ტ\\x03\\x02\\x02\\x02')\n buf.write('̵ჩ\\x03\\x02\\x02\\x02̷ჭ\\x03\\x02\\x02\\x02̹ჳ\\x03')\n buf.write('\\x02\\x02\\x02̻ჼ\\x03\\x02\\x02\\x02̽ᄂ\\x03\\x02\\x02\\x02̿ᄉ')\n buf.write('\\x03\\x02\\x02\\x02́ᄑ\\x03\\x02\\x02\\x02̓ᄚ\\x03\\x02\\x02\\x02ͅ')\n buf.write('ᄣ\\x03\\x02\\x02\\x02͇ᄪ\\x03\\x02\\x02\\x02͉ᄲ\\x03\\x02\\x02\\x02')\n buf.write('͋ᄺ\\x03\\x02\\x02\\x02͍ᅃ\\x03\\x02\\x02\\x02͏ᅈ\\x03')\n buf.write('\\x02\\x02\\x02͑ᅐ\\x03\\x02\\x02\\x02͓ᅛ\\x03\\x02\\x02\\x02͕ᅠ')\n buf.write('\\x03\\x02\\x02\\x02͗ᅩ\\x03\\x02\\x02\\x02͙ᅯ\\x03\\x02\\x02\\x02͛')\n buf.write('ᅵ\\x03\\x02\\x02\\x02͝ᅺ\\x03\\x02\\x02\\x02͟ᆁ\\x03\\x02\\x02\\x02')\n buf.write('͡ᆆ\\x03\\x02\\x02\\x02ͣᆌ\\x03\\x02\\x02\\x02ͥᆐ\\x03')\n buf.write('\\x02\\x02\\x02ͧᆗ\\x03\\x02\\x02\\x02ͩᆥ\\x03\\x02\\x02\\x02ͫᆭ')\n buf.write('\\x03\\x02\\x02\\x02ͭᆺ\\x03\\x02\\x02\\x02ͯᇅ\\x03\\x02\\x02\\x02ͱ')\n buf.write('ᇏ\\x03\\x02\\x02\\x02ͳᇙ\\x03\\x02\\x02\\x02͵ᇧ\\x03\\x02\\x02\\x02')\n buf.write('ͷᇰ\\x03\\x02\\x02\\x02\\u0379ᇶ\\x03\\x02\\x02\\x02ͻᇿ\\x03')\n buf.write('\\x02\\x02\\x02ͽሇ\\x03\\x02\\x02\\x02Ϳሔ\\x03\\x02\\x02\\x02\\u0381ም')\n buf.write('\\x03\\x02\\x02\\x02\\u0383ሢ\\x03\\x02\\x02\\x02΅ሦ\\x03\\x02\\x02\\x02·')\n buf.write(\n 'ሿ\\x03\\x02\\x02\\x02Ήቄ\\x03\\x02\\x02\\x02\\u038b\\u124f\\x03\\x02\\x02\\x02')\n buf.write('\\u038dቡ\\x03\\x02\\x02\\x02Ώቱ\\x03\\x02\\x02\\x02Αኄ\\x03')\n buf.write('\\x02\\x02\\x02Γኛ\\x03\\x02\\x02\\x02Εኪ\\x03\\x02\\x02\\x02Ηኴ')\n buf.write(\n '\\x03\\x02\\x02\\x02Ι\\u12bf\\x03\\x02\\x02\\x02Λ\\u12c7\\x03\\x02\\x02\\x02Ν')\n buf.write('ዔ\\x03\\x02\\x02\\x02Οዤ\\x03\\x02\\x02\\x02Ρዴ\\x03\\x02\\x02\\x02')\n buf.write('Σዹ\\x03\\x02\\x02\\x02Υዽ\\x03\\x02\\x02\\x02Χጂ\\x03')\n buf.write('\\x02\\x02\\x02Ωጆ\\x03\\x02\\x02\\x02Ϋጋ\\x03\\x02\\x02\\x02έጏ')\n buf.write('\\x03\\x02\\x02\\x02ί\\u1316\\x03\\x02\\x02\\x02αጚ\\x03\\x02\\x02\\x02γ')\n buf.write('ጠ\\x03\\x02\\x02\\x02εጰ\\x03\\x02\\x02\\x02ηጻ\\x03\\x02\\x02\\x02')\n buf.write('ιጿ\\x03\\x02\\x02\\x02λፈ\\x03\\x02\\x02\\x02νፎ\\x03')\n buf.write('\\x02\\x02\\x02οፕ\\x03\\x02\\x02\\x02ρፚ\\x03\\x02\\x02\\x02σ፡')\n buf.write('\\x03\\x02\\x02\\x02υ፮\\x03\\x02\\x02\\x02χ፻\\x03\\x02\\x02\\x02ω')\n buf.write('ᎈ\\x03\\x02\\x02\\x02ϋᎋ\\x03\\x02\\x02\\x02ύᎍ\\x03\\x02\\x02\\x02')\n buf.write('Ϗᎏ\\x03\\x02\\x02\\x02ϑ\\u139e\\x03\\x02\\x02\\x02ϓᎪ\\x03')\n buf.write('\\x02\\x02\\x02ϕᎳ\\x03\\x02\\x02\\x02ϗᎵ\\x03\\x02\\x02\\x02ϙᏀ')\n buf.write('\\x03\\x02\\x02\\x02ϛᏋ\\x03\\x02\\x02\\x02ϝᏖ\\x03\\x02\\x02\\x02ϟ')\n buf.write('Ꮱ\\x03\\x02\\x02\\x02ϡᏣ\\x03\\x02\\x02\\x02ϣᏭ\\x03\\x02\\x02\\x02')\n buf.write('ϥᏯ\\x03\\x02\\x02\\x02ϧᏱ\\x03\\x02\\x02\\x02ϩᏳ\\x03')\n buf.write('\\x02\\x02\\x02ϫᏵ\\x03\\x02\\x02\\x02ϭᏸ\\x03\\x02\\x02\\x02ϯᏺ')\n buf.write('\\x03\\x02\\x02\\x02ϱᏼ\\x03\\x02\\x02\\x02ϳ\\u13fe\\x03\\x02\\x02\\x02ϵ')\n buf.write('᐀\\x03\\x02\\x02\\x02Ϸᐂ\\x03\\x02\\x02\\x02Ϲᐄ\\x03\\x02\\x02\\x02')\n buf.write('ϻᐕ\\x03\\x02\\x02\\x02Ͻᐗ\\x03\\x02\\x02\\x02Ͽᐙ\\x03')\n buf.write('\\x02\\x02\\x02Ёᐛ\\x03\\x02\\x02\\x02Ѓᐞ\\x03\\x02\\x02\\x02Ѕᐠ')\n buf.write('\\x03\\x02\\x02\\x02Їᐫ\\x03\\x02\\x02\\x02Љᐭ\\x03\\x02\\x02\\x02Ћ')\n buf.write('ᐯ\\x03\\x02\\x02\\x02Ѝᐱ\\x03\\x02\\x02\\x02Џᐳ\\x03\\x02\\x02\\x02')\n buf.write('Бᐵ\\x03\\x02\\x02\\x02Гᐷ\\x03\\x02\\x02\\x02Еᐺ\\x03')\n buf.write('\\x02\\x02\\x02Зᐼ\\x03\\x02\\x02\\x02Йᐾ\\x03\\x02\\x02\\x02Лᑀ')\n buf.write('\\x03\\x02\\x02\\x02Нᑂ\\x03\\x02\\x02\\x02Пᑅ\\x03\\x02\\x02\\x02С')\n buf.write('ᑋ\\x03\\x02\\x02\\x02Уᑎ\\x03\\x02\\x02\\x02Хᑕ\\x03\\x02\\x02\\x02')\n buf.write('Чᑠ\\x03\\x02\\x02\\x02Щᑯ\\x03\\x02\\x02\\x02Ыᑽ\\x03')\n buf.write('\\x02\\x02\\x02Эᒐ\\x03\\x02\\x02\\x02Яᒔ\\x03\\x02\\x02\\x02бᒖ')\n buf.write('\\x03\\x02\\x02\\x02гᒞ\\x03\\x02\\x02\\x02еᒣ\\x03\\x02\\x02\\x02з')\n buf.write('ᒥ\\x03\\x02\\x02\\x02йᒧ\\x03\\x02\\x02\\x02лᒩ\\x03\\x02\\x02\\x02')\n buf.write('нᒫ\\x03\\x02\\x02\\x02пᒭ\\x03\\x02\\x02\\x02сᒯ\\x03')\n buf.write('\\x02\\x02\\x02уᒱ\\x03\\x02\\x02\\x02хᒳ\\x03\\x02\\x02\\x02чᒵ')\n buf.write('\\x03\\x02\\x02\\x02щᒷ\\x03\\x02\\x02\\x02ыᒹ\\x03\\x02\\x02\\x02э')\n buf.write('ᒻ\\x03\\x02\\x02\\x02яᒽ\\x03\\x02\\x02\\x02ёᒿ\\x03\\x02\\x02\\x02')\n buf.write('ѓᓁ\\x03\\x02\\x02\\x02ѕᓃ\\x03\\x02\\x02\\x02їᓅ\\x03')\n buf.write('\\x02\\x02\\x02љᓇ\\x03\\x02\\x02\\x02ћᓉ\\x03\\x02\\x02\\x02ѝᓋ')\n buf.write('\\x03\\x02\\x02\\x02џᓍ\\x03\\x02\\x02\\x02ѡᓏ\\x03\\x02\\x02\\x02ѣ')\n buf.write('ᓑ\\x03\\x02\\x02\\x02ѥᓓ\\x03\\x02\\x02\\x02ѧᓕ\\x03\\x02\\x02\\x02')\n buf.write('ѩѪ\\x070\\x02\\x02Ѫѫ\\x070\\x02\\x02ѫ\\x04\\x03\\x02')\n buf.write('\\x02\\x02Ѭѭ\\x05еț\\x02ѭ\\x06\\x03\\x02\\x02\\x02Ѯ')\n buf.write('ѯ\\x05еț\\x02ѯѰ\\x05лȞ\\x02Ѱ')\n buf.write('ѱ\\x05лȞ\\x02ѱ\\x08\\x03\\x02\\x02\\x02Ѳѳ\\x05е')\n buf.write('ț\\x02ѳѴ\\x05пȠ\\x02Ѵѵ\\x05ћ')\n buf.write('Ȯ\\x02ѵѶ\\x05нȟ\\x02Ѷѷ\\x05ї')\n buf.write('Ȭ\\x02ѷ\\n\\x03\\x02\\x02\\x02Ѹѹ\\x05еț\\x02ѹ')\n buf.write('Ѻ\\x05сȡ\\x02Ѻѻ\\x05нȟ\\x02ѻ')\n buf.write('Ѽ\\x05яȨ\\x02Ѽѽ\\x05ћȮ\\x02ѽ')\n buf.write('\\x0c\\x03\\x02\\x02\\x02Ѿѿ\\x05еț\\x02ѿҀ\\x05с')\n buf.write('ȡ\\x02Ҁҁ\\x05сȡ\\x02ҁ҂\\x05ї')\n buf.write('Ȭ\\x02҂҃\\x05нȟ\\x02҃҄\\x05с')\n buf.write('ȡ\\x02҄҅\\x05еț\\x02҅҆\\x05ћ')\n buf.write('Ȯ\\x02҆҇\\x05нȟ\\x02҇\\x0e\\x03\\x02\\x02\\x02')\n buf.write('҈҉\\x05еț\\x02҉Ҋ\\x05ыȦ')\n buf.write('\\x02Ҋҋ\\x05ыȦ\\x02ҋ\\x10\\x03\\x02\\x02\\x02Ҍ')\n buf.write('ҍ\\x05еț\\x02ҍҎ\\x05ыȦ\\x02Ҏ')\n buf.write('ҏ\\x05ћȮ\\x02ҏҐ\\x05нȟ\\x02Ґ')\n buf.write('ґ\\x05їȬ\\x02ґ\\x12\\x03\\x02\\x02\\x02Ғғ\\x05')\n buf.write('еț\\x02ғҔ\\x05яȨ\\x02Ҕҕ')\n buf.write('\\x05еț\\x02ҕҖ\\x05ыȦ\\x02Җҗ')\n buf.write('\\x05ѥȳ\\x02җҘ\\x05ѧȴ\\x02Ҙҙ')\n buf.write('\\x05нȟ\\x02ҙ\\x14\\x03\\x02\\x02\\x02Ққ\\x05е')\n buf.write('ț\\x02қҜ\\x05яȨ\\x02Ҝҝ\\x05л')\n buf.write('Ȟ\\x02ҝ\\x16\\x03\\x02\\x02\\x02Ҟҟ\\x05еț\\x02')\n buf.write('ҟҠ\\x05яȨ\\x02Ҡҡ\\x05ѥȳ')\n buf.write('\\x02ҡ\\x18\\x03\\x02\\x02\\x02Ңң\\x05еț\\x02ң')\n buf.write('Ҥ\\x05їȬ\\x02Ҥҥ\\x05їȬ\\x02ҥ')\n buf.write('Ҧ\\x05еț\\x02Ҧҧ\\x05ѥȳ\\x02ҧ')\n buf.write('\\x1a\\x03\\x02\\x02\\x02Ҩҩ\\x05еț\\x02ҩҪ\\x05')\n buf.write('љȭ\\x02Ҫ\\x1c\\x03\\x02\\x02\\x02ҫҬ\\x05еț')\n buf.write('\\x02Ҭҭ\\x05љȭ\\x02ҭҮ\\x05љȭ')\n buf.write('\\x02Үү\\x05ѝȯ\\x02үҰ\\x05эȧ')\n buf.write('\\x02Ұұ\\x05нȟ\\x02ұ\\x1e\\x03\\x02\\x02\\x02Ҳ')\n buf.write('ҳ\\x05еț\\x02ҳҴ\\x05љȭ\\x02Ҵ')\n buf.write('ҵ\\x05љȭ\\x02ҵҶ\\x05нȟ\\x02Ҷ')\n buf.write('ҷ\\x05їȬ\\x02ҷҸ\\x05ћȮ\\x02Ҹ')\n buf.write(' \\x03\\x02\\x02\\x02ҹҺ\\x05еț\\x02Һһ\\x05љ')\n buf.write('ȭ\\x02һҼ\\x05йȝ\\x02Ҽ\"\\x03\\x02\\x02\\x02ҽ')\n buf.write('Ҿ\\x05еț\\x02Ҿҿ\\x05љȭ\\x02ҿ')\n buf.write('Ӏ\\x05љȭ\\x02ӀӁ\\x05ёȩ\\x02Ӂ')\n buf.write('ӂ\\x05йȝ\\x02ӂӃ\\x05хȣ\\x02Ӄ')\n buf.write('ӄ\\x05еț\\x02ӄӅ\\x05ћȮ\\x02Ӆ')\n buf.write('ӆ\\x05нȟ\\x02ӆ$\\x03\\x02\\x02\\x02Ӈӈ\\x05е')\n buf.write('ț\\x02ӈӉ\\x05ћȮ\\x02Ӊ&\\x03\\x02\\x02\\x02ӊ')\n buf.write('Ӌ\\x05еț\\x02Ӌӌ\\x05ћȮ\\x02ӌ')\n buf.write('Ӎ\\x05ћȮ\\x02Ӎӎ\\x05їȬ\\x02ӎ')\n buf.write('ӏ\\x05хȣ\\x02ӏӐ\\x05зȜ\\x02Ӑ')\n buf.write('ӑ\\x05ѝȯ\\x02ӑӒ\\x05ћȮ\\x02Ӓ')\n buf.write('ӓ\\x05нȟ\\x02ӓ(\\x03\\x02\\x02\\x02Ӕӕ\\x05е')\n buf.write('ț\\x02ӕӖ\\x05ѝȯ\\x02Ӗӗ\\x05л')\n buf.write('Ȟ\\x02ӗӘ\\x05хȣ\\x02Әә\\x05ћ')\n buf.write('Ȯ\\x02ә*\\x03\\x02\\x02\\x02Ӛӛ\\x05еț\\x02ӛ')\n buf.write('Ӝ\\x05ѝȯ\\x02Ӝӝ\\x05ћȮ\\x02ӝ')\n buf.write('Ӟ\\x05уȢ\\x02Ӟӟ\\x05хȣ\\x02ӟ')\n buf.write('Ӡ\\x05лȞ\\x02Ӡ,\\x03\\x02\\x02\\x02ӡӢ\\x05е')\n buf.write('ț\\x02Ӣӣ\\x05ѝȯ\\x02ӣӤ\\x05ћ')\n buf.write('Ȯ\\x02Ӥӥ\\x05ёȩ\\x02ӥ.\\x03\\x02\\x02\\x02Ӧ')\n buf.write('ӧ\\x05еț\\x02ӧӨ\\x05ѝȯ\\x02Ө')\n buf.write('ө\\x05ћȮ\\x02өӪ\\x05ёȩ\\x02Ӫ')\n buf.write('ӫ\\x05эȧ\\x02ӫӬ\\x05еț\\x02Ӭ')\n buf.write('ӭ\\x05ћȮ\\x02ӭӮ\\x05хȣ\\x02Ӯ')\n buf.write('ӯ\\x05йȝ\\x02ӯ0\\x03\\x02\\x02\\x02Ӱӱ\\x05')\n buf.write('еț\\x02ӱӲ\\x05ѝȯ\\x02Ӳӳ')\n buf.write('\\x05ћȮ\\x02ӳӴ\\x05ёȩ\\x02Ӵӵ')\n buf.write('\\x05яȨ\\x02ӵӶ\\x05ёȩ\\x02Ӷӷ')\n buf.write('\\x05эȧ\\x02ӷӸ\\x05ёȩ\\x02Ӹӹ')\n buf.write('\\x05ѝȯ\\x02ӹӺ\\x05љȭ\\x02Ӻӻ')\n buf.write('\\x07a\\x02\\x02ӻӼ\\x05ћȮ\\x02Ӽӽ\\x05ї')\n buf.write('Ȭ\\x02ӽӾ\\x05еț\\x02Ӿӿ\\x05я')\n buf.write('Ȩ\\x02ӿԀ\\x05љȭ\\x02Ԁԁ\\x05е')\n buf.write('ț\\x02ԁԂ\\x05йȝ\\x02Ԃԃ\\x05ћ')\n buf.write('Ȯ\\x02ԃԄ\\x05хȣ\\x02Ԅԅ\\x05ё')\n buf.write('ȩ\\x02ԅԆ\\x05яȨ\\x02Ԇ2\\x03\\x02\\x02\\x02')\n buf.write('ԇԈ\\x05зȜ\\x02Ԉԉ\\x05еț')\n buf.write('\\x02ԉԊ\\x05ћȮ\\x02Ԋԋ\\x05йȝ')\n buf.write('\\x02ԋԌ\\x05уȢ\\x02Ԍ4\\x03\\x02\\x02\\x02ԍ')\n buf.write('Ԏ\\x05зȜ\\x02Ԏԏ\\x05нȟ\\x02ԏ')\n buf.write('Ԑ\\x05пȠ\\x02Ԑԑ\\x05ёȩ\\x02ԑ')\n buf.write('Ԓ\\x05їȬ\\x02Ԓԓ\\x05нȟ\\x02ԓ')\n buf.write('6\\x03\\x02\\x02\\x02Ԕԕ\\x05зȜ\\x02ԕԖ\\x05')\n buf.write('нȟ\\x02Ԗԗ\\x05сȡ\\x02ԗԘ')\n buf.write('\\x05хȣ\\x02Ԙԙ\\x05яȨ\\x02ԙ8\\x03')\n buf.write('\\x02\\x02\\x02Ԛԛ\\x05зȜ\\x02ԛԜ\\x05н')\n buf.write('ȟ\\x02Ԝԝ\\x05ћȮ\\x02ԝԞ\\x05ѡ')\n buf.write('ȱ\\x02Ԟԟ\\x05нȟ\\x02ԟԠ\\x05н')\n buf.write('ȟ\\x02Ԡԡ\\x05яȨ\\x02ԡ:\\x03\\x02\\x02\\x02Ԣ')\n buf.write('ԣ\\x05зȜ\\x02ԣԤ\\x05пȠ\\x02Ԥ')\n buf.write('ԥ\\x05хȣ\\x02ԥԦ\\x05ыȦ\\x02Ԧ')\n buf.write('ԧ\\x05нȟ\\x02ԧ<\\x03\\x02\\x02\\x02Ԩԩ\\x05з')\n buf.write('Ȝ\\x02ԩԪ\\x05хȣ\\x02Ԫԫ\\x05я')\n buf.write('Ȩ\\x02ԫԬ\\x05еț\\x02Ԭԭ\\x05ї')\n buf.write('Ȭ\\x02ԭԮ\\x05ѥȳ\\x02Ԯԯ\\x07a\\x02')\n buf.write('\\x02ԯ\\u0530\\x05лȞ\\x02\\u0530Ա\\x05ёȩ')\n buf.write('\\x02ԱԲ\\x05ѝȯ\\x02ԲԳ\\x05зȜ')\n buf.write('\\x02ԳԴ\\x05ыȦ\\x02ԴԵ\\x05нȟ')\n buf.write('\\x02Ե>\\x03\\x02\\x02\\x02ԶԷ\\x05зȜ\\x02ԷԸ')\n buf.write('\\x05хȣ\\x02ԸԹ\\x05яȨ\\x02ԹԺ')\n buf.write('\\x05еț\\x02ԺԻ\\x05їȬ\\x02ԻԼ')\n buf.write('\\x05ѥȳ\\x02ԼԽ\\x07a\\x02\\x02ԽԾ\\x05п')\n buf.write('Ƞ\\x02ԾԿ\\x05ыȦ\\x02ԿՀ\\x05ё')\n buf.write('ȩ\\x02ՀՁ\\x05еț\\x02ՁՂ\\x05ћ')\n buf.write('Ȯ\\x02Ղ@\\x03\\x02\\x02\\x02ՃՄ\\x05зȜ\\x02Մ')\n buf.write('Յ\\x05хȣ\\x02ՅՆ\\x05яȨ\\x02Ն')\n buf.write('Շ\\x05еț\\x02ՇՈ\\x05їȬ\\x02Ո')\n buf.write('Չ\\x05ѥȳ\\x02ՉՊ\\x07a\\x02\\x02ՊՋ')\n buf.write('\\x05хȣ\\x02ՋՌ\\x05яȨ\\x02ՌՍ')\n buf.write('\\x05ћȮ\\x02ՍՎ\\x05нȟ\\x02ՎՏ')\n buf.write('\\x05сȡ\\x02ՏՐ\\x05нȟ\\x02ՐՑ')\n buf.write('\\x05їȬ\\x02ՑB\\x03\\x02\\x02\\x02ՒՓ\\x05зȜ')\n buf.write('\\x02ՓՔ\\x05ыȦ\\x02ՔՕ\\x05ёȩ')\n buf.write('\\x02ՕՖ\\x05зȜ\\x02ՖD\\x03\\x02\\x02\\x02\\u0557\\u0558')\n buf.write('\\x05зȜ\\x02\\u0558ՙ\\x05ыȦ\\x02ՙ՚')\n buf.write('\\x05ёȩ\\x02՚՛\\x05йȝ\\x02՛՜')\n buf.write('\\x05щȥ\\x02՜F\\x03\\x02\\x02\\x02՝՞\\x05зȜ')\n buf.write('\\x02՞՟\\x05ёȩ\\x02՟ՠ\\x05лȞ')\n buf.write('\\x02ՠա\\x05ѥȳ\\x02աH\\x03\\x02\\x02\\x02բգ')\n buf.write('\\x05зȜ\\x02գդ\\x05ёȩ\\x02դե')\n buf.write('\\x05ёȩ\\x02եզ\\x05ыȦ\\x02զէ')\n buf.write('\\x05нȟ\\x02էը\\x05еț\\x02ըթ')\n buf.write('\\x05яȨ\\x02թJ\\x03\\x02\\x02\\x02ժի\\x05зȜ')\n buf.write('\\x02իլ\\x05ёȩ\\x02լխ\\x05ћȮ')\n buf.write('\\x02խծ\\x05уȢ\\x02ծL\\x03\\x02\\x02\\x02կհ')\n buf.write('\\x05зȜ\\x02հձ\\x05їȬ\\x02ձղ')\n buf.write('\\x05нȟ\\x02ղճ\\x05еț\\x02ճմ')\n buf.write('\\x05лȞ\\x02մյ\\x05ћȮ\\x02յն')\n buf.write('\\x05уȢ\\x02նN\\x03\\x02\\x02\\x02շո\\x05зȜ')\n buf.write('\\x02ոչ\\x05ѝȯ\\x02չպ\\x05ыȦ')\n buf.write('\\x02պջ\\x05щȥ\\x02ջP\\x03\\x02\\x02\\x02ռս')\n buf.write('\\x05зȜ\\x02սվ\\x05ѥȳ\\x02վR\\x03')\n buf.write('\\x02\\x02\\x02տր\\x05зȜ\\x02րց\\x05ѥ')\n buf.write('ȳ\\x02ցւ\\x05ћȮ\\x02ւփ\\x05н')\n buf.write('ȟ\\x02փT\\x03\\x02\\x02\\x02քօ\\x05йȝ\\x02օ')\n buf.write('V\\x03\\x02\\x02\\x02ֆև\\x05йȝ\\x02ևֈ\\x05е')\n buf.write('ț\\x02ֈ։\\x05йȝ\\x02։֊\\x05у')\n buf.write('Ȣ\\x02֊\\u058b\\x05нȟ\\x02\\u058bX\\x03\\x02\\x02\\x02\\u058c')\n buf.write('֍\\x05йȝ\\x02֍֎\\x05еț\\x02֎')\n buf.write('֏\\x05ыȦ\\x02֏\\u0590\\x05ыȦ\\x02\\u0590')\n buf.write('Z\\x03\\x02\\x02\\x02֑֒\\x05йȝ\\x02֒֓\\x05е')\n buf.write('ț\\x02֓֔\\x05яȨ\\x02֔֕\\x05ё')\n buf.write('ȩ\\x02֖֕\\x05яȨ\\x02֖֗\\x05х')\n buf.write('ȣ\\x02֗֘\\x05йȝ\\x02֘֙\\x05е')\n buf.write('ț\\x02֚֙\\x05ыȦ\\x02֚\\\\\\x03\\x02\\x02\\x02֛')\n buf.write('֜\\x05йȝ\\x02֜֝\\x05еț\\x02֝')\n buf.write('֞\\x05љȭ\\x02֞֟\\x05йȝ\\x02֟')\n buf.write('֠\\x05еț\\x02֠֡\\x05лȞ\\x02֡')\n buf.write('֢\\x05нȟ\\x02֢^\\x03\\x02\\x02\\x02֣֤\\x05й')\n buf.write('ȝ\\x02֤֥\\x05еț\\x02֥֦\\x05љ')\n buf.write('ȭ\\x02֦֧\\x05нȟ\\x02֧`\\x03\\x02\\x02\\x02֨')\n buf.write('֩\\x05йȝ\\x02֪֩\\x05еț\\x02֪')\n buf.write('֫\\x05љȭ\\x02֫֬\\x05ћȮ\\x02֬')\n buf.write('b\\x03\\x02\\x02\\x02֭֮\\x05йȝ\\x02֮֯\\x05у')\n buf.write('Ȣ\\x02ְ֯\\x05еț\\x02ְֱ\\x05ї')\n buf.write('Ȭ\\x02ֱd\\x03\\x02\\x02\\x02ֲֳ\\x05йȝ\\x02ֳ')\n buf.write('ִ\\x05уȢ\\x02ִֵ\\x05еț\\x02ֵ')\n buf.write('ֶ\\x05їȬ\\x02ֶַ\\x07a\\x02\\x02ַָ')\n buf.write('\\x05йȝ\\x02ָֹ\\x05љȭ\\x02ֹf\\x03')\n buf.write('\\x02\\x02\\x02ֺֻ\\x05йȝ\\x02ֻּ\\x05у')\n buf.write('Ȣ\\x02ּֽ\\x05еț\\x02ֽ־\\x05ї')\n buf.write('Ȭ\\x02־ֿ\\x05еț\\x02ֿ׀\\x05й')\n buf.write('ȝ\\x02׀ׁ\\x05ћȮ\\x02ׁׂ\\x05н')\n buf.write('ȟ\\x02ׂ׃\\x05їȬ\\x02׃h\\x03\\x02\\x02\\x02ׄ')\n buf.write('ׅ\\x05йȝ\\x02ׅ׆\\x05уȢ\\x02׆')\n buf.write('ׇ\\x05нȟ\\x02ׇ\\u05c8\\x05йȝ\\x02\\u05c8')\n buf.write('\\u05c9\\x05щȥ\\x02\\u05c9j\\x03\\x02\\x02\\x02\\u05ca\\u05cb\\x05й')\n buf.write('ȝ\\x02\\u05cb\\u05cc\\x05уȢ\\x02\\u05cc\\u05cd\\x05ї')\n buf.write('Ȭ\\x02\\u05cdl\\x03\\x02\\x02\\x02\\u05ce\\u05cf\\x05йȝ\\x02\\u05cf')\n buf.write('א\\x05ыȦ\\x02אב\\x05ёȩ\\x02ב')\n buf.write('ג\\x05зȜ\\x02גn\\x03\\x02\\x02\\x02דה\\x05й')\n buf.write('ȝ\\x02הו\\x05ыȦ\\x02וז\\x05ё')\n buf.write('ȩ\\x02זח\\x05љȭ\\x02חט\\x05н')\n buf.write('ȟ\\x02טp\\x03\\x02\\x02\\x02יך\\x05йȝ\\x02ך')\n buf.write('כ\\x05ыȦ\\x02כל\\x05ѝȯ\\x02ל')\n buf.write('ם\\x05љȭ\\x02םמ\\x05ћȮ\\x02מ')\n buf.write('ן\\x05нȟ\\x02ןנ\\x05їȬ\\x02נ')\n buf.write('r\\x03\\x02\\x02\\x02סע\\x05йȝ\\x02עף\\x05ё')\n buf.write('ȩ\\x02ףפ\\x05ыȦ\\x02פץ\\x05ы')\n buf.write('Ȧ\\x02ץצ\\x05нȟ\\x02צק\\x05й')\n buf.write('ȝ\\x02קר\\x05ћȮ\\x02רt\\x03\\x02\\x02\\x02ש')\n buf.write('ת\\x05йȝ\\x02ת\\u05eb\\x05ёȩ\\x02\\u05eb')\n buf.write('\\u05ec\\x05ыȦ\\x02\\u05ec\\u05ed\\x05ѝȯ\\x02\\u05ed')\n buf.write('\\u05ee\\x05эȧ\\x02\\u05eeׯ\\x05яȨ\\x02ׯ')\n buf.write('װ\\x05љȭ\\x02װv\\x03\\x02\\x02\\x02ױײ\\x05й')\n buf.write('ȝ\\x02ײ׳\\x05ёȩ\\x02׳״\\x05э')\n buf.write('ȧ\\x02״\\u05f5\\x05эȧ\\x02\\u05f5\\u05f6\\x05н')\n buf.write('ȟ\\x02\\u05f6\\u05f7\\x05яȨ\\x02\\u05f7\\u05f8\\x05ћ')\n buf.write('Ȯ\\x02\\u05f8x\\x03\\x02\\x02\\x02\\u05f9\\u05fa\\x05йȝ\\x02\\u05fa')\n buf.write('\\u05fb\\x05ёȩ\\x02\\u05fb\\u05fc\\x05эȧ\\x02\\u05fc')\n buf.write('\\u05fd\\x05эȧ\\x02\\u05fd\\u05fe\\x05хȣ\\x02\\u05fe')\n buf.write('\\u05ff\\x05ћȮ\\x02\\u05ffz\\x03\\x02\\x02\\x02\\u0600\\u0601\\x05й')\n buf.write('ȝ\\x02\\u0601\\u0602\\x05ёȩ\\x02\\u0602\\u0603\\x05э')\n buf.write('ȧ\\x02\\u0603\\u0604\\x05эȧ\\x02\\u0604\\u0605\\x05х')\n buf.write('ȣ\\x02\\u0605؆\\x05ћȮ\\x02؆؇\\x05ћ')\n buf.write('Ȯ\\x02؇؈\\x05нȟ\\x02؈؉\\x05л')\n buf.write('Ȟ\\x02؉|\\x03\\x02\\x02\\x02؊؋\\x05йȝ\\x02؋')\n buf.write('،\\x05ёȩ\\x02،؍\\x05эȧ\\x02؍')\n buf.write('؎\\x05ѓȪ\\x02؎؏\\x05еț\\x02؏')\n buf.write('ؐ\\x05ћȮ\\x02ؐؑ\\x05хȣ\\x02ؑ')\n buf.write('ؒ\\x05зȜ\\x02ؒؓ\\x05хȣ\\x02ؓ')\n buf.write('ؔ\\x05ыȦ\\x02ؔؕ\\x05хȣ\\x02ؕ')\n buf.write('ؖ\\x05ћȮ\\x02ؖؗ\\x05ѥȳ\\x02ؗ')\n buf.write('~\\x03\\x02\\x02\\x02ؘؙ\\x05йȝ\\x02ؙؚ\\x05ё')\n buf.write('ȩ\\x02ؚ؛\\x05эȧ\\x02؛\\u061c\\x05ѓ')\n buf.write('Ȫ\\x02\\u061c؝\\x05хȣ\\x02؝؞\\x05ы')\n buf.write('Ȧ\\x02؞؟\\x05нȟ\\x02؟\\x80\\x03\\x02\\x02')\n buf.write('\\x02ؠء\\x05йȝ\\x02ءآ\\x05ёȩ')\n buf.write('\\x02آأ\\x05эȧ\\x02أؤ\\x05ѓȪ')\n buf.write('\\x02ؤإ\\x05ёȩ\\x02إئ\\x05ѝȯ')\n buf.write('\\x02ئا\\x05яȨ\\x02اب\\x05лȞ')\n buf.write('\\x02ب\\x82\\x03\\x02\\x02\\x02ةت\\x05йȝ\\x02ت')\n buf.write('ث\\x05ёȩ\\x02ثج\\x05яȨ\\x02ج')\n buf.write('ح\\x05яȨ\\x02حخ\\x05нȟ\\x02خ')\n buf.write('د\\x05йȝ\\x02دذ\\x05ћȮ\\x02ذ')\n buf.write('\\x84\\x03\\x02\\x02\\x02رز\\x05йȝ\\x02زس')\n buf.write('\\x05ёȩ\\x02سش\\x05яȨ\\x02شص')\n buf.write('\\x05яȨ\\x02صض\\x05нȟ\\x02ضط')\n buf.write('\\x05йȝ\\x02طظ\\x05ћȮ\\x02ظع')\n buf.write('\\x07a\\x02\\x02عغ\\x05зȜ\\x02غػ\\x05ѥ')\n buf.write('ȳ\\x02ػؼ\\x07a\\x02\\x02ؼؽ\\x05їȬ')\n buf.write('\\x02ؽؾ\\x05ёȩ\\x02ؾؿ\\x05ёȩ')\n buf.write('\\x02ؿـ\\x05ћȮ\\x02ـ\\x86\\x03\\x02\\x02\\x02ف')\n buf.write('ق\\x05йȝ\\x02قك\\x05ёȩ\\x02ك')\n buf.write('ل\\x05яȨ\\x02لم\\x05љȭ\\x02م')\n buf.write('ن\\x05ћȮ\\x02نه\\x05еț\\x02ه')\n buf.write('و\\x05яȨ\\x02وى\\x05ћȮ\\x02ى')\n buf.write('\\x88\\x03\\x02\\x02\\x02يً\\x05йȝ\\x02ًٌ')\n buf.write('\\x05ёȩ\\x02ٌٍ\\x05яȨ\\x02ٍَ')\n buf.write('\\x05љȭ\\x02َُ\\x05ћȮ\\x02ُِ')\n buf.write('\\x05їȬ\\x02ِّ\\x05еț\\x02ّْ')\n buf.write('\\x05хȣ\\x02ْٓ\\x05яȨ\\x02ٓٔ')\n buf.write('\\x05ћȮ\\x02ٔ\\x8a\\x03\\x02\\x02\\x02ٕٖ\\x05й')\n buf.write('ȝ\\x02ٖٗ\\x05ёȩ\\x02ٗ٘\\x05я')\n buf.write('Ȩ\\x02٘ٙ\\x05љȭ\\x02ٙٚ\\x05ћ')\n buf.write('Ȯ\\x02ٚٛ\\x05їȬ\\x02ٜٛ\\x05е')\n buf.write('ț\\x02ٜٝ\\x05хȣ\\x02ٝٞ\\x05я')\n buf.write('Ȩ\\x02ٟٞ\\x05ћȮ\\x02ٟ٠\\x05љ')\n buf.write('ȭ\\x02٠\\x8c\\x03\\x02\\x02\\x02١٢\\x05йȝ')\n buf.write('\\x02٢٣\\x05ёȩ\\x02٣٤\\x05яȨ')\n buf.write('\\x02٤٥\\x05љȭ\\x02٥٦\\x05ћȮ')\n buf.write('\\x02٦٧\\x05їȬ\\x02٧٨\\x05ѝȯ')\n buf.write('\\x02٨٩\\x05йȝ\\x02٩٪\\x05ћȮ')\n buf.write('\\x02٪٫\\x05ёȩ\\x02٫٬\\x05їȬ')\n buf.write('\\x02٬\\x8e\\x03\\x02\\x02\\x02٭ٮ\\x05йȝ\\x02ٮ')\n buf.write('ٯ\\x05ёȩ\\x02ٯٰ\\x05яȨ\\x02ٰ')\n buf.write('ٱ\\x05ћȮ\\x02ٱٲ\\x05нȟ\\x02ٲ')\n buf.write('ٳ\\x05яȨ\\x02ٳٴ\\x05ћȮ\\x02ٴ')\n buf.write('\\x90\\x03\\x02\\x02\\x02ٵٶ\\x05йȝ\\x02ٶٷ')\n buf.write('\\x05ёȩ\\x02ٷٸ\\x05яȨ\\x02ٸٹ')\n buf.write('\\x05ћȮ\\x02ٹٺ\\x05нȟ\\x02ٺٻ')\n buf.write('\\x05ѣȲ\\x02ٻټ\\x05ћȮ\\x02ټ\\x92')\n buf.write('\\x03\\x02\\x02\\x02ٽپ\\x05йȝ\\x02پٿ\\x05ё')\n buf.write('ȩ\\x02ٿڀ\\x05яȨ\\x02ڀځ\\x05ћ')\n buf.write('Ȯ\\x02ځڂ\\x05хȣ\\x02ڂڃ\\x05я')\n buf.write('Ȩ\\x02ڃڄ\\x05ѝȯ\\x02ڄڅ\\x05н')\n buf.write('ȟ\\x02څ\\x94\\x03\\x02\\x02\\x02چڇ\\x05йȝ')\n buf.write('\\x02ڇڈ\\x05ёȩ\\x02ڈډ\\x05яȨ')\n buf.write('\\x02ډڊ\\x05џȰ\\x02ڊڋ\\x05нȟ')\n buf.write('\\x02ڋڌ\\x05їȬ\\x02ڌڍ\\x05ћȮ')\n buf.write('\\x02ڍ\\x96\\x03\\x02\\x02\\x02ڎڏ\\x05йȝ\\x02ڏ')\n buf.write('ڐ\\x05ёȩ\\x02ڐڑ\\x05їȬ\\x02ڑ')\n buf.write('ڒ\\x05їȬ\\x02ڒړ\\x05ѝȯ\\x02ړ')\n buf.write('ڔ\\x05ѓȪ\\x02ڔڕ\\x05ћȮ\\x02ڕ')\n buf.write('ږ\\x07a\\x02\\x02ږڗ\\x05ѣȲ\\x02ڗژ')\n buf.write('\\x05хȣ\\x02ژڙ\\x05лȞ\\x02ڙ\\x98')\n buf.write('\\x03\\x02\\x02\\x02ښڛ\\x05йȝ\\x02ڛڜ\\x05ё')\n buf.write('ȩ\\x02ڜڝ\\x05їȬ\\x02ڝڞ\\x05ї')\n buf.write('Ȭ\\x02ڞڟ\\x05ѝȯ\\x02ڟڠ\\x05ѓ')\n buf.write('Ȫ\\x02ڠڡ\\x05ћȮ\\x02ڡڢ\\x07a\\x02')\n buf.write('\\x02ڢڣ\\x05ѣȲ\\x02ڣڤ\\x05хȣ')\n buf.write('\\x02ڤڥ\\x05лȞ\\x02ڥڦ\\x07a\\x02\\x02ڦ')\n buf.write('ڧ\\x05еț\\x02ڧڨ\\x05ыȦ\\x02ڨ')\n buf.write('ک\\x05ыȦ\\x02ک\\x9a\\x03\\x02\\x02\\x02ڪګ')\n buf.write('\\x05йȝ\\x02ګڬ\\x05ёȩ\\x02ڬڭ')\n buf.write('\\x05љȭ\\x02ڭڮ\\x05ћȮ\\x02ڮ\\x9c')\n buf.write('\\x03\\x02\\x02\\x02گڰ\\x05йȝ\\x02ڰڱ\\x05ё')\n buf.write('ȩ\\x02ڱڲ\\x05ѝȯ\\x02ڲڳ\\x05я')\n buf.write('Ȩ\\x02ڳڴ\\x05ћȮ\\x02ڴ\\x9e\\x03\\x02\\x02')\n buf.write('\\x02ڵڶ\\x05йȝ\\x02ڶڷ\\x05їȬ')\n buf.write('\\x02ڷڸ\\x05нȟ\\x02ڸڹ\\x05еț')\n buf.write('\\x02ڹں\\x05ћȮ\\x02ںڻ\\x05нȟ')\n buf.write('\\x02ڻ\\xa0\\x03\\x02\\x02\\x02ڼڽ\\x05йȝ\\x02ڽ')\n buf.write('ھ\\x05їȬ\\x02ھڿ\\x05ёȩ\\x02ڿ')\n buf.write('ۀ\\x05љȭ\\x02ۀہ\\x05љȭ\\x02ہ')\n buf.write('¢\\x03\\x02\\x02\\x02ۂۃ\\x05йȝ\\x02ۃۄ')\n buf.write('\\x05ѝȯ\\x02ۄۅ\\x05зȜ\\x02ۅۆ')\n buf.write('\\x05нȟ\\x02ۆ¤\\x03\\x02\\x02\\x02ۇۈ\\x05й')\n buf.write('ȝ\\x02ۈۉ\\x05ѝȯ\\x02ۉۊ\\x05ї')\n buf.write('Ȭ\\x02ۊۋ\\x05їȬ\\x02ۋی\\x05н')\n buf.write('ȟ\\x02یۍ\\x05яȨ\\x02ۍێ\\x05ћ')\n buf.write('Ȯ\\x02ێ¦\\x03\\x02\\x02\\x02ۏې\\x05йȝ')\n buf.write('\\x02ېۑ\\x05ѝȯ\\x02ۑے\\x05їȬ')\n buf.write('\\x02ےۓ\\x05їȬ\\x02ۓ۔\\x05нȟ')\n buf.write('\\x02۔ە\\x05яȨ\\x02ەۖ\\x05ћȮ')\n buf.write('\\x02ۖۗ\\x07a\\x02\\x02ۗۘ\\x05ѝȯ\\x02ۘ')\n buf.write('ۙ\\x05љȭ\\x02ۙۚ\\x05нȟ\\x02ۚ')\n buf.write('ۛ\\x05їȬ\\x02ۛ¨\\x03\\x02\\x02\\x02ۜ\\u06dd')\n buf.write('\\x05йȝ\\x02\\u06dd۞\\x05ѝȯ\\x02۞۟')\n buf.write('\\x05їȬ\\x02۟۠\\x05љȭ\\x02۠ۡ')\n buf.write('\\x05ёȩ\\x02ۡۢ\\x05їȬ\\x02ۢª')\n buf.write('\\x03\\x02\\x02\\x02ۣۤ\\x05йȝ\\x02ۤۥ\\x05ѝ')\n buf.write('ȯ\\x02ۥۦ\\x05љȭ\\x02ۦۧ\\x05ћ')\n buf.write('Ȯ\\x02ۧۨ\\x05ёȩ\\x02ۨ۩\\x05э')\n buf.write('ȧ\\x02۩۪\\x05лȞ\\x02۪۫\\x05е')\n buf.write('ț\\x02۫۬\\x05ћȮ\\x02ۭ۬\\x05ѝ')\n buf.write('ȯ\\x02ۭۮ\\x05эȧ\\x02ۮ¬\\x03\\x02\\x02')\n buf.write('\\x02ۯ۰\\x05йȝ\\x02۰۱\\x05ѥȳ')\n buf.write('\\x02۱۲\\x05йȝ\\x02۲۳\\x05ыȦ')\n buf.write('\\x02۳۴\\x05нȟ\\x02۴®\\x03\\x02\\x02\\x02۵')\n buf.write('۶\\x05лȞ\\x02۶۷\\x05еț\\x02۷')\n buf.write('۸\\x05ћȮ\\x02۸۹\\x05еț\\x02۹')\n buf.write('°\\x03\\x02\\x02\\x02ۺۻ\\x05лȞ\\x02ۻۼ')\n buf.write('\\x05еț\\x02ۼ۽\\x05ћȮ\\x02۽۾')\n buf.write('\\x05еț\\x02۾ۿ\\x05зȜ\\x02ۿ܀')\n buf.write('\\x05еț\\x02܀܁\\x05љȭ\\x02܁܂')\n buf.write('\\x05нȟ\\x02܂²\\x03\\x02\\x02\\x02܃܄\\x05л')\n buf.write('Ȟ\\x02܄܅\\x05еț\\x02܅܆\\x05ћ')\n buf.write('Ȯ\\x02܆܇\\x05нȟ\\x02܇´\\x03\\x02\\x02')\n buf.write('\\x02܈܉\\x05лȞ\\x02܉܊\\x05еț')\n buf.write('\\x02܊܋\\x05ѥȳ\\x02܋¶\\x03\\x02\\x02\\x02܌')\n buf.write('܍\\x05лȞ\\x02܍\\u070e\\x05зȜ\\x02\\u070e')\n buf.write('\\u070f\\x07a\\x02\\x02\\u070fܐ\\x05їȬ\\x02ܐܑ')\n buf.write('\\x05ёȩ\\x02ܑܒ\\x05ыȦ\\x02ܒܓ')\n buf.write('\\x05нȟ\\x02ܓܔ\\x07a\\x02\\x02ܔܕ\\x05й')\n buf.write('ȝ\\x02ܕܖ\\x05уȢ\\x02ܖܗ\\x05е')\n buf.write('ț\\x02ܗܘ\\x05яȨ\\x02ܘܙ\\x05с')\n buf.write('ȡ\\x02ܙܚ\\x05нȟ\\x02ܚ¸\\x03\\x02\\x02')\n buf.write('\\x02ܛܜ\\x05лȞ\\x02ܜܝ\\x05зȜ')\n buf.write('\\x02ܝܞ\\x05ћȮ\\x02ܞܟ\\x05хȣ')\n buf.write('\\x02ܟܠ\\x05эȧ\\x02ܠܡ\\x05нȟ')\n buf.write('\\x02ܡܢ\\x05ѧȴ\\x02ܢܣ\\x05ёȩ')\n buf.write('\\x02ܣܤ\\x05яȨ\\x02ܤܥ\\x05нȟ')\n buf.write('\\x02ܥº\\x03\\x02\\x02\\x02ܦܧ\\x05лȞ\\x02ܧ')\n buf.write('ܨ\\x05лȞ\\x02ܨܩ\\x05ыȦ\\x02ܩ')\n buf.write('¼\\x03\\x02\\x02\\x02ܪܫ\\x05лȞ\\x02ܫܬ')\n buf.write('\\x05нȟ\\x02ܬܭ\\x05зȜ\\x02ܭܮ')\n buf.write('\\x05ѝȯ\\x02ܮܯ\\x05сȡ\\x02ܯ¾')\n buf.write('\\x03\\x02\\x02\\x02ܱܰ\\x05лȞ\\x02ܱܲ\\x05н')\n buf.write('ȟ\\x02ܲܳ\\x05йȝ\\x02ܳÀ\\x03\\x02\\x02')\n buf.write('\\x02ܴܵ\\x05лȞ\\x02ܵܶ\\x05нȟ')\n buf.write('\\x02ܷܶ\\x05йȝ\\x02ܷܸ\\x05хȣ')\n buf.write('\\x02ܸܹ\\x05эȧ\\x02ܹܺ\\x05еț')\n buf.write('\\x02ܻܺ\\x05ыȦ\\x02ܻÂ\\x03\\x02\\x02\\x02ܼ')\n buf.write('ܽ\\x05лȞ\\x02ܾܽ\\x05нȟ\\x02ܾ')\n buf.write('ܿ\\x05йȝ\\x02ܿ݀\\x05ыȦ\\x02݀')\n buf.write('݁\\x05еț\\x02݂݁\\x05їȬ\\x02݂')\n buf.write('݃\\x05нȟ\\x02݃Ä\\x03\\x02\\x02\\x02݄݅')\n buf.write('\\x05лȞ\\x02݆݅\\x05нȟ\\x02݆݇')\n buf.write('\\x05йȝ\\x02݈݇\\x05ёȩ\\x02݈݉')\n buf.write('\\x05эȧ\\x02݉݊\\x05ѓȪ\\x02݊\\u074b')\n buf.write('\\x05ёȩ\\x02\\u074b\\u074c\\x05љȭ\\x02\\u074cݍ')\n buf.write('\\x05нȟ\\x02ݍÆ\\x03\\x02\\x02\\x02ݎݏ\\x05л')\n buf.write('Ȟ\\x02ݏݐ\\x05нȟ\\x02ݐݑ\\x05й')\n buf.write('ȝ\\x02ݑݒ\\x05їȬ\\x02ݒݓ\\x05н')\n buf.write('ȟ\\x02ݓݔ\\x05эȧ\\x02ݔݕ\\x05н')\n buf.write('ȟ\\x02ݕݖ\\x05яȨ\\x02ݖݗ\\x05ћ')\n buf.write('Ȯ\\x02ݗÈ\\x03\\x02\\x02\\x02ݘݙ\\x05лȞ')\n buf.write('\\x02ݙݚ\\x05нȟ\\x02ݚݛ\\x05пȠ')\n buf.write('\\x02ݛݜ\\x05еț\\x02ݜݝ\\x05ѝȯ')\n buf.write('\\x02ݝݞ\\x05ыȦ\\x02ݞݟ\\x05ћȮ')\n buf.write('\\x02ݟÊ\\x03\\x02\\x02\\x02ݠݡ\\x05лȞ\\x02ݡ')\n buf.write('ݢ\\x05нȟ\\x02ݢݣ\\x05пȠ\\x02ݣ')\n buf.write('ݤ\\x05еț\\x02ݤݥ\\x05ѝȯ\\x02ݥ')\n buf.write('ݦ\\x05ыȦ\\x02ݦݧ\\x05ћȮ\\x02ݧ')\n buf.write('ݨ\\x05љȭ\\x02ݨÌ\\x03\\x02\\x02\\x02ݩݪ')\n buf.write('\\x05лȞ\\x02ݪݫ\\x05нȟ\\x02ݫݬ')\n buf.write('\\x05пȠ\\x02ݬݭ\\x05нȟ\\x02ݭݮ')\n buf.write('\\x05їȬ\\x02ݮݯ\\x05їȬ\\x02ݯݰ')\n buf.write('\\x05нȟ\\x02ݰݱ\\x05лȞ\\x02ݱÎ')\n buf.write('\\x03\\x02\\x02\\x02ݲݳ\\x05лȞ\\x02ݳݴ\\x05н')\n buf.write('ȟ\\x02ݴݵ\\x05пȠ\\x02ݵݶ\\x05х')\n buf.write('ȣ\\x02ݶݷ\\x05яȨ\\x02ݷݸ\\x05н')\n buf.write('ȟ\\x02ݸݹ\\x05їȬ\\x02ݹÐ\\x03\\x02\\x02')\n buf.write('\\x02ݺݻ\\x05лȞ\\x02ݻݼ\\x05нȟ')\n buf.write('\\x02ݼݽ\\x05ыȦ\\x02ݽݾ\\x05нȟ')\n buf.write('\\x02ݾݿ\\x05ћȮ\\x02ݿހ\\x05нȟ')\n buf.write('\\x02ހÒ\\x03\\x02\\x02\\x02ށނ\\x05лȞ\\x02ނ')\n buf.write('ރ\\x05нȟ\\x02ރބ\\x05ѓȪ\\x02ބ')\n buf.write('ޅ\\x05ћȮ\\x02ޅކ\\x05уȢ\\x02ކ')\n buf.write('Ô\\x03\\x02\\x02\\x02އވ\\x05лȞ\\x02ވމ')\n buf.write('\\x05нȟ\\x02މފ\\x05љȭ\\x02ފދ')\n buf.write('\\x05йȝ\\x02ދÖ\\x03\\x02\\x02\\x02ތލ\\x05л')\n buf.write('Ȟ\\x02ލގ\\x05нȟ\\x02ގޏ\\x05ћ')\n buf.write('Ȯ\\x02ޏސ\\x05нȟ\\x02ސޑ\\x05ї')\n buf.write('Ȭ\\x02ޑޒ\\x05эȧ\\x02ޒޓ\\x05х')\n buf.write('ȣ\\x02ޓޔ\\x05яȨ\\x02ޔޕ\\x05х')\n buf.write('ȣ\\x02ޕޖ\\x05љȭ\\x02ޖޗ\\x05ћ')\n buf.write('Ȯ\\x02ޗޘ\\x05хȣ\\x02ޘޙ\\x05й')\n buf.write('ȝ\\x02ޙØ\\x03\\x02\\x02\\x02ޚޛ\\x05лȞ')\n buf.write('\\x02ޛޜ\\x05хȣ\\x02ޜޝ\\x05эȧ')\n buf.write('\\x02ޝޞ\\x05нȟ\\x02ޞޟ\\x05яȨ')\n buf.write('\\x02ޟޠ\\x05љȭ\\x02ޠޡ\\x05хȣ')\n buf.write('\\x02ޡޢ\\x05ёȩ\\x02ޢޣ\\x05яȨ')\n buf.write('\\x02ޣÚ\\x03\\x02\\x02\\x02ޤޥ\\x05лȞ\\x02ޥ')\n buf.write('ަ\\x05хȣ\\x02ަާ\\x05љȭ\\x02ާ')\n buf.write('ި\\x05еț\\x02ިީ\\x05зȜ\\x02ީ')\n buf.write('ު\\x05ыȦ\\x02ުޫ\\x05нȟ\\x02ޫ')\n buf.write('Ü\\x03\\x02\\x02\\x02ެޭ\\x05лȞ\\x02ޭޮ')\n buf.write('\\x05хȣ\\x02ޮޯ\\x05љȭ\\x02ޯް')\n buf.write('\\x05еț\\x02ްޱ\\x05љȭ\\x02ޱ\\u07b2')\n buf.write('\\x05љȭ\\x02\\u07b2\\u07b3\\x05ёȩ\\x02\\u07b3\\u07b4')\n buf.write('\\x05йȝ\\x02\\u07b4\\u07b5\\x05хȣ\\x02\\u07b5\\u07b6')\n buf.write('\\x05еț\\x02\\u07b6\\u07b7\\x05ћȮ\\x02\\u07b7\\u07b8')\n buf.write('\\x05нȟ\\x02\\u07b8Þ\\x03\\x02\\x02\\x02\\u07b9\\u07ba\\x05л')\n buf.write('Ȟ\\x02\\u07ba\\u07bb\\x05хȣ\\x02\\u07bb\\u07bc\\x05љ')\n buf.write('ȭ\\x02\\u07bc\\u07bd\\x05ћȮ\\x02\\u07bd\\u07be\\x05х')\n buf.write('ȣ\\x02\\u07be\\u07bf\\x05яȨ\\x02\\u07bf߀\\x05й')\n buf.write('ȝ\\x02߀߁\\x05ћȮ\\x02߁à\\x03\\x02\\x02')\n buf.write('\\x02߂߃\\x05лȞ\\x02߃߄\\x05ёȩ')\n buf.write('\\x02߄߅\\x05йȝ\\x02߅߆\\x05ѝȯ')\n buf.write('\\x02߆߇\\x05эȧ\\x02߇߈\\x05нȟ')\n buf.write('\\x02߈߉\\x05яȨ\\x02߉ߊ\\x05ћȮ')\n buf.write('\\x02ߊâ\\x03\\x02\\x02\\x02ߋߌ\\x05лȞ\\x02ߌ')\n buf.write('ߍ\\x05ёȩ\\x02ߍߎ\\x05ѝȯ\\x02ߎ')\n buf.write('ߏ\\x05зȜ\\x02ߏߐ\\x05ыȦ\\x02ߐ')\n buf.write('ߑ\\x05нȟ\\x02ߑä\\x03\\x02\\x02\\x02ߒߓ')\n buf.write('\\x05лȞ\\x02ߓߔ\\x05їȬ\\x02ߔߕ')\n buf.write('\\x05ёȩ\\x02ߕߖ\\x05ѓȪ\\x02ߖæ')\n buf.write('\\x03\\x02\\x02\\x02ߗߘ\\x05лȞ\\x02ߘߙ\\x05љ')\n buf.write('ȭ\\x02ߙߚ\\x05хȣ\\x02ߚߛ\\x05я')\n buf.write('Ȩ\\x02ߛߜ\\x05ћȮ\\x02ߜߝ\\x05н')\n buf.write('ȟ\\x02ߝߞ\\x05їȬ\\x02ߞߟ\\x05џ')\n buf.write('Ȱ\\x02ߟߠ\\x05еț\\x02ߠߡ\\x05ы')\n buf.write('Ȧ\\x02ߡߢ\\x07a\\x02\\x02ߢߣ\\x05ѝȯ')\n buf.write('\\x02ߣߤ\\x05яȨ\\x02ߤߥ\\x05йȝ')\n buf.write('\\x02ߥߦ\\x05ёȩ\\x02ߦߧ\\x05яȨ')\n buf.write('\\x02ߧߨ\\x05љȭ\\x02ߨߩ\\x05ћȮ')\n buf.write('\\x02ߩߪ\\x05їȬ\\x02ߪ߫\\x05еț')\n buf.write('\\x02߫߬\\x05хȣ\\x02߬߭\\x05яȨ')\n buf.write('\\x02߭߮\\x05нȟ\\x02߮߯\\x05лȞ')\n buf.write('\\x02߯è\\x03\\x02\\x02\\x02߰߱\\x05нȟ\\x02߱')\n buf.write('߲\\x05еț\\x02߲߳\\x05йȝ\\x02߳')\n buf.write('ߴ\\x05уȢ\\x02ߴê\\x03\\x02\\x02\\x02ߵ߶')\n buf.write('\\x05нȟ\\x02߶߷\\x05ыȦ\\x02߷߸')\n buf.write('\\x05нȟ\\x02߸߹\\x05эȧ\\x02߹ߺ')\n buf.write('\\x05нȟ\\x02ߺ\\u07fb\\x05яȨ\\x02\\u07fb\\u07fc')\n buf.write('\\x05ћȮ\\x02\\u07fcì\\x03\\x02\\x02\\x02߽߾\\x05н')\n buf.write('ȟ\\x02߾߿\\x05ыȦ\\x02߿ࠀ\\x05љ')\n buf.write('ȭ\\x02ࠀࠁ\\x05нȟ\\x02ࠁî\\x03\\x02\\x02')\n buf.write('\\x02ࠂࠃ\\x05нȟ\\x02ࠃࠄ\\x05ыȦ')\n buf.write('\\x02ࠄࠅ\\x05љȭ\\x02ࠅࠆ\\x05хȣ')\n buf.write('\\x02ࠆࠇ\\x05пȠ\\x02ࠇð\\x03\\x02\\x02\\x02ࠈ')\n buf.write('ࠉ\\x05нȟ\\x02ࠉࠊ\\x05эȧ\\x02ࠊ')\n buf.write('ࠋ\\x05ѓȪ\\x02ࠋࠌ\\x05ћȮ\\x02ࠌ')\n buf.write('ࠍ\\x05ѥȳ\\x02ࠍò\\x03\\x02\\x02\\x02ࠎࠏ')\n buf.write('\\x05нȟ\\x02ࠏࠐ\\x05яȨ\\x02ࠐࠑ')\n buf.write('\\x05еț\\x02ࠑࠒ\\x05зȜ\\x02ࠒࠓ')\n buf.write('\\x05ыȦ\\x02ࠓࠔ\\x05нȟ\\x02ࠔô')\n buf.write('\\x03\\x02\\x02\\x02ࠕࠖ\\x05нȟ\\x02ࠖࠗ\\x05я')\n buf.write('Ȩ\\x02ࠗ࠘\\x05йȝ\\x02࠘࠙\\x05ё')\n buf.write('ȩ\\x02࠙ࠚ\\x05лȞ\\x02ࠚࠛ\\x05х')\n buf.write('ȣ\\x02ࠛࠜ\\x05яȨ\\x02ࠜࠝ\\x05с')\n buf.write('ȡ\\x02ࠝö\\x03\\x02\\x02\\x02ࠞࠟ\\x05нȟ')\n buf.write('\\x02ࠟࠠ\\x05яȨ\\x02ࠠࠡ\\x05лȞ')\n buf.write('\\x02ࠡø\\x03\\x02\\x02\\x02ࠢࠣ\\x05нȟ\\x02ࠣ')\n buf.write('ࠤ\\x05яȨ\\x02ࠤࠥ\\x05ћȮ\\x02ࠥ')\n buf.write('ࠦ\\x05хȣ\\x02ࠦࠧ\\x05ћȮ\\x02ࠧ')\n buf.write('ࠨ\\x05ѥȳ\\x02ࠨࠩ\\x05нȟ\\x02ࠩ')\n buf.write('ࠪ\\x05љȭ\\x02ࠪࠫ\\x05йȝ\\x02ࠫ')\n buf.write('ࠬ\\x05еț\\x02ࠬ࠭\\x05ѓȪ\\x02࠭')\n buf.write('\\u082e\\x05хȣ\\x02\\u082e\\u082f\\x05яȨ\\x02\\u082f')\n buf.write('࠰\\x05сȡ\\x02࠰ú\\x03\\x02\\x02\\x02࠱࠲')\n buf.write('\\x05нȟ\\x02࠲࠳\\x05їȬ\\x02࠳࠴')\n buf.write('\\x05їȬ\\x02࠴ü\\x03\\x02\\x02\\x02࠵࠶\\x05н')\n buf.write('ȟ\\x02࠶࠷\\x05їȬ\\x02࠷࠸\\x05ї')\n buf.write('Ȭ\\x02࠸࠹\\x05ёȩ\\x02࠹࠺\\x05ї')\n buf.write('Ȭ\\x02࠺࠻\\x05љȭ\\x02࠻þ\\x03\\x02\\x02')\n buf.write('\\x02࠼࠽\\x05нȟ\\x02࠽࠾\\x05љȭ')\n buf.write('\\x02࠾\\u083f\\x05йȝ\\x02\\u083fࡀ\\x05еț')\n buf.write('\\x02ࡀࡁ\\x05ѓȪ\\x02ࡁࡂ\\x05нȟ')\n buf.write('\\x02ࡂĀ\\x03\\x02\\x02\\x02ࡃࡄ\\x05нȟ\\x02ࡄ')\n buf.write('ࡅ\\x05џȰ\\x02ࡅࡆ\\x05еț\\x02ࡆ')\n buf.write('ࡇ\\x05ыȦ\\x02ࡇࡈ\\x05яȨ\\x02ࡈ')\n buf.write('ࡉ\\x05еț\\x02ࡉࡊ\\x05эȧ\\x02ࡊ')\n buf.write('ࡋ\\x05нȟ\\x02ࡋĂ\\x03\\x02\\x02\\x02ࡌࡍ')\n buf.write('\\x05нȟ\\x02ࡍࡎ\\x05ѣȲ\\x02ࡎࡏ')\n buf.write('\\x05йȝ\\x02ࡏࡐ\\x05нȟ\\x02ࡐࡑ')\n buf.write('\\x05ѓȪ\\x02ࡑࡒ\\x05ћȮ\\x02ࡒĄ')\n buf.write('\\x03\\x02\\x02\\x02ࡓࡔ\\x05нȟ\\x02ࡔࡕ\\x05ѣ')\n buf.write('Ȳ\\x02ࡕࡖ\\x05йȝ\\x02ࡖࡗ\\x05н')\n buf.write('ȟ\\x02ࡗࡘ\\x05ѓȪ\\x02ࡘ࡙\\x05ћ')\n buf.write('Ȯ\\x02࡙࡚\\x05хȣ\\x02࡚࡛\\x05ё')\n buf.write('ȩ\\x02࡛\\u085c\\x05яȨ\\x02\\u085cĆ\\x03\\x02\\x02')\n buf.write('\\x02\\u085d࡞\\x05нȟ\\x02࡞\\u085f\\x05ѣȲ')\n buf.write('\\x02\\u085fࡠ\\x05йȝ\\x02ࡠࡡ\\x05нȟ')\n buf.write('\\x02ࡡࡢ\\x05ѓȪ\\x02ࡢࡣ\\x05ћȮ')\n buf.write('\\x02ࡣࡤ\\x05хȣ\\x02ࡤࡥ\\x05ёȩ')\n buf.write('\\x02ࡥࡦ\\x05яȨ\\x02ࡦࡧ\\x07a\\x02\\x02ࡧ')\n buf.write('ࡨ\\x05хȣ\\x02ࡨࡩ\\x05яȨ\\x02ࡩ')\n buf.write('ࡪ\\x05хȣ\\x02ࡪ\\u086b\\x05ћȮ\\x02\\u086b')\n buf.write('Ĉ\\x03\\x02\\x02\\x02\\u086c\\u086d\\x05нȟ\\x02\\u086d\\u086e')\n buf.write('\\x05ѣȲ\\x02\\u086e\\u086f\\x05йȝ\\x02\\u086fࡰ')\n buf.write('\\x05нȟ\\x02ࡰࡱ\\x05ѓȪ\\x02ࡱࡲ')\n buf.write('\\x05ћȮ\\x02ࡲࡳ\\x05хȣ\\x02ࡳࡴ')\n buf.write('\\x05ёȩ\\x02ࡴࡵ\\x05яȨ\\x02ࡵࡶ')\n buf.write('\\x05љȭ\\x02ࡶĊ\\x03\\x02\\x02\\x02ࡷࡸ\\x05н')\n buf.write('ȟ\\x02ࡸࡹ\\x05ѣȲ\\x02ࡹࡺ\\x05й')\n buf.write('ȝ\\x02ࡺࡻ\\x05ыȦ\\x02ࡻࡼ\\x05ѝ')\n buf.write('ȯ\\x02ࡼࡽ\\x05лȞ\\x02ࡽࡾ\\x05н')\n buf.write('ȟ\\x02ࡾČ\\x03\\x02\\x02\\x02ࡿࢀ\\x05нȟ')\n buf.write('\\x02ࢀࢁ\\x05ѣȲ\\x02ࢁࢂ\\x05йȝ')\n buf.write('\\x02ࢂࢃ\\x05ыȦ\\x02ࢃࢄ\\x05ѝȯ')\n buf.write('\\x02ࢄࢅ\\x05љȭ\\x02ࢅࢆ\\x05хȣ')\n buf.write('\\x02ࢆࢇ\\x05џȰ\\x02ࢇ࢈\\x05нȟ')\n buf.write('\\x02࢈Ď\\x03\\x02\\x02\\x02ࢉࢊ\\x05нȟ\\x02ࢊ')\n buf.write('ࢋ\\x05ѣȲ\\x02ࢋࢌ\\x05нȟ\\x02ࢌ')\n buf.write('ࢍ\\x05йȝ\\x02ࢍࢎ\\x05ѝȯ\\x02ࢎ')\n buf.write('\\u088f\\x05ћȮ\\x02\\u088f\\u0890\\x05нȟ\\x02\\u0890')\n buf.write('Đ\\x03\\x02\\x02\\x02\\u0891\\u0892\\x05нȟ\\x02\\u0892\\u0893')\n buf.write('\\x05ѣȲ\\x02\\u0893\\u0894\\x05хȣ\\x02\\u0894\\u0895')\n buf.write('\\x05љȭ\\x02\\u0895\\u0896\\x05ћȮ\\x02\\u0896\\u0897')\n buf.write('\\x05љȭ\\x02\\u0897Ē\\x03\\x02\\x02\\x02࢙࢘\\x05н')\n buf.write('ȟ\\x02࢙࢚\\x05ѣȲ\\x02࢚࢛\\x05х')\n buf.write('ȣ\\x02࢛࢜\\x05ћȮ\\x02࢜Ĕ\\x03\\x02\\x02')\n buf.write('\\x02࢝࢞\\x05нȟ\\x02࢞࢟\\x05ѣȲ')\n buf.write('\\x02࢟ࢠ\\x05ѓȪ\\x02ࢠࢡ\\x05ыȦ')\n buf.write('\\x02ࢡࢢ\\x05еț\\x02ࢢࢣ\\x05хȣ')\n buf.write('\\x02ࢣࢤ\\x05яȨ\\x02ࢤĖ\\x03\\x02\\x02\\x02ࢥ')\n buf.write('ࢦ\\x05нȟ\\x02ࢦࢧ\\x05ѣȲ\\x02ࢧ')\n buf.write('ࢨ\\x05ћȮ\\x02ࢨࢩ\\x05нȟ\\x02ࢩ')\n buf.write('ࢪ\\x05їȬ\\x02ࢪࢫ\\x05яȨ\\x02ࢫ')\n buf.write('ࢬ\\x05еț\\x02ࢬࢭ\\x05ыȦ\\x02ࢭ')\n buf.write('Ę\\x03\\x02\\x02\\x02ࢮࢯ\\x05нȟ\\x02ࢯࢰ')\n buf.write('\\x05ѣȲ\\x02ࢰࢱ\\x05ћȮ\\x02ࢱࢲ')\n buf.write('\\x05їȬ\\x02ࢲࢳ\\x05еț\\x02ࢳࢴ')\n buf.write('\\x05йȝ\\x02ࢴࢵ\\x05ћȮ\\x02ࢵĚ')\n buf.write('\\x03\\x02\\x02\\x02ࢶࢷ\\x05пȠ\\x02ࢷࢸ\\x05е')\n buf.write('ț\\x02ࢸࢹ\\x05хȣ\\x02ࢹࢺ\\x05ы')\n buf.write('Ȧ\\x02ࢺࢻ\\x05ѝȯ\\x02ࢻࢼ\\x05ї')\n buf.write('Ȭ\\x02ࢼࢽ\\x05нȟ\\x02ࢽĜ\\x03\\x02\\x02')\n buf.write('\\x02ࢾࢿ\\x05пȠ\\x02ࢿࣀ\\x05еț')\n buf.write('\\x02ࣀࣁ\\x05ыȦ\\x02ࣁࣂ\\x05љȭ')\n buf.write('\\x02ࣂࣃ\\x05нȟ\\x02ࣃĞ\\x03\\x02\\x02\\x02ࣄ')\n buf.write('ࣅ\\x05пȠ\\x02ࣅࣆ\\x05нȟ\\x02ࣆ')\n buf.write('ࣇ\\x05ћȮ\\x02ࣇࣈ\\x05йȝ\\x02ࣈ')\n buf.write('ࣉ\\x05уȢ\\x02ࣉĠ\\x03\\x02\\x02\\x02࣊࣋')\n buf.write('\\x05пȠ\\x02࣋࣌\\x05хȣ\\x02࣌࣍')\n buf.write('\\x05яȨ\\x02࣍࣎\\x05еț\\x02࣏࣎')\n buf.write('\\x05ыȦ\\x02࣏Ģ\\x03\\x02\\x02\\x02࣐࣑\\x05п')\n buf.write('Ƞ\\x02࣑࣒\\x05хȣ\\x02࣒࣓\\x05ї')\n buf.write('Ȭ\\x02࣓ࣔ\\x05љȭ\\x02ࣔࣕ\\x05ћ')\n buf.write('Ȯ\\x02ࣕĤ\\x03\\x02\\x02\\x02ࣖࣗ\\x05пȠ')\n buf.write('\\x02ࣗࣘ\\x05хȣ\\x02ࣘࣙ\\x05їȬ')\n buf.write('\\x02ࣙࣚ\\x05љȭ\\x02ࣚࣛ\\x05ћȮ')\n buf.write('\\x02ࣛࣜ\\x07a\\x02\\x02ࣜࣝ\\x05џȰ\\x02ࣝ')\n buf.write('ࣞ\\x05еț\\x02ࣞࣟ\\x05ыȦ\\x02ࣟ')\n buf.write('࣠\\x05ѝȯ\\x02࣠࣡\\x05нȟ\\x02࣡')\n buf.write('Ħ\\x03\\x02\\x02\\x02\\u08e2ࣣ\\x05пȠ\\x02ࣣࣤ')\n buf.write('\\x05ыȦ\\x02ࣤࣥ\\x05ёȩ\\x02ࣦࣥ')\n buf.write('\\x05еț\\x02ࣦࣧ\\x05ћȮ\\x02ࣧĨ')\n buf.write('\\x03\\x02\\x02\\x02ࣩࣨ\\x05пȠ\\x02ࣩ࣪\\x05ё')\n buf.write('ȩ\\x02࣪࣫\\x05ыȦ\\x02࣫࣬\\x05ы')\n buf.write('Ȧ\\x02࣭࣬\\x05ёȩ\\x02࣭࣮\\x05ѡ')\n buf.write('ȱ\\x02࣮࣯\\x05хȣ\\x02ࣰ࣯\\x05я')\n buf.write('Ȩ\\x02ࣰࣱ\\x05сȡ\\x02ࣱĪ\\x03\\x02\\x02')\n buf.write('\\x02ࣲࣳ\\x05пȠ\\x02ࣳࣴ\\x05ёȩ')\n buf.write('\\x02ࣴࣵ\\x05ыȦ\\x02ࣶࣵ\\x05ыȦ')\n buf.write('\\x02ࣶࣷ\\x05ёȩ\\x02ࣷࣸ\\x05ѡȱ')\n buf.write('\\x02ࣹࣸ\\x05љȭ\\x02ࣹĬ\\x03\\x02\\x02\\x02ࣺ')\n buf.write('ࣻ\\x05пȠ\\x02ࣻࣼ\\x05ёȩ\\x02ࣼ')\n buf.write('ࣽ\\x05їȬ\\x02ࣽĮ\\x03\\x02\\x02\\x02ࣾࣿ')\n buf.write('\\x05пȠ\\x02ࣿऀ\\x05ёȩ\\x02ऀँ')\n buf.write('\\x05їȬ\\x02ँं\\x05еț\\x02ंः')\n buf.write('\\x05ыȦ\\x02ःऄ\\x05ыȦ\\x02ऄİ')\n buf.write('\\x03\\x02\\x02\\x02अआ\\x05пȠ\\x02आइ\\x05ё')\n buf.write('ȩ\\x02इई\\x05їȬ\\x02ईउ\\x05й')\n buf.write('ȝ\\x02उऊ\\x05нȟ\\x02ऊIJ\\x03\\x02\\x02')\n buf.write('\\x02ऋऌ\\x05пȠ\\x02ऌऍ\\x05їȬ')\n buf.write('\\x02ऍऎ\\x05ёȩ\\x02ऎए\\x05эȧ')\n buf.write('\\x02एĴ\\x03\\x02\\x02\\x02ऐऑ\\x05пȠ\\x02ऑ')\n buf.write('ऒ\\x05ѝȯ\\x02ऒओ\\x05ыȦ\\x02ओ')\n buf.write('औ\\x05ыȦ\\x02औĶ\\x03\\x02\\x02\\x02कख')\n buf.write('\\x05пȠ\\x02खग\\x05ѝȯ\\x02गघ')\n buf.write('\\x05яȨ\\x02घङ\\x05йȝ\\x02ङच')\n buf.write('\\x05ћȮ\\x02चछ\\x05хȣ\\x02छज')\n buf.write('\\x05ёȩ\\x02जझ\\x05яȨ\\x02झĸ')\n buf.write('\\x03\\x02\\x02\\x02ञट\\x05сȡ\\x02टठ\\x05ё')\n buf.write('ȩ\\x02ठड\\x05ћȮ\\x02डढ\\x05ё')\n buf.write('ȩ\\x02ढĺ\\x03\\x02\\x02\\x02णत\\x05сȡ')\n buf.write('\\x02तथ\\x05їȬ\\x02थद\\x05еț')\n buf.write('\\x02दध\\x05яȨ\\x02धन\\x05ћȮ')\n buf.write('\\x02नļ\\x03\\x02\\x02\\x02ऩप\\x05сȡ\\x02प')\n buf.write('फ\\x05їȬ\\x02फब\\x05ёȩ\\x02ब')\n buf.write('भ\\x05ѝȯ\\x02भम\\x05ѓȪ\\x02म')\n buf.write('ľ\\x03\\x02\\x02\\x02यर\\x05сȡ\\x02रऱ')\n buf.write('\\x05їȬ\\x02ऱल\\x05ёȩ\\x02लळ')\n buf.write('\\x05ѝȯ\\x02ळऴ\\x05ѓȪ\\x02ऴव')\n buf.write('\\x05хȣ\\x02वश\\x05яȨ\\x02शष')\n buf.write('\\x05сȡ\\x02षŀ\\x03\\x02\\x02\\x02सह\\x05у')\n buf.write('Ȣ\\x02हऺ\\x05еț\\x02ऺऻ\\x05љ')\n buf.write('ȭ\\x02ऻ़\\x05уȢ\\x02़ł\\x03\\x02\\x02')\n buf.write('\\x02ऽा\\x05уȢ\\x02ाि\\x05еț')\n buf.write('\\x02िी\\x05џȰ\\x02ीु\\x05хȣ')\n buf.write('\\x02ुू\\x05яȨ\\x02ूृ\\x05сȡ')\n buf.write('\\x02ृń\\x03\\x02\\x02\\x02ॄॅ\\x05уȢ\\x02ॅ')\n buf.write('ॆ\\x05хȣ\\x02ॆे\\x05лȞ\\x02े')\n buf.write('ै\\x05нȟ\\x02ैņ\\x03\\x02\\x02\\x02ॉॊ')\n buf.write('\\x05уȢ\\x02ॊो\\x05ёȩ\\x02ोौ')\n buf.write('\\x05ѝȯ\\x02ौ्\\x05їȬ\\x02्ň')\n buf.write('\\x03\\x02\\x02\\x02ॎॏ\\x05хȣ\\x02ॏॐ\\x05п')\n buf.write('Ƞ\\x02ॐŊ\\x03\\x02\\x02\\x02॒॑\\x05хȣ')\n buf.write('\\x02॒॓\\x05сȡ\\x02॓॔\\x05яȨ')\n buf.write('\\x02॔ॕ\\x05ёȩ\\x02ॕॖ\\x05їȬ')\n buf.write('\\x02ॖॗ\\x05нȟ\\x02ॗŌ\\x03\\x02\\x02\\x02क़')\n buf.write('ख़\\x05хȣ\\x02ख़ग़\\x05эȧ\\x02ग़')\n buf.write('ज़\\x05эȧ\\x02ज़ड़\\x05нȟ\\x02ड़')\n buf.write('ढ़\\x05лȞ\\x02ढ़फ़\\x05хȣ\\x02फ़')\n buf.write('य़\\x05еț\\x02य़ॠ\\x05ћȮ\\x02ॠ')\n buf.write('ॡ\\x05нȟ\\x02ॡŎ\\x03\\x02\\x02\\x02ॢॣ')\n buf.write('\\x05хȣ\\x02ॣ।\\x05яȨ\\x02।Ő')\n buf.write('\\x03\\x02\\x02\\x02॥०\\x05хȣ\\x02०१\\x05я')\n buf.write('Ȩ\\x02१२\\x05йȝ\\x02२३\\x05ы')\n buf.write('Ȧ\\x02३४\\x05ѝȯ\\x02४५\\x05л')\n buf.write('Ȟ\\x02५६\\x05нȟ\\x02६Œ\\x03\\x02\\x02')\n buf.write('\\x02७८\\x05хȣ\\x02८९\\x05яȨ')\n buf.write('\\x02९॰\\x05йȝ\\x02॰ॱ\\x05ыȦ')\n buf.write('\\x02ॱॲ\\x05ѝȯ\\x02ॲॳ\\x05лȞ')\n buf.write('\\x02ॳॴ\\x05хȣ\\x02ॴॵ\\x05яȨ')\n buf.write('\\x02ॵॶ\\x05сȡ\\x02ॶŔ\\x03\\x02\\x02\\x02ॷ')\n buf.write('ॸ\\x05хȣ\\x02ॸॹ\\x05яȨ\\x02ॹ')\n buf.write('ॺ\\x05йȝ\\x02ॺॻ\\x05їȬ\\x02ॻ')\n buf.write('ॼ\\x05нȟ\\x02ॼॽ\\x05эȧ\\x02ॽ')\n buf.write('ॾ\\x05нȟ\\x02ॾॿ\\x05яȨ\\x02ॿ')\n buf.write('ঀ\\x05ћȮ\\x02ঀŖ\\x03\\x02\\x02\\x02ঁং')\n buf.write('\\x05хȣ\\x02ংঃ\\x05яȨ\\x02ঃ\\u0984')\n buf.write('\\x05лȞ\\x02\\u0984অ\\x05нȟ\\x02অআ')\n buf.write('\\x05яȨ\\x02আই\\x05ћȮ\\x02ইŘ')\n buf.write('\\x03\\x02\\x02\\x02ঈউ\\x05хȣ\\x02উঊ\\x05я')\n buf.write('Ȩ\\x02ঊঋ\\x05лȞ\\x02ঋঌ\\x05н')\n buf.write('ȟ\\x02ঌ\\u098d\\x05ѣȲ\\x02\\u098dŚ\\x03\\x02\\x02')\n buf.write('\\x02\\u098eএ\\x05хȣ\\x02এঐ\\x05яȨ')\n buf.write('\\x02ঐ\\u0991\\x05лȞ\\x02\\u0991\\u0992\\x05нȟ')\n buf.write('\\x02\\u0992ও\\x05ѣȲ\\x02ওঔ\\x05нȟ')\n buf.write('\\x02ঔক\\x05лȞ\\x02কŜ\\x03\\x02\\x02\\x02খ')\n buf.write('গ\\x05хȣ\\x02গঘ\\x05яȨ\\x02ঘ')\n buf.write('ঙ\\x05лȞ\\x02ঙচ\\x05хȣ\\x02চ')\n buf.write('ছ\\x05йȝ\\x02ছজ\\x05еț\\x02জ')\n buf.write('ঝ\\x05ћȮ\\x02ঝঞ\\x05ёȩ\\x02ঞ')\n buf.write('ট\\x05їȬ\\x02টŞ\\x03\\x02\\x02\\x02ঠড')\n buf.write('\\x05хȣ\\x02ডঢ\\x05яȨ\\x02ঢণ')\n buf.write('\\x05лȞ\\x02ণত\\x05хȣ\\x02তথ')\n buf.write('\\x05йȝ\\x02থদ\\x05нȟ\\x02দধ')\n buf.write('\\x05љȭ\\x02ধŠ\\x03\\x02\\x02\\x02ন\\u09a9\\x05х')\n buf.write('ȣ\\x02\\u09a9প\\x05яȨ\\x02পফ\\x05п')\n buf.write('Ƞ\\x02ফব\\x05хȣ\\x02বভ\\x05я')\n buf.write('Ȩ\\x02ভম\\x05хȣ\\x02ময\\x05ћ')\n buf.write('Ȯ\\x02যর\\x05нȟ\\x02রŢ\\x03\\x02\\x02')\n buf.write('\\x02\\u09b1ল\\x05хȣ\\x02ল\\u09b3\\x05яȨ')\n buf.write('\\x02\\u09b3\\u09b4\\x05ыȦ\\x02\\u09b4\\u09b5\\x05хȣ')\n buf.write('\\x02\\u09b5শ\\x05яȨ\\x02শষ\\x05нȟ')\n buf.write('\\x02ষŤ\\x03\\x02\\x02\\x02সহ\\x05хȣ\\x02হ')\n buf.write('\\u09ba\\x05яȨ\\x02\\u09ba\\u09bb\\x05яȨ\\x02\\u09bb')\n buf.write('়\\x05нȟ\\x02়ঽ\\x05їȬ\\x02ঽ')\n buf.write('Ŧ\\x03\\x02\\x02\\x02াি\\x05хȣ\\x02িী')\n buf.write('\\x05яȨ\\x02ীু\\x05ёȩ\\x02ুূ')\n buf.write('\\x05ѝȯ\\x02ূৃ\\x05ћȮ\\x02ৃŨ')\n buf.write('\\x03\\x02\\x02\\x02ৄ\\u09c5\\x05хȣ\\x02\\u09c5\\u09c6\\x05я')\n buf.write('Ȩ\\x02\\u09c6ে\\x05љȭ\\x02েৈ\\x05н')\n buf.write('ȟ\\x02ৈ\\u09c9\\x05їȬ\\x02\\u09c9\\u09ca\\x05ћ')\n buf.write('Ȯ\\x02\\u09caŪ\\x03\\x02\\x02\\x02োৌ\\x05хȣ')\n buf.write('\\x02ৌ্\\x05яȨ\\x02্ৎ\\x05љȭ')\n buf.write('\\x02ৎ\\u09cf\\x05ћȮ\\x02\\u09cf\\u09d0\\x05еț')\n buf.write('\\x02\\u09d0\\u09d1\\x05яȨ\\x02\\u09d1\\u09d2\\x05ћȮ')\n buf.write('\\x02\\u09d2\\u09d3\\x05хȣ\\x02\\u09d3\\u09d4\\x05еț')\n buf.write('\\x02\\u09d4\\u09d5\\x05зȜ\\x02\\u09d5\\u09d6\\x05ыȦ')\n buf.write('\\x02\\u09d6ৗ\\x05нȟ\\x02ৗŬ\\x03\\x02\\x02\\x02\\u09d8')\n buf.write('\\u09d9\\x05хȣ\\x02\\u09d9\\u09da\\x05яȨ\\x02\\u09da')\n buf.write('\\u09db\\x05љȭ\\x02\\u09dbড়\\x05ћȮ\\x02ড়')\n buf.write('ঢ়\\x05нȟ\\x02ঢ়\\u09de\\x05еț\\x02\\u09de')\n buf.write('য়\\x05лȞ\\x02য়Ů\\x03\\x02\\x02\\x02ৠৡ')\n buf.write('\\x05хȣ\\x02ৡৢ\\x05яȨ\\x02ৢৣ')\n buf.write('\\x05ћȮ\\x02ৣŰ\\x03\\x02\\x02\\x02\\u09e4\\u09e5\\x05х')\n buf.write('ȣ\\x02\\u09e5০\\x05яȨ\\x02০১\\x05ћ')\n buf.write('Ȯ\\x02১২\\x05нȟ\\x02২৩\\x05с')\n buf.write('ȡ\\x02৩৪\\x05нȟ\\x02৪৫\\x05ї')\n buf.write('Ȭ\\x02৫Ų\\x03\\x02\\x02\\x02৬৭\\x05хȣ')\n buf.write('\\x02৭৮\\x05яȨ\\x02৮৯\\x05ћȮ')\n buf.write('\\x02৯ৰ\\x05нȟ\\x02ৰৱ\\x05їȬ')\n buf.write('\\x02ৱ৲\\x05љȭ\\x02৲৳\\x05нȟ')\n buf.write('\\x02৳৴\\x05йȝ\\x02৴৵\\x05ћȮ')\n buf.write('\\x02৵Ŵ\\x03\\x02\\x02\\x02৶৷\\x05хȣ\\x02৷')\n buf.write('৸\\x05яȨ\\x02৸৹\\x05ћȮ\\x02৹')\n buf.write('৺\\x05нȟ\\x02৺৻\\x05їȬ\\x02৻')\n buf.write('ৼ\\x05џȰ\\x02ৼ৽\\x05еț\\x02৽')\n buf.write('৾\\x05ыȦ\\x02৾Ŷ\\x03\\x02\\x02\\x02\\u09ff\\u0a00')\n buf.write('\\x05хȣ\\x02\\u0a00ਁ\\x05яȨ\\x02ਁਂ')\n buf.write('\\x05ћȮ\\x02ਂਃ\\x05ёȩ\\x02ਃŸ')\n buf.write('\\x03\\x02\\x02\\x02\\u0a04ਅ\\x05хȣ\\x02ਅਆ\\x05я')\n buf.write('Ȩ\\x02ਆਇ\\x05џȰ\\x02ਇਈ\\x05е')\n buf.write('ț\\x02ਈਉ\\x05ыȦ\\x02ਉਊ\\x05х')\n buf.write('ȣ\\x02ਊ\\u0a0b\\x05лȞ\\x02\\u0a0b\\u0a0c\\x05е')\n buf.write('ț\\x02\\u0a0c\\u0a0d\\x05ћȮ\\x02\\u0a0d\\u0a0e\\x05н')\n buf.write('ȟ\\x02\\u0a0eź\\x03\\x02\\x02\\x02ਏਐ\\x05хȣ')\n buf.write('\\x02ਐ\\u0a11\\x05љȭ\\x02\\u0a11ż\\x03\\x02\\x02\\x02\\u0a12')\n buf.write('ਓ\\x05хȣ\\x02ਓਔ\\x05љȭ\\x02ਔ')\n buf.write('ਕ\\x05ёȩ\\x02ਕਖ\\x05ыȦ\\x02ਖ')\n buf.write('ਗ\\x05еț\\x02ਗਘ\\x05ћȮ\\x02ਘ')\n buf.write('ਙ\\x05хȣ\\x02ਙਚ\\x05ёȩ\\x02ਚ')\n buf.write('ਛ\\x05яȨ\\x02ਛž\\x03\\x02\\x02\\x02ਜਝ')\n buf.write('\\x05хȣ\\x02ਝਞ\\x05ћȮ\\x02ਞਟ')\n buf.write('\\x05нȟ\\x02ਟਠ\\x05їȬ\\x02ਠਡ')\n buf.write('\\x05еț\\x02ਡਢ\\x05ћȮ\\x02ਢਣ')\n buf.write('\\x05нȟ\\x02ਣƀ\\x03\\x02\\x02\\x02ਤਥ\\x05ч')\n buf.write('Ȥ\\x02ਥਦ\\x05еț\\x02ਦਧ\\x05џ')\n buf.write('Ȱ\\x02ਧਨ\\x05еț\\x02ਨƂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0a29ਪ\\x05чȤ\\x02ਪਫ\\x05ёȩ')\n buf.write('\\x02ਫਬ\\x05хȣ\\x02ਬਭ\\x05яȨ')\n buf.write('\\x02ਭƄ\\x03\\x02\\x02\\x02ਮਯ\\x05щȥ\\x02ਯ')\n buf.write('ਰ\\x05нȟ\\x02ਰ\\u0a31\\x05нȟ\\x02\\u0a31')\n buf.write('ਲ\\x05ѓȪ\\x02ਲƆ\\x03\\x02\\x02\\x02ਲ਼\\u0a34')\n buf.write('\\x05ыȦ\\x02\\u0a34ਵ\\x05еț\\x02ਵਸ਼')\n buf.write('\\x05яȨ\\x02ਸ਼\\u0a37\\x05сȡ\\x02\\u0a37ਸ')\n buf.write('\\x05ѝȯ\\x02ਸਹ\\x05еț\\x02ਹ\\u0a3a')\n buf.write('\\x05сȡ\\x02\\u0a3a\\u0a3b\\x05нȟ\\x02\\u0a3bƈ')\n buf.write('\\x03\\x02\\x02\\x02਼\\u0a3d\\x05ыȦ\\x02\\u0a3dਾ\\x05е')\n buf.write('ț\\x02ਾਿ\\x05љȭ\\x02ਿੀ\\x05ћ')\n buf.write('Ȯ\\x02ੀƊ\\x03\\x02\\x02\\x02ੁੂ\\x05ыȦ')\n buf.write('\\x02ੂ\\u0a43\\x05еț\\x02\\u0a43\\u0a44\\x05љȭ')\n buf.write('\\x02\\u0a44\\u0a45\\x05ћȮ\\x02\\u0a45\\u0a46\\x07a\\x02\\x02\\u0a46')\n buf.write('ੇ\\x05џȰ\\x02ੇੈ\\x05еț\\x02ੈ')\n buf.write('\\u0a49\\x05ыȦ\\x02\\u0a49\\u0a4a\\x05ѝȯ\\x02\\u0a4a')\n buf.write('ੋ\\x05нȟ\\x02ੋƌ\\x03\\x02\\x02\\x02ੌ੍')\n buf.write('\\x05ыȦ\\x02੍\\u0a4e\\x05нȟ\\x02\\u0a4e\\u0a4f')\n buf.write('\\x05еț\\x02\\u0a4f\\u0a50\\x05лȞ\\x02\\u0a50ੑ')\n buf.write('\\x05хȣ\\x02ੑ\\u0a52\\x05яȨ\\x02\\u0a52\\u0a53')\n buf.write('\\x05сȡ\\x02\\u0a53Ǝ\\x03\\x02\\x02\\x02\\u0a54\\u0a55\\x05ы')\n buf.write('Ȧ\\x02\\u0a55\\u0a56\\x05нȟ\\x02\\u0a56\\u0a57\\x05п')\n buf.write('Ƞ\\x02\\u0a57\\u0a58\\x05ћȮ\\x02\\u0a58Ɛ\\x03\\x02\\x02')\n buf.write('\\x02ਖ਼ਗ਼\\x05ыȦ\\x02ਗ਼ਜ਼\\x05нȟ')\n buf.write('\\x02ਜ਼ੜ\\x05џȰ\\x02ੜ\\u0a5d\\x05нȟ')\n buf.write('\\x02\\u0a5dਫ਼\\x05ыȦ\\x02ਫ਼ƒ\\x03\\x02\\x02\\x02\\u0a5f')\n buf.write('\\u0a60\\x05ыȦ\\x02\\u0a60\\u0a61\\x05хȣ\\x02\\u0a61')\n buf.write('\\u0a62\\x05зȜ\\x02\\u0a62\\u0a63\\x05їȬ\\x02\\u0a63')\n buf.write('\\u0a64\\x05еț\\x02\\u0a64\\u0a65\\x05їȬ\\x02\\u0a65')\n buf.write('੦\\x05ѥȳ\\x02੦Ɣ\\x03\\x02\\x02\\x02੧੨')\n buf.write('\\x05ыȦ\\x02੨੩\\x05хȣ\\x02੩੪')\n buf.write('\\x05щȥ\\x02੪੫\\x05нȟ\\x02੫Ɩ')\n buf.write('\\x03\\x02\\x02\\x02੬੭\\x05ыȦ\\x02੭੮\\x05х')\n buf.write('ȣ\\x02੮੯\\x05щȥ\\x02੯ੰ\\x05н')\n buf.write('ȟ\\x02ੰੱ\\x074\\x02\\x02ੱƘ\\x03\\x02\\x02\\x02ੲ')\n buf.write('ੳ\\x05ыȦ\\x02ੳੴ\\x05хȣ\\x02ੴ')\n buf.write('ੵ\\x05щȥ\\x02ੵ੶\\x05нȟ\\x02੶')\n buf.write('\\u0a77\\x076\\x02\\x02\\u0a77ƚ\\x03\\x02\\x02\\x02\\u0a78\\u0a79\\x05ы'\n )\n buf.write('Ȧ\\x02\\u0a79\\u0a7a\\x05хȣ\\x02\\u0a7a\\u0a7b\\x05щ')\n buf.write('ȥ\\x02\\u0a7b\\u0a7c\\x05нȟ\\x02\\u0a7c\\u0a7d\\x05й')\n buf.write('ȝ\\x02\\u0a7dƜ\\x03\\x02\\x02\\x02\\u0a7e\\u0a7f\\x05ыȦ')\n buf.write('\\x02\\u0a7f\\u0a80\\x05хȣ\\x02\\u0a80ઁ\\x05эȧ')\n buf.write('\\x02ઁં\\x05хȣ\\x02ંઃ\\x05ћȮ')\n buf.write('\\x02ઃƞ\\x03\\x02\\x02\\x02\\u0a84અ\\x05ыȦ\\x02અ')\n buf.write('આ\\x05ёȩ\\x02આઇ\\x05йȝ\\x02ઇ')\n buf.write('ઈ\\x05еț\\x02ઈઉ\\x05ыȦ\\x02ઉ')\n buf.write('Ơ\\x03\\x02\\x02\\x02ઊઋ\\x05ыȦ\\x02ઋઌ')\n buf.write('\\x05ёȩ\\x02ઌઍ\\x05йȝ\\x02ઍ\\u0a8e')\n buf.write('\\x05щȥ\\x02\\u0a8eƢ\\x03\\x02\\x02\\x02એઐ\\x05ы')\n buf.write('Ȧ\\x02ઐઑ\\x05ёȩ\\x02ઑ\\u0a92\\x05й')\n buf.write('ȝ\\x02\\u0a92ઓ\\x05щȥ\\x02ઓઔ\\x05н')\n buf.write('ȟ\\x02ઔક\\x05лȞ\\x02કƤ\\x03\\x02\\x02')\n buf.write('\\x02ખગ\\x05ыȦ\\x02ગઘ\\x05ёȩ')\n buf.write('\\x02ઘઙ\\x05сȡ\\x02ઙƦ\\x03\\x02\\x02\\x02ચ')\n buf.write('છ\\x05ыȦ\\x02છજ\\x05ёȩ\\x02જ')\n buf.write('ઝ\\x05сȡ\\x02ઝઞ\\x05ёȩ\\x02ઞ')\n buf.write('ટ\\x05пȠ\\x02ટઠ\\x05пȠ\\x02ઠ')\n buf.write('ƨ\\x03\\x02\\x02\\x02ડઢ\\x05ыȦ\\x02ઢણ')\n buf.write('\\x05ёȩ\\x02ણત\\x05сȡ\\x02તથ')\n buf.write('\\x05ёȩ\\x02થદ\\x05яȨ\\x02દƪ')\n buf.write('\\x03\\x02\\x02\\x02ધન\\x05ыȦ\\x02ન\\u0aa9\\x05ё')\n buf.write('ȩ\\x02\\u0aa9પ\\x05яȨ\\x02પફ\\x05с')\n buf.write('ȡ\\x02ફƬ\\x03\\x02\\x02\\x02બભ\\x05ыȦ')\n buf.write('\\x02ભમ\\x05ёȩ\\x02મય\\x05ёȩ')\n buf.write('\\x02યર\\x05ѓȪ\\x02રƮ\\x03\\x02\\x02\\x02\\u0ab1')\n buf.write('લ\\x05эȧ\\x02લળ\\x05еț\\x02ળ')\n buf.write('\\u0ab4\\x05хȣ\\x02\\u0ab4વ\\x05яȨ\\x02વ')\n buf.write('ư\\x03\\x02\\x02\\x02શષ\\x05эȧ\\x02ષસ')\n buf.write('\\x05еț\\x02સહ\\x05ѓȪ\\x02હƲ')\n buf.write('\\x03\\x02\\x02\\x02\\u0aba\\u0abb\\x05эȧ\\x02\\u0abb઼\\x05е')\n buf.write('ț\\x02઼ઽ\\x05ћȮ\\x02ઽા\\x05й')\n buf.write('ȝ\\x02ાિ\\x05уȢ\\x02િી\\x05н')\n buf.write('ȟ\\x02ીુ\\x05лȞ\\x02ુƴ\\x03\\x02\\x02')\n buf.write('\\x02ૂૃ\\x05эȧ\\x02ૃૄ\\x05еț')\n buf.write('\\x02ૄૅ\\x05ѣȲ\\x02ૅ\\u0ac6\\x05џȰ')\n buf.write('\\x02\\u0ac6ે\\x05еț\\x02ેૈ\\x05ыȦ')\n buf.write('\\x02ૈૉ\\x05ѝȯ\\x02ૉ\\u0aca\\x05нȟ')\n buf.write('\\x02\\u0acaƶ\\x03\\x02\\x02\\x02ોૌ\\x05эȧ\\x02ૌ')\n buf.write('્\\x05нȟ\\x02્\\u0ace\\x05еț\\x02\\u0ace')\n buf.write('\\u0acf\\x05љȭ\\x02\\u0acfૐ\\x05ѝȯ\\x02ૐ')\n buf.write('\\u0ad1\\x05їȬ\\x02\\u0ad1\\u0ad2\\x05нȟ\\x02\\u0ad2')\n buf.write('\\u0ad3\\x05љȭ\\x02\\u0ad3Ƹ\\x03\\x02\\x02\\x02\\u0ad4\\u0ad5')\n buf.write('\\x05эȧ\\x02\\u0ad5\\u0ad6\\x05нȟ\\x02\\u0ad6\\u0ad7')\n buf.write('\\x05эȧ\\x02\\u0ad7\\u0ad8\\x05зȜ\\x02\\u0ad8\\u0ad9')\n buf.write('\\x05нȟ\\x02\\u0ad9\\u0ada\\x05їȬ\\x02\\u0adaƺ')\n buf.write('\\x03\\x02\\x02\\x02\\u0adb\\u0adc\\x05эȧ\\x02\\u0adc\\u0add\\x05н')\n buf.write('ȟ\\x02\\u0add\\u0ade\\x05їȬ\\x02\\u0ade\\u0adf\\x05с')\n buf.write('ȡ\\x02\\u0adfૠ\\x05нȟ\\x02ૠƼ\\x03\\x02\\x02')\n buf.write('\\x02ૡૢ\\x05эȧ\\x02ૢૣ\\x05хȣ')\n buf.write('\\x02ૣ\\u0ae4\\x05яȨ\\x02\\u0ae4\\u0ae5\\x05ѝȯ')\n buf.write('\\x02\\u0ae5૦\\x05љȭ\\x02૦ƾ\\x03\\x02\\x02\\x02૧')\n buf.write('૨\\x05эȧ\\x02૨૩\\x05хȣ\\x02૩')\n buf.write('૪\\x05яȨ\\x02૪૫\\x05ѝȯ\\x02૫')\n buf.write('૬\\x05ћȮ\\x02૬૭\\x05нȟ\\x02૭')\n buf.write('ǀ\\x03\\x02\\x02\\x02૮૯\\x05эȧ\\x02૯૰')\n buf.write('\\x05хȣ\\x02૰૱\\x05яȨ\\x02૱\\u0af2')\n buf.write('\\x05џȰ\\x02\\u0af2\\u0af3\\x05еț\\x02\\u0af3\\u0af4')\n buf.write('\\x05ыȦ\\x02\\u0af4\\u0af5\\x05ѝȯ\\x02\\u0af5\\u0af6')\n buf.write('\\x05нȟ\\x02\\u0af6ǂ\\x03\\x02\\x02\\x02\\u0af7\\u0af8\\x05э')\n buf.write('ȧ\\x02\\u0af8ૹ\\x05ыȦ\\x02ૹૺ\\x05љ')\n buf.write('ȭ\\x02ૺૻ\\x05ыȦ\\x02ૻૼ\\x05е')\n buf.write('ț\\x02ૼ૽\\x05зȜ\\x02૽૾\\x05н')\n buf.write('ȟ\\x02૾૿\\x05ыȦ\\x02૿DŽ\\x03\\x02\\x02')\n buf.write('\\x02\\u0b00ଁ\\x05эȧ\\x02ଁଂ\\x05ёȩ')\n buf.write('\\x02ଂଃ\\x05лȞ\\x02ଃ\\u0b04\\x05нȟ')\n buf.write('\\x02\\u0b04dž\\x03\\x02\\x02\\x02ଅଆ\\x05эȧ\\x02ଆ')\n buf.write('ଇ\\x05ёȩ\\x02ଇଈ\\x05лȞ\\x02ଈ')\n buf.write('ଉ\\x05нȟ\\x02ଉଊ\\x05ыȦ\\x02ଊ')\n buf.write('Lj\\x03\\x02\\x02\\x02ଋଌ\\x05эȧ\\x02ଌ\\u0b0d')\n buf.write('\\x05ёȩ\\x02\\u0b0d\\u0b0e\\x05лȞ\\x02\\u0b0eଏ')\n buf.write('\\x05хȣ\\x02ଏଐ\\x05пȠ\\x02ଐ\\u0b11')\n buf.write('\\x05ѥȳ\\x02\\u0b11NJ\\x03\\x02\\x02\\x02\\u0b12ଓ\\x05э')\n buf.write('ȧ\\x02ଓଔ\\x05ёȩ\\x02ଔକ\\x05я')\n buf.write('Ȩ\\x02କଖ\\x05ћȮ\\x02ଖଗ\\x05у')\n buf.write('Ȣ\\x02ଗnj\\x03\\x02\\x02\\x02ଘଙ\\x05эȧ')\n buf.write('\\x02ଙଚ\\x05ѝȯ\\x02ଚଛ\\x05ыȦ')\n buf.write('\\x02ଛଜ\\x05ћȮ\\x02ଜଝ\\x05хȣ')\n buf.write('\\x02ଝଞ\\x05љȭ\\x02ଞଟ\\x05нȟ')\n buf.write('\\x02ଟଠ\\x05ћȮ\\x02ଠǎ\\x03\\x02\\x02\\x02ଡ')\n buf.write('ଢ\\x05яȨ\\x02ଢଣ\\x05еț\\x02ଣ')\n buf.write('ତ\\x05эȧ\\x02ତଥ\\x05нȟ\\x02ଥ')\n buf.write('ǐ\\x03\\x02\\x02\\x02ଦଧ\\x05яȨ\\x02ଧନ')\n buf.write('\\x05еț\\x02ନ\\u0b29\\x05яȨ\\x02\\u0b29ǒ')\n buf.write('\\x03\\x02\\x02\\x02ପଫ\\x05яȨ\\x02ଫବ\\x05е')\n buf.write('ț\\x02ବଭ\\x05ћȮ\\x02ଭମ\\x05ѝ')\n buf.write('ȯ\\x02ମଯ\\x05їȬ\\x02ଯର\\x05е')\n buf.write('ț\\x02ର\\u0b31\\x05ыȦ\\x02\\u0b31ǔ\\x03\\x02\\x02')\n buf.write('\\x02ଲଳ\\x05яȨ\\x02ଳ\\u0b34\\x05еț')\n buf.write('\\x02\\u0b34ଵ\\x05ћȮ\\x02ଵଶ\\x05ѝȯ')\n buf.write('\\x02ଶଷ\\x05їȬ\\x02ଷସ\\x05еț')\n buf.write('\\x02ସହ\\x05ыȦ\\x02ହ\\u0b3a\\x05яȨ')\n buf.write('\\x02\\u0b3aǖ\\x03\\x02\\x02\\x02\\u0b3b଼\\x05яȨ\\x02଼')\n buf.write('ଽ\\x05еț\\x02ଽା\\x05џȰ\\x02ା')\n buf.write('ǘ\\x03\\x02\\x02\\x02ିୀ\\x05яȨ\\x02ୀୁ')\n buf.write('\\x05йȝ\\x02ୁୂ\\x05уȢ\\x02ୂୃ')\n buf.write('\\x05еț\\x02ୃୄ\\x05їȬ\\x02ୄǚ')\n buf.write('\\x03\\x02\\x02\\x02\\u0b45\\u0b46\\x05яȨ\\x02\\u0b46େ\\x05й')\n buf.write('ȝ\\x02େୈ\\x05уȢ\\x02ୈ\\u0b49\\x05е')\n buf.write('ț\\x02\\u0b49\\u0b4a\\x05їȬ\\x02\\u0b4aୋ\\x07a\\x02')\n buf.write('\\x02ୋୌ\\x05йȝ\\x02ୌ୍\\x05љȭ')\n buf.write('\\x02୍ǜ\\x03\\x02\\x02\\x02\\u0b4e\\u0b4f\\x05яȨ\\x02\\u0b4f')\n buf.write('\\u0b50\\x05йȝ\\x02\\u0b50\\u0b51\\x05ыȦ\\x02\\u0b51')\n buf.write('\\u0b52\\x05ёȩ\\x02\\u0b52\\u0b53\\x05зȜ\\x02\\u0b53')\n buf.write('Ǟ\\x03\\x02\\x02\\x02\\u0b54୕\\x05яȨ\\x02୕ୖ')\n buf.write('\\x05нȟ\\x02ୖୗ\\x05љȭ\\x02ୗ\\u0b58')\n buf.write('\\x05ћȮ\\x02\\u0b58\\u0b59\\x05нȟ\\x02\\u0b59\\u0b5a')\n buf.write('\\x05лȞ\\x02\\u0b5aǠ\\x03\\x02\\x02\\x02\\u0b5bଡ଼\\x05я')\n buf.write('Ȩ\\x02ଡ଼ଢ଼\\x05нȟ\\x02ଢ଼\\u0b5e\\x05ѡ')\n buf.write('ȱ\\x02\\u0b5eǢ\\x03\\x02\\x02\\x02ୟୠ\\x05яȨ')\n buf.write('\\x02ୠୡ\\x05ёȩ\\x02ୡǤ\\x03\\x02\\x02\\x02ୢ')\n buf.write('ୣ\\x05яȨ\\x02ୣ\\u0b64\\x05ёȩ\\x02\\u0b64')\n buf.write('\\u0b65\\x05еț\\x02\\u0b65୦\\x05ѝȯ\\x02୦')\n buf.write('୧\\x05лȞ\\x02୧୨\\x05хȣ\\x02୨')\n buf.write('୩\\x05ћȮ\\x02୩Ǧ\\x03\\x02\\x02\\x02୪୫')\n buf.write('\\x05яȨ\\x02୫୬\\x05ёȩ\\x02୬୭')\n buf.write('\\x05йȝ\\x02୭୮\\x05еț\\x02୮୯')\n buf.write('\\x05йȝ\\x02୯୰\\x05уȢ\\x02୰ୱ')\n buf.write('\\x05нȟ\\x02ୱǨ\\x03\\x02\\x02\\x02୲୳\\x05я')\n buf.write('Ȩ\\x02୳୴\\x05ёȩ\\x02୴୵\\x05й')\n buf.write('ȝ\\x02୵୶\\x05ёȩ\\x02୶୷\\x05ѓ')\n buf.write('Ȫ\\x02୷\\u0b78\\x05ѥȳ\\x02\\u0b78Ǫ\\x03\\x02\\x02')\n buf.write('\\x02\\u0b79\\u0b7a\\x05яȨ\\x02\\u0b7a\\u0b7b\\x05ёȩ')\n buf.write('\\x02\\u0b7b\\u0b7c\\x05йȝ\\x02\\u0b7c\\u0b7d\\x05ѥȳ')\n buf.write('\\x02\\u0b7d\\u0b7e\\x05йȝ\\x02\\u0b7e\\u0b7f\\x05ыȦ')\n buf.write('\\x02\\u0b7f\\u0b80\\x05нȟ\\x02\\u0b80Ǭ\\x03\\x02\\x02\\x02\\u0b81')\n buf.write('ஂ\\x05яȨ\\x02ஂஃ\\x05ёȩ\\x02ஃ')\n buf.write('\\u0b84\\x05нȟ\\x02\\u0b84அ\\x05яȨ\\x02அ')\n buf.write('ஆ\\x05ћȮ\\x02ஆஇ\\x05хȣ\\x02இ')\n buf.write('ஈ\\x05ћȮ\\x02ஈஉ\\x05ѥȳ\\x02உ')\n buf.write('ஊ\\x05нȟ\\x02ஊ\\u0b8b\\x05љȭ\\x02\\u0b8b')\n buf.write('\\u0b8c\\x05йȝ\\x02\\u0b8c\\u0b8d\\x05еț\\x02\\u0b8d')\n buf.write('எ\\x05ѓȪ\\x02எஏ\\x05хȣ\\x02ஏ')\n buf.write('ஐ\\x05яȨ\\x02ஐ\\u0b91\\x05сȡ\\x02\\u0b91')\n buf.write('Ǯ\\x03\\x02\\x02\\x02ஒஓ\\x05яȨ\\x02ஓஔ')\n buf.write('\\x05ёȩ\\x02ஔக\\x05эȧ\\x02க\\u0b96')\n buf.write('\\x05еț\\x02\\u0b96\\u0b97\\x05ѣȲ\\x02\\u0b97\\u0b98')\n buf.write('\\x05џȰ\\x02\\u0b98ங\\x05еț\\x02ஙச')\n buf.write('\\x05ыȦ\\x02ச\\u0b9b\\x05ѝȯ\\x02\\u0b9bஜ')\n buf.write('\\x05нȟ\\x02ஜǰ\\x03\\x02\\x02\\x02\\u0b9dஞ\\x05я')\n buf.write('Ȩ\\x02ஞட\\x05ёȩ\\x02ட\\u0ba0\\x05э')\n buf.write('ȧ\\x02\\u0ba0\\u0ba1\\x05хȣ\\x02\\u0ba1\\u0ba2\\x05я')\n buf.write('Ȩ\\x02\\u0ba2ண\\x05џȰ\\x02ணத\\x05е')\n buf.write('ț\\x02த\\u0ba5\\x05ыȦ\\x02\\u0ba5\\u0ba6\\x05ѝ')\n buf.write('ȯ\\x02\\u0ba6\\u0ba7\\x05нȟ\\x02\\u0ba7Dz\\x03\\x02\\x02')\n buf.write('\\x02நன\\x05яȨ\\x02னப\\x05ёȩ')\n buf.write('\\x02ப\\u0bab\\x05яȨ\\x02\\u0bab\\u0bac\\x05нȟ')\n buf.write('\\x02\\u0bacǴ\\x03\\x02\\x02\\x02\\u0badம\\x05яȨ\\x02ம')\n buf.write('ய\\x05ёȩ\\x02யர\\x05ёȩ\\x02ர')\n buf.write('ற\\x05їȬ\\x02றல\\x05лȞ\\x02ல')\n buf.write('ள\\x05нȟ\\x02ளழ\\x05їȬ\\x02ழ')\n buf.write('Ƕ\\x03\\x02\\x02\\x02வஶ\\x05яȨ\\x02ஶஷ')\n buf.write('\\x05ёȩ\\x02ஷஸ\\x05љȭ\\x02ஸஹ')\n buf.write('\\x05йȝ\\x02ஹ\\u0bba\\x05уȢ\\x02\\u0bba\\u0bbb')\n buf.write('\\x05нȟ\\x02\\u0bbb\\u0bbc\\x05эȧ\\x02\\u0bbc\\u0bbd')\n buf.write('\\x05еț\\x02\\u0bbdா\\x05йȝ\\x02ாி')\n buf.write('\\x05уȢ\\x02ிீ\\x05нȟ\\x02ீு')\n buf.write('\\x05йȝ\\x02ுூ\\x05щȥ\\x02ூǸ')\n buf.write('\\x03\\x02\\x02\\x02\\u0bc3\\u0bc4\\x05яȨ\\x02\\u0bc4\\u0bc5\\x05ё')\n buf.write('ȩ\\x02\\u0bc5ெ\\x05ћȮ\\x02ெǺ\\x03\\x02\\x02')\n buf.write('\\x02ேை\\x05яȨ\\x02ை\\u0bc9\\x05ёȩ')\n buf.write('\\x02\\u0bc9ொ\\x05ѡȱ\\x02ொோ\\x05еț')\n buf.write('\\x02ோௌ\\x05хȣ\\x02ௌ்\\x05ћȮ')\n buf.write('\\x02்Ǽ\\x03\\x02\\x02\\x02\\u0bce\\u0bcf\\x05яȨ\\x02\\u0bcf')\n buf.write('ௐ\\x05ѝȯ\\x02ௐ\\u0bd1\\x05ыȦ\\x02\\u0bd1')\n buf.write('\\u0bd2\\x05ыȦ\\x02\\u0bd2Ǿ\\x03\\x02\\x02\\x02\\u0bd3\\u0bd4')\n buf.write('\\x05яȨ\\x02\\u0bd4\\u0bd5\\x05ѝȯ\\x02\\u0bd5\\u0bd6')\n buf.write('\\x05ыȦ\\x02\\u0bd6ௗ\\x05ыȦ\\x02ௗ\\u0bd8')\n buf.write('\\x05љȭ\\x02\\u0bd8Ȁ\\x03\\x02\\x02\\x02\\u0bd9\\u0bda\\x05я')\n buf.write('Ȩ\\x02\\u0bda\\u0bdb\\x05ѝȯ\\x02\\u0bdb\\u0bdc\\x05э')\n buf.write('ȧ\\x02\\u0bdc\\u0bdd\\x05зȜ\\x02\\u0bdd\\u0bde\\x05н')\n buf.write('ȟ\\x02\\u0bde\\u0bdf\\x05їȬ\\x02\\u0bdfȂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0be0\\u0be1\\x05яȨ\\x02\\u0be1\\u0be2\\x05ѝȯ')\n buf.write('\\x02\\u0be2\\u0be3\\x05эȧ\\x02\\u0be3\\u0be4\\x05нȟ')\n buf.write('\\x02\\u0be4\\u0be5\\x05їȬ\\x02\\u0be5௦\\x05хȣ')\n buf.write('\\x02௦௧\\x05йȝ\\x02௧Ȅ\\x03\\x02\\x02\\x02௨')\n buf.write('௩\\x05яȨ\\x02௩௪\\x05џȰ\\x02௪')\n buf.write('௫\\x05еț\\x02௫௬\\x05їȬ\\x02௬')\n buf.write('௭\\x05йȝ\\x02௭௮\\x05уȢ\\x02௮')\n buf.write('௯\\x05еț\\x02௯௰\\x05їȬ\\x02௰')\n buf.write('௱\\x074\\x02\\x02௱Ȇ\\x03\\x02\\x02\\x02௲௳\\x05ё')\n buf.write('ȩ\\x02௳௴\\x05зȜ\\x02௴௵\\x05ч')\n buf.write('Ȥ\\x02௵௶\\x05нȟ\\x02௶௷\\x05й')\n buf.write('ȝ\\x02௷௸\\x05ћȮ\\x02௸Ȉ\\x03\\x02\\x02')\n buf.write('\\x02௹௺\\x05ёȩ\\x02௺\\u0bfb\\x05пȠ')\n buf.write('\\x02\\u0bfbȊ\\x03\\x02\\x02\\x02\\u0bfc\\u0bfd\\x05ёȩ\\x02\\u0bfd')\n buf.write('\\u0bfe\\x05пȠ\\x02\\u0bfe\\u0bff\\x05пȠ\\x02\\u0bff')\n buf.write('Ȍ\\x03\\x02\\x02\\x02ఀఁ\\x05ёȩ\\x02ఁం')\n buf.write('\\x05хȣ\\x02ంః\\x05лȞ\\x02ఃȎ')\n buf.write('\\x03\\x02\\x02\\x02ఄఅ\\x05ёȩ\\x02అఆ\\x05ы')\n buf.write('Ȧ\\x02ఆఇ\\x05лȞ\\x02ఇȐ\\x03\\x02\\x02')\n buf.write('\\x02ఈఉ\\x05ёȩ\\x02ఉఊ\\x05яȨ')\n buf.write('\\x02ఊȒ\\x03\\x02\\x02\\x02ఋఌ\\x05ёȩ\\x02ఌ')\n buf.write('\\u0c0d\\x05яȨ\\x02\\u0c0dఎ\\x05ыȦ\\x02ఎ')\n buf.write('ఏ\\x05ѥȳ\\x02ఏȔ\\x03\\x02\\x02\\x02ఐ\\u0c11')\n buf.write('\\x05ёȩ\\x02\\u0c11ఒ\\x05ѓȪ\\x02ఒఓ')\n buf.write('\\x05нȟ\\x02ఓఔ\\x05яȨ\\x02ఔȖ')\n buf.write('\\x03\\x02\\x02\\x02కఖ\\x05ёȩ\\x02ఖగ\\x05ѓ')\n buf.write('Ȫ\\x02గఘ\\x05ћȮ\\x02ఘఙ\\x05х')\n buf.write('ȣ\\x02ఙచ\\x05ёȩ\\x02చఛ\\x05я')\n buf.write('Ȩ\\x02ఛȘ\\x03\\x02\\x02\\x02జఝ\\x05ёȩ')\n buf.write('\\x02ఝఞ\\x05їȬ\\x02ఞȚ\\x03\\x02\\x02\\x02ట')\n buf.write('ఠ\\x05ёȩ\\x02ఠడ\\x05їȬ\\x02డ')\n buf.write('ఢ\\x05еț\\x02ఢణ\\x05лȞ\\x02ణ')\n buf.write('త\\x05еț\\x02తథ\\x05ћȮ\\x02థ')\n buf.write('ద\\x05еț\\x02దȜ\\x03\\x02\\x02\\x02ధన')\n buf.write('\\x05ёȩ\\x02న\\u0c29\\x05їȬ\\x02\\u0c29ప')\n buf.write('\\x05лȞ\\x02పఫ\\x05нȟ\\x02ఫబ')\n buf.write('\\x05їȬ\\x02బȞ\\x03\\x02\\x02\\x02భమ\\x05ё')\n buf.write('ȩ\\x02మయ\\x05їȬ\\x02యర\\x05л')\n buf.write('Ȟ\\x02రఱ\\x05хȣ\\x02ఱల\\x05я')\n buf.write('Ȩ\\x02లళ\\x05еț\\x02ళఴ\\x05ы')\n buf.write('Ȧ\\x02ఴవ\\x05хȣ\\x02వశ\\x05ћ')\n buf.write('Ȯ\\x02శష\\x05ѥȳ\\x02షȠ\\x03\\x02\\x02')\n buf.write('\\x02సహ\\x05ёȩ\\x02హ\\u0c3a\\x05љȭ')\n buf.write('\\x02\\u0c3a\\u0c3b\\x05нȟ\\x02\\u0c3b఼\\x05їȬ')\n buf.write('\\x02఼ఽ\\x05їȬ\\x02ఽా\\x05ёȩ')\n buf.write('\\x02ాి\\x05їȬ\\x02ిȢ\\x03\\x02\\x02\\x02ీ')\n buf.write('ు\\x05ёȩ\\x02ుూ\\x05ѝȯ\\x02ూ')\n buf.write('ృ\\x05ћȮ\\x02ృȤ\\x03\\x02\\x02\\x02ౄ\\u0c45')\n buf.write('\\x05ёȩ\\x02\\u0c45ె\\x05ѝȯ\\x02ెే')\n buf.write('\\x05ћȮ\\x02ేై\\x05нȟ\\x02ై\\u0c49')\n buf.write('\\x05їȬ\\x02\\u0c49Ȧ\\x03\\x02\\x02\\x02ొో\\x05ё')\n buf.write('ȩ\\x02ోౌ\\x05џȰ\\x02ౌ్\\x05н')\n buf.write('ȟ\\x02్\\u0c4e\\x05їȬ\\x02\\u0c4eȨ\\x03\\x02\\x02')\n buf.write('\\x02\\u0c4f\\u0c50\\x05ёȩ\\x02\\u0c50\\u0c51\\x05џȰ')\n buf.write('\\x02\\u0c51\\u0c52\\x05нȟ\\x02\\u0c52\\u0c53\\x05їȬ')\n buf.write('\\x02\\u0c53\\u0c54\\x05їȬ\\x02\\u0c54ౕ\\x05хȣ')\n buf.write('\\x02ౕౖ\\x05лȞ\\x02ౖ\\u0c57\\x05хȣ')\n buf.write('\\x02\\u0c57ౘ\\x05яȨ\\x02ౘౙ\\x05сȡ')\n buf.write('\\x02ౙȪ\\x03\\x02\\x02\\x02ౚ\\u0c5b\\x05ѓȪ\\x02\\u0c5b')\n buf.write('\\u0c5c\\x05еț\\x02\\u0c5cౝ\\x05йȝ\\x02ౝ')\n buf.write('\\u0c5e\\x05щȥ\\x02\\u0c5e\\u0c5f\\x05еț\\x02\\u0c5f')\n buf.write('ౠ\\x05сȡ\\x02ౠౡ\\x05нȟ\\x02ౡ')\n buf.write('Ȭ\\x03\\x02\\x02\\x02ౢౣ\\x05ѓȪ\\x02ౣ\\u0c64')\n buf.write('\\x05еț\\x02\\u0c64\\u0c65\\x05їȬ\\x02\\u0c65౦')\n buf.write('\\x05еț\\x02౦౧\\x05ыȦ\\x02౧౨')\n buf.write('\\x05ыȦ\\x02౨౩\\x05нȟ\\x02౩౪')\n buf.write('\\x05ыȦ\\x02౪౫\\x07a\\x02\\x02౫౬\\x05н')\n buf.write('ȟ\\x02౬౭\\x05яȨ\\x02౭౮\\x05е')\n buf.write('ț\\x02౮౯\\x05зȜ\\x02౯\\u0c70\\x05ы')\n buf.write('Ȧ\\x02\\u0c70\\u0c71\\x05нȟ\\x02\\u0c71Ȯ\\x03\\x02\\x02')\n buf.write('\\x02\\u0c72\\u0c73\\x05ѓȪ\\x02\\u0c73\\u0c74\\x05еț')\n buf.write('\\x02\\u0c74\\u0c75\\x05їȬ\\x02\\u0c75\\u0c76\\x05еț')\n buf.write('\\x02\\u0c76౷\\x05эȧ\\x02౷౸\\x05нȟ')\n buf.write('\\x02౸౹\\x05ћȮ\\x02౹౺\\x05нȟ')\n buf.write('\\x02౺౻\\x05їȬ\\x02౻౼\\x05љȭ')\n buf.write('\\x02౼Ȱ\\x03\\x02\\x02\\x02౽౾\\x05ѓȪ\\x02౾')\n buf.write('౿\\x05еț\\x02౿ಀ\\x05їȬ\\x02ಀ')\n buf.write('ಁ\\x05нȟ\\x02ಁಂ\\x05яȨ\\x02ಂ')\n buf.write('ಃ\\x05ћȮ\\x02ಃȲ\\x03\\x02\\x02\\x02಄ಅ')\n buf.write('\\x05ѓȪ\\x02ಅಆ\\x05еț\\x02ಆಇ')\n buf.write('\\x05їȬ\\x02ಇಈ\\x05ћȮ\\x02ಈಉ')\n buf.write('\\x05хȣ\\x02ಉಊ\\x05ћȮ\\x02ಊಋ')\n buf.write('\\x05хȣ\\x02ಋಌ\\x05ёȩ\\x02ಌ\\u0c8d')\n buf.write('\\x05яȨ\\x02\\u0c8dȴ\\x03\\x02\\x02\\x02ಎಏ\\x05ѓ')\n buf.write('Ȫ\\x02ಏಐ\\x05еț\\x02ಐ\\u0c91\\x05љ')\n buf.write('ȭ\\x02\\u0c91ಒ\\x05љȭ\\x02ಒಓ\\x05х')\n buf.write('ȣ\\x02ಓಔ\\x05яȨ\\x02ಔಕ\\x05с')\n buf.write('ȡ\\x02ಕȶ\\x03\\x02\\x02\\x02ಖಗ\\x05ѓȪ')\n buf.write('\\x02ಗಘ\\x05еț\\x02ಘಙ\\x05ћȮ')\n buf.write('\\x02ಙಚ\\x05уȢ\\x02ಚȸ\\x03\\x02\\x02\\x02ಛ')\n buf.write(\"ಜ\\x07'\\x02\\x02ಜಝ\\x05їȬ\\x02ಝಞ\")\n buf.write('\\x05ёȩ\\x02ಞಟ\\x05ѡȱ\\x02ಟಠ')\n buf.write('\\x05ћȮ\\x02ಠಡ\\x05ѥȳ\\x02ಡಢ')\n buf.write('\\x05ѓȪ\\x02ಢಣ\\x05нȟ\\x02ಣȺ')\n buf.write(\"\\x03\\x02\\x02\\x02ತಥ\\x07'\\x02\\x02ಥದ\\x05ћȮ\")\n buf.write('\\x02ದಧ\\x05ѥȳ\\x02ಧನ\\x05ѓȪ')\n buf.write('\\x02ನ\\u0ca9\\x05нȟ\\x02\\u0ca9ȼ\\x03\\x02\\x02\\x02ಪ')\n buf.write('ಫ\\x05ѓȪ\\x02ಫಬ\\x05хȣ\\x02ಬ')\n buf.write('ಭ\\x05ѓȪ\\x02ಭಮ\\x05нȟ\\x02ಮ')\n buf.write('ಯ\\x05ыȦ\\x02ಯರ\\x05хȣ\\x02ರ')\n buf.write('ಱ\\x05яȨ\\x02ಱಲ\\x05нȟ\\x02ಲ')\n buf.write('ಳ\\x05лȞ\\x02ಳȾ\\x03\\x02\\x02\\x02\\u0cb4ವ')\n buf.write('\\x05ѓȪ\\x02ವಶ\\x05хȣ\\x02ಶಷ')\n buf.write('\\x05џȰ\\x02ಷಸ\\x05ёȩ\\x02ಸಹ')\n buf.write('\\x05ћȮ\\x02ಹɀ\\x03\\x02\\x02\\x02\\u0cba\\u0cbb\\x05ѓ')\n buf.write('Ȫ\\x02\\u0cbb಼\\x05ыȦ\\x02಼ಽ\\x05е')\n buf.write('ț\\x02ಽಾ\\x05яȨ\\x02ಾɂ\\x03\\x02\\x02')\n buf.write('\\x02ಿೀ\\x05ѓȪ\\x02ೀು\\x05ыȦ')\n buf.write('\\x02ುೂ\\x05љȭ\\x02ೂೃ\\x07a\\x02\\x02ೃ')\n buf.write('ೄ\\x05хȣ\\x02ೄ\\u0cc5\\x05яȨ\\x02\\u0cc5')\n buf.write('ೆ\\x05ћȮ\\x02ೆೇ\\x05нȟ\\x02ೇ')\n buf.write('ೈ\\x05сȡ\\x02ೈ\\u0cc9\\x05нȟ\\x02\\u0cc9')\n buf.write('ೊ\\x05їȬ\\x02ೊɄ\\x03\\x02\\x02\\x02ೋೌ')\n buf.write('\\x05ѓȪ\\x02ೌ್\\x05ёȩ\\x02್\\u0cce')\n buf.write('\\x05љȭ\\x02\\u0cce\\u0ccf\\x05хȣ\\x02\\u0ccf\\u0cd0')\n buf.write('\\x05ћȮ\\x02\\u0cd0\\u0cd1\\x05хȣ\\x02\\u0cd1\\u0cd2')\n buf.write('\\x05џȰ\\x02\\u0cd2\\u0cd3\\x05нȟ\\x02\\u0cd3Ɇ')\n buf.write('\\x03\\x02\\x02\\x02\\u0cd4ೕ\\x05ѓȪ\\x02ೕೖ\\x05ё')\n buf.write('ȩ\\x02ೖ\\u0cd7\\x05љȭ\\x02\\u0cd7\\u0cd8\\x05х')\n buf.write('ȣ\\x02\\u0cd8\\u0cd9\\x05ћȮ\\x02\\u0cd9\\u0cda\\x05х')\n buf.write('ȣ\\x02\\u0cda\\u0cdb\\x05џȰ\\x02\\u0cdb\\u0cdc\\x05н')\n buf.write('ȟ\\x02\\u0cdcೝ\\x05яȨ\\x02ೝɈ\\x03\\x02\\x02')\n buf.write('\\x02ೞ\\u0cdf\\x05ѓȪ\\x02\\u0cdfೠ\\x05їȬ')\n buf.write('\\x02ೠೡ\\x05еț\\x02ೡೢ\\x05сȡ')\n buf.write('\\x02ೢೣ\\x05эȧ\\x02ೣ\\u0ce4\\x05еț')\n buf.write('\\x02\\u0ce4Ɋ\\x03\\x02\\x02\\x02\\u0ce5೦\\x05ѓȪ\\x02೦')\n buf.write('೧\\x05їȬ\\x02೧೨\\x05нȟ\\x02೨')\n buf.write('೩\\x05йȝ\\x02೩೪\\x05нȟ\\x02೪')\n buf.write('೫\\x05лȞ\\x02೫೬\\x05хȣ\\x02೬')\n buf.write('೭\\x05яȨ\\x02೭೮\\x05сȡ\\x02೮')\n buf.write('Ɍ\\x03\\x02\\x02\\x02೯\\u0cf0\\x05ѓȪ\\x02\\u0cf0ೱ')\n buf.write('\\x05їȬ\\x02ೱೲ\\x05нȟ\\x02ೲ\\u0cf3')\n buf.write('\\x05йȝ\\x02\\u0cf3\\u0cf4\\x05хȣ\\x02\\u0cf4\\u0cf5')\n buf.write('\\x05љȭ\\x02\\u0cf5\\u0cf6\\x05хȣ\\x02\\u0cf6\\u0cf7')\n buf.write('\\x05ёȩ\\x02\\u0cf7\\u0cf8\\x05яȨ\\x02\\u0cf8Ɏ')\n buf.write('\\x03\\x02\\x02\\x02\\u0cf9\\u0cfa\\x05ѓȪ\\x02\\u0cfa\\u0cfb\\x05ї')\n buf.write('Ȭ\\x02\\u0cfb\\u0cfc\\x05нȟ\\x02\\u0cfc\\u0cfd\\x05љ')\n buf.write('ȭ\\x02\\u0cfd\\u0cfe\\x05нȟ\\x02\\u0cfe\\u0cff\\x05я')\n buf.write('Ȩ\\x02\\u0cffഀ\\x05ћȮ\\x02ഀɐ\\x03\\x02\\x02')\n buf.write('\\x02ഁം\\x05ѓȪ\\x02ംഃ\\x05їȬ')\n buf.write('\\x02ഃഄ\\x05хȣ\\x02ഄഅ\\x05ёȩ')\n buf.write('\\x02അആ\\x05їȬ\\x02ആɒ\\x03\\x02\\x02\\x02ഇ')\n buf.write('ഈ\\x05ѓȪ\\x02ഈഉ\\x05їȬ\\x02ഉ')\n buf.write('ഊ\\x05ёȩ\\x02ഊഋ\\x05йȝ\\x02ഋ')\n buf.write('ഌ\\x05нȟ\\x02ഌ\\u0d0d\\x05лȞ\\x02\\u0d0d')\n buf.write('എ\\x05ѝȯ\\x02എഏ\\x05їȬ\\x02ഏ')\n buf.write('ഐ\\x05нȟ\\x02ഐɔ\\x03\\x02\\x02\\x02\\u0d11ഒ')\n buf.write('\\x05їȬ\\x02ഒഓ\\x05еț\\x02ഓഔ')\n buf.write('\\x05хȣ\\x02ഔക\\x05љȭ\\x02കഖ')\n buf.write('\\x05нȟ\\x02ഖɖ\\x03\\x02\\x02\\x02ഗഘ\\x05ї')\n buf.write('Ȭ\\x02ഘങ\\x05еț\\x02ങച\\x05я')\n buf.write('Ȩ\\x02ചഛ\\x05сȡ\\x02ഛജ\\x05н')\n buf.write('ȟ\\x02ജɘ\\x03\\x02\\x02\\x02ഝഞ\\x05їȬ')\n buf.write('\\x02ഞട\\x05еț\\x02ടഠ\\x05ѡȱ')\n buf.write('\\x02ഠɚ\\x03\\x02\\x02\\x02ഡഢ\\x05їȬ\\x02ഢ')\n buf.write('ണ\\x05нȟ\\x02ണത\\x05еț\\x02ത')\n buf.write('ഥ\\x05лȞ\\x02ഥɜ\\x03\\x02\\x02\\x02ദധ')\n buf.write('\\x05їȬ\\x02ധന\\x05нȟ\\x02നഩ')\n buf.write('\\x05еț\\x02ഩപ\\x05ыȦ\\x02പɞ')\n buf.write('\\x03\\x02\\x02\\x02ഫബ\\x05їȬ\\x02ബഭ\\x05н')\n buf.write('ȟ\\x02ഭമ\\x05йȝ\\x02മയ\\x05ё')\n buf.write('ȩ\\x02യര\\x05їȬ\\x02രറ\\x05л')\n buf.write('Ȟ\\x02റɠ\\x03\\x02\\x02\\x02ലള\\x05їȬ')\n buf.write('\\x02ളഴ\\x05нȟ\\x02ഴവ\\x05пȠ')\n buf.write('\\x02വɢ\\x03\\x02\\x02\\x02ശഷ\\x05їȬ\\x02ഷ')\n buf.write('സ\\x05нȟ\\x02സഹ\\x05пȠ\\x02ഹ')\n buf.write('ഺ\\x05нȟ\\x02ഺ഻\\x05їȬ\\x02഻')\n buf.write('഼\\x05нȟ\\x02഼ഽ\\x05яȨ\\x02ഽ')\n buf.write('ാ\\x05йȝ\\x02ാി\\x05нȟ\\x02ി')\n buf.write('ɤ\\x03\\x02\\x02\\x02ീു\\x05їȬ\\x02ുൂ')\n buf.write('\\x05нȟ\\x02ൂൃ\\x05пȠ\\x02ൃൄ')\n buf.write('\\x05нȟ\\x02ൄ\\u0d45\\x05їȬ\\x02\\u0d45െ')\n buf.write('\\x05нȟ\\x02െേ\\x05яȨ\\x02േൈ')\n buf.write('\\x05йȝ\\x02ൈ\\u0d49\\x05хȣ\\x02\\u0d49ൊ')\n buf.write('\\x05яȨ\\x02ൊോ\\x05сȡ\\x02ോɦ')\n buf.write('\\x03\\x02\\x02\\x02ൌ്\\x05їȬ\\x02്ൎ\\x05н')\n buf.write('ȟ\\x02ൎ൏\\x05чȤ\\x02൏\\u0d50\\x05н')\n buf.write('ȟ\\x02\\u0d50\\u0d51\\x05йȝ\\x02\\u0d51\\u0d52\\x05ћ')\n buf.write('Ȯ\\x02\\u0d52ɨ\\x03\\x02\\x02\\x02\\u0d53ൔ\\x05їȬ')\n buf.write('\\x02ൔൕ\\x05нȟ\\x02ൕൖ\\x05ыȦ')\n buf.write('\\x02ൖൗ\\x05хȣ\\x02ൗ൘\\x05нȟ')\n buf.write('\\x02൘൙\\x05љȭ\\x02൙൚\\x07a\\x02\\x02൚')\n buf.write('൛\\x05ёȩ\\x02൛൜\\x05яȨ\\x02൜')\n buf.write('ɪ\\x03\\x02\\x02\\x02൝൞\\x05їȬ\\x02൞ൟ')\n buf.write('\\x05нȟ\\x02ൟൠ\\x05яȨ\\x02ൠൡ')\n buf.write('\\x05еț\\x02ൡൢ\\x05эȧ\\x02ൢൣ')\n buf.write('\\x05нȟ\\x02ൣɬ\\x03\\x02\\x02\\x02\\u0d64\\u0d65\\x05ї')\n buf.write('Ȭ\\x02\\u0d65൦\\x05нȟ\\x02൦൧\\x05ѓ')\n buf.write('Ȫ\\x02൧൨\\x05ыȦ\\x02൨൩\\x05е')\n buf.write('ț\\x02൩൪\\x05йȝ\\x02൪൫\\x05н')\n buf.write('ȟ\\x02൫ɮ\\x03\\x02\\x02\\x02൬൭\\x05їȬ')\n buf.write('\\x02൭൮\\x05нȟ\\x02൮൯\\x05љȭ')\n buf.write('\\x02൯൰\\x05ѓȪ\\x02൰൱\\x05нȟ')\n buf.write('\\x02൱൲\\x05йȝ\\x02൲൳\\x05ћȮ')\n buf.write('\\x02൳ɰ\\x03\\x02\\x02\\x02൴൵\\x05їȬ\\x02൵')\n buf.write('൶\\x05нȟ\\x02൶൷\\x05љȭ\\x02൷')\n buf.write('൸\\x05ћȮ\\x02൸൹\\x05їȬ\\x02൹')\n buf.write('ൺ\\x05хȣ\\x02ൺൻ\\x05йȝ\\x02ൻ')\n buf.write('ർ\\x05ћȮ\\x02ർൽ\\x07a\\x02\\x02ൽൾ')\n buf.write('\\x05їȬ\\x02ൾൿ\\x05нȟ\\x02ൿ\\u0d80')\n buf.write('\\x05пȠ\\x02\\u0d80ඁ\\x05нȟ\\x02ඁං')\n buf.write('\\x05їȬ\\x02ංඃ\\x05нȟ\\x02ඃ\\u0d84')\n buf.write('\\x05яȨ\\x02\\u0d84අ\\x05йȝ\\x02අආ')\n buf.write('\\x05нȟ\\x02ආඇ\\x05љȭ\\x02ඇɲ')\n buf.write('\\x03\\x02\\x02\\x02ඈඉ\\x05їȬ\\x02ඉඊ\\x05н')\n buf.write('ȟ\\x02ඊඋ\\x05љȭ\\x02උඌ\\x05ѝ')\n buf.write('ȯ\\x02ඌඍ\\x05ыȦ\\x02ඍඎ\\x05ћ')\n buf.write('Ȯ\\x02ඎɴ\\x03\\x02\\x02\\x02ඏඐ\\x05їȬ')\n buf.write('\\x02ඐඑ\\x05нȟ\\x02එඒ\\x05љȭ')\n buf.write('\\x02ඒඓ\\x05ѝȯ\\x02ඓඔ\\x05ыȦ')\n buf.write('\\x02ඔඕ\\x05ћȮ\\x02ඕඖ\\x07a\\x02\\x02ඖ')\n buf.write('\\u0d97\\x05йȝ\\x02\\u0d97\\u0d98\\x05еț\\x02\\u0d98')\n buf.write('\\u0d99\\x05йȝ\\x02\\u0d99ක\\x05уȢ\\x02ක')\n buf.write('ඛ\\x05нȟ\\x02ඛɶ\\x03\\x02\\x02\\x02ගඝ')\n buf.write('\\x05їȬ\\x02ඝඞ\\x05нȟ\\x02ඞඟ')\n buf.write('\\x05ћȮ\\x02ඟච\\x05ѝȯ\\x02චඡ')\n buf.write('\\x05їȬ\\x02ඡජ\\x05яȨ\\x02ජɸ')\n buf.write('\\x03\\x02\\x02\\x02ඣඤ\\x05їȬ\\x02ඤඥ\\x05н')\n buf.write('ȟ\\x02ඥඦ\\x05ћȮ\\x02ඦට\\x05ѝ')\n buf.write('ȯ\\x02ටඨ\\x05їȬ\\x02ඨඩ\\x05я')\n buf.write('Ȩ\\x02ඩඪ\\x05хȣ\\x02ඪණ\\x05я')\n buf.write('Ȩ\\x02ණඬ\\x05сȡ\\x02ඬɺ\\x03\\x02\\x02')\n buf.write('\\x02තථ\\x05їȬ\\x02ථද\\x05нȟ')\n buf.write('\\x02දධ\\x05ѝȯ\\x02ධන\\x05љȭ')\n buf.write('\\x02න\\u0db2\\x05нȟ\\x02\\u0db2ɼ\\x03\\x02\\x02\\x02ඳ')\n buf.write('ප\\x05їȬ\\x02පඵ\\x05нȟ\\x02ඵ')\n buf.write('බ\\x05џȰ\\x02බභ\\x05нȟ\\x02භ')\n buf.write('ම\\x05їȬ\\x02මඹ\\x05љȭ\\x02ඹ')\n buf.write('ය\\x05нȟ\\x02යɾ\\x03\\x02\\x02\\x02ර\\u0dbc')\n buf.write('\\x05їȬ\\x02\\u0dbcල\\x05нȟ\\x02ල\\u0dbe')\n buf.write('\\x05џȰ\\x02\\u0dbe\\u0dbf\\x05ёȩ\\x02\\u0dbfව')\n buf.write('\\x05щȥ\\x02වශ\\x05нȟ\\x02ශʀ')\n buf.write('\\x03\\x02\\x02\\x02ෂස\\x05їȬ\\x02සහ\\x05х')\n buf.write('ȣ\\x02හළ\\x05сȡ\\x02ළෆ\\x05у')\n buf.write('Ȣ\\x02ෆ\\u0dc7\\x05ћȮ\\x02\\u0dc7ʂ\\x03\\x02\\x02')\n buf.write('\\x02\\u0dc8\\u0dc9\\x05їȬ\\x02\\u0dc9්\\x05ёȩ')\n buf.write('\\x02්\\u0dcb\\x05ыȦ\\x02\\u0dcb\\u0dcc\\x05ыȦ')\n buf.write('\\x02\\u0dcc\\u0dcd\\x05зȜ\\x02\\u0dcd\\u0dce\\x05еț')\n buf.write('\\x02\\u0dceා\\x05йȝ\\x02ාැ\\x05щȥ')\n buf.write('\\x02ැʄ\\x03\\x02\\x02\\x02ෑි\\x05їȬ\\x02ි')\n buf.write('ී\\x05ёȩ\\x02ීු\\x05ыȦ\\x02ු')\n buf.write('\\u0dd5\\x05ыȦ\\x02\\u0dd5ූ\\x05ѝȯ\\x02ූ')\n buf.write('\\u0dd7\\x05ѓȪ\\x02\\u0dd7ʆ\\x03\\x02\\x02\\x02ෘෙ')\n buf.write('\\x05їȬ\\x02ෙේ\\x05ёȩ\\x02ේෛ')\n buf.write('\\x05ѡȱ\\x02ෛʈ\\x03\\x02\\x02\\x02ොෝ\\x05ї')\n buf.write('Ȭ\\x02ෝෞ\\x05ёȩ\\x02ෞෟ\\x05ѡ')\n buf.write('ȱ\\x02ෟ\\u0de0\\x05хȣ\\x02\\u0de0\\u0de1\\x05л')\n buf.write('Ȟ\\x02\\u0de1ʊ\\x03\\x02\\x02\\x02\\u0de2\\u0de3\\x05їȬ')\n buf.write('\\x02\\u0de3\\u0de4\\x05ёȩ\\x02\\u0de4\\u0de5\\x05ѡȱ')\n buf.write('\\x02\\u0de5෦\\x05љȭ\\x02෦ʌ\\x03\\x02\\x02\\x02෧')\n buf.write('෨\\x05їȬ\\x02෨෩\\x05ѝȯ\\x02෩')\n buf.write('෪\\x05ыȦ\\x02෪෫\\x05нȟ\\x02෫')\n buf.write('෬\\x05љȭ\\x02෬ʎ\\x03\\x02\\x02\\x02෭෮')\n buf.write('\\x05љȭ\\x02෮෯\\x05еț\\x02෯\\u0df0')\n buf.write('\\x05эȧ\\x02\\u0df0\\u0df1\\x05ѓȪ\\x02\\u0df1ෲ')\n buf.write('\\x05ыȦ\\x02ෲෳ\\x05нȟ\\x02ෳʐ')\n buf.write('\\x03\\x02\\x02\\x02෴\\u0df5\\x05љȭ\\x02\\u0df5\\u0df6\\x05е')\n buf.write('ț\\x02\\u0df6\\u0df7\\x05џȰ\\x02\\u0df7\\u0df8\\x05н')\n buf.write('ȟ\\x02\\u0df8ʒ\\x03\\x02\\x02\\x02\\u0df9\\u0dfa\\x05љȭ')\n buf.write('\\x02\\u0dfa\\u0dfb\\x05еț\\x02\\u0dfb\\u0dfc\\x05џȰ')\n buf.write('\\x02\\u0dfc\\u0dfd\\x05нȟ\\x02\\u0dfd\\u0dfe\\x05ѓȪ')\n buf.write('\\x02\\u0dfe\\u0dff\\x05ёȩ\\x02\\u0dff\\u0e00\\x05хȣ')\n buf.write('\\x02\\u0e00ก\\x05яȨ\\x02กข\\x05ћȮ')\n buf.write('\\x02ขʔ\\x03\\x02\\x02\\x02ฃค\\x05љȭ\\x02ค')\n buf.write('ฅ\\x05йȝ\\x02ฅฆ\\x05уȢ\\x02ฆ')\n buf.write('ง\\x05нȟ\\x02งจ\\x05эȧ\\x02จ')\n buf.write('ฉ\\x05еț\\x02ฉʖ\\x03\\x02\\x02\\x02ชซ')\n buf.write('\\x05љȭ\\x02ซฌ\\x05йȝ\\x02ฌญ')\n buf.write('\\x05уȢ\\x02ญฎ\\x05нȟ\\x02ฎฏ')\n buf.write('\\x05эȧ\\x02ฏฐ\\x05еț\\x02ฐฑ')\n buf.write('\\x05йȝ\\x02ฑฒ\\x05уȢ\\x02ฒณ')\n buf.write('\\x05нȟ\\x02ณด\\x05йȝ\\x02ดต')\n buf.write('\\x05щȥ\\x02ตʘ\\x03\\x02\\x02\\x02ถท\\x05љ')\n buf.write('ȭ\\x02ทธ\\x05йȝ\\x02ธน\\x05я')\n buf.write('Ȩ\\x02นʚ\\x03\\x02\\x02\\x02บป\\x05љȭ')\n buf.write('\\x02ปผ\\x05нȟ\\x02ผฝ\\x05еț')\n buf.write('\\x02ฝพ\\x05їȬ\\x02พฟ\\x05йȝ')\n buf.write('\\x02ฟภ\\x05уȢ\\x02ภʜ\\x03\\x02\\x02\\x02ม')\n buf.write('ย\\x05љȭ\\x02ยร\\x05нȟ\\x02ร')\n buf.write('ฤ\\x05йȝ\\x02ฤล\\x05ёȩ\\x02ล')\n buf.write('ฦ\\x05яȨ\\x02ฦว\\x05лȞ\\x02ว')\n buf.write('ʞ\\x03\\x02\\x02\\x02ศษ\\x05љȭ\\x02ษส')\n buf.write('\\x05нȟ\\x02สห\\x05нȟ\\x02หฬ')\n buf.write('\\x05лȞ\\x02ฬʠ\\x03\\x02\\x02\\x02อฮ\\x05љ')\n buf.write('ȭ\\x02ฮฯ\\x05нȟ\\x02ฯะ\\x05с')\n buf.write('ȡ\\x02ะั\\x05эȧ\\x02ัา\\x05н')\n buf.write('ȟ\\x02าำ\\x05яȨ\\x02ำิ\\x05ћ')\n buf.write('Ȯ\\x02ิʢ\\x03\\x02\\x02\\x02ีึ\\x05љȭ')\n buf.write('\\x02ึื\\x05нȟ\\x02ืุ\\x05ыȦ')\n buf.write('\\x02ุู\\x05нȟ\\x02ฺู\\x05йȝ')\n buf.write('\\x02ฺ\\u0e3b\\x05ћȮ\\x02\\u0e3bʤ\\x03\\x02\\x02\\x02\\u0e3c')\n buf.write('\\u0e3d\\x05љȭ\\x02\\u0e3d\\u0e3e\\x05нȟ\\x02\\u0e3e')\n buf.write('฿\\x05ыȦ\\x02฿เ\\x05пȠ\\x02เ')\n buf.write('ʦ\\x03\\x02\\x02\\x02แโ\\x05љȭ\\x02โใ')\n buf.write('\\x05нȟ\\x02ใไ\\x05ѕȫ\\x02ไๅ')\n buf.write('\\x05ѝȯ\\x02ๅๆ\\x05нȟ\\x02ๆ็')\n buf.write('\\x05яȨ\\x02็่\\x05йȝ\\x02่้')\n buf.write('\\x05нȟ\\x02้ʨ\\x03\\x02\\x02\\x02๊๋\\x05љ')\n buf.write('ȭ\\x02๋์\\x05нȟ\\x02์ํ\\x05ѕ')\n buf.write('ȫ\\x02ํ๎\\x05ѝȯ\\x02๎๏\\x05н')\n buf.write('ȟ\\x02๏๐\\x05яȨ\\x02๐๑\\x05ћ')\n buf.write('Ȯ\\x02๑๒\\x05хȣ\\x02๒๓\\x05е')\n buf.write('ț\\x02๓๔\\x05ыȦ\\x02๔ʪ\\x03\\x02\\x02')\n buf.write('\\x02๕๖\\x05љȭ\\x02๖๗\\x05нȟ')\n buf.write('\\x02๗๘\\x05їȬ\\x02๘๙\\x05хȣ')\n buf.write('\\x02๙๚\\x05еț\\x02๚๛\\x05ыȦ')\n buf.write('\\x02๛\\u0e5c\\x05хȣ\\x02\\u0e5c\\u0e5d\\x05ѧȴ')\n buf.write('\\x02\\u0e5d\\u0e5e\\x05еț\\x02\\u0e5e\\u0e5f\\x05зȜ')\n buf.write('\\x02\\u0e5f\\u0e60\\x05ыȦ\\x02\\u0e60\\u0e61\\x05нȟ')\n buf.write('\\x02\\u0e61ʬ\\x03\\x02\\x02\\x02\\u0e62\\u0e63\\x05љȭ\\x02\\u0e63')\n buf.write('\\u0e64\\x05нȟ\\x02\\u0e64\\u0e65\\x05їȬ\\x02\\u0e65')\n buf.write('\\u0e66\\x05хȣ\\x02\\u0e66\\u0e67\\x05еț\\x02\\u0e67')\n buf.write('\\u0e68\\x05ыȦ\\x02\\u0e68\\u0e69\\x05ыȦ\\x02\\u0e69')\n buf.write('\\u0e6a\\x05ѥȳ\\x02\\u0e6a\\u0e6b\\x07a\\x02\\x02\\u0e6b\\u0e6c')\n buf.write('\\x05їȬ\\x02\\u0e6c\\u0e6d\\x05нȟ\\x02\\u0e6d\\u0e6e')\n buf.write('\\x05ѝȯ\\x02\\u0e6e\\u0e6f\\x05љȭ\\x02\\u0e6f\\u0e70')\n buf.write('\\x05еț\\x02\\u0e70\\u0e71\\x05зȜ\\x02\\u0e71\\u0e72')\n buf.write('\\x05ыȦ\\x02\\u0e72\\u0e73\\x05нȟ\\x02\\u0e73ʮ')\n buf.write('\\x03\\x02\\x02\\x02\\u0e74\\u0e75\\x05љȭ\\x02\\u0e75\\u0e76\\x05н')\n buf.write('ȟ\\x02\\u0e76\\u0e77\\x05їȬ\\x02\\u0e77\\u0e78\\x05џ')\n buf.write('Ȱ\\x02\\u0e78\\u0e79\\x05нȟ\\x02\\u0e79\\u0e7a\\x05ї')\n buf.write('Ȭ\\x02\\u0e7a\\u0e7b\\x05нȟ\\x02\\u0e7b\\u0e7c\\x05ї')\n buf.write('Ȭ\\x02\\u0e7c\\u0e7d\\x05їȬ\\x02\\u0e7d\\u0e7e\\x05ё')\n buf.write('ȩ\\x02\\u0e7e\\u0e7f\\x05їȬ\\x02\\u0e7fʰ\\x03\\x02\\x02')\n buf.write('\\x02\\u0e80ກ\\x05љȭ\\x02ກຂ\\x05нȟ')\n buf.write('\\x02ຂ\\u0e83\\x05љȭ\\x02\\u0e83ຄ\\x05љȭ')\n buf.write('\\x02ຄ\\u0e85\\x05хȣ\\x02\\u0e85ຆ\\x05ёȩ')\n buf.write('\\x02ຆງ\\x05яȨ\\x02ງຈ\\x05ћȮ')\n buf.write('\\x02ຈຉ\\x05хȣ\\x02ຉຊ\\x05эȧ')\n buf.write('\\x02ຊ\\u0e8b\\x05нȟ\\x02\\u0e8bຌ\\x05ѧȴ')\n buf.write('\\x02ຌຍ\\x05ёȩ\\x02ຍຎ\\x05яȨ')\n buf.write('\\x02ຎຏ\\x05нȟ\\x02ຏʲ\\x03\\x02\\x02\\x02ຐ')\n buf.write('ຑ\\x05љȭ\\x02ຑຒ\\x05нȟ\\x02ຒ')\n buf.write('ຓ\\x05ћȮ\\x02ຓʴ\\x03\\x02\\x02\\x02ດຕ')\n buf.write('\\x05љȭ\\x02ຕຖ\\x05нȟ\\x02ຖທ')\n buf.write('\\x05ћȮ\\x02ທຘ\\x05љȭ\\x02ຘʶ')\n buf.write('\\x03\\x02\\x02\\x02ນບ\\x05љȭ\\x02ບປ\\x05н')\n buf.write('ȟ\\x02ປຜ\\x05ћȮ\\x02ຜຝ\\x05ћ')\n buf.write('Ȯ\\x02ຝພ\\x05хȣ\\x02ພຟ\\x05я')\n buf.write('Ȩ\\x02ຟຠ\\x05сȡ\\x02ຠມ\\x05љ')\n buf.write('ȭ\\x02ມʸ\\x03\\x02\\x02\\x02ຢຣ\\x05љȭ')\n buf.write('\\x02ຣ\\u0ea4\\x05уȢ\\x02\\u0ea4ລ\\x05еț')\n buf.write('\\x02ລ\\u0ea6\\x05їȬ\\x02\\u0ea6ວ\\x05нȟ')\n buf.write('\\x02ວʺ\\x03\\x02\\x02\\x02ຨຩ\\x05љȭ\\x02ຩ')\n buf.write('ສ\\x05уȢ\\x02ສຫ\\x05ёȩ\\x02ຫ')\n buf.write('ຬ\\x05ѡȱ\\x02ຬʼ\\x03\\x02\\x02\\x02ອຮ')\n buf.write('\\x05љȭ\\x02ຮຯ\\x05уȢ\\x02ຯະ')\n buf.write('\\x05ѝȯ\\x02ະັ\\x05ћȮ\\x02ັາ')\n buf.write('\\x05лȞ\\x02າຳ\\x05ёȩ\\x02ຳິ')\n buf.write('\\x05ѡȱ\\x02ິີ\\x05яȨ\\x02ີʾ')\n buf.write('\\x03\\x02\\x02\\x02ຶື\\x05љȭ\\x02ືຸ\\x05х')\n buf.write('ȣ\\x02ຸູ\\x05зȜ\\x02຺ູ\\x05ы')\n buf.write('Ȧ\\x02຺ົ\\x05хȣ\\x02ົຼ\\x05я')\n buf.write('Ȩ\\x02ຼຽ\\x05сȡ\\x02ຽ\\u0ebe\\x05љ')\n buf.write('ȭ\\x02\\u0ebeˀ\\x03\\x02\\x02\\x02\\u0ebfເ\\x05љȭ')\n buf.write('\\x02ເແ\\x05хȣ\\x02ແໂ\\x05сȡ')\n buf.write('\\x02ໂໃ\\x05яȨ\\x02ໃໄ\\x05ћȮ')\n buf.write('\\x02ໄ\\u0ec5\\x05ѥȳ\\x02\\u0ec5ໆ\\x05ѓȪ')\n buf.write('\\x02ໆ\\u0ec7\\x05нȟ\\x02\\u0ec7˂\\x03\\x02\\x02\\x02່')\n buf.write('້\\x05љȭ\\x02້໊\\x05хȣ\\x02໊')\n buf.write('໋\\x05эȧ\\x02໋໌\\x05ѓȪ\\x02໌')\n buf.write('ໍ\\x05ыȦ\\x02ໍ\\u0ece\\x05нȟ\\x02\\u0ece')\n buf.write('\\u0ecf\\x07a\\x02\\x02\\u0ecf໐\\x05хȣ\\x02໐໑')\n buf.write('\\x05яȨ\\x02໑໒\\x05ћȮ\\x02໒໓')\n buf.write('\\x05нȟ\\x02໓໔\\x05сȡ\\x02໔໕')\n buf.write('\\x05нȟ\\x02໕໖\\x05їȬ\\x02໖˄')\n buf.write('\\x03\\x02\\x02\\x02໗໘\\x05љȭ\\x02໘໙\\x05х')\n buf.write('ȣ\\x02໙\\u0eda\\x05яȨ\\x02\\u0eda\\u0edb\\x05с')\n buf.write('ȡ\\x02\\u0edbໜ\\x05ыȦ\\x02ໜໝ\\x05н')\n buf.write('ȟ\\x02ໝˆ\\x03\\x02\\x02\\x02ໞໟ\\x05љȭ')\n buf.write('\\x02ໟ\\u0ee0\\x05хȣ\\x02\\u0ee0\\u0ee1\\x05ѧȴ')\n buf.write('\\x02\\u0ee1\\u0ee2\\x05нȟ\\x02\\u0ee2ˈ\\x03\\x02\\x02\\x02\\u0ee3')\n buf.write('\\u0ee4\\x05љȭ\\x02\\u0ee4\\u0ee5\\x05щȥ\\x02\\u0ee5')\n buf.write('\\u0ee6\\x05хȣ\\x02\\u0ee6\\u0ee7\\x05ѓȪ\\x02\\u0ee7')\n buf.write('ˊ\\x03\\x02\\x02\\x02\\u0ee8\\u0ee9\\x05љȭ\\x02\\u0ee9\\u0eea')\n buf.write('\\x05эȧ\\x02\\u0eea\\u0eeb\\x05еț\\x02\\u0eeb\\u0eec')\n buf.write('\\x05ыȦ\\x02\\u0eec\\u0eed\\x05ыȦ\\x02\\u0eed\\u0eee')\n buf.write('\\x05хȣ\\x02\\u0eee\\u0eef\\x05яȨ\\x02\\u0eef\\u0ef0')\n buf.write('\\x05ћȮ\\x02\\u0ef0ˌ\\x03\\x02\\x02\\x02\\u0ef1\\u0ef2\\x05љ')\n buf.write('ȭ\\x02\\u0ef2\\u0ef3\\x05яȨ\\x02\\u0ef3\\u0ef4\\x05е')\n buf.write('ț\\x02\\u0ef4\\u0ef5\\x05ѓȪ\\x02\\u0ef5\\u0ef6\\x05љ')\n buf.write('ȭ\\x02\\u0ef6\\u0ef7\\x05уȢ\\x02\\u0ef7\\u0ef8\\x05ё')\n buf.write('ȩ\\x02\\u0ef8\\u0ef9\\x05ћȮ\\x02\\u0ef9ˎ\\x03\\x02\\x02')\n buf.write('\\x02\\u0efa\\u0efb\\x05љȭ\\x02\\u0efb\\u0efc\\x05ёȩ')\n buf.write('\\x02\\u0efc\\u0efd\\x05эȧ\\x02\\u0efd\\u0efe\\x05нȟ')\n buf.write('\\x02\\u0efeː\\x03\\x02\\x02\\x02\\u0effༀ\\x05љȭ\\x02ༀ')\n buf.write('༁\\x05ѓȪ\\x02༁༂\\x05нȟ\\x02༂')\n buf.write('༃\\x05йȝ\\x02༃༄\\x05хȣ\\x02༄')\n buf.write('༅\\x05пȠ\\x02༅༆\\x05хȣ\\x02༆')\n buf.write('༇\\x05йȝ\\x02༇༈\\x05еț\\x02༈')\n buf.write('༉\\x05ћȮ\\x02༉༊\\x05хȣ\\x02༊')\n buf.write('་\\x05ёȩ\\x02་༌\\x05яȨ\\x02༌')\n buf.write('˒\\x03\\x02\\x02\\x02།༎\\x05љȭ\\x02༎༏')\n buf.write('\\x05ѕȫ\\x02༏༐\\x05ыȦ\\x02༐༑')\n buf.write('\\x05лȞ\\x02༑༒\\x05еț\\x02༒༓')\n buf.write('\\x05ћȮ\\x02༓༔\\x05еț\\x02༔˔')\n buf.write('\\x03\\x02\\x02\\x02༕༖\\x05љȭ\\x02༖༗\\x05ѕ')\n buf.write('ȫ\\x02༗༘\\x05ыȦ\\x02༘༙\\x05н')\n buf.write('ȟ\\x02༙༚\\x05їȬ\\x02༚༛\\x05ї')\n buf.write('Ȭ\\x02༛༜\\x05ёȩ\\x02༜༝\\x05ї')\n buf.write('Ȭ\\x02༝˖\\x03\\x02\\x02\\x02༞༟\\x05љȭ')\n buf.write('\\x02༟༠\\x05ћȮ\\x02༠༡\\x05еț')\n buf.write('\\x02༡༢\\x05яȨ\\x02༢༣\\x05лȞ')\n buf.write('\\x02༣༤\\x05еț\\x02༤༥\\x05ыȦ')\n buf.write('\\x02༥༦\\x05ёȩ\\x02༦༧\\x05яȨ')\n buf.write('\\x02༧༨\\x05нȟ\\x02༨˘\\x03\\x02\\x02\\x02༩')\n buf.write('༪\\x05љȭ\\x02༪༫\\x05ћȮ\\x02༫')\n buf.write('༬\\x05еț\\x02༬༭\\x05їȬ\\x02༭')\n buf.write('༮\\x05ћȮ\\x02༮˚\\x03\\x02\\x02\\x02༯༰')\n buf.write('\\x05љȭ\\x02༰༱\\x05ћȮ\\x02༱༲')\n buf.write('\\x05еț\\x02༲༳\\x05їȬ\\x02༳༴')\n buf.write('\\x05ћȮ\\x02༴༵\\x05ѝȯ\\x02༵༶')\n buf.write('\\x05ѓȪ\\x02༶˜\\x03\\x02\\x02\\x02༷༸\\x05љ')\n buf.write('ȭ\\x02༸༹\\x05ћȮ\\x02༹༺\\x05е')\n buf.write('ț\\x02༺༻\\x05ћȮ\\x02༻༼\\x05н')\n buf.write('ȟ\\x02༼༽\\x05эȧ\\x02༽༾\\x05н')\n buf.write('ȟ\\x02༾༿\\x05яȨ\\x02༿ཀ\\x05ћ')\n buf.write('Ȯ\\x02ཀ˞\\x03\\x02\\x02\\x02ཁག\\x05љȭ')\n buf.write('\\x02གགྷ\\x05ћȮ\\x02གྷང\\x05еț')\n buf.write('\\x02ངཅ\\x05ћȮ\\x02ཅཆ\\x05нȟ')\n buf.write('\\x02ཆཇ\\x05эȧ\\x02ཇ\\u0f48\\x05нȟ')\n buf.write('\\x02\\u0f48ཉ\\x05яȨ\\x02ཉཊ\\x05ћȮ')\n buf.write('\\x02ཊཋ\\x07a\\x02\\x02ཋཌ\\x05хȣ\\x02ཌ')\n buf.write('ཌྷ\\x05лȞ\\x02ཌྷˠ\\x03\\x02\\x02\\x02ཎཏ')\n buf.write('\\x05љȭ\\x02ཏཐ\\x05ћȮ\\x02ཐད')\n buf.write('\\x05еț\\x02དདྷ\\x05ћȮ\\x02དྷན')\n buf.write('\\x05хȣ\\x02ནཔ\\x05йȝ\\x02པˢ')\n buf.write('\\x03\\x02\\x02\\x02ཕབ\\x05љȭ\\x02བབྷ\\x05ћ')\n buf.write('Ȯ\\x02བྷམ\\x05еț\\x02མཙ\\x05ћ')\n buf.write('Ȯ\\x02ཙཚ\\x05хȣ\\x02ཚཛ\\x05љ')\n buf.write('ȭ\\x02ཛཛྷ\\x05ћȮ\\x02ཛྷཝ\\x05х')\n buf.write('ȣ\\x02ཝཞ\\x05йȝ\\x02ཞཟ\\x05љ')\n buf.write('ȭ\\x02ཟˤ\\x03\\x02\\x02\\x02འཡ\\x05љȭ')\n buf.write('\\x02ཡར\\x05ћȮ\\x02རལ\\x05їȬ')\n buf.write('\\x02ལཤ\\x05хȣ\\x02ཤཥ\\x05яȨ')\n buf.write('\\x02ཥས\\x05сȡ\\x02ས˦\\x03\\x02\\x02\\x02ཧ')\n buf.write('ཨ\\x05љȭ\\x02ཨཀྵ\\x05ѝȯ\\x02ཀྵ')\n buf.write('ཪ\\x05зȜ\\x02ཪཫ\\x05эȧ\\x02ཫ')\n buf.write('ཬ\\x05ѝȯ\\x02ཬ\\u0f6d\\x05ыȦ\\x02\\u0f6d')\n buf.write('\\u0f6e\\x05ћȮ\\x02\\u0f6e\\u0f6f\\x05хȣ\\x02\\u0f6f')\n buf.write('\\u0f70\\x05љȭ\\x02\\u0f70ཱ\\x05нȟ\\x02ཱ')\n buf.write('ི\\x05ћȮ\\x02ི˨\\x03\\x02\\x02\\x02ཱིུ')\n buf.write('\\x05љȭ\\x02ཱུུ\\x05ѝȯ\\x02ཱུྲྀ')\n buf.write('\\x05зȜ\\x02ྲྀཷ\\x05ѓȪ\\x02ཷླྀ')\n buf.write('\\x05еț\\x02ླྀཹ\\x05їȬ\\x02ཹེ')\n buf.write('\\x05ћȮ\\x02ེཻ\\x05хȣ\\x02ཻོ')\n buf.write('\\x05ћȮ\\x02ོཽ\\x05хȣ\\x02ཽཾ')\n buf.write('\\x05ёȩ\\x02ཾཿ\\x05яȨ\\x02ཿ˪')\n buf.write('\\x03\\x02\\x02\\x02ཱྀྀ\\x05љȭ\\x02ཱྀྂ\\x05ѝ')\n buf.write('ȯ\\x02ྂྃ\\x05зȜ\\x02྄ྃ\\x05љ')\n buf.write('ȭ\\x02྄྅\\x05ћȮ\\x02྅྆\\x05х')\n buf.write('ȣ\\x02྆྇\\x05ћȮ\\x02྇ྈ\\x05ѝ')\n buf.write('ȯ\\x02ྈྉ\\x05ћȮ\\x02ྉྊ\\x05е')\n buf.write('ț\\x02ྊྋ\\x05зȜ\\x02ྋྌ\\x05ы')\n buf.write('Ȧ\\x02ྌྍ\\x05нȟ\\x02ྍˬ\\x03\\x02\\x02')\n buf.write('\\x02ྎྏ\\x05љȭ\\x02ྏྐ\\x05ѝȯ')\n buf.write('\\x02ྐྑ\\x05зȜ\\x02ྑྒ\\x05ћȮ')\n buf.write('\\x02ྒྒྷ\\x05ѥȳ\\x02ྒྷྔ\\x05ѓȪ')\n buf.write('\\x02ྔྕ\\x05нȟ\\x02ྕˮ\\x03\\x02\\x02\\x02ྖ')\n buf.write('ྗ\\x05љȭ\\x02ྗ\\u0f98\\x05ѝȯ\\x02\\u0f98')\n buf.write('ྙ\\x05йȝ\\x02ྙྚ\\x05йȝ\\x02ྚ')\n buf.write('ྛ\\x05нȟ\\x02ྛྜ\\x05љȭ\\x02ྜ')\n buf.write('ྜྷ\\x05љȭ\\x02ྜྷ˰\\x03\\x02\\x02\\x02ྞྟ')\n buf.write('\\x05љȭ\\x02ྟྠ\\x05ѝȯ\\x02ྠྡ')\n buf.write('\\x05љȭ\\x02ྡྡྷ\\x05ѓȪ\\x02ྡྷྣ')\n buf.write('\\x05нȟ\\x02ྣྤ\\x05яȨ\\x02ྤྥ')\n buf.write('\\x05лȞ\\x02ྥ˲\\x03\\x02\\x02\\x02ྦྦྷ\\x05ћ')\n buf.write('Ȯ\\x02ྦྷྨ\\x05еț\\x02ྨྩ\\x05з')\n buf.write('Ȝ\\x02ྩྪ\\x05ыȦ\\x02ྪྫ\\x05н')\n buf.write('ȟ\\x02ྫ˴\\x03\\x02\\x02\\x02ྫྷྭ\\x05ћȮ')\n buf.write('\\x02ྭྮ\\x05уȢ\\x02ྮྯ\\x05нȟ')\n buf.write('\\x02ྯ˶\\x03\\x02\\x02\\x02ྰྱ\\x05ћȮ\\x02ྱ')\n buf.write('ྲ\\x05уȢ\\x02ྲླ\\x05нȟ\\x02ླ')\n buf.write('ྴ\\x05яȨ\\x02ྴ˸\\x03\\x02\\x02\\x02ྵྶ')\n buf.write('\\x05ћȮ\\x02ྶྷ\\x05хȣ\\x02ྷྸ')\n buf.write('\\x05эȧ\\x02ྸྐྵ\\x05нȟ\\x02ྐྵ˺')\n buf.write('\\x03\\x02\\x02\\x02ྺྻ\\x05ћȮ\\x02ྻྼ\\x05х')\n buf.write('ȣ\\x02ྼ\\u0fbd\\x05эȧ\\x02\\u0fbd྾\\x05н')\n buf.write('ȟ\\x02྾྿\\x05љȭ\\x02྿࿀\\x05ћ')\n buf.write('Ȯ\\x02࿀࿁\\x05еț\\x02࿁࿂\\x05э')\n buf.write('ȧ\\x02࿂࿃\\x05ѓȪ\\x02࿃˼\\x03\\x02\\x02')\n buf.write('\\x02࿄࿅\\x05ћȮ\\x02࿅࿆\\x05хȣ')\n buf.write('\\x02࿆࿇\\x05эȧ\\x02࿇࿈\\x05нȟ')\n buf.write('\\x02࿈࿉\\x05љȭ\\x02࿉࿊\\x05ћȮ')\n buf.write('\\x02࿊࿋\\x05еț\\x02࿋࿌\\x05эȧ')\n buf.write('\\x02࿌\\u0fcd\\x05ѓȪ\\x02\\u0fcd࿎\\x07a\\x02\\x02࿎')\n buf.write('࿏\\x05ыȦ\\x02࿏࿐\\x05ћȮ\\x02࿐')\n buf.write('࿑\\x05ѧȴ\\x02࿑࿒\\x07a\\x02\\x02࿒࿓')\n buf.write('\\x05ѝȯ\\x02࿓࿔\\x05яȨ\\x02࿔࿕')\n buf.write('\\x05йȝ\\x02࿕࿖\\x05ёȩ\\x02࿖࿗')\n buf.write('\\x05яȨ\\x02࿗࿘\\x05љȭ\\x02࿘࿙')\n buf.write('\\x05ћȮ\\x02࿙࿚\\x05їȬ\\x02࿚\\u0fdb')\n buf.write('\\x05еț\\x02\\u0fdb\\u0fdc\\x05хȣ\\x02\\u0fdc\\u0fdd')\n buf.write('\\x05яȨ\\x02\\u0fdd\\u0fde\\x05нȟ\\x02\\u0fde\\u0fdf')\n buf.write('\\x05лȞ\\x02\\u0fdf˾\\x03\\x02\\x02\\x02\\u0fe0\\u0fe1\\x05ћ')\n buf.write('Ȯ\\x02\\u0fe1\\u0fe2\\x05хȣ\\x02\\u0fe2\\u0fe3\\x05э')\n buf.write('ȧ\\x02\\u0fe3\\u0fe4\\x05нȟ\\x02\\u0fe4\\u0fe5\\x05љ')\n buf.write('ȭ\\x02\\u0fe5\\u0fe6\\x05ћȮ\\x02\\u0fe6\\u0fe7\\x05е')\n buf.write('ț\\x02\\u0fe7\\u0fe8\\x05эȧ\\x02\\u0fe8\\u0fe9\\x05ѓ')\n buf.write('Ȫ\\x02\\u0fe9\\u0fea\\x07a\\x02\\x02\\u0fea\\u0feb\\x05ћȮ')\n buf.write('\\x02\\u0feb\\u0fec\\x05ѧȴ\\x02\\u0fec\\u0fed\\x07a\\x02\\x02\\u0fed')\n buf.write('\\u0fee\\x05ѝȯ\\x02\\u0fee\\u0fef\\x05яȨ\\x02\\u0fef')\n buf.write('\\u0ff0\\x05йȝ\\x02\\u0ff0\\u0ff1\\x05ёȩ\\x02\\u0ff1')\n buf.write('\\u0ff2\\x05яȨ\\x02\\u0ff2\\u0ff3\\x05љȭ\\x02\\u0ff3')\n buf.write('\\u0ff4\\x05ћȮ\\x02\\u0ff4\\u0ff5\\x05їȬ\\x02\\u0ff5')\n buf.write('\\u0ff6\\x05еț\\x02\\u0ff6\\u0ff7\\x05хȣ\\x02\\u0ff7')\n buf.write('\\u0ff8\\x05яȨ\\x02\\u0ff8\\u0ff9\\x05нȟ\\x02\\u0ff9')\n buf.write('\\u0ffa\\x05лȞ\\x02\\u0ffà\\x03\\x02\\x02\\x02\\u0ffb\\u0ffc')\n buf.write('\\x05ћȮ\\x02\\u0ffc\\u0ffd\\x05хȣ\\x02\\u0ffd\\u0ffe')\n buf.write('\\x05эȧ\\x02\\u0ffe\\u0fff\\x05нȟ\\x02\\u0fffက')\n buf.write('\\x05љȭ\\x02ကခ\\x05ћȮ\\x02ခဂ')\n buf.write('\\x05еț\\x02ဂဃ\\x05эȧ\\x02ဃင')\n buf.write('\\x05ѓȪ\\x02ငစ\\x07a\\x02\\x02စဆ\\x05ѝ')\n buf.write('ȯ\\x02ဆဇ\\x05яȨ\\x02ဇဈ\\x05й')\n buf.write('ȝ\\x02ဈဉ\\x05ёȩ\\x02ဉည\\x05я')\n buf.write('Ȩ\\x02ညဋ\\x05љȭ\\x02ဋဌ\\x05ћ')\n buf.write('Ȯ\\x02ဌဍ\\x05їȬ\\x02ဍဎ\\x05е')\n buf.write('ț\\x02ဎဏ\\x05хȣ\\x02ဏတ\\x05я')\n buf.write('Ȩ\\x02တထ\\x05нȟ\\x02ထဒ\\x05л')\n buf.write('Ȟ\\x02ဒ̂\\x03\\x02\\x02\\x02ဓန\\x05ћȮ')\n buf.write('\\x02နပ\\x05хȣ\\x02ပဖ\\x05эȧ')\n buf.write('\\x02ဖဗ\\x05нȟ\\x02ဗဘ\\x05ѧȴ')\n buf.write('\\x02ဘမ\\x05ёȩ\\x02မယ\\x05яȨ')\n buf.write('\\x02ယရ\\x05нȟ\\x02ရလ\\x07a\\x02\\x02လ')\n buf.write('ဝ\\x05еț\\x02ဝသ\\x05зȜ\\x02သ')\n buf.write('ဟ\\x05зȜ\\x02ဟဠ\\x05їȬ\\x02ဠ')\n buf.write('̄\\x03\\x02\\x02\\x02အဢ\\x05ћȮ\\x02ဢဣ')\n buf.write('\\x05хȣ\\x02ဣဤ\\x05эȧ\\x02ဤဥ')\n buf.write('\\x05нȟ\\x02ဥဦ\\x05ѧȴ\\x02ဦဧ')\n buf.write('\\x05ёȩ\\x02ဧဨ\\x05яȨ\\x02ဨဩ')\n buf.write('\\x05нȟ\\x02ဩဪ\\x07a\\x02\\x02ဪါ\\x05у')\n buf.write('Ȣ\\x02ါာ\\x05ёȩ\\x02ာိ\\x05ѝ')\n buf.write('ȯ\\x02ိီ\\x05їȬ\\x02ီ̆\\x03\\x02\\x02')\n buf.write('\\x02ုူ\\x05ћȮ\\x02ူေ\\x05хȣ')\n buf.write('\\x02ေဲ\\x05эȧ\\x02ဲဳ\\x05нȟ')\n buf.write('\\x02ဳဴ\\x05ѧȴ\\x02ဴဵ\\x05ёȩ')\n buf.write('\\x02ဵံ\\x05яȨ\\x02ံ့\\x05нȟ')\n buf.write('\\x02့း\\x07a\\x02\\x02း္\\x05эȧ\\x02္')\n buf.write('်\\x05хȣ\\x02်ျ\\x05яȨ\\x02ျ')\n buf.write('ြ\\x05ѝȯ\\x02ြွ\\x05ћȮ\\x02ွ')\n buf.write('ှ\\x05нȟ\\x02ှ̈\\x03\\x02\\x02\\x02ဿ၀')\n buf.write('\\x05ћȮ\\x02၀၁\\x05хȣ\\x02၁၂')\n buf.write('\\x05эȧ\\x02၂၃\\x05нȟ\\x02၃၄')\n buf.write('\\x05ѧȴ\\x02၄၅\\x05ёȩ\\x02၅၆')\n buf.write('\\x05яȨ\\x02၆၇\\x05нȟ\\x02၇၈')\n buf.write('\\x07a\\x02\\x02၈၉\\x05їȬ\\x02၉၊\\x05н')\n buf.write('ȟ\\x02၊။\\x05сȡ\\x02။၌\\x05х')\n buf.write('ȣ\\x02၌၍\\x05ёȩ\\x02၍၎\\x05я')\n buf.write('Ȩ\\x02၎̊\\x03\\x02\\x02\\x02၏ၐ\\x05ћȮ')\n buf.write('\\x02ၐၑ\\x05ёȩ\\x02ၑ̌\\x03\\x02\\x02\\x02ၒ')\n buf.write('ၓ\\x05ћȮ\\x02ၓၔ\\x05їȬ\\x02ၔ')\n buf.write('ၕ\\x05еț\\x02ၕၖ\\x05хȣ\\x02ၖ')\n buf.write('ၗ\\x05ыȦ\\x02ၗၘ\\x05хȣ\\x02ၘ')\n buf.write('ၙ\\x05яȨ\\x02ၙၚ\\x05сȡ\\x02ၚ')\n buf.write('̎\\x03\\x02\\x02\\x02ၛၜ\\x05ћȮ\\x02ၜၝ')\n buf.write('\\x05їȬ\\x02ၝၞ\\x05еț\\x02ၞၟ')\n buf.write('\\x05яȨ\\x02ၟၠ\\x05љȭ\\x02ၠၡ')\n buf.write('\\x05еț\\x02ၡၢ\\x05йȝ\\x02ၢၣ')\n buf.write('\\x05ћȮ\\x02ၣၤ\\x05хȣ\\x02ၤၥ')\n buf.write('\\x05ёȩ\\x02ၥၦ\\x05яȨ\\x02ၦ̐')\n buf.write('\\x03\\x02\\x02\\x02ၧၨ\\x05ћȮ\\x02ၨၩ\\x05ї')\n buf.write('Ȭ\\x02ၩၪ\\x05еț\\x02ၪၫ\\x05я')\n buf.write('Ȩ\\x02ၫၬ\\x05љȭ\\x02ၬၭ\\x05ы')\n buf.write('Ȧ\\x02ၭၮ\\x05еț\\x02ၮၯ\\x05ћ')\n buf.write('Ȯ\\x02ၯၰ\\x05нȟ\\x02ၰ̒\\x03\\x02\\x02')\n buf.write('\\x02ၱၲ\\x05ћȮ\\x02ၲၳ\\x05їȬ')\n buf.write('\\x02ၳၴ\\x05нȟ\\x02ၴၵ\\x05еț')\n buf.write('\\x02ၵၶ\\x05ћȮ\\x02ၶ̔\\x03\\x02\\x02\\x02ၷ')\n buf.write('ၸ\\x05ћȮ\\x02ၸၹ\\x05їȬ\\x02ၹ')\n buf.write('ၺ\\x05хȣ\\x02ၺၻ\\x05сȡ\\x02ၻ')\n buf.write('ၼ\\x05сȡ\\x02ၼၽ\\x05нȟ\\x02ၽ')\n buf.write('ၾ\\x05їȬ\\x02ၾ̖\\x03\\x02\\x02\\x02ၿႀ')\n buf.write('\\x05ћȮ\\x02ႀႁ\\x05їȬ\\x02ႁႂ')\n buf.write('\\x05хȣ\\x02ႂႃ\\x05эȧ\\x02ႃ̘')\n buf.write('\\x03\\x02\\x02\\x02ႄႅ\\x05ћȮ\\x02ႅႆ\\x05ї')\n buf.write('Ȭ\\x02ႆႇ\\x05ѝȯ\\x02ႇႈ\\x05н')\n buf.write('ȟ\\x02ႈ̚\\x03\\x02\\x02\\x02ႉႊ\\x05ћȮ')\n buf.write('\\x02ႊႋ\\x05їȬ\\x02ႋႌ\\x05ѝȯ')\n buf.write('\\x02ႌႍ\\x05яȨ\\x02ႍႎ\\x05йȝ')\n buf.write('\\x02ႎႏ\\x05еț\\x02ႏ႐\\x05ћȮ')\n buf.write('\\x02႐႑\\x05нȟ\\x02႑̜\\x03\\x02\\x02\\x02႒')\n buf.write('႓\\x05ћȮ\\x02႓႔\\x05ѥȳ\\x02႔')\n buf.write('႕\\x05ѓȪ\\x02႕႖\\x05нȟ\\x02႖')\n buf.write('̞\\x03\\x02\\x02\\x02႗႘\\x05ѝȯ\\x02႘႙')\n buf.write('\\x05яȨ\\x02႙ႚ\\x05зȜ\\x02ႚႛ')\n buf.write('\\x05ёȩ\\x02ႛႜ\\x05ѝȯ\\x02ႜႝ')\n buf.write('\\x05яȨ\\x02ႝ႞\\x05лȞ\\x02႞႟')\n buf.write('\\x05нȟ\\x02႟Ⴀ\\x05лȞ\\x02Ⴀ̠')\n buf.write('\\x03\\x02\\x02\\x02ႡႢ\\x05ѝȯ\\x02ႢႣ\\x05я')\n buf.write('Ȩ\\x02ႣႤ\\x05лȞ\\x02ႤႥ\\x05н')\n buf.write('ȟ\\x02ႥႦ\\x05їȬ\\x02Ⴆ̢\\x03\\x02\\x02')\n buf.write('\\x02ႧႨ\\x05ѝȯ\\x02ႨႩ\\x05яȨ')\n buf.write('\\x02ႩႪ\\x05хȣ\\x02ႪႫ\\x05ёȩ')\n buf.write('\\x02ႫႬ\\x05яȨ\\x02Ⴌ̤\\x03\\x02\\x02\\x02Ⴍ')\n buf.write('Ⴎ\\x05ѝȯ\\x02ႮႯ\\x05яȨ\\x02Ⴏ')\n buf.write('Ⴐ\\x05хȣ\\x02ႰႱ\\x05ѕȫ\\x02Ⴑ')\n buf.write('Ⴒ\\x05ѝȯ\\x02ႲႳ\\x05нȟ\\x02Ⴓ')\n buf.write('̦\\x03\\x02\\x02\\x02ႴႵ\\x05ѝȯ\\x02ႵႶ')\n buf.write('\\x05яȨ\\x02ႶႷ\\x05ыȦ\\x02ႷႸ')\n buf.write('\\x05хȣ\\x02ႸႹ\\x05эȧ\\x02ႹႺ')\n buf.write('\\x05хȣ\\x02ႺႻ\\x05ћȮ\\x02ႻႼ')\n buf.write('\\x05нȟ\\x02ႼႽ\\x05лȞ\\x02Ⴝ̨')\n buf.write('\\x03\\x02\\x02\\x02ႾႿ\\x05ѝȯ\\x02ႿჀ\\x05я')\n buf.write('Ȩ\\x02ჀჁ\\x05ѓȪ\\x02ჁჂ\\x05х')\n buf.write('ȣ\\x02ჂჃ\\x05џȰ\\x02ჃჄ\\x05ё')\n buf.write('ȩ\\x02ჄჅ\\x05ћȮ\\x02Ⴥ̪\\x03\\x02\\x02')\n buf.write('\\x02\\u10c6Ⴧ\\x05ѝȯ\\x02Ⴧ\\u10c8\\x05яȨ')\n buf.write('\\x02\\u10c8\\u10c9\\x05ћȮ\\x02\\u10c9\\u10ca\\x05хȣ')\n buf.write('\\x02\\u10ca\\u10cb\\x05ыȦ\\x02\\u10cb̬\\x03\\x02\\x02\\x02\\u10cc')\n buf.write('Ⴭ\\x05ѝȯ\\x02Ⴭ\\u10ce\\x05ѓȪ\\x02\\u10ce')\n buf.write('\\u10cf\\x05лȞ\\x02\\u10cfა\\x05еț\\x02ა')\n buf.write('ბ\\x05ћȮ\\x02ბგ\\x05нȟ\\x02გ')\n buf.write('̮\\x03\\x02\\x02\\x02დე\\x05ѝȯ\\x02ევ')\n buf.write('\\x05ѓȪ\\x02ვზ\\x05лȞ\\x02ზთ')\n buf.write('\\x05еț\\x02თი\\x05ћȮ\\x02იკ')\n buf.write('\\x05нȟ\\x02კლ\\x05лȞ\\x02ლ̰')\n buf.write('\\x03\\x02\\x02\\x02მნ\\x05ѝȯ\\x02ნო\\x05ѓ')\n buf.write('Ȫ\\x02ოპ\\x05љȭ\\x02პჟ\\x05н')\n buf.write('ȟ\\x02ჟრ\\x05їȬ\\x02რს\\x05ћ')\n buf.write('Ȯ\\x02ს̲\\x03\\x02\\x02\\x02ტუ\\x05ѝȯ')\n buf.write('\\x02უფ\\x05їȬ\\x02ფქ\\x05ёȩ')\n buf.write('\\x02ქღ\\x05ѡȱ\\x02ღყ\\x05хȣ')\n buf.write('\\x02ყშ\\x05лȞ\\x02შ̴\\x03\\x02\\x02\\x02ჩ')\n buf.write('ც\\x05ѝȯ\\x02ცძ\\x05љȭ\\x02ძ')\n buf.write('წ\\x05нȟ\\x02წ̶\\x03\\x02\\x02\\x02ჭხ')\n buf.write('\\x05ѝȯ\\x02ხჯ\\x05љȭ\\x02ჯჰ')\n buf.write('\\x05хȣ\\x02ჰჱ\\x05яȨ\\x02ჱჲ')\n buf.write('\\x05сȡ\\x02ჲ̸\\x03\\x02\\x02\\x02ჳჴ\\x05џ')\n buf.write('Ȱ\\x02ჴჵ\\x05еț\\x02ჵჶ\\x05ы')\n buf.write('Ȧ\\x02ჶჷ\\x05хȣ\\x02ჷჸ\\x05л')\n buf.write('Ȟ\\x02ჸჹ\\x05еț\\x02ჹჺ\\x05ћ')\n buf.write('Ȯ\\x02ჺ჻\\x05нȟ\\x02჻̺\\x03\\x02\\x02')\n buf.write('\\x02ჼჽ\\x05џȰ\\x02ჽჾ\\x05еț')\n buf.write('\\x02ჾჿ\\x05ыȦ\\x02ჿᄀ\\x05ѝȯ')\n buf.write('\\x02ᄀᄁ\\x05нȟ\\x02ᄁ̼\\x03\\x02\\x02\\x02ᄂ')\n buf.write('ᄃ\\x05џȰ\\x02ᄃᄄ\\x05еț\\x02ᄄ')\n buf.write('ᄅ\\x05ыȦ\\x02ᄅᄆ\\x05ѝȯ\\x02ᄆ')\n buf.write('ᄇ\\x05нȟ\\x02ᄇᄈ\\x05љȭ\\x02ᄈ')\n buf.write('̾\\x03\\x02\\x02\\x02ᄉᄊ\\x05џȰ\\x02ᄊᄋ')\n buf.write('\\x05еț\\x02ᄋᄌ\\x05їȬ\\x02ᄌᄍ')\n buf.write('\\x05йȝ\\x02ᄍᄎ\\x05уȢ\\x02ᄎᄏ')\n buf.write('\\x05еț\\x02ᄏᄐ\\x05їȬ\\x02ᄐ̀')\n buf.write('\\x03\\x02\\x02\\x02ᄑᄒ\\x05џȰ\\x02ᄒᄓ\\x05е')\n buf.write('ț\\x02ᄓᄔ\\x05їȬ\\x02ᄔᄕ\\x05й')\n buf.write('ȝ\\x02ᄕᄖ\\x05уȢ\\x02ᄖᄗ\\x05е')\n buf.write('ț\\x02ᄗᄘ\\x05їȬ\\x02ᄘᄙ\\x074')\n buf.write('\\x02\\x02ᄙ͂\\x03\\x02\\x02\\x02ᄚᄛ\\x05џȰ\\x02ᄛ')\n buf.write('ᄜ\\x05еț\\x02ᄜᄝ\\x05їȬ\\x02ᄝ')\n buf.write('ᄞ\\x05хȣ\\x02ᄞᄟ\\x05еț\\x02ᄟ')\n buf.write('ᄠ\\x05зȜ\\x02ᄠᄡ\\x05ыȦ\\x02ᄡ')\n buf.write('ᄢ\\x05нȟ\\x02ᄢ̈́\\x03\\x02\\x02\\x02ᄣᄤ')\n buf.write('\\x05џȰ\\x02ᄤᄥ\\x05еț\\x02ᄥᄦ')\n buf.write('\\x05їȬ\\x02ᄦᄧ\\x05їȬ\\x02ᄧᄨ')\n buf.write('\\x05еț\\x02ᄨᄩ\\x05ѥȳ\\x02ᄩ͆')\n buf.write('\\x03\\x02\\x02\\x02ᄪᄫ\\x05џȰ\\x02ᄫᄬ\\x05е')\n buf.write('ț\\x02ᄬᄭ\\x05їȬ\\x02ᄭᄮ\\x05ѥ')\n buf.write('ȳ\\x02ᄮᄯ\\x05хȣ\\x02ᄯᄰ\\x05я')\n buf.write('Ȩ\\x02ᄰᄱ\\x05сȡ\\x02ᄱ͈\\x03\\x02\\x02')\n buf.write('\\x02ᄲᄳ\\x05џȰ\\x02ᄳᄴ\\x05нȟ')\n buf.write('\\x02ᄴᄵ\\x05їȬ\\x02ᄵᄶ\\x05љȭ')\n buf.write('\\x02ᄶᄷ\\x05хȣ\\x02ᄷᄸ\\x05ёȩ')\n buf.write('\\x02ᄸᄹ\\x05яȨ\\x02ᄹ͊\\x03\\x02\\x02\\x02ᄺ')\n buf.write('ᄻ\\x05џȰ\\x02ᄻᄼ\\x05нȟ\\x02ᄼ')\n buf.write('ᄽ\\x05їȬ\\x02ᄽᄾ\\x05љȭ\\x02ᄾ')\n buf.write('ᄿ\\x05хȣ\\x02ᄿᅀ\\x05ёȩ\\x02ᅀ')\n buf.write('ᅁ\\x05яȨ\\x02ᅁᅂ\\x05љȭ\\x02ᅂ')\n buf.write('͌\\x03\\x02\\x02\\x02ᅃᅄ\\x05ѡȱ\\x02ᅄᅅ')\n buf.write('\\x05еț\\x02ᅅᅆ\\x05хȣ\\x02ᅆᅇ')\n buf.write('\\x05ћȮ\\x02ᅇ͎\\x03\\x02\\x02\\x02ᅈᅉ\\x05ѡ')\n buf.write('ȱ\\x02ᅉᅊ\\x05еț\\x02ᅊᅋ\\x05ї')\n buf.write('Ȭ\\x02ᅋᅌ\\x05яȨ\\x02ᅌᅍ\\x05х')\n buf.write('ȣ\\x02ᅍᅎ\\x05яȨ\\x02ᅎᅏ\\x05с')\n buf.write('ȡ\\x02ᅏ͐\\x03\\x02\\x02\\x02ᅐᅑ\\x05ѡȱ')\n buf.write('\\x02ᅑᅒ\\x05нȟ\\x02ᅒᅓ\\x05ыȦ')\n buf.write('\\x02ᅓᅔ\\x05ыȦ\\x02ᅔᅕ\\x05пȠ')\n buf.write('\\x02ᅕᅖ\\x05ёȩ\\x02ᅖᅗ\\x05їȬ')\n buf.write('\\x02ᅗᅘ\\x05эȧ\\x02ᅘᅙ\\x05нȟ')\n buf.write('\\x02ᅙᅚ\\x05лȞ\\x02ᅚ͒\\x03\\x02\\x02\\x02ᅛ')\n buf.write('ᅜ\\x05ѡȱ\\x02ᅜᅝ\\x05уȢ\\x02ᅝ')\n buf.write('ᅞ\\x05нȟ\\x02ᅞᅟ\\x05яȨ\\x02ᅟ')\n buf.write('͔\\x03\\x02\\x02\\x02ᅠᅡ\\x05ѡȱ\\x02ᅡᅢ')\n buf.write('\\x05уȢ\\x02ᅢᅣ\\x05нȟ\\x02ᅣᅤ')\n buf.write('\\x05яȨ\\x02ᅤᅥ\\x05нȟ\\x02ᅥᅦ')\n buf.write('\\x05џȰ\\x02ᅦᅧ\\x05нȟ\\x02ᅧᅨ')\n buf.write('\\x05їȬ\\x02ᅨ͖\\x03\\x02\\x02\\x02ᅩᅪ\\x05ѡ')\n buf.write('ȱ\\x02ᅪᅫ\\x05уȢ\\x02ᅫᅬ\\x05н')\n buf.write('ȟ\\x02ᅬᅭ\\x05їȬ\\x02ᅭᅮ\\x05н')\n buf.write('ȟ\\x02ᅮ͘\\x03\\x02\\x02\\x02ᅯᅰ\\x05ѡȱ')\n buf.write('\\x02ᅰᅱ\\x05уȢ\\x02ᅱᅲ\\x05хȣ')\n buf.write('\\x02ᅲᅳ\\x05ыȦ\\x02ᅳᅴ\\x05нȟ')\n buf.write('\\x02ᅴ͚\\x03\\x02\\x02\\x02ᅵᅶ\\x05ѡȱ\\x02ᅶ')\n buf.write('ᅷ\\x05хȣ\\x02ᅷᅸ\\x05ћȮ\\x02ᅸ')\n buf.write('ᅹ\\x05уȢ\\x02ᅹ͜\\x03\\x02\\x02\\x02ᅺᅻ')\n buf.write('\\x05ѡȱ\\x02ᅻᅼ\\x05хȣ\\x02ᅼᅽ')\n buf.write('\\x05ћȮ\\x02ᅽᅾ\\x05уȢ\\x02ᅾᅿ')\n buf.write('\\x05хȣ\\x02ᅿᆀ\\x05яȨ\\x02ᆀ͞')\n buf.write('\\x03\\x02\\x02\\x02ᆁᆂ\\x05ѡȱ\\x02ᆂᆃ\\x05ё')\n buf.write('ȩ\\x02ᆃᆄ\\x05їȬ\\x02ᆄᆅ\\x05щ')\n buf.write('ȥ\\x02ᆅ͠\\x03\\x02\\x02\\x02ᆆᆇ\\x05ѡȱ')\n buf.write('\\x02ᆇᆈ\\x05їȬ\\x02ᆈᆉ\\x05хȣ')\n buf.write('\\x02ᆉᆊ\\x05ћȮ\\x02ᆊᆋ\\x05нȟ')\n buf.write('\\x02ᆋ͢\\x03\\x02\\x02\\x02ᆌᆍ\\x05ѣȲ\\x02ᆍ')\n buf.write('ᆎ\\x05эȧ\\x02ᆎᆏ\\x05ыȦ\\x02ᆏ')\n buf.write('ͤ\\x03\\x02\\x02\\x02ᆐᆑ\\x05ѣȲ\\x02ᆑᆒ')\n buf.write('\\x05эȧ\\x02ᆒᆓ\\x05ыȦ\\x02ᆓᆔ')\n buf.write('\\x05еț\\x02ᆔᆕ\\x05сȡ\\x02ᆕᆖ')\n buf.write('\\x05сȡ\\x02ᆖͦ\\x03\\x02\\x02\\x02ᆗᆘ\\x05ѣ')\n buf.write('Ȳ\\x02ᆘᆙ\\x05эȧ\\x02ᆙᆚ\\x05ы')\n buf.write('Ȧ\\x02ᆚᆛ\\x05еț\\x02ᆛᆜ\\x05ћ')\n buf.write('Ȯ\\x02ᆜᆝ\\x05ћȮ\\x02ᆝᆞ\\x05ї')\n buf.write('Ȭ\\x02ᆞᆟ\\x05хȣ\\x02ᆟᆠ\\x05з')\n buf.write('Ȝ\\x02ᆠᆡ\\x05ѝȯ\\x02ᆡᆢ\\x05ћ')\n buf.write('Ȯ\\x02ᆢᆣ\\x05нȟ\\x02ᆣᆤ\\x05љ')\n buf.write('ȭ\\x02ᆤͨ\\x03\\x02\\x02\\x02ᆥᆦ\\x05ѣȲ')\n buf.write('\\x02ᆦᆧ\\x05эȧ\\x02ᆧᆨ\\x05ыȦ')\n buf.write('\\x02ᆨᆩ\\x05йȝ\\x02ᆩᆪ\\x05еț')\n buf.write('\\x02ᆪᆫ\\x05љȭ\\x02ᆫᆬ\\x05ћȮ')\n buf.write('\\x02ᆬͪ\\x03\\x02\\x02\\x02ᆭᆮ\\x05ѣȲ\\x02ᆮ')\n buf.write('ᆯ\\x05эȧ\\x02ᆯᆰ\\x05ыȦ\\x02ᆰ')\n buf.write('ᆱ\\x05йȝ\\x02ᆱᆲ\\x05ёȩ\\x02ᆲ')\n buf.write('ᆳ\\x05ыȦ\\x02ᆳᆴ\\x05еț\\x02ᆴ')\n buf.write('ᆵ\\x05ћȮ\\x02ᆵᆶ\\x05ћȮ\\x02ᆶ')\n buf.write('ᆷ\\x05џȰ\\x02ᆷᆸ\\x05еț\\x02ᆸ')\n buf.write('ᆹ\\x05ыȦ\\x02ᆹͬ\\x03\\x02\\x02\\x02ᆺᆻ')\n buf.write('\\x05ѣȲ\\x02ᆻᆼ\\x05эȧ\\x02ᆼᆽ')\n buf.write('\\x05ыȦ\\x02ᆽᆾ\\x05нȟ\\x02ᆾᆿ')\n buf.write('\\x05ыȦ\\x02ᆿᇀ\\x05нȟ\\x02ᇀᇁ')\n buf.write('\\x05эȧ\\x02ᇁᇂ\\x05нȟ\\x02ᇂᇃ')\n buf.write('\\x05яȨ\\x02ᇃᇄ\\x05ћȮ\\x02ᇄͮ')\n buf.write('\\x03\\x02\\x02\\x02ᇅᇆ\\x05ѣȲ\\x02ᇆᇇ\\x05э')\n buf.write('ȧ\\x02ᇇᇈ\\x05ыȦ\\x02ᇈᇉ\\x05н')\n buf.write('ȟ\\x02ᇉᇊ\\x05ѣȲ\\x02ᇊᇋ\\x05х')\n buf.write('ȣ\\x02ᇋᇌ\\x05љȭ\\x02ᇌᇍ\\x05ћ')\n buf.write('Ȯ\\x02ᇍᇎ\\x05љȭ\\x02ᇎͰ\\x03\\x02\\x02')\n buf.write('\\x02ᇏᇐ\\x05ѣȲ\\x02ᇐᇑ\\x05эȧ')\n buf.write('\\x02ᇑᇒ\\x05ыȦ\\x02ᇒᇓ\\x05пȠ')\n buf.write('\\x02ᇓᇔ\\x05ёȩ\\x02ᇔᇕ\\x05їȬ')\n buf.write('\\x02ᇕᇖ\\x05нȟ\\x02ᇖᇗ\\x05љȭ')\n buf.write('\\x02ᇗᇘ\\x05ћȮ\\x02ᇘͲ\\x03\\x02\\x02\\x02ᇙ')\n buf.write('ᇚ\\x05ѣȲ\\x02ᇚᇛ\\x05эȧ\\x02ᇛ')\n buf.write('ᇜ\\x05ыȦ\\x02ᇜᇝ\\x05яȨ\\x02ᇝ')\n buf.write('ᇞ\\x05еț\\x02ᇞᇟ\\x05эȧ\\x02ᇟ')\n buf.write('ᇠ\\x05нȟ\\x02ᇠᇡ\\x05љȭ\\x02ᇡ')\n buf.write('ᇢ\\x05ѓȪ\\x02ᇢᇣ\\x05еț\\x02ᇣ')\n buf.write('ᇤ\\x05йȝ\\x02ᇤᇥ\\x05нȟ\\x02ᇥ')\n buf.write('ᇦ\\x05љȭ\\x02ᇦʹ\\x03\\x02\\x02\\x02ᇧᇨ')\n buf.write('\\x05ѣȲ\\x02ᇨᇩ\\x05эȧ\\x02ᇩᇪ')\n buf.write('\\x05ыȦ\\x02ᇪᇫ\\x05ѓȪ\\x02ᇫᇬ')\n buf.write('\\x05еț\\x02ᇬᇭ\\x05їȬ\\x02ᇭᇮ')\n buf.write('\\x05љȭ\\x02ᇮᇯ\\x05нȟ\\x02ᇯͶ')\n buf.write('\\x03\\x02\\x02\\x02ᇰᇱ\\x05ѣȲ\\x02ᇱᇲ\\x05э')\n buf.write('ȧ\\x02ᇲᇳ\\x05ыȦ\\x02ᇳᇴ\\x05ѓ')\n buf.write('Ȫ\\x02ᇴᇵ\\x05хȣ\\x02ᇵ\\u0378\\x03\\x02\\x02')\n buf.write('\\x02ᇶᇷ\\x05ѣȲ\\x02ᇷᇸ\\x05эȧ')\n buf.write('\\x02ᇸᇹ\\x05ыȦ\\x02ᇹᇺ\\x05ѕȫ')\n buf.write('\\x02ᇺᇻ\\x05ѝȯ\\x02ᇻᇼ\\x05нȟ')\n buf.write('\\x02ᇼᇽ\\x05їȬ\\x02ᇽᇾ\\x05ѥȳ')\n buf.write('\\x02ᇾͺ\\x03\\x02\\x02\\x02ᇿሀ\\x05ѣȲ\\x02ሀ')\n buf.write('ሁ\\x05эȧ\\x02ሁሂ\\x05ыȦ\\x02ሂ')\n buf.write('ሃ\\x05їȬ\\x02ሃሄ\\x05ёȩ\\x02ሄ')\n buf.write('ህ\\x05ёȩ\\x02ህሆ\\x05ћȮ\\x02ሆ')\n buf.write('ͼ\\x03\\x02\\x02\\x02ሇለ\\x05ѣȲ\\x02ለሉ')\n buf.write('\\x05эȧ\\x02ሉሊ\\x05ыȦ\\x02ሊላ')\n buf.write('\\x05љȭ\\x02ላሌ\\x05нȟ\\x02ሌል')\n buf.write('\\x05їȬ\\x02ልሎ\\x05хȣ\\x02ሎሏ')\n buf.write('\\x05еț\\x02ሏሐ\\x05ыȦ\\x02ሐሑ')\n buf.write('\\x05хȣ\\x02ሑሒ\\x05ѧȴ\\x02ሒሓ')\n buf.write('\\x05нȟ\\x02ሓ;\\x03\\x02\\x02\\x02ሔሕ\\x05ѣ')\n buf.write('Ȳ\\x02ሕሖ\\x05эȧ\\x02ሖሗ\\x05ы')\n buf.write('Ȧ\\x02ሗመ\\x05ћȮ\\x02መሙ\\x05е')\n buf.write('ț\\x02ሙሚ\\x05зȜ\\x02ሚማ\\x05ы')\n buf.write('Ȧ\\x02ማሜ\\x05нȟ\\x02ሜ\\u0380\\x03\\x02\\x02')\n buf.write('\\x02ምሞ\\x05ѥȳ\\x02ሞሟ\\x05нȟ')\n buf.write('\\x02ሟሠ\\x05еț\\x02ሠሡ\\x05їȬ')\n buf.write('\\x02ሡ\\u0382\\x03\\x02\\x02\\x02ሢሣ\\x05ѥȳ\\x02ሣ')\n buf.write('ሤ\\x05нȟ\\x02ሤሥ\\x05љȭ\\x02ሥ')\n buf.write('΄\\x03\\x02\\x02\\x02ሦሧ\\x05ѥȳ\\x02ሧረ')\n buf.write('\\x05эȧ\\x02ረሩ\\x05хȣ\\x02ሩሪ')\n buf.write('\\x05яȨ\\x02ሪራ\\x05ћȮ\\x02ራሬ')\n buf.write('\\x05нȟ\\x02ሬር\\x05їȬ\\x02ርሮ')\n buf.write('\\x05џȰ\\x02ሮሯ\\x05еț\\x02ሯሰ')\n buf.write('\\x05ыȦ\\x02ሰሱ\\x07a\\x02\\x02ሱሲ\\x05ѝ')\n buf.write('ȯ\\x02ሲሳ\\x05яȨ\\x02ሳሴ\\x05й')\n buf.write('ȝ\\x02ሴስ\\x05ёȩ\\x02ስሶ\\x05я')\n buf.write('Ȩ\\x02ሶሷ\\x05љȭ\\x02ሷሸ\\x05ћ')\n buf.write('Ȯ\\x02ሸሹ\\x05їȬ\\x02ሹሺ\\x05е')\n buf.write('ț\\x02ሺሻ\\x05хȣ\\x02ሻሼ\\x05я')\n buf.write('Ȩ\\x02ሼሽ\\x05нȟ\\x02ሽሾ\\x05л')\n buf.write('Ȟ\\x02ሾΆ\\x03\\x02\\x02\\x02ሿቀ\\x05ѧȴ')\n buf.write('\\x02ቀቁ\\x05ёȩ\\x02ቁቂ\\x05яȨ')\n buf.write('\\x02ቂቃ\\x05нȟ\\x02ቃΈ\\x03\\x02\\x02\\x02ቄ')\n buf.write('ቅ\\x05ѓȪ\\x02ቅቆ\\x05їȬ\\x02ቆ')\n buf.write('ቇ\\x05нȟ\\x02ቇቈ\\x05лȞ\\x02ቈ')\n buf.write('\\u1249\\x05хȣ\\x02\\u1249ቊ\\x05йȝ\\x02ቊ')\n buf.write('ቋ\\x05ћȮ\\x02ቋቌ\\x05хȣ\\x02ቌ')\n buf.write('ቍ\\x05ёȩ\\x02ቍ\\u124e\\x05яȨ\\x02\\u124e')\n buf.write('Ί\\x03\\x02\\x02\\x02\\u124fቐ\\x05ѓȪ\\x02ቐቑ')\n buf.write('\\x05їȬ\\x02ቑቒ\\x05нȟ\\x02ቒቓ')\n buf.write('\\x05лȞ\\x02ቓቔ\\x05хȣ\\x02ቔቕ')\n buf.write('\\x05йȝ\\x02ቕቖ\\x05ћȮ\\x02ቖ\\u1257')\n buf.write('\\x05хȣ\\x02\\u1257ቘ\\x05ёȩ\\x02ቘ\\u1259')\n buf.write('\\x05яȨ\\x02\\u1259ቚ\\x07a\\x02\\x02ቚቛ\\x05з')\n buf.write('Ȝ\\x02ቛቜ\\x05ёȩ\\x02ቜቝ\\x05ѝ')\n buf.write('ȯ\\x02ቝ\\u125e\\x05яȨ\\x02\\u125e\\u125f\\x05л')\n buf.write('Ȟ\\x02\\u125fበ\\x05љȭ\\x02በΌ\\x03\\x02\\x02')\n buf.write('\\x02ቡቢ\\x05ѓȪ\\x02ቢባ\\x05їȬ')\n buf.write('\\x02ባቤ\\x05нȟ\\x02ቤብ\\x05лȞ')\n buf.write('\\x02ብቦ\\x05хȣ\\x02ቦቧ\\x05йȝ')\n buf.write('\\x02ቧቨ\\x05ћȮ\\x02ቨቩ\\x05хȣ')\n buf.write('\\x02ቩቪ\\x05ёȩ\\x02ቪቫ\\x05яȨ')\n buf.write('\\x02ቫቬ\\x07a\\x02\\x02ቬቭ\\x05йȝ\\x02ቭ')\n buf.write('ቮ\\x05ёȩ\\x02ቮቯ\\x05љȭ\\x02ቯ')\n buf.write('ተ\\x05ћȮ\\x02ተΎ\\x03\\x02\\x02\\x02ቱቲ')\n buf.write('\\x05ѓȪ\\x02ቲታ\\x05їȬ\\x02ታቴ')\n buf.write('\\x05нȟ\\x02ቴት\\x05лȞ\\x02ትቶ')\n buf.write('\\x05хȣ\\x02ቶቷ\\x05йȝ\\x02ቷቸ')\n buf.write('\\x05ћȮ\\x02ቸቹ\\x05хȣ\\x02ቹቺ')\n buf.write('\\x05ёȩ\\x02ቺቻ\\x05яȨ\\x02ቻቼ')\n buf.write('\\x07a\\x02\\x02ቼች\\x05лȞ\\x02ችቾ\\x05н')\n buf.write('ȟ\\x02ቾቿ\\x05ћȮ\\x02ቿኀ\\x05е')\n buf.write('ț\\x02ኀኁ\\x05хȣ\\x02ኁኂ\\x05ы')\n buf.write('Ȧ\\x02ኂኃ\\x05љȭ\\x02ኃΐ\\x03\\x02\\x02')\n buf.write('\\x02ኄኅ\\x05ѓȪ\\x02ኅኆ\\x05їȬ')\n buf.write('\\x02ኆኇ\\x05нȟ\\x02ኇኈ\\x05лȞ')\n buf.write('\\x02ኈ\\u1289\\x05хȣ\\x02\\u1289ኊ\\x05йȝ')\n buf.write('\\x02ኊኋ\\x05ћȮ\\x02ኋኌ\\x05хȣ')\n buf.write('\\x02ኌኍ\\x05ёȩ\\x02ኍ\\u128e\\x05яȨ')\n buf.write('\\x02\\u128e\\u128f\\x07a\\x02\\x02\\u128fነ\\x05ѓȪ\\x02ነ')\n buf.write('ኑ\\x05їȬ\\x02ኑኒ\\x05ёȩ\\x02ኒ')\n buf.write('ና\\x05зȜ\\x02ናኔ\\x05еț\\x02ኔ')\n buf.write('ን\\x05зȜ\\x02ንኖ\\x05хȣ\\x02ኖ')\n buf.write('ኗ\\x05ыȦ\\x02ኗኘ\\x05хȣ\\x02ኘ')\n buf.write('ኙ\\x05ћȮ\\x02ኙኚ\\x05ѥȳ\\x02ኚ')\n buf.write('Β\\x03\\x02\\x02\\x02ኛኜ\\x05ѓȪ\\x02ኜኝ')\n buf.write('\\x05їȬ\\x02ኝኞ\\x05нȟ\\x02ኞኟ')\n buf.write('\\x05лȞ\\x02ኟአ\\x05хȣ\\x02አኡ')\n buf.write('\\x05йȝ\\x02ኡኢ\\x05ћȮ\\x02ኢኣ')\n buf.write('\\x05хȣ\\x02ኣኤ\\x05ёȩ\\x02ኤእ')\n buf.write('\\x05яȨ\\x02እኦ\\x07a\\x02\\x02ኦኧ\\x05љ')\n buf.write('ȭ\\x02ኧከ\\x05нȟ\\x02ከኩ\\x05ћ')\n buf.write('Ȯ\\x02ኩΔ\\x03\\x02\\x02\\x02ኪካ\\x05йȝ')\n buf.write('\\x02ካኬ\\x05ѝȯ\\x02ኬክ\\x05эȧ')\n buf.write('\\x02ክኮ\\x05нȟ\\x02ኮኯ\\x07a\\x02\\x02ኯ')\n buf.write('ኰ\\x05лȞ\\x02ኰ\\u12b1\\x05хȣ\\x02\\u12b1')\n buf.write('ኲ\\x05љȭ\\x02ኲኳ\\x05ћȮ\\x02ኳ')\n buf.write('Ζ\\x03\\x02\\x02\\x02ኴኵ\\x05лȞ\\x02ኵ\\u12b6')\n buf.write('\\x05нȟ\\x02\\u12b6\\u12b7\\x05яȨ\\x02\\u12b7ኸ')\n buf.write('\\x05љȭ\\x02ኸኹ\\x05нȟ\\x02ኹኺ')\n buf.write('\\x07a\\x02\\x02ኺኻ\\x05їȬ\\x02ኻኼ\\x05е')\n buf.write('ț\\x02ኼኽ\\x05яȨ\\x02ኽኾ\\x05щ')\n buf.write('ȥ\\x02ኾΘ\\x03\\x02\\x02\\x02\\u12bfዀ\\x05ыȦ')\n buf.write('\\x02ዀ\\u12c1\\x05хȣ\\x02\\u12c1ዂ\\x05љȭ')\n buf.write('\\x02ዂዃ\\x05ћȮ\\x02ዃዄ\\x05еț')\n buf.write('\\x02ዄዅ\\x05сȡ\\x02ዅ\\u12c6\\x05сȡ')\n buf.write('\\x02\\u12c6Κ\\x03\\x02\\x02\\x02\\u12c7ወ\\x05ѓȪ\\x02ወ')\n buf.write('ዉ\\x05нȟ\\x02ዉዊ\\x05їȬ\\x02ዊ')\n buf.write('ዋ\\x05йȝ\\x02ዋዌ\\x05нȟ\\x02ዌ')\n buf.write('ው\\x05яȨ\\x02ውዎ\\x05ћȮ\\x02ዎ')\n buf.write('ዏ\\x07a\\x02\\x02ዏዐ\\x05їȬ\\x02ዐዑ')\n buf.write('\\x05еț\\x02ዑዒ\\x05яȨ\\x02ዒዓ')\n buf.write('\\x05щȥ\\x02ዓΜ\\x03\\x02\\x02\\x02ዔዕ\\x05ѓ')\n buf.write('Ȫ\\x02ዕዖ\\x05нȟ\\x02ዖ\\u12d7\\x05ї')\n buf.write('Ȭ\\x02\\u12d7ዘ\\x05йȝ\\x02ዘዙ\\x05н')\n buf.write('ȟ\\x02ዙዚ\\x05яȨ\\x02ዚዛ\\x05ћ')\n buf.write('Ȯ\\x02ዛዜ\\x05хȣ\\x02ዜዝ\\x05ы')\n buf.write('Ȧ\\x02ዝዞ\\x05нȟ\\x02ዞዟ\\x07a\\x02')\n buf.write('\\x02ዟዠ\\x05йȝ\\x02ዠዡ\\x05ёȩ')\n buf.write('\\x02ዡዢ\\x05яȨ\\x02ዢዣ\\x05ћȮ')\n buf.write('\\x02ዣΞ\\x03\\x02\\x02\\x02ዤዥ\\x05ѓȪ\\x02ዥ')\n buf.write('ዦ\\x05нȟ\\x02ዦዧ\\x05їȬ\\x02ዧ')\n buf.write('የ\\x05йȝ\\x02የዩ\\x05нȟ\\x02ዩ')\n buf.write('ዪ\\x05яȨ\\x02ዪያ\\x05ћȮ\\x02ያ')\n buf.write('ዬ\\x05хȣ\\x02ዬይ\\x05ыȦ\\x02ይ')\n buf.write('ዮ\\x05нȟ\\x02ዮዯ\\x07a\\x02\\x02ዯደ')\n buf.write('\\x05лȞ\\x02ደዱ\\x05хȣ\\x02ዱዲ')\n buf.write('\\x05љȭ\\x02ዲዳ\\x05йȝ\\x02ዳΠ')\n buf.write('\\x03\\x02\\x02\\x02ዴድ\\x05їȬ\\x02ድዶ\\x05е')\n buf.write('ț\\x02ዶዷ\\x05яȨ\\x02ዷዸ\\x05щ')\n buf.write('ȥ\\x02ዸ\\u03a2\\x03\\x02\\x02\\x02ዹዺ\\x05еț')\n buf.write('\\x02ዺዻ\\x05џȰ\\x02ዻዼ\\x05сȡ')\n buf.write('\\x02ዼΤ\\x03\\x02\\x02\\x02ዽዾ\\x05йȝ\\x02ዾ')\n buf.write('ዿ\\x05ёȩ\\x02ዿጀ\\x05їȬ\\x02ጀ')\n buf.write('ጁ\\x05їȬ\\x02ጁΦ\\x03\\x02\\x02\\x02ጂጃ')\n buf.write('\\x05ыȦ\\x02ጃጄ\\x05еț\\x02ጄጅ')\n buf.write('\\x05сȡ\\x02ጅΨ\\x03\\x02\\x02\\x02ጆጇ\\x05ы')\n buf.write('Ȧ\\x02ጇገ\\x05нȟ\\x02ገጉ\\x05е')\n buf.write('ț\\x02ጉጊ\\x05лȞ\\x02ጊΪ\\x03\\x02\\x02')\n buf.write('\\x02ጋጌ\\x05эȧ\\x02ጌግ\\x05еț')\n buf.write('\\x02ግጎ\\x05ѣȲ\\x02ጎά\\x03\\x02\\x02\\x02ጏ')\n buf.write('ጐ\\x05эȧ\\x02ጐ\\u1311\\x05нȟ\\x02\\u1311')\n buf.write('ጒ\\x05лȞ\\x02ጒጓ\\x05хȣ\\x02ጓ')\n buf.write('ጔ\\x05еț\\x02ጔጕ\\x05яȨ\\x02ጕ')\n buf.write('ή\\x03\\x02\\x02\\x02\\u1316\\u1317\\x05эȧ\\x02\\u1317ጘ')\n buf.write('\\x05хȣ\\x02ጘጙ\\x05яȨ\\x02ጙΰ')\n buf.write('\\x03\\x02\\x02\\x02ጚጛ\\x05яȨ\\x02ጛጜ\\x05ћ')\n buf.write('Ȯ\\x02ጜጝ\\x05хȣ\\x02ጝጞ\\x05ы')\n buf.write('Ȧ\\x02ጞጟ\\x05нȟ\\x02ጟβ\\x03\\x02\\x02')\n buf.write('\\x02ጠጡ\\x05їȬ\\x02ጡጢ\\x05еț')\n buf.write('\\x02ጢጣ\\x05ћȮ\\x02ጣጤ\\x05хȣ')\n buf.write('\\x02ጤጥ\\x05ёȩ\\x02ጥጦ\\x07a\\x02\\x02ጦ')\n buf.write('ጧ\\x05ћȮ\\x02ጧጨ\\x05ёȩ\\x02ጨ')\n buf.write('ጩ\\x07a\\x02\\x02ጩጪ\\x05їȬ\\x02ጪጫ')\n buf.write('\\x05нȟ\\x02ጫጬ\\x05ѓȪ\\x02ጬጭ')\n buf.write('\\x05ёȩ\\x02ጭጮ\\x05їȬ\\x02ጮጯ')\n buf.write('\\x05ћȮ\\x02ጯδ\\x03\\x02\\x02\\x02ጰጱ\\x05ї')\n buf.write('Ȭ\\x02ጱጲ\\x05ёȩ\\x02ጲጳ\\x05ѡ')\n buf.write('ȱ\\x02ጳጴ\\x07a\\x02\\x02ጴጵ\\x05яȨ')\n buf.write('\\x02ጵጶ\\x05ѝȯ\\x02ጶጷ\\x05эȧ')\n buf.write('\\x02ጷጸ\\x05зȜ\\x02ጸጹ\\x05нȟ')\n buf.write('\\x02ጹጺ\\x05їȬ\\x02ጺζ\\x03\\x02\\x02\\x02ጻ')\n buf.write('ጼ\\x05љȭ\\x02ጼጽ\\x05ѝȯ\\x02ጽ')\n buf.write('ጾ\\x05эȧ\\x02ጾθ\\x03\\x02\\x02\\x02ጿፀ')\n buf.write('\\x05џȰ\\x02ፀፁ\\x05еț\\x02ፁፂ')\n buf.write('\\x05їȬ\\x02ፂፃ\\x05хȣ\\x02ፃፄ')\n buf.write('\\x05еț\\x02ፄፅ\\x05яȨ\\x02ፅፆ')\n buf.write('\\x05йȝ\\x02ፆፇ\\x05нȟ\\x02ፇκ')\n buf.write('\\x03\\x02\\x02\\x02ፈፉ\\x05їȬ\\x02ፉፊ\\x05н')\n buf.write('ȟ\\x02ፊፋ\\x05сȡ\\x02ፋፌ\\x05ї')\n buf.write('Ȭ\\x02ፌፍ\\x07a\\x02\\x02ፍμ\\x03\\x02\\x02\\x02ፎ')\n buf.write('ፏ\\x05љȭ\\x02ፏፐ\\x05ћȮ\\x02ፐ')\n buf.write('ፑ\\x05лȞ\\x02ፑፒ\\x05лȞ\\x02ፒ')\n buf.write('ፓ\\x05нȟ\\x02ፓፔ\\x05џȰ\\x02ፔ')\n buf.write('ξ\\x03\\x02\\x02\\x02ፕፖ\\x05џȰ\\x02ፖፗ')\n buf.write('\\x05еț\\x02ፗፘ\\x05їȬ\\x02ፘፙ')\n buf.write('\\x07a\\x02\\x02ፙπ\\x03\\x02\\x02\\x02ፚ\\u135b\\x05йȝ')\n buf.write('\\x02\\u135b\\u135c\\x05ёȩ\\x02\\u135c፝\\x05џȰ')\n buf.write('\\x02፝፞\\x05еț\\x02፞፟\\x05їȬ')\n buf.write('\\x02፟፠\\x07a\\x02\\x02፠ς\\x03\\x02\\x02\\x02፡።')\n buf.write('\\x05яȨ\\x02።፩\\x07)\\x02\\x02፣፨\\n\\x02\\x02')\n buf.write('\\x02፤፥\\x07)\\x02\\x02፥፨\\x07)\\x02\\x02፦፨\\x05')\n buf.write('Эȗ\\x02፧፣\\x03\\x02\\x02\\x02፧፤\\x03\\x02\\x02\\x02')\n buf.write('፧፦\\x03\\x02\\x02\\x02፨፫\\x03\\x02\\x02\\x02፩፧\\x03')\n buf.write('\\x02\\x02\\x02፩፪\\x03\\x02\\x02\\x02፪፬\\x03\\x02\\x02\\x02፫፩')\n buf.write('\\x03\\x02\\x02\\x02፬፭\\x07)\\x02\\x02፭τ\\x03\\x02\\x02\\x02፮')\n buf.write('፷\\x05зȜ\\x02፯፳\\x07)\\x02\\x02፰፲')\n buf.write('\\x0423\\x02፱፰\\x03\\x02\\x02\\x02፲፵\\x03\\x02\\x02\\x02፳')\n buf.write('፱\\x03\\x02\\x02\\x02፳፴\\x03\\x02\\x02\\x02፴፶\\x03\\x02\\x02\\x02')\n buf.write('፵፳\\x03\\x02\\x02\\x02፶፸\\x07)\\x02\\x02፷፯\\x03')\n buf.write('\\x02\\x02\\x02፸፹\\x03\\x02\\x02\\x02፹፷\\x03\\x02\\x02\\x02፹፺')\n buf.write('\\x03\\x02\\x02\\x02፺φ\\x03\\x02\\x02\\x02፻ᎄ\\x05ѣȲ')\n buf.write('\\x02፼ᎀ\\x07)\\x02\\x02\\u137d\\u137f\\t\\x03\\x02\\x02\\u137e\\u137d')\n buf.write(\n '\\x03\\x02\\x02\\x02\\u137fᎂ\\x03\\x02\\x02\\x02ᎀ\\u137e\\x03\\x02\\x02\\x02ᎀ')\n buf.write('ᎁ\\x03\\x02\\x02\\x02ᎁᎃ\\x03\\x02\\x02\\x02ᎂᎀ\\x03\\x02\\x02\\x02')\n buf.write('ᎃᎅ\\x07)\\x02\\x02ᎄ፼\\x03\\x02\\x02\\x02ᎅᎆ\\x03')\n buf.write('\\x02\\x02\\x02ᎆᎄ\\x03\\x02\\x02\\x02ᎆᎇ\\x03\\x02\\x02\\x02ᎇψ')\n buf.write('\\x03\\x02\\x02\\x02ᎈᎉ\\x070\\x02\\x02ᎉᎊ\\x070\\x02\\x02ᎊ')\n buf.write('ϊ\\x03\\x02\\x02\\x02ᎋᎌ\\x070\\x02\\x02ᎌό\\x03\\x02\\x02')\n buf.write('\\x02ᎍᎎ\\x05УȒ\\x02ᎎώ\\x03\\x02\\x02\\x02ᎏ')\n buf.write('᎘\\x05Хȓ\\x02᎐᎒\\t\\x04\\x02\\x02᎑᎓')\n buf.write('\\t\\x05\\x02\\x02᎒᎑\\x03\\x02\\x02\\x02᎒᎓\\x03\\x02\\x02\\x02᎓')\n buf.write('᎖\\x03\\x02\\x02\\x02᎔᎗\\x05Хȓ\\x02᎕᎗')\n buf.write('\\x05УȒ\\x02᎖᎔\\x03\\x02\\x02\\x02᎖᎕\\x03\\x02\\x02')\n buf.write('\\x02᎗᎙\\x03\\x02\\x02\\x02᎘᎐\\x03\\x02\\x02\\x02᎘᎙')\n buf.write('\\x03\\x02\\x02\\x02᎙\\u139c\\x03\\x02\\x02\\x02\\u139a\\u139d\\x05лȞ')\n buf.write(\n '\\x02\\u139b\\u139d\\x05пȠ\\x02\\u139c\\u139a\\x03\\x02\\x02\\x02\\u139c')\n buf.write(\n '\\u139b\\x03\\x02\\x02\\x02\\u139c\\u139d\\x03\\x02\\x02\\x02\\u139dϐ\\x03\\x02\\x02\\x02'\n )\n buf.write('\\u139eᎥ\\x07)\\x02\\x02\\u139fᎤ\\n\\x02\\x02\\x02ᎠᎡ\\x07')\n buf.write(')\\x02\\x02ᎡᎤ\\x07)\\x02\\x02ᎢᎤ\\x05Эȗ\\x02Ꭳ')\n buf.write('\\u139f\\x03\\x02\\x02\\x02ᎣᎠ\\x03\\x02\\x02\\x02ᎣᎢ\\x03\\x02\\x02\\x02')\n buf.write('ᎤᎧ\\x03\\x02\\x02\\x02ᎥᎣ\\x03\\x02\\x02\\x02ᎥᎦ\\x03')\n buf.write('\\x02\\x02\\x02ᎦᎨ\\x03\\x02\\x02\\x02ᎧᎥ\\x03\\x02\\x02\\x02ᎨᎩ')\n buf.write('\\x07)\\x02\\x02Ꭹϒ\\x03\\x02\\x02\\x02ᎪᎯ\\x05ѕȫ')\n buf.write('\\x02ᎫᎰ\\x05ϗǬ\\x02ᎬᎰ\\x05ϙǭ')\n buf.write('\\x02ᎭᎰ\\x05ϛǮ\\x02ᎮᎰ\\x05ϝǯ')\n buf.write('\\x02ᎯᎫ\\x03\\x02\\x02\\x02ᎯᎬ\\x03\\x02\\x02\\x02ᎯᎭ')\n buf.write('\\x03\\x02\\x02\\x02ᎯᎮ\\x03\\x02\\x02\\x02ᎰᎱ\\x03\\x02\\x02\\x02Ꮁ')\n buf.write('Ꮂ\\x08Ǫ\\x02\\x02Ꮂϔ\\x03\\x02\\x02\\x02ᎳᎴ\\x07)')\n buf.write('\\x02\\x02Ꮄϖ\\x03\\x02\\x02\\x02ᎵᎶ\\x05ϕǫ\\x02Ꮆ')\n buf.write('Ꮊ\\x07>\\x02\\x02ᎷᎹ\\x0b\\x02\\x02\\x02ᎸᎷ\\x03\\x02\\x02\\x02')\n buf.write('ᎹᎼ\\x03\\x02\\x02\\x02ᎺᎻ\\x03\\x02\\x02\\x02ᎺᎸ\\x03')\n buf.write('\\x02\\x02\\x02ᎻᎽ\\x03\\x02\\x02\\x02ᎼᎺ\\x03\\x02\\x02\\x02ᎽᎾ')\n buf.write('\\x07@\\x02\\x02ᎾᎿ\\x05ϕǫ\\x02ᎿϘ\\x03\\x02\\x02')\n buf.write('\\x02ᏀᏁ\\x05ϕǫ\\x02ᏁᏅ\\x07}\\x02\\x02Ꮒ')\n buf.write('Ꮔ\\x0b\\x02\\x02\\x02ᏃᏂ\\x03\\x02\\x02\\x02ᏄᏇ\\x03\\x02\\x02')\n buf.write('\\x02ᏅᏆ\\x03\\x02\\x02\\x02ᏅᏃ\\x03\\x02\\x02\\x02ᏆᏈ')\n buf.write('\\x03\\x02\\x02\\x02ᏇᏅ\\x03\\x02\\x02\\x02ᏈᏉ\\x07\\x7f\\x02\\x02Ꮙ')\n buf.write('Ꮚ\\x05ϕǫ\\x02ᏊϚ\\x03\\x02\\x02\\x02ᏋᏌ')\n buf.write('\\x05ϕǫ\\x02ᏌᏐ\\x07]\\x02\\x02ᏍᏏ\\x0b\\x02\\x02')\n buf.write('\\x02ᏎᏍ\\x03\\x02\\x02\\x02ᏏᏒ\\x03\\x02\\x02\\x02ᏐᏑ')\n buf.write('\\x03\\x02\\x02\\x02ᏐᏎ\\x03\\x02\\x02\\x02ᏑᏓ\\x03\\x02\\x02\\x02Ꮢ')\n buf.write('Ꮠ\\x03\\x02\\x02\\x02ᏓᏔ\\x07_\\x02\\x02ᏔᏕ\\x05ϕ')\n buf.write('ǫ\\x02ᏕϜ\\x03\\x02\\x02\\x02ᏖᏗ\\x05ϕǫ')\n buf.write('\\x02ᏗᏛ\\x07*\\x02\\x02ᏘᏚ\\x0b\\x02\\x02\\x02ᏙᏘ')\n buf.write('\\x03\\x02\\x02\\x02ᏚᏝ\\x03\\x02\\x02\\x02ᏛᏜ\\x03\\x02\\x02\\x02Ꮫ')\n buf.write('Ꮩ\\x03\\x02\\x02\\x02ᏜᏞ\\x03\\x02\\x02\\x02ᏝᏛ\\x03\\x02\\x02\\x02')\n buf.write('ᏞᏟ\\x07+\\x02\\x02ᏟᏠ\\x05ϕǫ\\x02Ꮰ')\n buf.write('Ϟ\\x03\\x02\\x02\\x02ᏡᏢ\\n\\x06\\x02\\x02ᏢϠ\\x03\\x02\\x02\\x02')\n buf.write('ᏣᏧ\\x07$\\x02\\x02ᏤᏨ\\n\\x07\\x02\\x02ᏥᏦ\\x07')\n buf.write('$\\x02\\x02ᏦᏨ\\x07$\\x02\\x02ᏧᏤ\\x03\\x02\\x02\\x02ᏧᏥ')\n buf.write('\\x03\\x02\\x02\\x02ᏨᏩ\\x03\\x02\\x02\\x02ᏩᏧ\\x03\\x02\\x02\\x02Ꮹ')\n buf.write('Ꮺ\\x03\\x02\\x02\\x02ᏪᏫ\\x03\\x02\\x02\\x02ᏫᏬ\\x07$\\x02\\x02')\n buf.write(\"ᏬϢ\\x03\\x02\\x02\\x02ᏭᏮ\\x07'\\x02\\x02ᏮϤ\\x03\")\n buf.write('\\x02\\x02\\x02ᏯᏰ\\x07(\\x02\\x02ᏰϦ\\x03\\x02\\x02\\x02ᏱᏲ')\n buf.write('\\x07*\\x02\\x02ᏲϨ\\x03\\x02\\x02\\x02ᏳᏴ\\x07+\\x02\\x02ᏴϪ')\n buf.write(\n '\\x03\\x02\\x02\\x02Ᏽ\\u13f6\\x07,\\x02\\x02\\u13f6\\u13f7\\x07,\\x02\\x02\\u13f7Ϭ'\n )\n buf.write('\\x03\\x02\\x02\\x02ᏸᏹ\\x07,\\x02\\x02ᏹϮ\\x03\\x02\\x02\\x02ᏺ')\n buf.write('ᏻ\\x07-\\x02\\x02ᏻϰ\\x03\\x02\\x02\\x02ᏼᏽ\\x07/\\x02\\x02ᏽ')\n buf.write(\n 'ϲ\\x03\\x02\\x02\\x02\\u13fe\\u13ff\\x07.\\x02\\x02\\u13ffϴ\\x03\\x02\\x02\\x02'\n )\n buf.write('᐀ᐁ\\x071\\x02\\x02ᐁ϶\\x03\\x02\\x02\\x02ᐂᐃ')\n buf.write('\\x07B\\x02\\x02ᐃϸ\\x03\\x02\\x02\\x02ᐄᐅ\\x07<\\x02\\x02ᐅᐆ')\n buf.write('\\x07?\\x02\\x02ᐆϺ\\x03\\x02\\x02\\x02ᐇᐈ\\x07<\\x02\\x02ᐈᐍ')\n buf.write('\\x05Сȑ\\x02ᐉᐌ\\x05Сȑ\\x02ᐊᐌ')\n buf.write('\\t\\x08\\x02\\x02ᐋᐉ\\x03\\x02\\x02\\x02ᐋᐊ\\x03\\x02\\x02\\x02ᐌ')\n buf.write('ᐏ\\x03\\x02\\x02\\x02ᐍᐋ\\x03\\x02\\x02\\x02ᐍᐎ\\x03\\x02\\x02\\x02')\n buf.write('ᐎᐖ\\x03\\x02\\x02\\x02ᐏᐍ\\x03\\x02\\x02\\x02ᐐᐑ\\x07')\n buf.write('<\\x02\\x02ᐑᐖ\\x05ϡDZ\\x02ᐒᐓ\\x07<\\x02\\x02ᐓ')\n buf.write('ᐖ\\x05ύǧ\\x02ᐔᐖ\\x05Бȉ\\x02ᐕ')\n buf.write('ᐇ\\x03\\x02\\x02\\x02ᐕᐐ\\x03\\x02\\x02\\x02ᐕᐒ\\x03\\x02\\x02\\x02')\n buf.write('ᐕᐔ\\x03\\x02\\x02\\x02ᐖϼ\\x03\\x02\\x02\\x02ᐗᐘ\\x07')\n buf.write('<\\x02\\x02ᐘϾ\\x03\\x02\\x02\\x02ᐙᐚ\\x07=\\x02\\x02ᐚЀ')\n buf.write('\\x03\\x02\\x02\\x02ᐛᐜ\\x07>\\x02\\x02ᐜᐝ\\x07?\\x02\\x02ᐝЂ')\n buf.write('\\x03\\x02\\x02\\x02ᐞᐟ\\x07>\\x02\\x02ᐟЄ\\x03\\x02\\x02\\x02ᐠ')\n buf.write('ᐡ\\x07@\\x02\\x02ᐡᐢ\\x07?\\x02\\x02ᐢІ\\x03\\x02\\x02\\x02ᐣ')\n buf.write('ᐤ\\x07#\\x02\\x02ᐤᐬ\\x07?\\x02\\x02ᐥᐦ\\x07>\\x02\\x02ᐦ')\n buf.write('ᐬ\\x07@\\x02\\x02ᐧᐨ\\x07`\\x02\\x02ᐨᐬ\\x07?\\x02\\x02ᐩ')\n buf.write('ᐪ\\x07\\x80\\x02\\x02ᐪᐬ\\x07?\\x02\\x02ᐫᐣ\\x03\\x02')\n buf.write('\\x02\\x02ᐫᐥ\\x03\\x02\\x02\\x02ᐫᐧ\\x03\\x02\\x02\\x02ᐫᐩ')\n buf.write('\\x03\\x02\\x02\\x02ᐬЈ\\x03\\x02\\x02\\x02ᐭᐮ\\x07`\\x02\\x02ᐮ')\n buf.write('Њ\\x03\\x02\\x02\\x02ᐯᐰ\\x07\\x80\\x02\\x02ᐰЌ\\x03\\x02')\n buf.write('\\x02\\x02ᐱᐲ\\x07#\\x02\\x02ᐲЎ\\x03\\x02\\x02\\x02ᐳᐴ')\n buf.write('\\x07@\\x02\\x02ᐴА\\x03\\x02\\x02\\x02ᐵᐶ\\x07A\\x02\\x02ᐶВ')\n buf.write('\\x03\\x02\\x02\\x02ᐷᐸ\\x07~\\x02\\x02ᐸᐹ\\x07~\\x02\\x02ᐹД')\n buf.write('\\x03\\x02\\x02\\x02ᐺᐻ\\x07~\\x02\\x02ᐻЖ\\x03\\x02\\x02\\x02ᐼ')\n buf.write('ᐽ\\x07?\\x02\\x02ᐽИ\\x03\\x02\\x02\\x02ᐾᐿ\\x07]\\x02\\x02ᐿ')\n buf.write('К\\x03\\x02\\x02\\x02ᑀᑁ\\x07_\\x02\\x02ᑁМ\\x03\\x02\\x02\\x02')\n buf.write('ᑂᑃ\\x07a\\x02\\x02ᑃО\\x03\\x02\\x02\\x02ᑄᑆ\\t')\n buf.write('\\t\\x02\\x02ᑅᑄ\\x03\\x02\\x02\\x02ᑆᑇ\\x03\\x02\\x02\\x02ᑇᑅ')\n buf.write('\\x03\\x02\\x02\\x02ᑇᑈ\\x03\\x02\\x02\\x02ᑈᑉ\\x03\\x02\\x02\\x02ᑉ')\n buf.write('ᑊ\\x08Ȑ\\x03\\x02ᑊР\\x03\\x02\\x02\\x02ᑋᑌ\\t\\n')\n buf.write('\\x02\\x02ᑌТ\\x03\\x02\\x02\\x02ᑍᑏ\\x042;\\x02ᑎᑍ')\n buf.write('\\x03\\x02\\x02\\x02ᑏᑐ\\x03\\x02\\x02\\x02ᑐᑎ\\x03\\x02\\x02\\x02ᑐ')\n buf.write('ᑑ\\x03\\x02\\x02\\x02ᑑФ\\x03\\x02\\x02\\x02ᑒᑔ\\x05ύ')\n buf.write('ǧ\\x02ᑓᑒ\\x03\\x02\\x02\\x02ᑔᑗ\\x03\\x02\\x02\\x02ᑕ')\n buf.write('ᑓ\\x03\\x02\\x02\\x02ᑕᑖ\\x03\\x02\\x02\\x02ᑖᑙ\\x03\\x02\\x02\\x02')\n buf.write('ᑗᑕ\\x03\\x02\\x02\\x02ᑘᑚ\\x070\\x02\\x02ᑙᑘ')\n buf.write('\\x03\\x02\\x02\\x02ᑙᑚ\\x03\\x02\\x02\\x02ᑚᑜ\\x03\\x02\\x02\\x02ᑛ')\n buf.write('ᑝ\\x05ύǧ\\x02ᑜᑛ\\x03\\x02\\x02\\x02ᑝᑞ')\n buf.write('\\x03\\x02\\x02\\x02ᑞᑜ\\x03\\x02\\x02\\x02ᑞᑟ\\x03\\x02\\x02\\x02ᑟ')\n buf.write('Ц\\x03\\x02\\x02\\x02ᑠᑡ\\x07/\\x02\\x02ᑡᑢ\\x07/\\x02\\x02ᑢ')\n buf.write('ᑦ\\x03\\x02\\x02\\x02ᑣᑥ\\n\\x0b\\x02\\x02ᑤᑣ\\x03\\x02\\x02')\n buf.write('\\x02ᑥᑨ\\x03\\x02\\x02\\x02ᑦᑤ\\x03\\x02\\x02\\x02ᑦᑧ')\n buf.write('\\x03\\x02\\x02\\x02ᑧᑫ\\x03\\x02\\x02\\x02ᑨᑦ\\x03\\x02\\x02\\x02ᑩ')\n buf.write('ᑬ\\x05Эȗ\\x02ᑪᑬ\\x07\\x02\\x02\\x03ᑫᑩ')\n buf.write('\\x03\\x02\\x02\\x02ᑫᑪ\\x03\\x02\\x02\\x02ᑬᑭ\\x03\\x02\\x02\\x02ᑭ')\n buf.write('ᑮ\\x08Ȕ\\x04\\x02ᑮШ\\x03\\x02\\x02\\x02ᑯᑰ\\x071')\n buf.write('\\x02\\x02ᑰᑱ\\x07,\\x02\\x02ᑱᑵ\\x03\\x02\\x02\\x02ᑲᑴ')\n buf.write('\\x0b\\x02\\x02\\x02ᑳᑲ\\x03\\x02\\x02\\x02ᑴᑷ\\x03\\x02\\x02\\x02ᑵ')\n buf.write('ᑶ\\x03\\x02\\x02\\x02ᑵᑳ\\x03\\x02\\x02\\x02ᑶᑸ\\x03\\x02\\x02\\x02')\n buf.write('ᑷᑵ\\x03\\x02\\x02\\x02ᑸᑹ\\x07,\\x02\\x02ᑹᑺ\\x07')\n buf.write('1\\x02\\x02ᑺᑻ\\x03\\x02\\x02\\x02ᑻᑼ\\x08ȕ\\x04\\x02ᑼ')\n buf.write('Ъ\\x03\\x02\\x02\\x02ᑽᑾ\\x07r\\x02\\x02ᑾᑿ\\x07t\\x02\\x02ᑿ')\n buf.write('ᒀ\\x07q\\x02\\x02ᒀᒁ\\x07o\\x02\\x02ᒁᒂ\\x07r\\x02\\x02ᒂ')\n buf.write('ᒃ\\x07v\\x02\\x02ᒃᒄ\\x03\\x02\\x02\\x02ᒄᒈ\\x05Я')\n buf.write('Ș\\x02ᒅᒇ\\n\\x0b\\x02\\x02ᒆᒅ\\x03\\x02\\x02\\x02ᒇ')\n buf.write('ᒊ\\x03\\x02\\x02\\x02ᒈᒆ\\x03\\x02\\x02\\x02ᒈᒉ\\x03\\x02\\x02\\x02')\n buf.write('ᒉᒍ\\x03\\x02\\x02\\x02ᒊᒈ\\x03\\x02\\x02\\x02ᒋᒎ\\x05')\n buf.write('Эȗ\\x02ᒌᒎ\\x07\\x02\\x02\\x03ᒍᒋ\\x03\\x02\\x02\\x02')\n buf.write('ᒍᒌ\\x03\\x02\\x02\\x02ᒎЬ\\x03\\x02\\x02\\x02ᒏᒑ\\x07')\n buf.write('\\x0f\\x02\\x02ᒐᒏ\\x03\\x02\\x02\\x02ᒐᒑ\\x03\\x02\\x02\\x02ᒑ')\n buf.write('ᒒ\\x03\\x02\\x02\\x02ᒒᒓ\\x07\\x0c\\x02\\x02ᒓЮ\\x03\\x02\\x02\\x02')\n buf.write('ᒔᒕ\\t\\x0c\\x02\\x02ᒕа\\x03\\x02\\x02\\x02ᒖᒛ\\x05')\n buf.write('Сȑ\\x02ᒗᒚ\\x05Сȑ\\x02ᒘᒚ')\n buf.write('\\t\\r\\x02\\x02ᒙᒗ\\x03\\x02\\x02\\x02ᒙᒘ\\x03\\x02\\x02\\x02ᒚ')\n buf.write('ᒝ\\x03\\x02\\x02\\x02ᒛᒙ\\x03\\x02\\x02\\x02ᒛᒜ\\x03\\x02\\x02\\x02')\n buf.write('ᒜв\\x03\\x02\\x02\\x02ᒝᒛ\\x03\\x02\\x02\\x02ᒞᒟ\\x07')\n buf.write('B\\x02\\x02ᒟᒠ\\x07#\\x02\\x02ᒠᒡ\\x03\\x02\\x02\\x02ᒡᒢ')\n buf.write('\\x08Ț\\x04\\x02ᒢд\\x03\\x02\\x02\\x02ᒣᒤ\\t\\x0e\\x02\\x02')\n buf.write('ᒤж\\x03\\x02\\x02\\x02ᒥᒦ\\t\\x0f\\x02\\x02ᒦи')\n buf.write('\\x03\\x02\\x02\\x02ᒧᒨ\\t\\x10\\x02\\x02ᒨк\\x03\\x02\\x02\\x02ᒩ')\n buf.write('ᒪ\\t\\x11\\x02\\x02ᒪм\\x03\\x02\\x02\\x02ᒫᒬ\\t\\x04\\x02')\n buf.write('\\x02ᒬо\\x03\\x02\\x02\\x02ᒭᒮ\\t\\x12\\x02\\x02ᒮр')\n buf.write('\\x03\\x02\\x02\\x02ᒯᒰ\\t\\x13\\x02\\x02ᒰт\\x03\\x02\\x02\\x02ᒱ')\n buf.write('ᒲ\\t\\x14\\x02\\x02ᒲф\\x03\\x02\\x02\\x02ᒳᒴ\\t\\x15\\x02')\n buf.write('\\x02ᒴц\\x03\\x02\\x02\\x02ᒵᒶ\\t\\x16\\x02\\x02ᒶш')\n buf.write('\\x03\\x02\\x02\\x02ᒷᒸ\\t\\x17\\x02\\x02ᒸъ\\x03\\x02\\x02\\x02ᒹ')\n buf.write('ᒺ\\t\\x18\\x02\\x02ᒺь\\x03\\x02\\x02\\x02ᒻᒼ\\t\\x19\\x02')\n buf.write('\\x02ᒼю\\x03\\x02\\x02\\x02ᒽᒾ\\t\\x1a\\x02\\x02ᒾѐ')\n buf.write('\\x03\\x02\\x02\\x02ᒿᓀ\\t\\x1b\\x02\\x02ᓀђ\\x03\\x02\\x02\\x02ᓁ')\n buf.write('ᓂ\\t\\x1c\\x02\\x02ᓂє\\x03\\x02\\x02\\x02ᓃᓄ\\t\\x1d\\x02')\n buf.write('\\x02ᓄі\\x03\\x02\\x02\\x02ᓅᓆ\\t\\x1e\\x02\\x02ᓆј')\n buf.write('\\x03\\x02\\x02\\x02ᓇᓈ\\t\\x1f\\x02\\x02ᓈњ\\x03\\x02\\x02\\x02ᓉ')\n buf.write('ᓊ\\t \\x02\\x02ᓊќ\\x03\\x02\\x02\\x02ᓋᓌ\\t!\\x02\\x02ᓌ')\n buf.write('ў\\x03\\x02\\x02\\x02ᓍᓎ\\t\"\\x02\\x02ᓎѠ\\x03\\x02\\x02\\x02')\n buf.write('ᓏᓐ\\t#\\x02\\x02ᓐѢ\\x03\\x02\\x02\\x02ᓑᓒ\\t')\n buf.write('$\\x02\\x02ᓒѤ\\x03\\x02\\x02\\x02ᓓᓔ\\t%\\x02\\x02ᓔѦ')\n buf.write(\"\\x03\\x02\\x02\\x02ᓕᓖ\\t&\\x02\\x02ᓖѨ\\x03\\x02\\x02\\x02'\\x02፧\")\n buf.write('፩፳፹ᎀᎆ᎒᎖᎘\\u139c')\n buf.write('ᎣᎥᎯᎺᏅᏐᏛᏧᏩ')\n buf.write('ᐋᐍᐕᐫᑇᑐᑕᑙᑞ')\n buf.write('ᑦᑫᑵᒈᒍᒐᒙᒛ\\x05\\tǪ')\n buf.write('\\x02\\x08\\x02\\x02\\x02\\x03\\x02')\n return buf.getvalue()\n\n\nclass PlSqlLexer(Lexer):\n atn = ATNDeserializer().deserialize(serializedATN())\n decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)]\n T__0 = 1\n A_LETTER = 2\n ADD = 3\n AFTER = 4\n AGENT = 5\n AGGREGATE = 6\n ALL = 7\n ALTER = 8\n ANALYZE = 9\n AND = 10\n ANY = 11\n ARRAY = 12\n AS = 13\n ASSUME = 14\n ASSERT = 15\n ASC = 16\n ASSOCIATE = 17\n AT = 18\n ATTRIBUTE = 19\n AUDIT = 20\n AUTHID = 21\n AUTO = 22\n AUTOMATIC = 23\n AUTONOMOUS_TRANSACTION = 24\n BATCH = 25\n BEFORE = 26\n BEGIN = 27\n BETWEEN = 28\n BFILE = 29\n BINARY_DOUBLE = 30\n BINARY_FLOAT = 31\n BINARY_INTEGER = 32\n BLOB = 33\n BLOCK = 34\n BODY = 35\n BOOLEAN = 36\n BOTH = 37\n BREADTH = 38\n BULK = 39\n BY = 40\n BYTE = 41\n C_LETTER = 42\n CACHE = 43\n CALL = 44\n CANONICAL = 45\n CASCADE = 46\n CASE = 47\n CAST = 48\n CHAR = 49\n CHAR_CS = 50\n CHARACTER = 51\n CHECK = 52\n CHR = 53\n CLOB = 54\n CLOSE = 55\n CLUSTER = 56\n COLLECT = 57\n COLUMNS = 58\n COMMENT = 59\n COMMIT = 60\n COMMITTED = 61\n COMPATIBILITY = 62\n COMPILE = 63\n COMPOUND = 64\n CONNECT = 65\n CONNECT_BY_ROOT = 66\n CONSTANT = 67\n CONSTRAINT = 68\n CONSTRAINTS = 69\n CONSTRUCTOR = 70\n CONTENT = 71\n CONTEXT = 72\n CONTINUE = 73\n CONVERT = 74\n CORRUPT_XID = 75\n CORRUPT_XID_ALL = 76\n COST = 77\n COUNT = 78\n CREATE = 79\n CROSS = 80\n CUBE = 81\n CURRENT = 82\n CURRENT_USER = 83\n CURSOR = 84\n CUSTOMDATUM = 85\n CYCLE = 86\n DATA = 87\n DATABASE = 88\n DATE = 89\n DAY = 90\n DB_ROLE_CHANGE = 91\n DBTIMEZONE = 92\n DDL = 93\n DEBUG = 94\n DEC = 95\n DECIMAL = 96\n DECLARE = 97\n DECOMPOSE = 98\n DECREMENT = 99\n DEFAULT = 100\n DEFAULTS = 101\n DEFERRED = 102\n DEFINER = 103\n DELETE = 104\n DEPTH = 105\n DESC = 106\n DETERMINISTIC = 107\n DIMENSION = 108\n DISABLE = 109\n DISASSOCIATE = 110\n DISTINCT = 111\n DOCUMENT = 112\n DOUBLE = 113\n DROP = 114\n DSINTERVAL_UNCONSTRAINED = 115\n EACH = 116\n ELEMENT = 117\n ELSE = 118\n ELSIF = 119\n EMPTY = 120\n ENABLE = 121\n ENCODING = 122\n END = 123\n ENTITYESCAPING = 124\n ERR = 125\n ERRORS = 126\n ESCAPE = 127\n EVALNAME = 128\n EXCEPT = 129\n EXCEPTION = 130\n EXCEPTION_INIT = 131\n EXCEPTIONS = 132\n EXCLUDE = 133\n EXCLUSIVE = 134\n EXECUTE = 135\n EXISTS = 136\n EXIT = 137\n EXPLAIN = 138\n EXTERNAL = 139\n EXTRACT = 140\n FAILURE = 141\n FALSE = 142\n FETCH = 143\n FINAL = 144\n FIRST = 145\n FIRST_VALUE = 146\n FLOAT = 147\n FOLLOWING = 148\n FOLLOWS = 149\n FOR = 150\n FORALL = 151\n FORCE = 152\n FROM = 153\n FULL = 154\n FUNCTION = 155\n GOTO = 156\n GRANT = 157\n GROUP = 158\n GROUPING = 159\n HASH = 160\n HAVING = 161\n HIDE = 162\n HOUR = 163\n IF = 164\n IGNORE = 165\n IMMEDIATE = 166\n IN = 167\n INCLUDE = 168\n INCLUDING = 169\n INCREMENT = 170\n INDENT = 171\n INDEX = 172\n INDEXED = 173\n INDICATOR = 174\n INDICES = 175\n INFINITE = 176\n INLINE = 177\n INNER = 178\n INOUT = 179\n INSERT = 180\n INSTANTIABLE = 181\n INSTEAD = 182\n INT = 183\n INTEGER = 184\n INTERSECT = 185\n INTERVAL = 186\n INTO = 187\n INVALIDATE = 188\n IS = 189\n ISOLATION = 190\n ITERATE = 191\n JAVA = 192\n JOIN = 193\n KEEP = 194\n LANGUAGE = 195\n LAST = 196\n LAST_VALUE = 197\n LEADING = 198\n LEFT = 199\n LEVEL = 200\n LIBRARY = 201\n LIKE = 202\n LIKE2 = 203\n LIKE4 = 204\n LIKEC = 205\n LIMIT = 206\n LOCAL = 207\n LOCK = 208\n LOCKED = 209\n LOG = 210\n LOGOFF = 211\n LOGON = 212\n LONG = 213\n LOOP = 214\n MAIN = 215\n MAP = 216\n MATCHED = 217\n MAXVALUE = 218\n MEASURES = 219\n MEMBER = 220\n MERGE = 221\n MINUS = 222\n MINUTE = 223\n MINVALUE = 224\n MLSLABEL = 225\n MODE = 226\n MODEL = 227\n MODIFY = 228\n MONTH = 229\n MULTISET = 230\n NAME = 231\n NAN = 232\n NATURAL = 233\n NATURALN = 234\n NAV = 235\n NCHAR = 236\n NCHAR_CS = 237\n NCLOB = 238\n NESTED = 239\n NEW = 240\n NO = 241\n NOAUDIT = 242\n NOCACHE = 243\n NOCOPY = 244\n NOCYCLE = 245\n NOENTITYESCAPING = 246\n NOMAXVALUE = 247\n NOMINVALUE = 248\n NONE = 249\n NOORDER = 250\n NOSCHEMACHECK = 251\n NOT = 252\n NOWAIT = 253\n NULL = 254\n NULLS = 255\n NUMBER = 256\n NUMERIC = 257\n NVARCHAR2 = 258\n OBJECT = 259\n OF = 260\n OFF = 261\n OID = 262\n OLD = 263\n ON = 264\n ONLY = 265\n OPEN = 266\n OPTION = 267\n OR = 268\n ORADATA = 269\n ORDER = 270\n ORDINALITY = 271\n OSERROR = 272\n OUT = 273\n OUTER = 274\n OVER = 275\n OVERRIDING = 276\n PACKAGE = 277\n PARALLEL_ENABLE = 278\n PARAMETERS = 279\n PARENT = 280\n PARTITION = 281\n PASSING = 282\n PATH = 283\n PERCENT_ROWTYPE = 284\n PERCENT_TYPE = 285\n PIPELINED = 286\n PIVOT = 287\n PLAN = 288\n PLS_INTEGER = 289\n POSITIVE = 290\n POSITIVEN = 291\n PRAGMA = 292\n PRECEDING = 293\n PRECISION = 294\n PRESENT = 295\n PRIOR = 296\n PROCEDURE = 297\n RAISE = 298\n RANGE = 299\n RAW = 300\n READ = 301\n REAL = 302\n RECORD = 303\n REF = 304\n REFERENCE = 305\n REFERENCING = 306\n REJECT = 307\n RELIES_ON = 308\n RENAME = 309\n REPLACE = 310\n RESPECT = 311\n RESTRICT_REFERENCES = 312\n RESULT = 313\n RESULT_CACHE = 314\n RETURN = 315\n RETURNING = 316\n REUSE = 317\n REVERSE = 318\n REVOKE = 319\n RIGHT = 320\n ROLLBACK = 321\n ROLLUP = 322\n ROW = 323\n ROWID = 324\n ROWS = 325\n RULES = 326\n SAMPLE = 327\n SAVE = 328\n SAVEPOINT = 329\n SCHEMA = 330\n SCHEMACHECK = 331\n SCN = 332\n SEARCH = 333\n SECOND = 334\n SEED = 335\n SEGMENT = 336\n SELECT = 337\n SELF = 338\n SEQUENCE = 339\n SEQUENTIAL = 340\n SERIALIZABLE = 341\n SERIALLY_REUSABLE = 342\n SERVERERROR = 343\n SESSIONTIMEZONE = 344\n SET = 345\n SETS = 346\n SETTINGS = 347\n SHARE = 348\n SHOW = 349\n SHUTDOWN = 350\n SIBLINGS = 351\n SIGNTYPE = 352\n SIMPLE_INTEGER = 353\n SINGLE = 354\n SIZE = 355\n SKIP_ = 356\n SMALLINT = 357\n SNAPSHOT = 358\n SOME = 359\n SPECIFICATION = 360\n SQLDATA = 361\n SQLERROR = 362\n STANDALONE = 363\n START = 364\n STARTUP = 365\n STATEMENT = 366\n STATEMENT_ID = 367\n STATIC = 368\n STATISTICS = 369\n STRING = 370\n SUBMULTISET = 371\n SUBPARTITION = 372\n SUBSTITUTABLE = 373\n SUBTYPE = 374\n SUCCESS = 375\n SUSPEND = 376\n TABLE = 377\n THE = 378\n THEN = 379\n TIME = 380\n TIMESTAMP = 381\n TIMESTAMP_LTZ_UNCONSTRAINED = 382\n TIMESTAMP_TZ_UNCONSTRAINED = 383\n TIMESTAMP_UNCONSTRAINED = 384\n TIMEZONE_ABBR = 385\n TIMEZONE_HOUR = 386\n TIMEZONE_MINUTE = 387\n TIMEZONE_REGION = 388\n TO = 389\n TRAILING = 390\n TRANSACTION = 391\n TRANSLATE = 392\n TREAT = 393\n TRIGGER = 394\n TRIM = 395\n TRUE = 396\n TRUNCATE = 397\n TYPE = 398\n UNBOUNDED = 399\n UNDER = 400\n UNION = 401\n UNIQUE = 402\n UNLIMITED = 403\n UNPIVOT = 404\n UNTIL = 405\n UPDATE = 406\n UPDATED = 407\n UPSERT = 408\n UROWID = 409\n USE = 410\n USING = 411\n VALIDATE = 412\n VALUE = 413\n VALUES = 414\n VARCHAR = 415\n VARCHAR2 = 416\n VARIABLE = 417\n VARRAY = 418\n VARYING = 419\n VERSION = 420\n VERSIONS = 421\n WAIT = 422\n WARNING = 423\n WELLFORMED = 424\n WHEN = 425\n WHENEVER = 426\n WHERE = 427\n WHILE = 428\n WITH = 429\n WITHIN = 430\n WORK = 431\n WRITE = 432\n XML = 433\n XMLAGG = 434\n XMLATTRIBUTES = 435\n XMLCAST = 436\n XMLCOLATTVAL = 437\n XMLELEMENT = 438\n XMLEXISTS = 439\n XMLFOREST = 440\n XMLNAMESPACES = 441\n XMLPARSE = 442\n XMLPI = 443\n XMLQUERY = 444\n XMLROOT = 445\n XMLSERIALIZE = 446\n XMLTABLE = 447\n YEAR = 448\n YES = 449\n YMINTERVAL_UNCONSTRAINED = 450\n ZONE = 451\n PREDICTION = 452\n PREDICTION_BOUNDS = 453\n PREDICTION_COST = 454\n PREDICTION_DETAILS = 455\n PREDICTION_PROBABILITY = 456\n PREDICTION_SET = 457\n CUME_DIST = 458\n DENSE_RANK = 459\n LISTAGG = 460\n PERCENT_RANK = 461\n PERCENTILE_CONT = 462\n PERCENTILE_DISC = 463\n RANK = 464\n AVG = 465\n CORR = 466\n LAG = 467\n LEAD = 468\n MAX = 469\n MEDIAN = 470\n MIN = 471\n NTILE = 472\n RATIO_TO_REPORT = 473\n ROW_NUMBER = 474\n SUM = 475\n VARIANCE = 476\n REGR_ = 477\n STDDEV = 478\n VAR_ = 479\n COVAR_ = 480\n NATIONAL_CHAR_STRING_LIT = 481\n BIT_STRING_LIT = 482\n HEX_STRING_LIT = 483\n DOUBLE_PERIOD = 484\n PERIOD = 485\n UNSIGNED_INTEGER = 486\n APPROXIMATE_NUM_LIT = 487\n CHAR_STRING = 488\n DELIMITED_ID = 489\n PERCENT = 490\n AMPERSAND = 491\n LEFT_PAREN = 492\n RIGHT_PAREN = 493\n DOUBLE_ASTERISK = 494\n ASTERISK = 495\n PLUS_SIGN = 496\n MINUS_SIGN = 497\n COMMA = 498\n SOLIDUS = 499\n AT_SIGN = 500\n ASSIGN_OP = 501\n BINDVAR = 502\n COLON = 503\n SEMICOLON = 504\n LESS_THAN_OR_EQUALS_OP = 505\n LESS_THAN_OP = 506\n GREATER_THAN_OR_EQUALS_OP = 507\n NOT_EQUAL_OP = 508\n CARRET_OPERATOR_PART = 509\n TILDE_OPERATOR_PART = 510\n EXCLAMATION_OPERATOR_PART = 511\n GREATER_THAN_OP = 512\n CONCATENATION_OP = 513\n VERTICAL_BAR = 514\n EQUALS_OP = 515\n LEFT_BRACKET = 516\n RIGHT_BRACKET = 517\n INTRODUCER = 518\n SPACES = 519\n SINGLE_LINE_COMMENT = 520\n MULTI_LINE_COMMENT = 521\n PROMPT = 522\n REGULAR_ID = 523\n ZV = 524\n channelNames = [u'DEFAULT_TOKEN_CHANNEL', u'HIDDEN']\n modeNames = ['DEFAULT_MODE']\n literalNames = ['<INVALID>', \"'..'\", \"'.'\", \"'%'\", \"'&'\", \"'('\", \"')'\",\n \"'**'\", \"'*'\", \"'+'\", \"'-'\", \"','\", \"'/'\", \"'@'\", \"':='\", \"':'\",\n \"';'\", \"'<='\", \"'<'\", \"'>='\", \"'^'\", \"'~'\", \"'!'\", \"'>'\", \"'||'\",\n \"'|'\", \"'='\", \"'['\", \"']'\", \"'_'\", \"'@!'\"]\n symbolicNames = ['<INVALID>', 'A_LETTER', 'ADD', 'AFTER', 'AGENT',\n 'AGGREGATE', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS',\n 'ASSUME', 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT',\n 'AUTHID', 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH',\n 'BEFORE', 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE',\n 'BINARY_FLOAT', 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY',\n 'BOOLEAN', 'BOTH', 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER',\n 'CACHE', 'CALL', 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR',\n 'CHAR_CS', 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER',\n 'COLLECT', 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED',\n 'COMPATIBILITY', 'COMPILE', 'COMPOUND', 'CONNECT',\n 'CONNECT_BY_ROOT', 'CONSTANT', 'CONSTRAINT', 'CONSTRAINTS',\n 'CONSTRUCTOR', 'CONTENT', 'CONTEXT', 'CONTINUE', 'CONVERT',\n 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST', 'COUNT', 'CREATE',\n 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER', 'CURSOR', 'CUSTOMDATUM',\n 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY', 'DB_ROLE_CHANGE',\n 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL', 'DECLARE',\n 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS', 'DEFERRED',\n 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC', 'DIMENSION',\n 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT', 'DOUBLE', 'DROP',\n 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT', 'ELSE', 'ELSIF',\n 'EMPTY', 'ENABLE', 'ENCODING', 'END', 'ENTITYESCAPING', 'ERR',\n 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT', 'EXCEPTION',\n 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE', 'EXECUTE',\n 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FAILURE',\n 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE', 'FLOAT',\n 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM', 'FULL',\n 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH', 'HAVING',\n 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INCLUDE',\n 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED', 'INDICATOR',\n 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT', 'INSERT',\n 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'DELIMITED_ID', 'PERCENT',\n 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN', 'DOUBLE_ASTERISK',\n 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA', 'SOLIDUS',\n 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'CONCATENATION_OP', 'VERTICAL_BAR', 'EQUALS_OP',\n 'LEFT_BRACKET', 'RIGHT_BRACKET', 'INTRODUCER', 'SPACES',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'REGULAR_ID',\n 'ZV']\n ruleNames = ['T__0', 'A_LETTER', 'ADD', 'AFTER', 'AGENT', 'AGGREGATE',\n 'ALL', 'ALTER', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASSUME',\n 'ASSERT', 'ASC', 'ASSOCIATE', 'AT', 'ATTRIBUTE', 'AUDIT', 'AUTHID',\n 'AUTO', 'AUTOMATIC', 'AUTONOMOUS_TRANSACTION', 'BATCH', 'BEFORE',\n 'BEGIN', 'BETWEEN', 'BFILE', 'BINARY_DOUBLE', 'BINARY_FLOAT',\n 'BINARY_INTEGER', 'BLOB', 'BLOCK', 'BODY', 'BOOLEAN', 'BOTH',\n 'BREADTH', 'BULK', 'BY', 'BYTE', 'C_LETTER', 'CACHE', 'CALL',\n 'CANONICAL', 'CASCADE', 'CASE', 'CAST', 'CHAR', 'CHAR_CS',\n 'CHARACTER', 'CHECK', 'CHR', 'CLOB', 'CLOSE', 'CLUSTER', 'COLLECT',\n 'COLUMNS', 'COMMENT', 'COMMIT', 'COMMITTED', 'COMPATIBILITY',\n 'COMPILE', 'COMPOUND', 'CONNECT', 'CONNECT_BY_ROOT', 'CONSTANT',\n 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRUCTOR', 'CONTENT', 'CONTEXT',\n 'CONTINUE', 'CONVERT', 'CORRUPT_XID', 'CORRUPT_XID_ALL', 'COST',\n 'COUNT', 'CREATE', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_USER',\n 'CURSOR', 'CUSTOMDATUM', 'CYCLE', 'DATA', 'DATABASE', 'DATE', 'DAY',\n 'DB_ROLE_CHANGE', 'DBTIMEZONE', 'DDL', 'DEBUG', 'DEC', 'DECIMAL',\n 'DECLARE', 'DECOMPOSE', 'DECREMENT', 'DEFAULT', 'DEFAULTS',\n 'DEFERRED', 'DEFINER', 'DELETE', 'DEPTH', 'DESC', 'DETERMINISTIC',\n 'DIMENSION', 'DISABLE', 'DISASSOCIATE', 'DISTINCT', 'DOCUMENT',\n 'DOUBLE', 'DROP', 'DSINTERVAL_UNCONSTRAINED', 'EACH', 'ELEMENT',\n 'ELSE', 'ELSIF', 'EMPTY', 'ENABLE', 'ENCODING', 'END',\n 'ENTITYESCAPING', 'ERR', 'ERRORS', 'ESCAPE', 'EVALNAME', 'EXCEPT',\n 'EXCEPTION', 'EXCEPTION_INIT', 'EXCEPTIONS', 'EXCLUDE', 'EXCLUSIVE',\n 'EXECUTE', 'EXISTS', 'EXIT', 'EXPLAIN', 'EXTERNAL', 'EXTRACT',\n 'FAILURE', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FIRST_VALUE',\n 'FLOAT', 'FOLLOWING', 'FOLLOWS', 'FOR', 'FORALL', 'FORCE', 'FROM',\n 'FULL', 'FUNCTION', 'GOTO', 'GRANT', 'GROUP', 'GROUPING', 'HASH',\n 'HAVING', 'HIDE', 'HOUR', 'IF', 'IGNORE', 'IMMEDIATE', 'IN',\n 'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDENT', 'INDEX', 'INDEXED',\n 'INDICATOR', 'INDICES', 'INFINITE', 'INLINE', 'INNER', 'INOUT',\n 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INT', 'INTEGER', 'INTERSECT',\n 'INTERVAL', 'INTO', 'INVALIDATE', 'IS', 'ISOLATION', 'ITERATE',\n 'JAVA', 'JOIN', 'KEEP', 'LANGUAGE', 'LAST', 'LAST_VALUE', 'LEADING',\n 'LEFT', 'LEVEL', 'LIBRARY', 'LIKE', 'LIKE2', 'LIKE4', 'LIKEC',\n 'LIMIT', 'LOCAL', 'LOCK', 'LOCKED', 'LOG', 'LOGOFF', 'LOGON',\n 'LONG', 'LOOP', 'MAIN', 'MAP', 'MATCHED', 'MAXVALUE', 'MEASURES',\n 'MEMBER', 'MERGE', 'MINUS', 'MINUTE', 'MINVALUE', 'MLSLABEL',\n 'MODE', 'MODEL', 'MODIFY', 'MONTH', 'MULTISET', 'NAME', 'NAN',\n 'NATURAL', 'NATURALN', 'NAV', 'NCHAR', 'NCHAR_CS', 'NCLOB',\n 'NESTED', 'NEW', 'NO', 'NOAUDIT', 'NOCACHE', 'NOCOPY', 'NOCYCLE',\n 'NOENTITYESCAPING', 'NOMAXVALUE', 'NOMINVALUE', 'NONE', 'NOORDER',\n 'NOSCHEMACHECK', 'NOT', 'NOWAIT', 'NULL', 'NULLS', 'NUMBER',\n 'NUMERIC', 'NVARCHAR2', 'OBJECT', 'OF', 'OFF', 'OID', 'OLD', 'ON',\n 'ONLY', 'OPEN', 'OPTION', 'OR', 'ORADATA', 'ORDER', 'ORDINALITY',\n 'OSERROR', 'OUT', 'OUTER', 'OVER', 'OVERRIDING', 'PACKAGE',\n 'PARALLEL_ENABLE', 'PARAMETERS', 'PARENT', 'PARTITION', 'PASSING',\n 'PATH', 'PERCENT_ROWTYPE', 'PERCENT_TYPE', 'PIPELINED', 'PIVOT',\n 'PLAN', 'PLS_INTEGER', 'POSITIVE', 'POSITIVEN', 'PRAGMA',\n 'PRECEDING', 'PRECISION', 'PRESENT', 'PRIOR', 'PROCEDURE', 'RAISE',\n 'RANGE', 'RAW', 'READ', 'REAL', 'RECORD', 'REF', 'REFERENCE',\n 'REFERENCING', 'REJECT', 'RELIES_ON', 'RENAME', 'REPLACE',\n 'RESPECT', 'RESTRICT_REFERENCES', 'RESULT', 'RESULT_CACHE',\n 'RETURN', 'RETURNING', 'REUSE', 'REVERSE', 'REVOKE', 'RIGHT',\n 'ROLLBACK', 'ROLLUP', 'ROW', 'ROWID', 'ROWS', 'RULES', 'SAMPLE',\n 'SAVE', 'SAVEPOINT', 'SCHEMA', 'SCHEMACHECK', 'SCN', 'SEARCH',\n 'SECOND', 'SEED', 'SEGMENT', 'SELECT', 'SELF', 'SEQUENCE',\n 'SEQUENTIAL', 'SERIALIZABLE', 'SERIALLY_REUSABLE', 'SERVERERROR',\n 'SESSIONTIMEZONE', 'SET', 'SETS', 'SETTINGS', 'SHARE', 'SHOW',\n 'SHUTDOWN', 'SIBLINGS', 'SIGNTYPE', 'SIMPLE_INTEGER', 'SINGLE',\n 'SIZE', 'SKIP_', 'SMALLINT', 'SNAPSHOT', 'SOME', 'SPECIFICATION',\n 'SQLDATA', 'SQLERROR', 'STANDALONE', 'START', 'STARTUP',\n 'STATEMENT', 'STATEMENT_ID', 'STATIC', 'STATISTICS', 'STRING',\n 'SUBMULTISET', 'SUBPARTITION', 'SUBSTITUTABLE', 'SUBTYPE',\n 'SUCCESS', 'SUSPEND', 'TABLE', 'THE', 'THEN', 'TIME', 'TIMESTAMP',\n 'TIMESTAMP_LTZ_UNCONSTRAINED', 'TIMESTAMP_TZ_UNCONSTRAINED',\n 'TIMESTAMP_UNCONSTRAINED', 'TIMEZONE_ABBR', 'TIMEZONE_HOUR',\n 'TIMEZONE_MINUTE', 'TIMEZONE_REGION', 'TO', 'TRAILING',\n 'TRANSACTION', 'TRANSLATE', 'TREAT', 'TRIGGER', 'TRIM', 'TRUE',\n 'TRUNCATE', 'TYPE', 'UNBOUNDED', 'UNDER', 'UNION', 'UNIQUE',\n 'UNLIMITED', 'UNPIVOT', 'UNTIL', 'UPDATE', 'UPDATED', 'UPSERT',\n 'UROWID', 'USE', 'USING', 'VALIDATE', 'VALUE', 'VALUES', 'VARCHAR',\n 'VARCHAR2', 'VARIABLE', 'VARRAY', 'VARYING', 'VERSION', 'VERSIONS',\n 'WAIT', 'WARNING', 'WELLFORMED', 'WHEN', 'WHENEVER', 'WHERE',\n 'WHILE', 'WITH', 'WITHIN', 'WORK', 'WRITE', 'XML', 'XMLAGG',\n 'XMLATTRIBUTES', 'XMLCAST', 'XMLCOLATTVAL', 'XMLELEMENT',\n 'XMLEXISTS', 'XMLFOREST', 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI',\n 'XMLQUERY', 'XMLROOT', 'XMLSERIALIZE', 'XMLTABLE', 'YEAR', 'YES',\n 'YMINTERVAL_UNCONSTRAINED', 'ZONE', 'PREDICTION',\n 'PREDICTION_BOUNDS', 'PREDICTION_COST', 'PREDICTION_DETAILS',\n 'PREDICTION_PROBABILITY', 'PREDICTION_SET', 'CUME_DIST',\n 'DENSE_RANK', 'LISTAGG', 'PERCENT_RANK', 'PERCENTILE_CONT',\n 'PERCENTILE_DISC', 'RANK', 'AVG', 'CORR', 'LAG', 'LEAD', 'MAX',\n 'MEDIAN', 'MIN', 'NTILE', 'RATIO_TO_REPORT', 'ROW_NUMBER', 'SUM',\n 'VARIANCE', 'REGR_', 'STDDEV', 'VAR_', 'COVAR_',\n 'NATIONAL_CHAR_STRING_LIT', 'BIT_STRING_LIT', 'HEX_STRING_LIT',\n 'DOUBLE_PERIOD', 'PERIOD', 'UNSIGNED_INTEGER',\n 'APPROXIMATE_NUM_LIT', 'CHAR_STRING', 'CHAR_STRING_PERL', 'QUOTE',\n 'QS_ANGLE', 'QS_BRACE', 'QS_BRACK', 'QS_PAREN', 'QS_OTHER_CH',\n 'DELIMITED_ID', 'PERCENT', 'AMPERSAND', 'LEFT_PAREN', 'RIGHT_PAREN',\n 'DOUBLE_ASTERISK', 'ASTERISK', 'PLUS_SIGN', 'MINUS_SIGN', 'COMMA',\n 'SOLIDUS', 'AT_SIGN', 'ASSIGN_OP', 'BINDVAR', 'COLON', 'SEMICOLON',\n 'LESS_THAN_OR_EQUALS_OP', 'LESS_THAN_OP',\n 'GREATER_THAN_OR_EQUALS_OP', 'NOT_EQUAL_OP', 'CARRET_OPERATOR_PART',\n 'TILDE_OPERATOR_PART', 'EXCLAMATION_OPERATOR_PART',\n 'GREATER_THAN_OP', 'QUESTION_MARK', 'CONCATENATION_OP',\n 'VERTICAL_BAR', 'EQUALS_OP', 'LEFT_BRACKET', 'RIGHT_BRACKET',\n 'INTRODUCER', 'SPACES', 'SIMPLE_LETTER',\n 'UNSIGNED_INTEGER_FRAGMENT', 'FLOAT_FRAGMENT',\n 'SINGLE_LINE_COMMENT', 'MULTI_LINE_COMMENT', 'PROMPT', 'NEWLINE',\n 'SPACE', 'REGULAR_ID', 'ZV', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',\n 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',\n 'V', 'W', 'X', 'Y', 'Z']\n grammarFileName = 'PlSql.g4'\n\n def __init__(self, input=None, output: TextIO=sys.stdout):\n super().__init__(input, output)\n self.checkVersion('4.7.2')\n self._interp = LexerATNSimulator(self, self.atn, self.\n decisionsToDFA, PredictionContextCache())\n self._actions = None\n self._predicates = None\n",
"step-5": "# Generated from /home/mridul/PycharmProjects/BTP_2k18-19/PlSql.g4 by ANTLR 4.7.2\nfrom antlr4 import *\nfrom io import StringIO\nfrom typing.io import TextIO\nimport sys\n\n\n\ndef serializedATN():\n with StringIO() as buf:\n buf.write(\"\\3\\u608b\\ua72a\\u8133\\ub9ed\\u417c\\u3be7\\u7786\\u5964\\2\\u020e\")\n buf.write(\"\\u14d7\\b\\1\\4\\2\\t\\2\\4\\3\\t\\3\\4\\4\\t\\4\\4\\5\\t\\5\\4\\6\\t\\6\\4\\7\")\n buf.write(\"\\t\\7\\4\\b\\t\\b\\4\\t\\t\\t\\4\\n\\t\\n\\4\\13\\t\\13\\4\\f\\t\\f\\4\\r\\t\\r\")\n buf.write(\"\\4\\16\\t\\16\\4\\17\\t\\17\\4\\20\\t\\20\\4\\21\\t\\21\\4\\22\\t\\22\\4\\23\")\n buf.write(\"\\t\\23\\4\\24\\t\\24\\4\\25\\t\\25\\4\\26\\t\\26\\4\\27\\t\\27\\4\\30\\t\\30\")\n buf.write(\"\\4\\31\\t\\31\\4\\32\\t\\32\\4\\33\\t\\33\\4\\34\\t\\34\\4\\35\\t\\35\\4\\36\")\n buf.write(\"\\t\\36\\4\\37\\t\\37\\4 \\t \\4!\\t!\\4\\\"\\t\\\"\\4#\\t#\\4$\\t$\\4%\\t%\")\n buf.write(\"\\4&\\t&\\4\\'\\t\\'\\4(\\t(\\4)\\t)\\4*\\t*\\4+\\t+\\4,\\t,\\4-\\t-\\4.\")\n buf.write(\"\\t.\\4/\\t/\\4\\60\\t\\60\\4\\61\\t\\61\\4\\62\\t\\62\\4\\63\\t\\63\\4\\64\")\n buf.write(\"\\t\\64\\4\\65\\t\\65\\4\\66\\t\\66\\4\\67\\t\\67\\48\\t8\\49\\t9\\4:\\t:\")\n buf.write(\"\\4;\\t;\\4<\\t<\\4=\\t=\\4>\\t>\\4?\\t?\\4@\\t@\\4A\\tA\\4B\\tB\\4C\\t\")\n buf.write(\"C\\4D\\tD\\4E\\tE\\4F\\tF\\4G\\tG\\4H\\tH\\4I\\tI\\4J\\tJ\\4K\\tK\\4L\\t\")\n buf.write(\"L\\4M\\tM\\4N\\tN\\4O\\tO\\4P\\tP\\4Q\\tQ\\4R\\tR\\4S\\tS\\4T\\tT\\4U\\t\")\n buf.write(\"U\\4V\\tV\\4W\\tW\\4X\\tX\\4Y\\tY\\4Z\\tZ\\4[\\t[\\4\\\\\\t\\\\\\4]\\t]\\4\")\n buf.write(\"^\\t^\\4_\\t_\\4`\\t`\\4a\\ta\\4b\\tb\\4c\\tc\\4d\\td\\4e\\te\\4f\\tf\\4\")\n buf.write(\"g\\tg\\4h\\th\\4i\\ti\\4j\\tj\\4k\\tk\\4l\\tl\\4m\\tm\\4n\\tn\\4o\\to\\4\")\n buf.write(\"p\\tp\\4q\\tq\\4r\\tr\\4s\\ts\\4t\\tt\\4u\\tu\\4v\\tv\\4w\\tw\\4x\\tx\\4\")\n buf.write(\"y\\ty\\4z\\tz\\4{\\t{\\4|\\t|\\4}\\t}\\4~\\t~\\4\\177\\t\\177\\4\\u0080\")\n buf.write(\"\\t\\u0080\\4\\u0081\\t\\u0081\\4\\u0082\\t\\u0082\\4\\u0083\\t\\u0083\")\n buf.write(\"\\4\\u0084\\t\\u0084\\4\\u0085\\t\\u0085\\4\\u0086\\t\\u0086\\4\\u0087\")\n buf.write(\"\\t\\u0087\\4\\u0088\\t\\u0088\\4\\u0089\\t\\u0089\\4\\u008a\\t\\u008a\")\n buf.write(\"\\4\\u008b\\t\\u008b\\4\\u008c\\t\\u008c\\4\\u008d\\t\\u008d\\4\\u008e\")\n buf.write(\"\\t\\u008e\\4\\u008f\\t\\u008f\\4\\u0090\\t\\u0090\\4\\u0091\\t\\u0091\")\n buf.write(\"\\4\\u0092\\t\\u0092\\4\\u0093\\t\\u0093\\4\\u0094\\t\\u0094\\4\\u0095\")\n buf.write(\"\\t\\u0095\\4\\u0096\\t\\u0096\\4\\u0097\\t\\u0097\\4\\u0098\\t\\u0098\")\n buf.write(\"\\4\\u0099\\t\\u0099\\4\\u009a\\t\\u009a\\4\\u009b\\t\\u009b\\4\\u009c\")\n buf.write(\"\\t\\u009c\\4\\u009d\\t\\u009d\\4\\u009e\\t\\u009e\\4\\u009f\\t\\u009f\")\n buf.write(\"\\4\\u00a0\\t\\u00a0\\4\\u00a1\\t\\u00a1\\4\\u00a2\\t\\u00a2\\4\\u00a3\")\n buf.write(\"\\t\\u00a3\\4\\u00a4\\t\\u00a4\\4\\u00a5\\t\\u00a5\\4\\u00a6\\t\\u00a6\")\n buf.write(\"\\4\\u00a7\\t\\u00a7\\4\\u00a8\\t\\u00a8\\4\\u00a9\\t\\u00a9\\4\\u00aa\")\n buf.write(\"\\t\\u00aa\\4\\u00ab\\t\\u00ab\\4\\u00ac\\t\\u00ac\\4\\u00ad\\t\\u00ad\")\n buf.write(\"\\4\\u00ae\\t\\u00ae\\4\\u00af\\t\\u00af\\4\\u00b0\\t\\u00b0\\4\\u00b1\")\n buf.write(\"\\t\\u00b1\\4\\u00b2\\t\\u00b2\\4\\u00b3\\t\\u00b3\\4\\u00b4\\t\\u00b4\")\n buf.write(\"\\4\\u00b5\\t\\u00b5\\4\\u00b6\\t\\u00b6\\4\\u00b7\\t\\u00b7\\4\\u00b8\")\n buf.write(\"\\t\\u00b8\\4\\u00b9\\t\\u00b9\\4\\u00ba\\t\\u00ba\\4\\u00bb\\t\\u00bb\")\n buf.write(\"\\4\\u00bc\\t\\u00bc\\4\\u00bd\\t\\u00bd\\4\\u00be\\t\\u00be\\4\\u00bf\")\n buf.write(\"\\t\\u00bf\\4\\u00c0\\t\\u00c0\\4\\u00c1\\t\\u00c1\\4\\u00c2\\t\\u00c2\")\n buf.write(\"\\4\\u00c3\\t\\u00c3\\4\\u00c4\\t\\u00c4\\4\\u00c5\\t\\u00c5\\4\\u00c6\")\n buf.write(\"\\t\\u00c6\\4\\u00c7\\t\\u00c7\\4\\u00c8\\t\\u00c8\\4\\u00c9\\t\\u00c9\")\n buf.write(\"\\4\\u00ca\\t\\u00ca\\4\\u00cb\\t\\u00cb\\4\\u00cc\\t\\u00cc\\4\\u00cd\")\n buf.write(\"\\t\\u00cd\\4\\u00ce\\t\\u00ce\\4\\u00cf\\t\\u00cf\\4\\u00d0\\t\\u00d0\")\n buf.write(\"\\4\\u00d1\\t\\u00d1\\4\\u00d2\\t\\u00d2\\4\\u00d3\\t\\u00d3\\4\\u00d4\")\n buf.write(\"\\t\\u00d4\\4\\u00d5\\t\\u00d5\\4\\u00d6\\t\\u00d6\\4\\u00d7\\t\\u00d7\")\n buf.write(\"\\4\\u00d8\\t\\u00d8\\4\\u00d9\\t\\u00d9\\4\\u00da\\t\\u00da\\4\\u00db\")\n buf.write(\"\\t\\u00db\\4\\u00dc\\t\\u00dc\\4\\u00dd\\t\\u00dd\\4\\u00de\\t\\u00de\")\n buf.write(\"\\4\\u00df\\t\\u00df\\4\\u00e0\\t\\u00e0\\4\\u00e1\\t\\u00e1\\4\\u00e2\")\n buf.write(\"\\t\\u00e2\\4\\u00e3\\t\\u00e3\\4\\u00e4\\t\\u00e4\\4\\u00e5\\t\\u00e5\")\n buf.write(\"\\4\\u00e6\\t\\u00e6\\4\\u00e7\\t\\u00e7\\4\\u00e8\\t\\u00e8\\4\\u00e9\")\n buf.write(\"\\t\\u00e9\\4\\u00ea\\t\\u00ea\\4\\u00eb\\t\\u00eb\\4\\u00ec\\t\\u00ec\")\n buf.write(\"\\4\\u00ed\\t\\u00ed\\4\\u00ee\\t\\u00ee\\4\\u00ef\\t\\u00ef\\4\\u00f0\")\n buf.write(\"\\t\\u00f0\\4\\u00f1\\t\\u00f1\\4\\u00f2\\t\\u00f2\\4\\u00f3\\t\\u00f3\")\n buf.write(\"\\4\\u00f4\\t\\u00f4\\4\\u00f5\\t\\u00f5\\4\\u00f6\\t\\u00f6\\4\\u00f7\")\n buf.write(\"\\t\\u00f7\\4\\u00f8\\t\\u00f8\\4\\u00f9\\t\\u00f9\\4\\u00fa\\t\\u00fa\")\n buf.write(\"\\4\\u00fb\\t\\u00fb\\4\\u00fc\\t\\u00fc\\4\\u00fd\\t\\u00fd\\4\\u00fe\")\n buf.write(\"\\t\\u00fe\\4\\u00ff\\t\\u00ff\\4\\u0100\\t\\u0100\\4\\u0101\\t\\u0101\")\n buf.write(\"\\4\\u0102\\t\\u0102\\4\\u0103\\t\\u0103\\4\\u0104\\t\\u0104\\4\\u0105\")\n buf.write(\"\\t\\u0105\\4\\u0106\\t\\u0106\\4\\u0107\\t\\u0107\\4\\u0108\\t\\u0108\")\n buf.write(\"\\4\\u0109\\t\\u0109\\4\\u010a\\t\\u010a\\4\\u010b\\t\\u010b\\4\\u010c\")\n buf.write(\"\\t\\u010c\\4\\u010d\\t\\u010d\\4\\u010e\\t\\u010e\\4\\u010f\\t\\u010f\")\n buf.write(\"\\4\\u0110\\t\\u0110\\4\\u0111\\t\\u0111\\4\\u0112\\t\\u0112\\4\\u0113\")\n buf.write(\"\\t\\u0113\\4\\u0114\\t\\u0114\\4\\u0115\\t\\u0115\\4\\u0116\\t\\u0116\")\n buf.write(\"\\4\\u0117\\t\\u0117\\4\\u0118\\t\\u0118\\4\\u0119\\t\\u0119\\4\\u011a\")\n buf.write(\"\\t\\u011a\\4\\u011b\\t\\u011b\\4\\u011c\\t\\u011c\\4\\u011d\\t\\u011d\")\n buf.write(\"\\4\\u011e\\t\\u011e\\4\\u011f\\t\\u011f\\4\\u0120\\t\\u0120\\4\\u0121\")\n buf.write(\"\\t\\u0121\\4\\u0122\\t\\u0122\\4\\u0123\\t\\u0123\\4\\u0124\\t\\u0124\")\n buf.write(\"\\4\\u0125\\t\\u0125\\4\\u0126\\t\\u0126\\4\\u0127\\t\\u0127\\4\\u0128\")\n buf.write(\"\\t\\u0128\\4\\u0129\\t\\u0129\\4\\u012a\\t\\u012a\\4\\u012b\\t\\u012b\")\n buf.write(\"\\4\\u012c\\t\\u012c\\4\\u012d\\t\\u012d\\4\\u012e\\t\\u012e\\4\\u012f\")\n buf.write(\"\\t\\u012f\\4\\u0130\\t\\u0130\\4\\u0131\\t\\u0131\\4\\u0132\\t\\u0132\")\n buf.write(\"\\4\\u0133\\t\\u0133\\4\\u0134\\t\\u0134\\4\\u0135\\t\\u0135\\4\\u0136\")\n buf.write(\"\\t\\u0136\\4\\u0137\\t\\u0137\\4\\u0138\\t\\u0138\\4\\u0139\\t\\u0139\")\n buf.write(\"\\4\\u013a\\t\\u013a\\4\\u013b\\t\\u013b\\4\\u013c\\t\\u013c\\4\\u013d\")\n buf.write(\"\\t\\u013d\\4\\u013e\\t\\u013e\\4\\u013f\\t\\u013f\\4\\u0140\\t\\u0140\")\n buf.write(\"\\4\\u0141\\t\\u0141\\4\\u0142\\t\\u0142\\4\\u0143\\t\\u0143\\4\\u0144\")\n buf.write(\"\\t\\u0144\\4\\u0145\\t\\u0145\\4\\u0146\\t\\u0146\\4\\u0147\\t\\u0147\")\n buf.write(\"\\4\\u0148\\t\\u0148\\4\\u0149\\t\\u0149\\4\\u014a\\t\\u014a\\4\\u014b\")\n buf.write(\"\\t\\u014b\\4\\u014c\\t\\u014c\\4\\u014d\\t\\u014d\\4\\u014e\\t\\u014e\")\n buf.write(\"\\4\\u014f\\t\\u014f\\4\\u0150\\t\\u0150\\4\\u0151\\t\\u0151\\4\\u0152\")\n buf.write(\"\\t\\u0152\\4\\u0153\\t\\u0153\\4\\u0154\\t\\u0154\\4\\u0155\\t\\u0155\")\n buf.write(\"\\4\\u0156\\t\\u0156\\4\\u0157\\t\\u0157\\4\\u0158\\t\\u0158\\4\\u0159\")\n buf.write(\"\\t\\u0159\\4\\u015a\\t\\u015a\\4\\u015b\\t\\u015b\\4\\u015c\\t\\u015c\")\n buf.write(\"\\4\\u015d\\t\\u015d\\4\\u015e\\t\\u015e\\4\\u015f\\t\\u015f\\4\\u0160\")\n buf.write(\"\\t\\u0160\\4\\u0161\\t\\u0161\\4\\u0162\\t\\u0162\\4\\u0163\\t\\u0163\")\n buf.write(\"\\4\\u0164\\t\\u0164\\4\\u0165\\t\\u0165\\4\\u0166\\t\\u0166\\4\\u0167\")\n buf.write(\"\\t\\u0167\\4\\u0168\\t\\u0168\\4\\u0169\\t\\u0169\\4\\u016a\\t\\u016a\")\n buf.write(\"\\4\\u016b\\t\\u016b\\4\\u016c\\t\\u016c\\4\\u016d\\t\\u016d\\4\\u016e\")\n buf.write(\"\\t\\u016e\\4\\u016f\\t\\u016f\\4\\u0170\\t\\u0170\\4\\u0171\\t\\u0171\")\n buf.write(\"\\4\\u0172\\t\\u0172\\4\\u0173\\t\\u0173\\4\\u0174\\t\\u0174\\4\\u0175\")\n buf.write(\"\\t\\u0175\\4\\u0176\\t\\u0176\\4\\u0177\\t\\u0177\\4\\u0178\\t\\u0178\")\n buf.write(\"\\4\\u0179\\t\\u0179\\4\\u017a\\t\\u017a\\4\\u017b\\t\\u017b\\4\\u017c\")\n buf.write(\"\\t\\u017c\\4\\u017d\\t\\u017d\\4\\u017e\\t\\u017e\\4\\u017f\\t\\u017f\")\n buf.write(\"\\4\\u0180\\t\\u0180\\4\\u0181\\t\\u0181\\4\\u0182\\t\\u0182\\4\\u0183\")\n buf.write(\"\\t\\u0183\\4\\u0184\\t\\u0184\\4\\u0185\\t\\u0185\\4\\u0186\\t\\u0186\")\n buf.write(\"\\4\\u0187\\t\\u0187\\4\\u0188\\t\\u0188\\4\\u0189\\t\\u0189\\4\\u018a\")\n buf.write(\"\\t\\u018a\\4\\u018b\\t\\u018b\\4\\u018c\\t\\u018c\\4\\u018d\\t\\u018d\")\n buf.write(\"\\4\\u018e\\t\\u018e\\4\\u018f\\t\\u018f\\4\\u0190\\t\\u0190\\4\\u0191\")\n buf.write(\"\\t\\u0191\\4\\u0192\\t\\u0192\\4\\u0193\\t\\u0193\\4\\u0194\\t\\u0194\")\n buf.write(\"\\4\\u0195\\t\\u0195\\4\\u0196\\t\\u0196\\4\\u0197\\t\\u0197\\4\\u0198\")\n buf.write(\"\\t\\u0198\\4\\u0199\\t\\u0199\\4\\u019a\\t\\u019a\\4\\u019b\\t\\u019b\")\n buf.write(\"\\4\\u019c\\t\\u019c\\4\\u019d\\t\\u019d\\4\\u019e\\t\\u019e\\4\\u019f\")\n buf.write(\"\\t\\u019f\\4\\u01a0\\t\\u01a0\\4\\u01a1\\t\\u01a1\\4\\u01a2\\t\\u01a2\")\n buf.write(\"\\4\\u01a3\\t\\u01a3\\4\\u01a4\\t\\u01a4\\4\\u01a5\\t\\u01a5\\4\\u01a6\")\n buf.write(\"\\t\\u01a6\\4\\u01a7\\t\\u01a7\\4\\u01a8\\t\\u01a8\\4\\u01a9\\t\\u01a9\")\n buf.write(\"\\4\\u01aa\\t\\u01aa\\4\\u01ab\\t\\u01ab\\4\\u01ac\\t\\u01ac\\4\\u01ad\")\n buf.write(\"\\t\\u01ad\\4\\u01ae\\t\\u01ae\\4\\u01af\\t\\u01af\\4\\u01b0\\t\\u01b0\")\n buf.write(\"\\4\\u01b1\\t\\u01b1\\4\\u01b2\\t\\u01b2\\4\\u01b3\\t\\u01b3\\4\\u01b4\")\n buf.write(\"\\t\\u01b4\\4\\u01b5\\t\\u01b5\\4\\u01b6\\t\\u01b6\\4\\u01b7\\t\\u01b7\")\n buf.write(\"\\4\\u01b8\\t\\u01b8\\4\\u01b9\\t\\u01b9\\4\\u01ba\\t\\u01ba\\4\\u01bb\")\n buf.write(\"\\t\\u01bb\\4\\u01bc\\t\\u01bc\\4\\u01bd\\t\\u01bd\\4\\u01be\\t\\u01be\")\n buf.write(\"\\4\\u01bf\\t\\u01bf\\4\\u01c0\\t\\u01c0\\4\\u01c1\\t\\u01c1\\4\\u01c2\")\n buf.write(\"\\t\\u01c2\\4\\u01c3\\t\\u01c3\\4\\u01c4\\t\\u01c4\\4\\u01c5\\t\\u01c5\")\n buf.write(\"\\4\\u01c6\\t\\u01c6\\4\\u01c7\\t\\u01c7\\4\\u01c8\\t\\u01c8\\4\\u01c9\")\n buf.write(\"\\t\\u01c9\\4\\u01ca\\t\\u01ca\\4\\u01cb\\t\\u01cb\\4\\u01cc\\t\\u01cc\")\n buf.write(\"\\4\\u01cd\\t\\u01cd\\4\\u01ce\\t\\u01ce\\4\\u01cf\\t\\u01cf\\4\\u01d0\")\n buf.write(\"\\t\\u01d0\\4\\u01d1\\t\\u01d1\\4\\u01d2\\t\\u01d2\\4\\u01d3\\t\\u01d3\")\n buf.write(\"\\4\\u01d4\\t\\u01d4\\4\\u01d5\\t\\u01d5\\4\\u01d6\\t\\u01d6\\4\\u01d7\")\n buf.write(\"\\t\\u01d7\\4\\u01d8\\t\\u01d8\\4\\u01d9\\t\\u01d9\\4\\u01da\\t\\u01da\")\n buf.write(\"\\4\\u01db\\t\\u01db\\4\\u01dc\\t\\u01dc\\4\\u01dd\\t\\u01dd\\4\\u01de\")\n buf.write(\"\\t\\u01de\\4\\u01df\\t\\u01df\\4\\u01e0\\t\\u01e0\\4\\u01e1\\t\\u01e1\")\n buf.write(\"\\4\\u01e2\\t\\u01e2\\4\\u01e3\\t\\u01e3\\4\\u01e4\\t\\u01e4\\4\\u01e5\")\n buf.write(\"\\t\\u01e5\\4\\u01e6\\t\\u01e6\\4\\u01e7\\t\\u01e7\\4\\u01e8\\t\\u01e8\")\n buf.write(\"\\4\\u01e9\\t\\u01e9\\4\\u01ea\\t\\u01ea\\4\\u01eb\\t\\u01eb\\4\\u01ec\")\n buf.write(\"\\t\\u01ec\\4\\u01ed\\t\\u01ed\\4\\u01ee\\t\\u01ee\\4\\u01ef\\t\\u01ef\")\n buf.write(\"\\4\\u01f0\\t\\u01f0\\4\\u01f1\\t\\u01f1\\4\\u01f2\\t\\u01f2\\4\\u01f3\")\n buf.write(\"\\t\\u01f3\\4\\u01f4\\t\\u01f4\\4\\u01f5\\t\\u01f5\\4\\u01f6\\t\\u01f6\")\n buf.write(\"\\4\\u01f7\\t\\u01f7\\4\\u01f8\\t\\u01f8\\4\\u01f9\\t\\u01f9\\4\\u01fa\")\n buf.write(\"\\t\\u01fa\\4\\u01fb\\t\\u01fb\\4\\u01fc\\t\\u01fc\\4\\u01fd\\t\\u01fd\")\n buf.write(\"\\4\\u01fe\\t\\u01fe\\4\\u01ff\\t\\u01ff\\4\\u0200\\t\\u0200\\4\\u0201\")\n buf.write(\"\\t\\u0201\\4\\u0202\\t\\u0202\\4\\u0203\\t\\u0203\\4\\u0204\\t\\u0204\")\n buf.write(\"\\4\\u0205\\t\\u0205\\4\\u0206\\t\\u0206\\4\\u0207\\t\\u0207\\4\\u0208\")\n buf.write(\"\\t\\u0208\\4\\u0209\\t\\u0209\\4\\u020a\\t\\u020a\\4\\u020b\\t\\u020b\")\n buf.write(\"\\4\\u020c\\t\\u020c\\4\\u020d\\t\\u020d\\4\\u020e\\t\\u020e\\4\\u020f\")\n buf.write(\"\\t\\u020f\\4\\u0210\\t\\u0210\\4\\u0211\\t\\u0211\\4\\u0212\\t\\u0212\")\n buf.write(\"\\4\\u0213\\t\\u0213\\4\\u0214\\t\\u0214\\4\\u0215\\t\\u0215\\4\\u0216\")\n buf.write(\"\\t\\u0216\\4\\u0217\\t\\u0217\\4\\u0218\\t\\u0218\\4\\u0219\\t\\u0219\")\n buf.write(\"\\4\\u021a\\t\\u021a\\4\\u021b\\t\\u021b\\4\\u021c\\t\\u021c\\4\\u021d\")\n buf.write(\"\\t\\u021d\\4\\u021e\\t\\u021e\\4\\u021f\\t\\u021f\\4\\u0220\\t\\u0220\")\n buf.write(\"\\4\\u0221\\t\\u0221\\4\\u0222\\t\\u0222\\4\\u0223\\t\\u0223\\4\\u0224\")\n buf.write(\"\\t\\u0224\\4\\u0225\\t\\u0225\\4\\u0226\\t\\u0226\\4\\u0227\\t\\u0227\")\n buf.write(\"\\4\\u0228\\t\\u0228\\4\\u0229\\t\\u0229\\4\\u022a\\t\\u022a\\4\\u022b\")\n buf.write(\"\\t\\u022b\\4\\u022c\\t\\u022c\\4\\u022d\\t\\u022d\\4\\u022e\\t\\u022e\")\n buf.write(\"\\4\\u022f\\t\\u022f\\4\\u0230\\t\\u0230\\4\\u0231\\t\\u0231\\4\\u0232\")\n buf.write(\"\\t\\u0232\\4\\u0233\\t\\u0233\\4\\u0234\\t\\u0234\\3\\2\\3\\2\\3\\2\\3\")\n buf.write(\"\\3\\3\\3\\3\\4\\3\\4\\3\\4\\3\\4\\3\\5\\3\\5\\3\\5\\3\\5\\3\\5\\3\\5\\3\\6\\3\\6\")\n buf.write(\"\\3\\6\\3\\6\\3\\6\\3\\6\\3\\7\\3\\7\\3\\7\\3\\7\\3\\7\\3\\7\\3\\7\\3\\7\\3\\7\\3\")\n buf.write(\"\\7\\3\\b\\3\\b\\3\\b\\3\\b\\3\\t\\3\\t\\3\\t\\3\\t\\3\\t\\3\\t\\3\\n\\3\\n\\3\\n\")\n buf.write(\"\\3\\n\\3\\n\\3\\n\\3\\n\\3\\n\\3\\13\\3\\13\\3\\13\\3\\13\\3\\f\\3\\f\\3\\f\\3\")\n buf.write(\"\\f\\3\\r\\3\\r\\3\\r\\3\\r\\3\\r\\3\\r\\3\\16\\3\\16\\3\\16\\3\\17\\3\\17\\3\")\n buf.write(\"\\17\\3\\17\\3\\17\\3\\17\\3\\17\\3\\20\\3\\20\\3\\20\\3\\20\\3\\20\\3\\20\")\n buf.write(\"\\3\\20\\3\\21\\3\\21\\3\\21\\3\\21\\3\\22\\3\\22\\3\\22\\3\\22\\3\\22\\3\\22\")\n buf.write(\"\\3\\22\\3\\22\\3\\22\\3\\22\\3\\23\\3\\23\\3\\23\\3\\24\\3\\24\\3\\24\\3\\24\")\n buf.write(\"\\3\\24\\3\\24\\3\\24\\3\\24\\3\\24\\3\\24\\3\\25\\3\\25\\3\\25\\3\\25\\3\\25\")\n buf.write(\"\\3\\25\\3\\26\\3\\26\\3\\26\\3\\26\\3\\26\\3\\26\\3\\26\\3\\27\\3\\27\\3\\27\")\n buf.write(\"\\3\\27\\3\\27\\3\\30\\3\\30\\3\\30\\3\\30\\3\\30\\3\\30\\3\\30\\3\\30\\3\\30\")\n buf.write(\"\\3\\30\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\")\n buf.write(\"\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\\3\\31\")\n buf.write(\"\\3\\31\\3\\31\\3\\32\\3\\32\\3\\32\\3\\32\\3\\32\\3\\32\\3\\33\\3\\33\\3\\33\")\n buf.write(\"\\3\\33\\3\\33\\3\\33\\3\\33\\3\\34\\3\\34\\3\\34\\3\\34\\3\\34\\3\\34\\3\\35\")\n buf.write(\"\\3\\35\\3\\35\\3\\35\\3\\35\\3\\35\\3\\35\\3\\35\\3\\36\\3\\36\\3\\36\\3\\36\")\n buf.write(\"\\3\\36\\3\\36\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\")\n buf.write(\"\\3\\37\\3\\37\\3\\37\\3\\37\\3\\37\\3 \\3 \\3 \\3 \\3 \\3 \\3 \\3 \\3 \\3\")\n buf.write(\" \\3 \\3 \\3 \\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3!\\3\")\n buf.write(\"!\\3\\\"\\3\\\"\\3\\\"\\3\\\"\\3\\\"\\3#\\3#\\3#\\3#\\3#\\3#\\3$\\3$\\3$\\3$\\3\")\n buf.write(\"$\\3%\\3%\\3%\\3%\\3%\\3%\\3%\\3%\\3&\\3&\\3&\\3&\\3&\\3\\'\\3\\'\\3\\'\\3\")\n buf.write(\"\\'\\3\\'\\3\\'\\3\\'\\3\\'\\3(\\3(\\3(\\3(\\3(\\3)\\3)\\3)\\3*\\3*\\3*\\3\")\n buf.write(\"*\\3*\\3+\\3+\\3,\\3,\\3,\\3,\\3,\\3,\\3-\\3-\\3-\\3-\\3-\\3.\\3.\\3.\\3\")\n buf.write(\".\\3.\\3.\\3.\\3.\\3.\\3.\\3/\\3/\\3/\\3/\\3/\\3/\\3/\\3/\\3\\60\\3\\60\")\n buf.write(\"\\3\\60\\3\\60\\3\\60\\3\\61\\3\\61\\3\\61\\3\\61\\3\\61\\3\\62\\3\\62\\3\\62\")\n buf.write(\"\\3\\62\\3\\62\\3\\63\\3\\63\\3\\63\\3\\63\\3\\63\\3\\63\\3\\63\\3\\63\\3\\64\")\n buf.write(\"\\3\\64\\3\\64\\3\\64\\3\\64\\3\\64\\3\\64\\3\\64\\3\\64\\3\\64\\3\\65\\3\\65\")\n buf.write(\"\\3\\65\\3\\65\\3\\65\\3\\65\\3\\66\\3\\66\\3\\66\\3\\66\\3\\67\\3\\67\\3\\67\")\n buf.write(\"\\3\\67\\3\\67\\38\\38\\38\\38\\38\\38\\39\\39\\39\\39\\39\\39\\39\\39\\3\")\n buf.write(\":\\3:\\3:\\3:\\3:\\3:\\3:\\3:\\3;\\3;\\3;\\3;\\3;\\3;\\3;\\3;\\3<\\3<\\3\")\n buf.write(\"<\\3<\\3<\\3<\\3<\\3<\\3=\\3=\\3=\\3=\\3=\\3=\\3=\\3>\\3>\\3>\\3>\\3>\\3\")\n buf.write(\">\\3>\\3>\\3>\\3>\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3?\\3\")\n buf.write(\"?\\3@\\3@\\3@\\3@\\3@\\3@\\3@\\3@\\3A\\3A\\3A\\3A\\3A\\3A\\3A\\3A\\3A\\3\")\n buf.write(\"B\\3B\\3B\\3B\\3B\\3B\\3B\\3B\\3C\\3C\\3C\\3C\\3C\\3C\\3C\\3C\\3C\\3C\\3\")\n buf.write(\"C\\3C\\3C\\3C\\3C\\3C\\3D\\3D\\3D\\3D\\3D\\3D\\3D\\3D\\3D\\3E\\3E\\3E\\3\")\n buf.write(\"E\\3E\\3E\\3E\\3E\\3E\\3E\\3E\\3F\\3F\\3F\\3F\\3F\\3F\\3F\\3F\\3F\\3F\\3\")\n buf.write(\"F\\3F\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3G\\3H\\3H\\3H\\3H\\3\")\n buf.write(\"H\\3H\\3H\\3H\\3I\\3I\\3I\\3I\\3I\\3I\\3I\\3I\\3J\\3J\\3J\\3J\\3J\\3J\\3\")\n buf.write(\"J\\3J\\3J\\3K\\3K\\3K\\3K\\3K\\3K\\3K\\3K\\3L\\3L\\3L\\3L\\3L\\3L\\3L\\3\")\n buf.write(\"L\\3L\\3L\\3L\\3L\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3M\\3\")\n buf.write(\"M\\3M\\3M\\3N\\3N\\3N\\3N\\3N\\3O\\3O\\3O\\3O\\3O\\3O\\3P\\3P\\3P\\3P\\3\")\n buf.write(\"P\\3P\\3P\\3Q\\3Q\\3Q\\3Q\\3Q\\3Q\\3R\\3R\\3R\\3R\\3R\\3S\\3S\\3S\\3S\\3\")\n buf.write(\"S\\3S\\3S\\3S\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3T\\3U\\3\")\n buf.write(\"U\\3U\\3U\\3U\\3U\\3U\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3V\\3\")\n buf.write(\"W\\3W\\3W\\3W\\3W\\3W\\3X\\3X\\3X\\3X\\3X\\3Y\\3Y\\3Y\\3Y\\3Y\\3Y\\3Y\\3\")\n buf.write(\"Y\\3Y\\3Z\\3Z\\3Z\\3Z\\3Z\\3[\\3[\\3[\\3[\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\")\n buf.write(\"\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3\\\\\\3]\\3]\\3]\\3]\\3]\")\n buf.write(\"\\3]\\3]\\3]\\3]\\3]\\3]\\3^\\3^\\3^\\3^\\3_\\3_\\3_\\3_\\3_\\3_\\3`\\3\")\n buf.write(\"`\\3`\\3`\\3a\\3a\\3a\\3a\\3a\\3a\\3a\\3a\\3b\\3b\\3b\\3b\\3b\\3b\\3b\\3\")\n buf.write(\"b\\3c\\3c\\3c\\3c\\3c\\3c\\3c\\3c\\3c\\3c\\3d\\3d\\3d\\3d\\3d\\3d\\3d\\3\")\n buf.write(\"d\\3d\\3d\\3e\\3e\\3e\\3e\\3e\\3e\\3e\\3e\\3f\\3f\\3f\\3f\\3f\\3f\\3f\\3\")\n buf.write(\"f\\3f\\3g\\3g\\3g\\3g\\3g\\3g\\3g\\3g\\3g\\3h\\3h\\3h\\3h\\3h\\3h\\3h\\3\")\n buf.write(\"h\\3i\\3i\\3i\\3i\\3i\\3i\\3i\\3j\\3j\\3j\\3j\\3j\\3j\\3k\\3k\\3k\\3k\\3\")\n buf.write(\"k\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3l\\3m\\3m\\3m\\3\")\n buf.write(\"m\\3m\\3m\\3m\\3m\\3m\\3m\\3n\\3n\\3n\\3n\\3n\\3n\\3n\\3n\\3o\\3o\\3o\\3\")\n buf.write(\"o\\3o\\3o\\3o\\3o\\3o\\3o\\3o\\3o\\3o\\3p\\3p\\3p\\3p\\3p\\3p\\3p\\3p\\3\")\n buf.write(\"p\\3q\\3q\\3q\\3q\\3q\\3q\\3q\\3q\\3q\\3r\\3r\\3r\\3r\\3r\\3r\\3r\\3s\\3\")\n buf.write(\"s\\3s\\3s\\3s\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3\")\n buf.write(\"t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3t\\3u\\3u\\3u\\3u\\3u\\3v\\3v\\3\")\n buf.write(\"v\\3v\\3v\\3v\\3v\\3v\\3w\\3w\\3w\\3w\\3w\\3x\\3x\\3x\\3x\\3x\\3x\\3y\\3\")\n buf.write(\"y\\3y\\3y\\3y\\3y\\3z\\3z\\3z\\3z\\3z\\3z\\3z\\3{\\3{\\3{\\3{\\3{\\3{\\3\")\n buf.write(\"{\\3{\\3{\\3|\\3|\\3|\\3|\\3}\\3}\\3}\\3}\\3}\\3}\\3}\\3}\\3}\\3}\\3}\\3\")\n buf.write(\"}\\3}\\3}\\3}\\3~\\3~\\3~\\3~\\3\\177\\3\\177\\3\\177\\3\\177\\3\\177\\3\")\n buf.write(\"\\177\\3\\177\\3\\u0080\\3\\u0080\\3\\u0080\\3\\u0080\\3\\u0080\\3\\u0080\")\n buf.write(\"\\3\\u0080\\3\\u0081\\3\\u0081\\3\\u0081\\3\\u0081\\3\\u0081\\3\\u0081\")\n buf.write(\"\\3\\u0081\\3\\u0081\\3\\u0081\\3\\u0082\\3\\u0082\\3\\u0082\\3\\u0082\")\n buf.write(\"\\3\\u0082\\3\\u0082\\3\\u0082\\3\\u0083\\3\\u0083\\3\\u0083\\3\\u0083\")\n buf.write(\"\\3\\u0083\\3\\u0083\\3\\u0083\\3\\u0083\\3\\u0083\\3\\u0083\\3\\u0084\")\n buf.write(\"\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\")\n buf.write(\"\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\\3\\u0084\")\n buf.write(\"\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0085\")\n buf.write(\"\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0085\\3\\u0086\\3\\u0086\\3\\u0086\")\n buf.write(\"\\3\\u0086\\3\\u0086\\3\\u0086\\3\\u0086\\3\\u0086\\3\\u0087\\3\\u0087\")\n buf.write(\"\\3\\u0087\\3\\u0087\\3\\u0087\\3\\u0087\\3\\u0087\\3\\u0087\\3\\u0087\")\n buf.write(\"\\3\\u0087\\3\\u0088\\3\\u0088\\3\\u0088\\3\\u0088\\3\\u0088\\3\\u0088\")\n buf.write(\"\\3\\u0088\\3\\u0088\\3\\u0089\\3\\u0089\\3\\u0089\\3\\u0089\\3\\u0089\")\n buf.write(\"\\3\\u0089\\3\\u0089\\3\\u008a\\3\\u008a\\3\\u008a\\3\\u008a\\3\\u008a\")\n buf.write(\"\\3\\u008b\\3\\u008b\\3\\u008b\\3\\u008b\\3\\u008b\\3\\u008b\\3\\u008b\")\n buf.write(\"\\3\\u008b\\3\\u008c\\3\\u008c\\3\\u008c\\3\\u008c\\3\\u008c\\3\\u008c\")\n buf.write(\"\\3\\u008c\\3\\u008c\\3\\u008c\\3\\u008d\\3\\u008d\\3\\u008d\\3\\u008d\")\n buf.write(\"\\3\\u008d\\3\\u008d\\3\\u008d\\3\\u008d\\3\\u008e\\3\\u008e\\3\\u008e\")\n buf.write(\"\\3\\u008e\\3\\u008e\\3\\u008e\\3\\u008e\\3\\u008e\\3\\u008f\\3\\u008f\")\n buf.write(\"\\3\\u008f\\3\\u008f\\3\\u008f\\3\\u008f\\3\\u0090\\3\\u0090\\3\\u0090\")\n buf.write(\"\\3\\u0090\\3\\u0090\\3\\u0090\\3\\u0091\\3\\u0091\\3\\u0091\\3\\u0091\")\n buf.write(\"\\3\\u0091\\3\\u0091\\3\\u0092\\3\\u0092\\3\\u0092\\3\\u0092\\3\\u0092\")\n buf.write(\"\\3\\u0092\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\")\n buf.write(\"\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0093\\3\\u0094\")\n buf.write(\"\\3\\u0094\\3\\u0094\\3\\u0094\\3\\u0094\\3\\u0094\\3\\u0095\\3\\u0095\")\n buf.write(\"\\3\\u0095\\3\\u0095\\3\\u0095\\3\\u0095\\3\\u0095\\3\\u0095\\3\\u0095\")\n buf.write(\"\\3\\u0095\\3\\u0096\\3\\u0096\\3\\u0096\\3\\u0096\\3\\u0096\\3\\u0096\")\n buf.write(\"\\3\\u0096\\3\\u0096\\3\\u0097\\3\\u0097\\3\\u0097\\3\\u0097\\3\\u0098\")\n buf.write(\"\\3\\u0098\\3\\u0098\\3\\u0098\\3\\u0098\\3\\u0098\\3\\u0098\\3\\u0099\")\n buf.write(\"\\3\\u0099\\3\\u0099\\3\\u0099\\3\\u0099\\3\\u0099\\3\\u009a\\3\\u009a\")\n buf.write(\"\\3\\u009a\\3\\u009a\\3\\u009a\\3\\u009b\\3\\u009b\\3\\u009b\\3\\u009b\")\n buf.write(\"\\3\\u009b\\3\\u009c\\3\\u009c\\3\\u009c\\3\\u009c\\3\\u009c\\3\\u009c\")\n buf.write(\"\\3\\u009c\\3\\u009c\\3\\u009c\\3\\u009d\\3\\u009d\\3\\u009d\\3\\u009d\")\n buf.write(\"\\3\\u009d\\3\\u009e\\3\\u009e\\3\\u009e\\3\\u009e\\3\\u009e\\3\\u009e\")\n buf.write(\"\\3\\u009f\\3\\u009f\\3\\u009f\\3\\u009f\\3\\u009f\\3\\u009f\\3\\u00a0\")\n buf.write(\"\\3\\u00a0\\3\\u00a0\\3\\u00a0\\3\\u00a0\\3\\u00a0\\3\\u00a0\\3\\u00a0\")\n buf.write(\"\\3\\u00a0\\3\\u00a1\\3\\u00a1\\3\\u00a1\\3\\u00a1\\3\\u00a1\\3\\u00a2\")\n buf.write(\"\\3\\u00a2\\3\\u00a2\\3\\u00a2\\3\\u00a2\\3\\u00a2\\3\\u00a2\\3\\u00a3\")\n buf.write(\"\\3\\u00a3\\3\\u00a3\\3\\u00a3\\3\\u00a3\\3\\u00a4\\3\\u00a4\\3\\u00a4\")\n buf.write(\"\\3\\u00a4\\3\\u00a4\\3\\u00a5\\3\\u00a5\\3\\u00a5\\3\\u00a6\\3\\u00a6\")\n buf.write(\"\\3\\u00a6\\3\\u00a6\\3\\u00a6\\3\\u00a6\\3\\u00a6\\3\\u00a7\\3\\u00a7\")\n buf.write(\"\\3\\u00a7\\3\\u00a7\\3\\u00a7\\3\\u00a7\\3\\u00a7\\3\\u00a7\\3\\u00a7\")\n buf.write(\"\\3\\u00a7\\3\\u00a8\\3\\u00a8\\3\\u00a8\\3\\u00a9\\3\\u00a9\\3\\u00a9\")\n buf.write(\"\\3\\u00a9\\3\\u00a9\\3\\u00a9\\3\\u00a9\\3\\u00a9\\3\\u00aa\\3\\u00aa\")\n buf.write(\"\\3\\u00aa\\3\\u00aa\\3\\u00aa\\3\\u00aa\\3\\u00aa\\3\\u00aa\\3\\u00aa\")\n buf.write(\"\\3\\u00aa\\3\\u00ab\\3\\u00ab\\3\\u00ab\\3\\u00ab\\3\\u00ab\\3\\u00ab\")\n buf.write(\"\\3\\u00ab\\3\\u00ab\\3\\u00ab\\3\\u00ab\\3\\u00ac\\3\\u00ac\\3\\u00ac\")\n buf.write(\"\\3\\u00ac\\3\\u00ac\\3\\u00ac\\3\\u00ac\\3\\u00ad\\3\\u00ad\\3\\u00ad\")\n buf.write(\"\\3\\u00ad\\3\\u00ad\\3\\u00ad\\3\\u00ae\\3\\u00ae\\3\\u00ae\\3\\u00ae\")\n buf.write(\"\\3\\u00ae\\3\\u00ae\\3\\u00ae\\3\\u00ae\\3\\u00af\\3\\u00af\\3\\u00af\")\n buf.write(\"\\3\\u00af\\3\\u00af\\3\\u00af\\3\\u00af\\3\\u00af\\3\\u00af\\3\\u00af\")\n buf.write(\"\\3\\u00b0\\3\\u00b0\\3\\u00b0\\3\\u00b0\\3\\u00b0\\3\\u00b0\\3\\u00b0\")\n buf.write(\"\\3\\u00b0\\3\\u00b1\\3\\u00b1\\3\\u00b1\\3\\u00b1\\3\\u00b1\\3\\u00b1\")\n buf.write(\"\\3\\u00b1\\3\\u00b1\\3\\u00b1\\3\\u00b2\\3\\u00b2\\3\\u00b2\\3\\u00b2\")\n buf.write(\"\\3\\u00b2\\3\\u00b2\\3\\u00b2\\3\\u00b3\\3\\u00b3\\3\\u00b3\\3\\u00b3\")\n buf.write(\"\\3\\u00b3\\3\\u00b3\\3\\u00b4\\3\\u00b4\\3\\u00b4\\3\\u00b4\\3\\u00b4\")\n buf.write(\"\\3\\u00b4\\3\\u00b5\\3\\u00b5\\3\\u00b5\\3\\u00b5\\3\\u00b5\\3\\u00b5\")\n buf.write(\"\\3\\u00b5\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\")\n buf.write(\"\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\\3\\u00b6\")\n buf.write(\"\\3\\u00b7\\3\\u00b7\\3\\u00b7\\3\\u00b7\\3\\u00b7\\3\\u00b7\\3\\u00b7\")\n buf.write(\"\\3\\u00b7\\3\\u00b8\\3\\u00b8\\3\\u00b8\\3\\u00b8\\3\\u00b9\\3\\u00b9\")\n buf.write(\"\\3\\u00b9\\3\\u00b9\\3\\u00b9\\3\\u00b9\\3\\u00b9\\3\\u00b9\\3\\u00ba\")\n buf.write(\"\\3\\u00ba\\3\\u00ba\\3\\u00ba\\3\\u00ba\\3\\u00ba\\3\\u00ba\\3\\u00ba\")\n buf.write(\"\\3\\u00ba\\3\\u00ba\\3\\u00bb\\3\\u00bb\\3\\u00bb\\3\\u00bb\\3\\u00bb\")\n buf.write(\"\\3\\u00bb\\3\\u00bb\\3\\u00bb\\3\\u00bb\\3\\u00bc\\3\\u00bc\\3\\u00bc\")\n buf.write(\"\\3\\u00bc\\3\\u00bc\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00bd\")\n buf.write(\"\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00bd\\3\\u00be\")\n buf.write(\"\\3\\u00be\\3\\u00be\\3\\u00bf\\3\\u00bf\\3\\u00bf\\3\\u00bf\\3\\u00bf\")\n buf.write(\"\\3\\u00bf\\3\\u00bf\\3\\u00bf\\3\\u00bf\\3\\u00bf\\3\\u00c0\\3\\u00c0\")\n buf.write(\"\\3\\u00c0\\3\\u00c0\\3\\u00c0\\3\\u00c0\\3\\u00c0\\3\\u00c0\\3\\u00c1\")\n buf.write(\"\\3\\u00c1\\3\\u00c1\\3\\u00c1\\3\\u00c1\\3\\u00c2\\3\\u00c2\\3\\u00c2\")\n buf.write(\"\\3\\u00c2\\3\\u00c2\\3\\u00c3\\3\\u00c3\\3\\u00c3\\3\\u00c3\\3\\u00c3\")\n buf.write(\"\\3\\u00c4\\3\\u00c4\\3\\u00c4\\3\\u00c4\\3\\u00c4\\3\\u00c4\\3\\u00c4\")\n buf.write(\"\\3\\u00c4\\3\\u00c4\\3\\u00c5\\3\\u00c5\\3\\u00c5\\3\\u00c5\\3\\u00c5\")\n buf.write(\"\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c6\")\n buf.write(\"\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c6\\3\\u00c7\\3\\u00c7\\3\\u00c7\")\n buf.write(\"\\3\\u00c7\\3\\u00c7\\3\\u00c7\\3\\u00c7\\3\\u00c7\\3\\u00c8\\3\\u00c8\")\n buf.write(\"\\3\\u00c8\\3\\u00c8\\3\\u00c8\\3\\u00c9\\3\\u00c9\\3\\u00c9\\3\\u00c9\")\n buf.write(\"\\3\\u00c9\\3\\u00c9\\3\\u00ca\\3\\u00ca\\3\\u00ca\\3\\u00ca\\3\\u00ca\")\n buf.write(\"\\3\\u00ca\\3\\u00ca\\3\\u00ca\\3\\u00cb\\3\\u00cb\\3\\u00cb\\3\\u00cb\")\n buf.write(\"\\3\\u00cb\\3\\u00cc\\3\\u00cc\\3\\u00cc\\3\\u00cc\\3\\u00cc\\3\\u00cc\")\n buf.write(\"\\3\\u00cd\\3\\u00cd\\3\\u00cd\\3\\u00cd\\3\\u00cd\\3\\u00cd\\3\\u00ce\")\n buf.write(\"\\3\\u00ce\\3\\u00ce\\3\\u00ce\\3\\u00ce\\3\\u00ce\\3\\u00cf\\3\\u00cf\")\n buf.write(\"\\3\\u00cf\\3\\u00cf\\3\\u00cf\\3\\u00cf\\3\\u00d0\\3\\u00d0\\3\\u00d0\")\n buf.write(\"\\3\\u00d0\\3\\u00d0\\3\\u00d0\\3\\u00d1\\3\\u00d1\\3\\u00d1\\3\\u00d1\")\n buf.write(\"\\3\\u00d1\\3\\u00d2\\3\\u00d2\\3\\u00d2\\3\\u00d2\\3\\u00d2\\3\\u00d2\")\n buf.write(\"\\3\\u00d2\\3\\u00d3\\3\\u00d3\\3\\u00d3\\3\\u00d3\\3\\u00d4\\3\\u00d4\")\n buf.write(\"\\3\\u00d4\\3\\u00d4\\3\\u00d4\\3\\u00d4\\3\\u00d4\\3\\u00d5\\3\\u00d5\")\n buf.write(\"\\3\\u00d5\\3\\u00d5\\3\\u00d5\\3\\u00d5\\3\\u00d6\\3\\u00d6\\3\\u00d6\")\n buf.write(\"\\3\\u00d6\\3\\u00d6\\3\\u00d7\\3\\u00d7\\3\\u00d7\\3\\u00d7\\3\\u00d7\")\n buf.write(\"\\3\\u00d8\\3\\u00d8\\3\\u00d8\\3\\u00d8\\3\\u00d8\\3\\u00d9\\3\\u00d9\")\n buf.write(\"\\3\\u00d9\\3\\u00d9\\3\\u00da\\3\\u00da\\3\\u00da\\3\\u00da\\3\\u00da\")\n buf.write(\"\\3\\u00da\\3\\u00da\\3\\u00da\\3\\u00db\\3\\u00db\\3\\u00db\\3\\u00db\")\n buf.write(\"\\3\\u00db\\3\\u00db\\3\\u00db\\3\\u00db\\3\\u00db\\3\\u00dc\\3\\u00dc\")\n buf.write(\"\\3\\u00dc\\3\\u00dc\\3\\u00dc\\3\\u00dc\\3\\u00dc\\3\\u00dc\\3\\u00dc\")\n buf.write(\"\\3\\u00dd\\3\\u00dd\\3\\u00dd\\3\\u00dd\\3\\u00dd\\3\\u00dd\\3\\u00dd\")\n buf.write(\"\\3\\u00de\\3\\u00de\\3\\u00de\\3\\u00de\\3\\u00de\\3\\u00de\\3\\u00df\")\n buf.write(\"\\3\\u00df\\3\\u00df\\3\\u00df\\3\\u00df\\3\\u00df\\3\\u00e0\\3\\u00e0\")\n buf.write(\"\\3\\u00e0\\3\\u00e0\\3\\u00e0\\3\\u00e0\\3\\u00e0\\3\\u00e1\\3\\u00e1\")\n buf.write(\"\\3\\u00e1\\3\\u00e1\\3\\u00e1\\3\\u00e1\\3\\u00e1\\3\\u00e1\\3\\u00e1\")\n buf.write(\"\\3\\u00e2\\3\\u00e2\\3\\u00e2\\3\\u00e2\\3\\u00e2\\3\\u00e2\\3\\u00e2\")\n buf.write(\"\\3\\u00e2\\3\\u00e2\\3\\u00e3\\3\\u00e3\\3\\u00e3\\3\\u00e3\\3\\u00e3\")\n buf.write(\"\\3\\u00e4\\3\\u00e4\\3\\u00e4\\3\\u00e4\\3\\u00e4\\3\\u00e4\\3\\u00e5\")\n buf.write(\"\\3\\u00e5\\3\\u00e5\\3\\u00e5\\3\\u00e5\\3\\u00e5\\3\\u00e5\\3\\u00e6\")\n buf.write(\"\\3\\u00e6\\3\\u00e6\\3\\u00e6\\3\\u00e6\\3\\u00e6\\3\\u00e7\\3\\u00e7\")\n buf.write(\"\\3\\u00e7\\3\\u00e7\\3\\u00e7\\3\\u00e7\\3\\u00e7\\3\\u00e7\\3\\u00e7\")\n buf.write(\"\\3\\u00e8\\3\\u00e8\\3\\u00e8\\3\\u00e8\\3\\u00e8\\3\\u00e9\\3\\u00e9\")\n buf.write(\"\\3\\u00e9\\3\\u00e9\\3\\u00ea\\3\\u00ea\\3\\u00ea\\3\\u00ea\\3\\u00ea\")\n buf.write(\"\\3\\u00ea\\3\\u00ea\\3\\u00ea\\3\\u00eb\\3\\u00eb\\3\\u00eb\\3\\u00eb\")\n buf.write(\"\\3\\u00eb\\3\\u00eb\\3\\u00eb\\3\\u00eb\\3\\u00eb\\3\\u00ec\\3\\u00ec\")\n buf.write(\"\\3\\u00ec\\3\\u00ec\\3\\u00ed\\3\\u00ed\\3\\u00ed\\3\\u00ed\\3\\u00ed\")\n buf.write(\"\\3\\u00ed\\3\\u00ee\\3\\u00ee\\3\\u00ee\\3\\u00ee\\3\\u00ee\\3\\u00ee\")\n buf.write(\"\\3\\u00ee\\3\\u00ee\\3\\u00ee\\3\\u00ef\\3\\u00ef\\3\\u00ef\\3\\u00ef\")\n buf.write(\"\\3\\u00ef\\3\\u00ef\\3\\u00f0\\3\\u00f0\\3\\u00f0\\3\\u00f0\\3\\u00f0\")\n buf.write(\"\\3\\u00f0\\3\\u00f0\\3\\u00f1\\3\\u00f1\\3\\u00f1\\3\\u00f1\\3\\u00f2\")\n buf.write(\"\\3\\u00f2\\3\\u00f2\\3\\u00f3\\3\\u00f3\\3\\u00f3\\3\\u00f3\\3\\u00f3\")\n buf.write(\"\\3\\u00f3\\3\\u00f3\\3\\u00f3\\3\\u00f4\\3\\u00f4\\3\\u00f4\\3\\u00f4\")\n buf.write(\"\\3\\u00f4\\3\\u00f4\\3\\u00f4\\3\\u00f4\\3\\u00f5\\3\\u00f5\\3\\u00f5\")\n buf.write(\"\\3\\u00f5\\3\\u00f5\\3\\u00f5\\3\\u00f5\\3\\u00f6\\3\\u00f6\\3\\u00f6\")\n buf.write(\"\\3\\u00f6\\3\\u00f6\\3\\u00f6\\3\\u00f6\\3\\u00f6\\3\\u00f7\\3\\u00f7\")\n buf.write(\"\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\")\n buf.write(\"\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\\3\\u00f7\")\n buf.write(\"\\3\\u00f7\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f8\")\n buf.write(\"\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f8\\3\\u00f9\\3\\u00f9\")\n buf.write(\"\\3\\u00f9\\3\\u00f9\\3\\u00f9\\3\\u00f9\\3\\u00f9\\3\\u00f9\\3\\u00f9\")\n buf.write(\"\\3\\u00f9\\3\\u00f9\\3\\u00fa\\3\\u00fa\\3\\u00fa\\3\\u00fa\\3\\u00fa\")\n buf.write(\"\\3\\u00fb\\3\\u00fb\\3\\u00fb\\3\\u00fb\\3\\u00fb\\3\\u00fb\\3\\u00fb\")\n buf.write(\"\\3\\u00fb\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\")\n buf.write(\"\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\\3\\u00fc\")\n buf.write(\"\\3\\u00fc\\3\\u00fd\\3\\u00fd\\3\\u00fd\\3\\u00fd\\3\\u00fe\\3\\u00fe\")\n buf.write(\"\\3\\u00fe\\3\\u00fe\\3\\u00fe\\3\\u00fe\\3\\u00fe\\3\\u00ff\\3\\u00ff\")\n buf.write(\"\\3\\u00ff\\3\\u00ff\\3\\u00ff\\3\\u0100\\3\\u0100\\3\\u0100\\3\\u0100\")\n buf.write(\"\\3\\u0100\\3\\u0100\\3\\u0101\\3\\u0101\\3\\u0101\\3\\u0101\\3\\u0101\")\n buf.write(\"\\3\\u0101\\3\\u0101\\3\\u0102\\3\\u0102\\3\\u0102\\3\\u0102\\3\\u0102\")\n buf.write(\"\\3\\u0102\\3\\u0102\\3\\u0102\\3\\u0103\\3\\u0103\\3\\u0103\\3\\u0103\")\n buf.write(\"\\3\\u0103\\3\\u0103\\3\\u0103\\3\\u0103\\3\\u0103\\3\\u0103\\3\\u0104\")\n buf.write(\"\\3\\u0104\\3\\u0104\\3\\u0104\\3\\u0104\\3\\u0104\\3\\u0104\\3\\u0105\")\n buf.write(\"\\3\\u0105\\3\\u0105\\3\\u0106\\3\\u0106\\3\\u0106\\3\\u0106\\3\\u0107\")\n buf.write(\"\\3\\u0107\\3\\u0107\\3\\u0107\\3\\u0108\\3\\u0108\\3\\u0108\\3\\u0108\")\n buf.write(\"\\3\\u0109\\3\\u0109\\3\\u0109\\3\\u010a\\3\\u010a\\3\\u010a\\3\\u010a\")\n buf.write(\"\\3\\u010a\\3\\u010b\\3\\u010b\\3\\u010b\\3\\u010b\\3\\u010b\\3\\u010c\")\n buf.write(\"\\3\\u010c\\3\\u010c\\3\\u010c\\3\\u010c\\3\\u010c\\3\\u010c\\3\\u010d\")\n buf.write(\"\\3\\u010d\\3\\u010d\\3\\u010e\\3\\u010e\\3\\u010e\\3\\u010e\\3\\u010e\")\n buf.write(\"\\3\\u010e\\3\\u010e\\3\\u010e\\3\\u010f\\3\\u010f\\3\\u010f\\3\\u010f\")\n buf.write(\"\\3\\u010f\\3\\u010f\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0110\")\n buf.write(\"\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0110\\3\\u0111\")\n buf.write(\"\\3\\u0111\\3\\u0111\\3\\u0111\\3\\u0111\\3\\u0111\\3\\u0111\\3\\u0111\")\n buf.write(\"\\3\\u0112\\3\\u0112\\3\\u0112\\3\\u0112\\3\\u0113\\3\\u0113\\3\\u0113\")\n buf.write(\"\\3\\u0113\\3\\u0113\\3\\u0113\\3\\u0114\\3\\u0114\\3\\u0114\\3\\u0114\")\n buf.write(\"\\3\\u0114\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0115\")\n buf.write(\"\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0115\\3\\u0116\\3\\u0116\")\n buf.write(\"\\3\\u0116\\3\\u0116\\3\\u0116\\3\\u0116\\3\\u0116\\3\\u0116\\3\\u0117\")\n buf.write(\"\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\")\n buf.write(\"\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\\3\\u0117\")\n buf.write(\"\\3\\u0117\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0118\")\n buf.write(\"\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0118\\3\\u0119\\3\\u0119\")\n buf.write(\"\\3\\u0119\\3\\u0119\\3\\u0119\\3\\u0119\\3\\u0119\\3\\u011a\\3\\u011a\")\n buf.write(\"\\3\\u011a\\3\\u011a\\3\\u011a\\3\\u011a\\3\\u011a\\3\\u011a\\3\\u011a\")\n buf.write(\"\\3\\u011a\\3\\u011b\\3\\u011b\\3\\u011b\\3\\u011b\\3\\u011b\\3\\u011b\")\n buf.write(\"\\3\\u011b\\3\\u011b\\3\\u011c\\3\\u011c\\3\\u011c\\3\\u011c\\3\\u011c\")\n buf.write(\"\\3\\u011d\\3\\u011d\\3\\u011d\\3\\u011d\\3\\u011d\\3\\u011d\\3\\u011d\")\n buf.write(\"\\3\\u011d\\3\\u011d\\3\\u011e\\3\\u011e\\3\\u011e\\3\\u011e\\3\\u011e\")\n buf.write(\"\\3\\u011e\\3\\u011f\\3\\u011f\\3\\u011f\\3\\u011f\\3\\u011f\\3\\u011f\")\n buf.write(\"\\3\\u011f\\3\\u011f\\3\\u011f\\3\\u011f\\3\\u0120\\3\\u0120\\3\\u0120\")\n buf.write(\"\\3\\u0120\\3\\u0120\\3\\u0120\\3\\u0121\\3\\u0121\\3\\u0121\\3\\u0121\")\n buf.write(\"\\3\\u0121\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\")\n buf.write(\"\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0122\\3\\u0123\")\n buf.write(\"\\3\\u0123\\3\\u0123\\3\\u0123\\3\\u0123\\3\\u0123\\3\\u0123\\3\\u0123\")\n buf.write(\"\\3\\u0123\\3\\u0124\\3\\u0124\\3\\u0124\\3\\u0124\\3\\u0124\\3\\u0124\")\n buf.write(\"\\3\\u0124\\3\\u0124\\3\\u0124\\3\\u0124\\3\\u0125\\3\\u0125\\3\\u0125\")\n buf.write(\"\\3\\u0125\\3\\u0125\\3\\u0125\\3\\u0125\\3\\u0126\\3\\u0126\\3\\u0126\")\n buf.write(\"\\3\\u0126\\3\\u0126\\3\\u0126\\3\\u0126\\3\\u0126\\3\\u0126\\3\\u0126\")\n buf.write(\"\\3\\u0127\\3\\u0127\\3\\u0127\\3\\u0127\\3\\u0127\\3\\u0127\\3\\u0127\")\n buf.write(\"\\3\\u0127\\3\\u0127\\3\\u0127\\3\\u0128\\3\\u0128\\3\\u0128\\3\\u0128\")\n buf.write(\"\\3\\u0128\\3\\u0128\\3\\u0128\\3\\u0128\\3\\u0129\\3\\u0129\\3\\u0129\")\n buf.write(\"\\3\\u0129\\3\\u0129\\3\\u0129\\3\\u012a\\3\\u012a\\3\\u012a\\3\\u012a\")\n buf.write(\"\\3\\u012a\\3\\u012a\\3\\u012a\\3\\u012a\\3\\u012a\\3\\u012a\\3\\u012b\")\n buf.write(\"\\3\\u012b\\3\\u012b\\3\\u012b\\3\\u012b\\3\\u012b\\3\\u012c\\3\\u012c\")\n buf.write(\"\\3\\u012c\\3\\u012c\\3\\u012c\\3\\u012c\\3\\u012d\\3\\u012d\\3\\u012d\")\n buf.write(\"\\3\\u012d\\3\\u012e\\3\\u012e\\3\\u012e\\3\\u012e\\3\\u012e\\3\\u012f\")\n buf.write(\"\\3\\u012f\\3\\u012f\\3\\u012f\\3\\u012f\\3\\u0130\\3\\u0130\\3\\u0130\")\n buf.write(\"\\3\\u0130\\3\\u0130\\3\\u0130\\3\\u0130\\3\\u0131\\3\\u0131\\3\\u0131\")\n buf.write(\"\\3\\u0131\\3\\u0132\\3\\u0132\\3\\u0132\\3\\u0132\\3\\u0132\\3\\u0132\")\n buf.write(\"\\3\\u0132\\3\\u0132\\3\\u0132\\3\\u0132\\3\\u0133\\3\\u0133\\3\\u0133\")\n buf.write(\"\\3\\u0133\\3\\u0133\\3\\u0133\\3\\u0133\\3\\u0133\\3\\u0133\\3\\u0133\")\n buf.write(\"\\3\\u0133\\3\\u0133\\3\\u0134\\3\\u0134\\3\\u0134\\3\\u0134\\3\\u0134\")\n buf.write(\"\\3\\u0134\\3\\u0134\\3\\u0135\\3\\u0135\\3\\u0135\\3\\u0135\\3\\u0135\")\n buf.write(\"\\3\\u0135\\3\\u0135\\3\\u0135\\3\\u0135\\3\\u0135\\3\\u0136\\3\\u0136\")\n buf.write(\"\\3\\u0136\\3\\u0136\\3\\u0136\\3\\u0136\\3\\u0136\\3\\u0137\\3\\u0137\")\n buf.write(\"\\3\\u0137\\3\\u0137\\3\\u0137\\3\\u0137\\3\\u0137\\3\\u0137\\3\\u0138\")\n buf.write(\"\\3\\u0138\\3\\u0138\\3\\u0138\\3\\u0138\\3\\u0138\\3\\u0138\\3\\u0138\")\n buf.write(\"\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\")\n buf.write(\"\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\")\n buf.write(\"\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u0139\\3\\u013a\")\n buf.write(\"\\3\\u013a\\3\\u013a\\3\\u013a\\3\\u013a\\3\\u013a\\3\\u013a\\3\\u013b\")\n buf.write(\"\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\")\n buf.write(\"\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013b\\3\\u013c\\3\\u013c\")\n buf.write(\"\\3\\u013c\\3\\u013c\\3\\u013c\\3\\u013c\\3\\u013c\\3\\u013d\\3\\u013d\")\n buf.write(\"\\3\\u013d\\3\\u013d\\3\\u013d\\3\\u013d\\3\\u013d\\3\\u013d\\3\\u013d\")\n buf.write(\"\\3\\u013d\\3\\u013e\\3\\u013e\\3\\u013e\\3\\u013e\\3\\u013e\\3\\u013e\")\n buf.write(\"\\3\\u013f\\3\\u013f\\3\\u013f\\3\\u013f\\3\\u013f\\3\\u013f\\3\\u013f\")\n buf.write(\"\\3\\u013f\\3\\u0140\\3\\u0140\\3\\u0140\\3\\u0140\\3\\u0140\\3\\u0140\")\n buf.write(\"\\3\\u0140\\3\\u0141\\3\\u0141\\3\\u0141\\3\\u0141\\3\\u0141\\3\\u0141\")\n buf.write(\"\\3\\u0142\\3\\u0142\\3\\u0142\\3\\u0142\\3\\u0142\\3\\u0142\\3\\u0142\")\n buf.write(\"\\3\\u0142\\3\\u0142\\3\\u0143\\3\\u0143\\3\\u0143\\3\\u0143\\3\\u0143\")\n buf.write(\"\\3\\u0143\\3\\u0143\\3\\u0144\\3\\u0144\\3\\u0144\\3\\u0144\\3\\u0145\")\n buf.write(\"\\3\\u0145\\3\\u0145\\3\\u0145\\3\\u0145\\3\\u0145\\3\\u0146\\3\\u0146\")\n buf.write(\"\\3\\u0146\\3\\u0146\\3\\u0146\\3\\u0147\\3\\u0147\\3\\u0147\\3\\u0147\")\n buf.write(\"\\3\\u0147\\3\\u0147\\3\\u0148\\3\\u0148\\3\\u0148\\3\\u0148\\3\\u0148\")\n buf.write(\"\\3\\u0148\\3\\u0148\\3\\u0149\\3\\u0149\\3\\u0149\\3\\u0149\\3\\u0149\")\n buf.write(\"\\3\\u014a\\3\\u014a\\3\\u014a\\3\\u014a\\3\\u014a\\3\\u014a\\3\\u014a\")\n buf.write(\"\\3\\u014a\\3\\u014a\\3\\u014a\\3\\u014b\\3\\u014b\\3\\u014b\\3\\u014b\")\n buf.write(\"\\3\\u014b\\3\\u014b\\3\\u014b\\3\\u014c\\3\\u014c\\3\\u014c\\3\\u014c\")\n buf.write(\"\\3\\u014c\\3\\u014c\\3\\u014c\\3\\u014c\\3\\u014c\\3\\u014c\\3\\u014c\")\n buf.write(\"\\3\\u014c\\3\\u014d\\3\\u014d\\3\\u014d\\3\\u014d\\3\\u014e\\3\\u014e\")\n buf.write(\"\\3\\u014e\\3\\u014e\\3\\u014e\\3\\u014e\\3\\u014e\\3\\u014f\\3\\u014f\")\n buf.write(\"\\3\\u014f\\3\\u014f\\3\\u014f\\3\\u014f\\3\\u014f\\3\\u0150\\3\\u0150\")\n buf.write(\"\\3\\u0150\\3\\u0150\\3\\u0150\\3\\u0151\\3\\u0151\\3\\u0151\\3\\u0151\")\n buf.write(\"\\3\\u0151\\3\\u0151\\3\\u0151\\3\\u0151\\3\\u0152\\3\\u0152\\3\\u0152\")\n buf.write(\"\\3\\u0152\\3\\u0152\\3\\u0152\\3\\u0152\\3\\u0153\\3\\u0153\\3\\u0153\")\n buf.write(\"\\3\\u0153\\3\\u0153\\3\\u0154\\3\\u0154\\3\\u0154\\3\\u0154\\3\\u0154\")\n buf.write(\"\\3\\u0154\\3\\u0154\\3\\u0154\\3\\u0154\\3\\u0155\\3\\u0155\\3\\u0155\")\n buf.write(\"\\3\\u0155\\3\\u0155\\3\\u0155\\3\\u0155\\3\\u0155\\3\\u0155\\3\\u0155\")\n buf.write(\"\\3\\u0155\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\")\n buf.write(\"\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\\3\\u0156\")\n buf.write(\"\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\")\n buf.write(\"\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\")\n buf.write(\"\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0157\\3\\u0158\\3\\u0158\\3\\u0158\")\n buf.write(\"\\3\\u0158\\3\\u0158\\3\\u0158\\3\\u0158\\3\\u0158\\3\\u0158\\3\\u0158\")\n buf.write(\"\\3\\u0158\\3\\u0158\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\")\n buf.write(\"\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\")\n buf.write(\"\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u0159\\3\\u015a\\3\\u015a\\3\\u015a\")\n buf.write(\"\\3\\u015a\\3\\u015b\\3\\u015b\\3\\u015b\\3\\u015b\\3\\u015b\\3\\u015c\")\n buf.write(\"\\3\\u015c\\3\\u015c\\3\\u015c\\3\\u015c\\3\\u015c\\3\\u015c\\3\\u015c\")\n buf.write(\"\\3\\u015c\\3\\u015d\\3\\u015d\\3\\u015d\\3\\u015d\\3\\u015d\\3\\u015d\")\n buf.write(\"\\3\\u015e\\3\\u015e\\3\\u015e\\3\\u015e\\3\\u015e\\3\\u015f\\3\\u015f\")\n buf.write(\"\\3\\u015f\\3\\u015f\\3\\u015f\\3\\u015f\\3\\u015f\\3\\u015f\\3\\u015f\")\n buf.write(\"\\3\\u0160\\3\\u0160\\3\\u0160\\3\\u0160\\3\\u0160\\3\\u0160\\3\\u0160\")\n buf.write(\"\\3\\u0160\\3\\u0160\\3\\u0161\\3\\u0161\\3\\u0161\\3\\u0161\\3\\u0161\")\n buf.write(\"\\3\\u0161\\3\\u0161\\3\\u0161\\3\\u0161\\3\\u0162\\3\\u0162\\3\\u0162\")\n buf.write(\"\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\")\n buf.write(\"\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0162\\3\\u0163\\3\\u0163\")\n buf.write(\"\\3\\u0163\\3\\u0163\\3\\u0163\\3\\u0163\\3\\u0163\\3\\u0164\\3\\u0164\")\n buf.write(\"\\3\\u0164\\3\\u0164\\3\\u0164\\3\\u0165\\3\\u0165\\3\\u0165\\3\\u0165\")\n buf.write(\"\\3\\u0165\\3\\u0166\\3\\u0166\\3\\u0166\\3\\u0166\\3\\u0166\\3\\u0166\")\n buf.write(\"\\3\\u0166\\3\\u0166\\3\\u0166\\3\\u0167\\3\\u0167\\3\\u0167\\3\\u0167\")\n buf.write(\"\\3\\u0167\\3\\u0167\\3\\u0167\\3\\u0167\\3\\u0167\\3\\u0168\\3\\u0168\")\n buf.write(\"\\3\\u0168\\3\\u0168\\3\\u0168\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u0169\")\n buf.write(\"\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u0169\")\n buf.write(\"\\3\\u0169\\3\\u0169\\3\\u0169\\3\\u016a\\3\\u016a\\3\\u016a\\3\\u016a\")\n buf.write(\"\\3\\u016a\\3\\u016a\\3\\u016a\\3\\u016a\\3\\u016b\\3\\u016b\\3\\u016b\")\n buf.write(\"\\3\\u016b\\3\\u016b\\3\\u016b\\3\\u016b\\3\\u016b\\3\\u016b\\3\\u016c\")\n buf.write(\"\\3\\u016c\\3\\u016c\\3\\u016c\\3\\u016c\\3\\u016c\\3\\u016c\\3\\u016c\")\n buf.write(\"\\3\\u016c\\3\\u016c\\3\\u016c\\3\\u016d\\3\\u016d\\3\\u016d\\3\\u016d\")\n buf.write(\"\\3\\u016d\\3\\u016d\\3\\u016e\\3\\u016e\\3\\u016e\\3\\u016e\\3\\u016e\")\n buf.write(\"\\3\\u016e\\3\\u016e\\3\\u016e\\3\\u016f\\3\\u016f\\3\\u016f\\3\\u016f\")\n buf.write(\"\\3\\u016f\\3\\u016f\\3\\u016f\\3\\u016f\\3\\u016f\\3\\u016f\\3\\u0170\")\n buf.write(\"\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\")\n buf.write(\"\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0170\\3\\u0171\\3\\u0171\")\n buf.write(\"\\3\\u0171\\3\\u0171\\3\\u0171\\3\\u0171\\3\\u0171\\3\\u0172\\3\\u0172\")\n buf.write(\"\\3\\u0172\\3\\u0172\\3\\u0172\\3\\u0172\\3\\u0172\\3\\u0172\\3\\u0172\")\n buf.write(\"\\3\\u0172\\3\\u0172\\3\\u0173\\3\\u0173\\3\\u0173\\3\\u0173\\3\\u0173\")\n buf.write(\"\\3\\u0173\\3\\u0173\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\")\n buf.write(\"\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\\3\\u0174\")\n buf.write(\"\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\")\n buf.write(\"\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0175\\3\\u0176\")\n buf.write(\"\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\")\n buf.write(\"\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0176\\3\\u0177\")\n buf.write(\"\\3\\u0177\\3\\u0177\\3\\u0177\\3\\u0177\\3\\u0177\\3\\u0177\\3\\u0177\")\n buf.write(\"\\3\\u0178\\3\\u0178\\3\\u0178\\3\\u0178\\3\\u0178\\3\\u0178\\3\\u0178\")\n buf.write(\"\\3\\u0178\\3\\u0179\\3\\u0179\\3\\u0179\\3\\u0179\\3\\u0179\\3\\u0179\")\n buf.write(\"\\3\\u0179\\3\\u0179\\3\\u017a\\3\\u017a\\3\\u017a\\3\\u017a\\3\\u017a\")\n buf.write(\"\\3\\u017a\\3\\u017b\\3\\u017b\\3\\u017b\\3\\u017b\\3\\u017c\\3\\u017c\")\n buf.write(\"\\3\\u017c\\3\\u017c\\3\\u017c\\3\\u017d\\3\\u017d\\3\\u017d\\3\\u017d\")\n buf.write(\"\\3\\u017d\\3\\u017e\\3\\u017e\\3\\u017e\\3\\u017e\\3\\u017e\\3\\u017e\")\n buf.write(\"\\3\\u017e\\3\\u017e\\3\\u017e\\3\\u017e\\3\\u017f\\3\\u017f\\3\\u017f\")\n buf.write(\"\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\")\n buf.write(\"\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\")\n buf.write(\"\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\")\n buf.write(\"\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u017f\\3\\u0180\\3\\u0180\\3\\u0180\")\n buf.write(\"\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\")\n buf.write(\"\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\")\n buf.write(\"\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0180\")\n buf.write(\"\\3\\u0180\\3\\u0180\\3\\u0180\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\")\n buf.write(\"\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\")\n buf.write(\"\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\")\n buf.write(\"\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0181\\3\\u0182\")\n buf.write(\"\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\")\n buf.write(\"\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0182\\3\\u0183\")\n buf.write(\"\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\")\n buf.write(\"\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0183\\3\\u0184\")\n buf.write(\"\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\")\n buf.write(\"\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\\3\\u0184\")\n buf.write(\"\\3\\u0184\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\")\n buf.write(\"\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0185\")\n buf.write(\"\\3\\u0185\\3\\u0185\\3\\u0185\\3\\u0186\\3\\u0186\\3\\u0186\\3\\u0187\")\n buf.write(\"\\3\\u0187\\3\\u0187\\3\\u0187\\3\\u0187\\3\\u0187\\3\\u0187\\3\\u0187\")\n buf.write(\"\\3\\u0187\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\")\n buf.write(\"\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0188\\3\\u0189\")\n buf.write(\"\\3\\u0189\\3\\u0189\\3\\u0189\\3\\u0189\\3\\u0189\\3\\u0189\\3\\u0189\")\n buf.write(\"\\3\\u0189\\3\\u0189\\3\\u018a\\3\\u018a\\3\\u018a\\3\\u018a\\3\\u018a\")\n buf.write(\"\\3\\u018a\\3\\u018b\\3\\u018b\\3\\u018b\\3\\u018b\\3\\u018b\\3\\u018b\")\n buf.write(\"\\3\\u018b\\3\\u018b\\3\\u018c\\3\\u018c\\3\\u018c\\3\\u018c\\3\\u018c\")\n buf.write(\"\\3\\u018d\\3\\u018d\\3\\u018d\\3\\u018d\\3\\u018d\\3\\u018e\\3\\u018e\")\n buf.write(\"\\3\\u018e\\3\\u018e\\3\\u018e\\3\\u018e\\3\\u018e\\3\\u018e\\3\\u018e\")\n buf.write(\"\\3\\u018f\\3\\u018f\\3\\u018f\\3\\u018f\\3\\u018f\\3\\u0190\\3\\u0190\")\n buf.write(\"\\3\\u0190\\3\\u0190\\3\\u0190\\3\\u0190\\3\\u0190\\3\\u0190\\3\\u0190\")\n buf.write(\"\\3\\u0190\\3\\u0191\\3\\u0191\\3\\u0191\\3\\u0191\\3\\u0191\\3\\u0191\")\n buf.write(\"\\3\\u0192\\3\\u0192\\3\\u0192\\3\\u0192\\3\\u0192\\3\\u0192\\3\\u0193\")\n buf.write(\"\\3\\u0193\\3\\u0193\\3\\u0193\\3\\u0193\\3\\u0193\\3\\u0193\\3\\u0194\")\n buf.write(\"\\3\\u0194\\3\\u0194\\3\\u0194\\3\\u0194\\3\\u0194\\3\\u0194\\3\\u0194\")\n buf.write(\"\\3\\u0194\\3\\u0194\\3\\u0195\\3\\u0195\\3\\u0195\\3\\u0195\\3\\u0195\")\n buf.write(\"\\3\\u0195\\3\\u0195\\3\\u0195\\3\\u0196\\3\\u0196\\3\\u0196\\3\\u0196\")\n buf.write(\"\\3\\u0196\\3\\u0196\\3\\u0197\\3\\u0197\\3\\u0197\\3\\u0197\\3\\u0197\")\n buf.write(\"\\3\\u0197\\3\\u0197\\3\\u0198\\3\\u0198\\3\\u0198\\3\\u0198\\3\\u0198\")\n buf.write(\"\\3\\u0198\\3\\u0198\\3\\u0198\\3\\u0199\\3\\u0199\\3\\u0199\\3\\u0199\")\n buf.write(\"\\3\\u0199\\3\\u0199\\3\\u0199\\3\\u019a\\3\\u019a\\3\\u019a\\3\\u019a\")\n buf.write(\"\\3\\u019a\\3\\u019a\\3\\u019a\\3\\u019b\\3\\u019b\\3\\u019b\\3\\u019b\")\n buf.write(\"\\3\\u019c\\3\\u019c\\3\\u019c\\3\\u019c\\3\\u019c\\3\\u019c\\3\\u019d\")\n buf.write(\"\\3\\u019d\\3\\u019d\\3\\u019d\\3\\u019d\\3\\u019d\\3\\u019d\\3\\u019d\")\n buf.write(\"\\3\\u019d\\3\\u019e\\3\\u019e\\3\\u019e\\3\\u019e\\3\\u019e\\3\\u019e\")\n buf.write(\"\\3\\u019f\\3\\u019f\\3\\u019f\\3\\u019f\\3\\u019f\\3\\u019f\\3\\u019f\")\n buf.write(\"\\3\\u01a0\\3\\u01a0\\3\\u01a0\\3\\u01a0\\3\\u01a0\\3\\u01a0\\3\\u01a0\")\n buf.write(\"\\3\\u01a0\\3\\u01a1\\3\\u01a1\\3\\u01a1\\3\\u01a1\\3\\u01a1\\3\\u01a1\")\n buf.write(\"\\3\\u01a1\\3\\u01a1\\3\\u01a1\\3\\u01a2\\3\\u01a2\\3\\u01a2\\3\\u01a2\")\n buf.write(\"\\3\\u01a2\\3\\u01a2\\3\\u01a2\\3\\u01a2\\3\\u01a2\\3\\u01a3\\3\\u01a3\")\n buf.write(\"\\3\\u01a3\\3\\u01a3\\3\\u01a3\\3\\u01a3\\3\\u01a3\\3\\u01a4\\3\\u01a4\")\n buf.write(\"\\3\\u01a4\\3\\u01a4\\3\\u01a4\\3\\u01a4\\3\\u01a4\\3\\u01a4\\3\\u01a5\")\n buf.write(\"\\3\\u01a5\\3\\u01a5\\3\\u01a5\\3\\u01a5\\3\\u01a5\\3\\u01a5\\3\\u01a5\")\n buf.write(\"\\3\\u01a6\\3\\u01a6\\3\\u01a6\\3\\u01a6\\3\\u01a6\\3\\u01a6\\3\\u01a6\")\n buf.write(\"\\3\\u01a6\\3\\u01a6\\3\\u01a7\\3\\u01a7\\3\\u01a7\\3\\u01a7\\3\\u01a7\")\n buf.write(\"\\3\\u01a8\\3\\u01a8\\3\\u01a8\\3\\u01a8\\3\\u01a8\\3\\u01a8\\3\\u01a8\")\n buf.write(\"\\3\\u01a8\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01a9\")\n buf.write(\"\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01a9\\3\\u01aa\\3\\u01aa\")\n buf.write(\"\\3\\u01aa\\3\\u01aa\\3\\u01aa\\3\\u01ab\\3\\u01ab\\3\\u01ab\\3\\u01ab\")\n buf.write(\"\\3\\u01ab\\3\\u01ab\\3\\u01ab\\3\\u01ab\\3\\u01ab\\3\\u01ac\\3\\u01ac\")\n buf.write(\"\\3\\u01ac\\3\\u01ac\\3\\u01ac\\3\\u01ac\\3\\u01ad\\3\\u01ad\\3\\u01ad\")\n buf.write(\"\\3\\u01ad\\3\\u01ad\\3\\u01ad\\3\\u01ae\\3\\u01ae\\3\\u01ae\\3\\u01ae\")\n buf.write(\"\\3\\u01ae\\3\\u01af\\3\\u01af\\3\\u01af\\3\\u01af\\3\\u01af\\3\\u01af\")\n buf.write(\"\\3\\u01af\\3\\u01b0\\3\\u01b0\\3\\u01b0\\3\\u01b0\\3\\u01b0\\3\\u01b1\")\n buf.write(\"\\3\\u01b1\\3\\u01b1\\3\\u01b1\\3\\u01b1\\3\\u01b1\\3\\u01b2\\3\\u01b2\")\n buf.write(\"\\3\\u01b2\\3\\u01b2\\3\\u01b3\\3\\u01b3\\3\\u01b3\\3\\u01b3\\3\\u01b3\")\n buf.write(\"\\3\\u01b3\\3\\u01b3\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\")\n buf.write(\"\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\\3\\u01b4\")\n buf.write(\"\\3\\u01b4\\3\\u01b4\\3\\u01b5\\3\\u01b5\\3\\u01b5\\3\\u01b5\\3\\u01b5\")\n buf.write(\"\\3\\u01b5\\3\\u01b5\\3\\u01b5\\3\\u01b6\\3\\u01b6\\3\\u01b6\\3\\u01b6\")\n buf.write(\"\\3\\u01b6\\3\\u01b6\\3\\u01b6\\3\\u01b6\\3\\u01b6\\3\\u01b6\\3\\u01b6\")\n buf.write(\"\\3\\u01b6\\3\\u01b6\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b7\")\n buf.write(\"\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b7\\3\\u01b8\")\n buf.write(\"\\3\\u01b8\\3\\u01b8\\3\\u01b8\\3\\u01b8\\3\\u01b8\\3\\u01b8\\3\\u01b8\")\n buf.write(\"\\3\\u01b8\\3\\u01b8\\3\\u01b9\\3\\u01b9\\3\\u01b9\\3\\u01b9\\3\\u01b9\")\n buf.write(\"\\3\\u01b9\\3\\u01b9\\3\\u01b9\\3\\u01b9\\3\\u01b9\\3\\u01ba\\3\\u01ba\")\n buf.write(\"\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\")\n buf.write(\"\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01ba\\3\\u01bb\\3\\u01bb\")\n buf.write(\"\\3\\u01bb\\3\\u01bb\\3\\u01bb\\3\\u01bb\\3\\u01bb\\3\\u01bb\\3\\u01bb\")\n buf.write(\"\\3\\u01bc\\3\\u01bc\\3\\u01bc\\3\\u01bc\\3\\u01bc\\3\\u01bc\\3\\u01bd\")\n buf.write(\"\\3\\u01bd\\3\\u01bd\\3\\u01bd\\3\\u01bd\\3\\u01bd\\3\\u01bd\\3\\u01bd\")\n buf.write(\"\\3\\u01bd\\3\\u01be\\3\\u01be\\3\\u01be\\3\\u01be\\3\\u01be\\3\\u01be\")\n buf.write(\"\\3\\u01be\\3\\u01be\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\")\n buf.write(\"\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\\3\\u01bf\")\n buf.write(\"\\3\\u01bf\\3\\u01c0\\3\\u01c0\\3\\u01c0\\3\\u01c0\\3\\u01c0\\3\\u01c0\")\n buf.write(\"\\3\\u01c0\\3\\u01c0\\3\\u01c0\\3\\u01c1\\3\\u01c1\\3\\u01c1\\3\\u01c1\")\n buf.write(\"\\3\\u01c1\\3\\u01c2\\3\\u01c2\\3\\u01c2\\3\\u01c2\\3\\u01c3\\3\\u01c3\")\n buf.write(\"\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\")\n buf.write(\"\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\")\n buf.write(\"\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\\3\\u01c3\")\n buf.write(\"\\3\\u01c3\\3\\u01c3\\3\\u01c4\\3\\u01c4\\3\\u01c4\\3\\u01c4\\3\\u01c4\")\n buf.write(\"\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c5\")\n buf.write(\"\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c5\\3\\u01c6\\3\\u01c6\\3\\u01c6\")\n buf.write(\"\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\")\n buf.write(\"\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\\3\\u01c6\")\n buf.write(\"\\3\\u01c6\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\")\n buf.write(\"\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c7\")\n buf.write(\"\\3\\u01c7\\3\\u01c7\\3\\u01c7\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\")\n buf.write(\"\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\")\n buf.write(\"\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\\3\\u01c8\")\n buf.write(\"\\3\\u01c8\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\")\n buf.write(\"\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\")\n buf.write(\"\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01c9\")\n buf.write(\"\\3\\u01c9\\3\\u01c9\\3\\u01c9\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\")\n buf.write(\"\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\")\n buf.write(\"\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01ca\\3\\u01cb\\3\\u01cb\\3\\u01cb\")\n buf.write(\"\\3\\u01cb\\3\\u01cb\\3\\u01cb\\3\\u01cb\\3\\u01cb\\3\\u01cb\\3\\u01cb\")\n buf.write(\"\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cc\")\n buf.write(\"\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cc\\3\\u01cd\\3\\u01cd\\3\\u01cd\")\n buf.write(\"\\3\\u01cd\\3\\u01cd\\3\\u01cd\\3\\u01cd\\3\\u01cd\\3\\u01ce\\3\\u01ce\")\n buf.write(\"\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01ce\")\n buf.write(\"\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01ce\\3\\u01cf\\3\\u01cf\\3\\u01cf\")\n buf.write(\"\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\")\n buf.write(\"\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01cf\\3\\u01d0\")\n buf.write(\"\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\")\n buf.write(\"\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\\3\\u01d0\")\n buf.write(\"\\3\\u01d0\\3\\u01d1\\3\\u01d1\\3\\u01d1\\3\\u01d1\\3\\u01d1\\3\\u01d2\")\n buf.write(\"\\3\\u01d2\\3\\u01d2\\3\\u01d2\\3\\u01d3\\3\\u01d3\\3\\u01d3\\3\\u01d3\")\n buf.write(\"\\3\\u01d3\\3\\u01d4\\3\\u01d4\\3\\u01d4\\3\\u01d4\\3\\u01d5\\3\\u01d5\")\n buf.write(\"\\3\\u01d5\\3\\u01d5\\3\\u01d5\\3\\u01d6\\3\\u01d6\\3\\u01d6\\3\\u01d6\")\n buf.write(\"\\3\\u01d7\\3\\u01d7\\3\\u01d7\\3\\u01d7\\3\\u01d7\\3\\u01d7\\3\\u01d7\")\n buf.write(\"\\3\\u01d8\\3\\u01d8\\3\\u01d8\\3\\u01d8\\3\\u01d9\\3\\u01d9\\3\\u01d9\")\n buf.write(\"\\3\\u01d9\\3\\u01d9\\3\\u01d9\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\")\n buf.write(\"\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\")\n buf.write(\"\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01da\\3\\u01db\\3\\u01db\")\n buf.write(\"\\3\\u01db\\3\\u01db\\3\\u01db\\3\\u01db\\3\\u01db\\3\\u01db\\3\\u01db\")\n buf.write(\"\\3\\u01db\\3\\u01db\\3\\u01dc\\3\\u01dc\\3\\u01dc\\3\\u01dc\\3\\u01dd\")\n buf.write(\"\\3\\u01dd\\3\\u01dd\\3\\u01dd\\3\\u01dd\\3\\u01dd\\3\\u01dd\\3\\u01dd\")\n buf.write(\"\\3\\u01dd\\3\\u01de\\3\\u01de\\3\\u01de\\3\\u01de\\3\\u01de\\3\\u01de\")\n buf.write(\"\\3\\u01df\\3\\u01df\\3\\u01df\\3\\u01df\\3\\u01df\\3\\u01df\\3\\u01df\")\n buf.write(\"\\3\\u01e0\\3\\u01e0\\3\\u01e0\\3\\u01e0\\3\\u01e0\\3\\u01e1\\3\\u01e1\")\n buf.write(\"\\3\\u01e1\\3\\u01e1\\3\\u01e1\\3\\u01e1\\3\\u01e1\\3\\u01e2\\3\\u01e2\")\n buf.write(\"\\3\\u01e2\\3\\u01e2\\3\\u01e2\\3\\u01e2\\7\\u01e2\\u1368\\n\\u01e2\")\n buf.write(\"\\f\\u01e2\\16\\u01e2\\u136b\\13\\u01e2\\3\\u01e2\\3\\u01e2\\3\\u01e3\")\n buf.write(\"\\3\\u01e3\\3\\u01e3\\7\\u01e3\\u1372\\n\\u01e3\\f\\u01e3\\16\\u01e3\")\n buf.write(\"\\u1375\\13\\u01e3\\3\\u01e3\\6\\u01e3\\u1378\\n\\u01e3\\r\\u01e3\")\n buf.write(\"\\16\\u01e3\\u1379\\3\\u01e4\\3\\u01e4\\3\\u01e4\\7\\u01e4\\u137f\")\n buf.write(\"\\n\\u01e4\\f\\u01e4\\16\\u01e4\\u1382\\13\\u01e4\\3\\u01e4\\6\\u01e4\")\n buf.write(\"\\u1385\\n\\u01e4\\r\\u01e4\\16\\u01e4\\u1386\\3\\u01e5\\3\\u01e5\")\n buf.write(\"\\3\\u01e5\\3\\u01e6\\3\\u01e6\\3\\u01e7\\3\\u01e7\\3\\u01e8\\3\\u01e8\")\n buf.write(\"\\3\\u01e8\\5\\u01e8\\u1393\\n\\u01e8\\3\\u01e8\\3\\u01e8\\5\\u01e8\")\n buf.write(\"\\u1397\\n\\u01e8\\5\\u01e8\\u1399\\n\\u01e8\\3\\u01e8\\3\\u01e8\\5\")\n buf.write(\"\\u01e8\\u139d\\n\\u01e8\\3\\u01e9\\3\\u01e9\\3\\u01e9\\3\\u01e9\\3\")\n buf.write(\"\\u01e9\\7\\u01e9\\u13a4\\n\\u01e9\\f\\u01e9\\16\\u01e9\\u13a7\\13\")\n buf.write(\"\\u01e9\\3\\u01e9\\3\\u01e9\\3\\u01ea\\3\\u01ea\\3\\u01ea\\3\\u01ea\")\n buf.write(\"\\3\\u01ea\\5\\u01ea\\u13b0\\n\\u01ea\\3\\u01ea\\3\\u01ea\\3\\u01eb\")\n buf.write(\"\\3\\u01eb\\3\\u01ec\\3\\u01ec\\3\\u01ec\\7\\u01ec\\u13b9\\n\\u01ec\")\n buf.write(\"\\f\\u01ec\\16\\u01ec\\u13bc\\13\\u01ec\\3\\u01ec\\3\\u01ec\\3\\u01ec\")\n buf.write(\"\\3\\u01ed\\3\\u01ed\\3\\u01ed\\7\\u01ed\\u13c4\\n\\u01ed\\f\\u01ed\")\n buf.write(\"\\16\\u01ed\\u13c7\\13\\u01ed\\3\\u01ed\\3\\u01ed\\3\\u01ed\\3\\u01ee\")\n buf.write(\"\\3\\u01ee\\3\\u01ee\\7\\u01ee\\u13cf\\n\\u01ee\\f\\u01ee\\16\\u01ee\")\n buf.write(\"\\u13d2\\13\\u01ee\\3\\u01ee\\3\\u01ee\\3\\u01ee\\3\\u01ef\\3\\u01ef\")\n buf.write(\"\\3\\u01ef\\7\\u01ef\\u13da\\n\\u01ef\\f\\u01ef\\16\\u01ef\\u13dd\")\n buf.write(\"\\13\\u01ef\\3\\u01ef\\3\\u01ef\\3\\u01ef\\3\\u01f0\\3\\u01f0\\3\\u01f1\")\n buf.write(\"\\3\\u01f1\\3\\u01f1\\3\\u01f1\\6\\u01f1\\u13e8\\n\\u01f1\\r\\u01f1\")\n buf.write(\"\\16\\u01f1\\u13e9\\3\\u01f1\\3\\u01f1\\3\\u01f2\\3\\u01f2\\3\\u01f3\")\n buf.write(\"\\3\\u01f3\\3\\u01f4\\3\\u01f4\\3\\u01f5\\3\\u01f5\\3\\u01f6\\3\\u01f6\")\n buf.write(\"\\3\\u01f6\\3\\u01f7\\3\\u01f7\\3\\u01f8\\3\\u01f8\\3\\u01f9\\3\\u01f9\")\n buf.write(\"\\3\\u01fa\\3\\u01fa\\3\\u01fb\\3\\u01fb\\3\\u01fc\\3\\u01fc\\3\\u01fd\")\n buf.write(\"\\3\\u01fd\\3\\u01fd\\3\\u01fe\\3\\u01fe\\3\\u01fe\\3\\u01fe\\7\\u01fe\")\n buf.write(\"\\u140c\\n\\u01fe\\f\\u01fe\\16\\u01fe\\u140f\\13\\u01fe\\3\\u01fe\")\n buf.write(\"\\3\\u01fe\\3\\u01fe\\3\\u01fe\\3\\u01fe\\5\\u01fe\\u1416\\n\\u01fe\")\n buf.write(\"\\3\\u01ff\\3\\u01ff\\3\\u0200\\3\\u0200\\3\\u0201\\3\\u0201\\3\\u0201\")\n buf.write(\"\\3\\u0202\\3\\u0202\\3\\u0203\\3\\u0203\\3\\u0203\\3\\u0204\\3\\u0204\")\n buf.write(\"\\3\\u0204\\3\\u0204\\3\\u0204\\3\\u0204\\3\\u0204\\3\\u0204\\5\\u0204\")\n buf.write(\"\\u142c\\n\\u0204\\3\\u0205\\3\\u0205\\3\\u0206\\3\\u0206\\3\\u0207\")\n buf.write(\"\\3\\u0207\\3\\u0208\\3\\u0208\\3\\u0209\\3\\u0209\\3\\u020a\\3\\u020a\")\n buf.write(\"\\3\\u020a\\3\\u020b\\3\\u020b\\3\\u020c\\3\\u020c\\3\\u020d\\3\\u020d\")\n buf.write(\"\\3\\u020e\\3\\u020e\\3\\u020f\\3\\u020f\\3\\u0210\\6\\u0210\\u1446\")\n buf.write(\"\\n\\u0210\\r\\u0210\\16\\u0210\\u1447\\3\\u0210\\3\\u0210\\3\\u0211\")\n buf.write(\"\\3\\u0211\\3\\u0212\\6\\u0212\\u144f\\n\\u0212\\r\\u0212\\16\\u0212\")\n buf.write(\"\\u1450\\3\\u0213\\7\\u0213\\u1454\\n\\u0213\\f\\u0213\\16\\u0213\")\n buf.write(\"\\u1457\\13\\u0213\\3\\u0213\\5\\u0213\\u145a\\n\\u0213\\3\\u0213\")\n buf.write(\"\\6\\u0213\\u145d\\n\\u0213\\r\\u0213\\16\\u0213\\u145e\\3\\u0214\")\n buf.write(\"\\3\\u0214\\3\\u0214\\3\\u0214\\7\\u0214\\u1465\\n\\u0214\\f\\u0214\")\n buf.write(\"\\16\\u0214\\u1468\\13\\u0214\\3\\u0214\\3\\u0214\\5\\u0214\\u146c\")\n buf.write(\"\\n\\u0214\\3\\u0214\\3\\u0214\\3\\u0215\\3\\u0215\\3\\u0215\\3\\u0215\")\n buf.write(\"\\7\\u0215\\u1474\\n\\u0215\\f\\u0215\\16\\u0215\\u1477\\13\\u0215\")\n buf.write(\"\\3\\u0215\\3\\u0215\\3\\u0215\\3\\u0215\\3\\u0215\\3\\u0216\\3\\u0216\")\n buf.write(\"\\3\\u0216\\3\\u0216\\3\\u0216\\3\\u0216\\3\\u0216\\3\\u0216\\3\\u0216\")\n buf.write(\"\\7\\u0216\\u1487\\n\\u0216\\f\\u0216\\16\\u0216\\u148a\\13\\u0216\")\n buf.write(\"\\3\\u0216\\3\\u0216\\5\\u0216\\u148e\\n\\u0216\\3\\u0217\\5\\u0217\")\n buf.write(\"\\u1491\\n\\u0217\\3\\u0217\\3\\u0217\\3\\u0218\\3\\u0218\\3\\u0219\")\n buf.write(\"\\3\\u0219\\3\\u0219\\7\\u0219\\u149a\\n\\u0219\\f\\u0219\\16\\u0219\")\n buf.write(\"\\u149d\\13\\u0219\\3\\u021a\\3\\u021a\\3\\u021a\\3\\u021a\\3\\u021a\")\n buf.write(\"\\3\\u021b\\3\\u021b\\3\\u021c\\3\\u021c\\3\\u021d\\3\\u021d\\3\\u021e\")\n buf.write(\"\\3\\u021e\\3\\u021f\\3\\u021f\\3\\u0220\\3\\u0220\\3\\u0221\\3\\u0221\")\n buf.write(\"\\3\\u0222\\3\\u0222\\3\\u0223\\3\\u0223\\3\\u0224\\3\\u0224\\3\\u0225\")\n buf.write(\"\\3\\u0225\\3\\u0226\\3\\u0226\\3\\u0227\\3\\u0227\\3\\u0228\\3\\u0228\")\n buf.write(\"\\3\\u0229\\3\\u0229\\3\\u022a\\3\\u022a\\3\\u022b\\3\\u022b\\3\\u022c\")\n buf.write(\"\\3\\u022c\\3\\u022d\\3\\u022d\\3\\u022e\\3\\u022e\\3\\u022f\\3\\u022f\")\n buf.write(\"\\3\\u0230\\3\\u0230\\3\\u0231\\3\\u0231\\3\\u0232\\3\\u0232\\3\\u0233\")\n buf.write(\"\\3\\u0233\\3\\u0234\\3\\u0234\\7\\u13ba\\u13c5\\u13d0\\u13db\\u1475\")\n buf.write(\"\\2\\u0235\\3\\3\\5\\4\\7\\5\\t\\6\\13\\7\\r\\b\\17\\t\\21\\n\\23\\13\\25\\f\")\n buf.write(\"\\27\\r\\31\\16\\33\\17\\35\\20\\37\\21!\\22#\\23%\\24\\'\\25)\\26+\\27\")\n buf.write(\"-\\30/\\31\\61\\32\\63\\33\\65\\34\\67\\359\\36;\\37= ?!A\\\"C#E$G%\")\n buf.write(\"I&K\\'M(O)Q*S+U,W-Y.[/]\\60_\\61a\\62c\\63e\\64g\\65i\\66k\\67\")\n buf.write(\"m8o9q:s;u<w=y>{?}@\\177A\\u0081B\\u0083C\\u0085D\\u0087E\\u0089\")\n buf.write(\"F\\u008bG\\u008dH\\u008fI\\u0091J\\u0093K\\u0095L\\u0097M\\u0099\")\n buf.write(\"N\\u009bO\\u009dP\\u009fQ\\u00a1R\\u00a3S\\u00a5T\\u00a7U\\u00a9\")\n buf.write(\"V\\u00abW\\u00adX\\u00afY\\u00b1Z\\u00b3[\\u00b5\\\\\\u00b7]\\u00b9\")\n buf.write(\"^\\u00bb_\\u00bd`\\u00bfa\\u00c1b\\u00c3c\\u00c5d\\u00c7e\\u00c9\")\n buf.write(\"f\\u00cbg\\u00cdh\\u00cfi\\u00d1j\\u00d3k\\u00d5l\\u00d7m\\u00d9\")\n buf.write(\"n\\u00dbo\\u00ddp\\u00dfq\\u00e1r\\u00e3s\\u00e5t\\u00e7u\\u00e9\")\n buf.write(\"v\\u00ebw\\u00edx\\u00efy\\u00f1z\\u00f3{\\u00f5|\\u00f7}\\u00f9\")\n buf.write(\"~\\u00fb\\177\\u00fd\\u0080\\u00ff\\u0081\\u0101\\u0082\\u0103\")\n buf.write(\"\\u0083\\u0105\\u0084\\u0107\\u0085\\u0109\\u0086\\u010b\\u0087\")\n buf.write(\"\\u010d\\u0088\\u010f\\u0089\\u0111\\u008a\\u0113\\u008b\\u0115\")\n buf.write(\"\\u008c\\u0117\\u008d\\u0119\\u008e\\u011b\\u008f\\u011d\\u0090\")\n buf.write(\"\\u011f\\u0091\\u0121\\u0092\\u0123\\u0093\\u0125\\u0094\\u0127\")\n buf.write(\"\\u0095\\u0129\\u0096\\u012b\\u0097\\u012d\\u0098\\u012f\\u0099\")\n buf.write(\"\\u0131\\u009a\\u0133\\u009b\\u0135\\u009c\\u0137\\u009d\\u0139\")\n buf.write(\"\\u009e\\u013b\\u009f\\u013d\\u00a0\\u013f\\u00a1\\u0141\\u00a2\")\n buf.write(\"\\u0143\\u00a3\\u0145\\u00a4\\u0147\\u00a5\\u0149\\u00a6\\u014b\")\n buf.write(\"\\u00a7\\u014d\\u00a8\\u014f\\u00a9\\u0151\\u00aa\\u0153\\u00ab\")\n buf.write(\"\\u0155\\u00ac\\u0157\\u00ad\\u0159\\u00ae\\u015b\\u00af\\u015d\")\n buf.write(\"\\u00b0\\u015f\\u00b1\\u0161\\u00b2\\u0163\\u00b3\\u0165\\u00b4\")\n buf.write(\"\\u0167\\u00b5\\u0169\\u00b6\\u016b\\u00b7\\u016d\\u00b8\\u016f\")\n buf.write(\"\\u00b9\\u0171\\u00ba\\u0173\\u00bb\\u0175\\u00bc\\u0177\\u00bd\")\n buf.write(\"\\u0179\\u00be\\u017b\\u00bf\\u017d\\u00c0\\u017f\\u00c1\\u0181\")\n buf.write(\"\\u00c2\\u0183\\u00c3\\u0185\\u00c4\\u0187\\u00c5\\u0189\\u00c6\")\n buf.write(\"\\u018b\\u00c7\\u018d\\u00c8\\u018f\\u00c9\\u0191\\u00ca\\u0193\")\n buf.write(\"\\u00cb\\u0195\\u00cc\\u0197\\u00cd\\u0199\\u00ce\\u019b\\u00cf\")\n buf.write(\"\\u019d\\u00d0\\u019f\\u00d1\\u01a1\\u00d2\\u01a3\\u00d3\\u01a5\")\n buf.write(\"\\u00d4\\u01a7\\u00d5\\u01a9\\u00d6\\u01ab\\u00d7\\u01ad\\u00d8\")\n buf.write(\"\\u01af\\u00d9\\u01b1\\u00da\\u01b3\\u00db\\u01b5\\u00dc\\u01b7\")\n buf.write(\"\\u00dd\\u01b9\\u00de\\u01bb\\u00df\\u01bd\\u00e0\\u01bf\\u00e1\")\n buf.write(\"\\u01c1\\u00e2\\u01c3\\u00e3\\u01c5\\u00e4\\u01c7\\u00e5\\u01c9\")\n buf.write(\"\\u00e6\\u01cb\\u00e7\\u01cd\\u00e8\\u01cf\\u00e9\\u01d1\\u00ea\")\n buf.write(\"\\u01d3\\u00eb\\u01d5\\u00ec\\u01d7\\u00ed\\u01d9\\u00ee\\u01db\")\n buf.write(\"\\u00ef\\u01dd\\u00f0\\u01df\\u00f1\\u01e1\\u00f2\\u01e3\\u00f3\")\n buf.write(\"\\u01e5\\u00f4\\u01e7\\u00f5\\u01e9\\u00f6\\u01eb\\u00f7\\u01ed\")\n buf.write(\"\\u00f8\\u01ef\\u00f9\\u01f1\\u00fa\\u01f3\\u00fb\\u01f5\\u00fc\")\n buf.write(\"\\u01f7\\u00fd\\u01f9\\u00fe\\u01fb\\u00ff\\u01fd\\u0100\\u01ff\")\n buf.write(\"\\u0101\\u0201\\u0102\\u0203\\u0103\\u0205\\u0104\\u0207\\u0105\")\n buf.write(\"\\u0209\\u0106\\u020b\\u0107\\u020d\\u0108\\u020f\\u0109\\u0211\")\n buf.write(\"\\u010a\\u0213\\u010b\\u0215\\u010c\\u0217\\u010d\\u0219\\u010e\")\n buf.write(\"\\u021b\\u010f\\u021d\\u0110\\u021f\\u0111\\u0221\\u0112\\u0223\")\n buf.write(\"\\u0113\\u0225\\u0114\\u0227\\u0115\\u0229\\u0116\\u022b\\u0117\")\n buf.write(\"\\u022d\\u0118\\u022f\\u0119\\u0231\\u011a\\u0233\\u011b\\u0235\")\n buf.write(\"\\u011c\\u0237\\u011d\\u0239\\u011e\\u023b\\u011f\\u023d\\u0120\")\n buf.write(\"\\u023f\\u0121\\u0241\\u0122\\u0243\\u0123\\u0245\\u0124\\u0247\")\n buf.write(\"\\u0125\\u0249\\u0126\\u024b\\u0127\\u024d\\u0128\\u024f\\u0129\")\n buf.write(\"\\u0251\\u012a\\u0253\\u012b\\u0255\\u012c\\u0257\\u012d\\u0259\")\n buf.write(\"\\u012e\\u025b\\u012f\\u025d\\u0130\\u025f\\u0131\\u0261\\u0132\")\n buf.write(\"\\u0263\\u0133\\u0265\\u0134\\u0267\\u0135\\u0269\\u0136\\u026b\")\n buf.write(\"\\u0137\\u026d\\u0138\\u026f\\u0139\\u0271\\u013a\\u0273\\u013b\")\n buf.write(\"\\u0275\\u013c\\u0277\\u013d\\u0279\\u013e\\u027b\\u013f\\u027d\")\n buf.write(\"\\u0140\\u027f\\u0141\\u0281\\u0142\\u0283\\u0143\\u0285\\u0144\")\n buf.write(\"\\u0287\\u0145\\u0289\\u0146\\u028b\\u0147\\u028d\\u0148\\u028f\")\n buf.write(\"\\u0149\\u0291\\u014a\\u0293\\u014b\\u0295\\u014c\\u0297\\u014d\")\n buf.write(\"\\u0299\\u014e\\u029b\\u014f\\u029d\\u0150\\u029f\\u0151\\u02a1\")\n buf.write(\"\\u0152\\u02a3\\u0153\\u02a5\\u0154\\u02a7\\u0155\\u02a9\\u0156\")\n buf.write(\"\\u02ab\\u0157\\u02ad\\u0158\\u02af\\u0159\\u02b1\\u015a\\u02b3\")\n buf.write(\"\\u015b\\u02b5\\u015c\\u02b7\\u015d\\u02b9\\u015e\\u02bb\\u015f\")\n buf.write(\"\\u02bd\\u0160\\u02bf\\u0161\\u02c1\\u0162\\u02c3\\u0163\\u02c5\")\n buf.write(\"\\u0164\\u02c7\\u0165\\u02c9\\u0166\\u02cb\\u0167\\u02cd\\u0168\")\n buf.write(\"\\u02cf\\u0169\\u02d1\\u016a\\u02d3\\u016b\\u02d5\\u016c\\u02d7\")\n buf.write(\"\\u016d\\u02d9\\u016e\\u02db\\u016f\\u02dd\\u0170\\u02df\\u0171\")\n buf.write(\"\\u02e1\\u0172\\u02e3\\u0173\\u02e5\\u0174\\u02e7\\u0175\\u02e9\")\n buf.write(\"\\u0176\\u02eb\\u0177\\u02ed\\u0178\\u02ef\\u0179\\u02f1\\u017a\")\n buf.write(\"\\u02f3\\u017b\\u02f5\\u017c\\u02f7\\u017d\\u02f9\\u017e\\u02fb\")\n buf.write(\"\\u017f\\u02fd\\u0180\\u02ff\\u0181\\u0301\\u0182\\u0303\\u0183\")\n buf.write(\"\\u0305\\u0184\\u0307\\u0185\\u0309\\u0186\\u030b\\u0187\\u030d\")\n buf.write(\"\\u0188\\u030f\\u0189\\u0311\\u018a\\u0313\\u018b\\u0315\\u018c\")\n buf.write(\"\\u0317\\u018d\\u0319\\u018e\\u031b\\u018f\\u031d\\u0190\\u031f\")\n buf.write(\"\\u0191\\u0321\\u0192\\u0323\\u0193\\u0325\\u0194\\u0327\\u0195\")\n buf.write(\"\\u0329\\u0196\\u032b\\u0197\\u032d\\u0198\\u032f\\u0199\\u0331\")\n buf.write(\"\\u019a\\u0333\\u019b\\u0335\\u019c\\u0337\\u019d\\u0339\\u019e\")\n buf.write(\"\\u033b\\u019f\\u033d\\u01a0\\u033f\\u01a1\\u0341\\u01a2\\u0343\")\n buf.write(\"\\u01a3\\u0345\\u01a4\\u0347\\u01a5\\u0349\\u01a6\\u034b\\u01a7\")\n buf.write(\"\\u034d\\u01a8\\u034f\\u01a9\\u0351\\u01aa\\u0353\\u01ab\\u0355\")\n buf.write(\"\\u01ac\\u0357\\u01ad\\u0359\\u01ae\\u035b\\u01af\\u035d\\u01b0\")\n buf.write(\"\\u035f\\u01b1\\u0361\\u01b2\\u0363\\u01b3\\u0365\\u01b4\\u0367\")\n buf.write(\"\\u01b5\\u0369\\u01b6\\u036b\\u01b7\\u036d\\u01b8\\u036f\\u01b9\")\n buf.write(\"\\u0371\\u01ba\\u0373\\u01bb\\u0375\\u01bc\\u0377\\u01bd\\u0379\")\n buf.write(\"\\u01be\\u037b\\u01bf\\u037d\\u01c0\\u037f\\u01c1\\u0381\\u01c2\")\n buf.write(\"\\u0383\\u01c3\\u0385\\u01c4\\u0387\\u01c5\\u0389\\u01c6\\u038b\")\n buf.write(\"\\u01c7\\u038d\\u01c8\\u038f\\u01c9\\u0391\\u01ca\\u0393\\u01cb\")\n buf.write(\"\\u0395\\u01cc\\u0397\\u01cd\\u0399\\u01ce\\u039b\\u01cf\\u039d\")\n buf.write(\"\\u01d0\\u039f\\u01d1\\u03a1\\u01d2\\u03a3\\u01d3\\u03a5\\u01d4\")\n buf.write(\"\\u03a7\\u01d5\\u03a9\\u01d6\\u03ab\\u01d7\\u03ad\\u01d8\\u03af\")\n buf.write(\"\\u01d9\\u03b1\\u01da\\u03b3\\u01db\\u03b5\\u01dc\\u03b7\\u01dd\")\n buf.write(\"\\u03b9\\u01de\\u03bb\\u01df\\u03bd\\u01e0\\u03bf\\u01e1\\u03c1\")\n buf.write(\"\\u01e2\\u03c3\\u01e3\\u03c5\\u01e4\\u03c7\\u01e5\\u03c9\\u01e6\")\n buf.write(\"\\u03cb\\u01e7\\u03cd\\u01e8\\u03cf\\u01e9\\u03d1\\u01ea\\u03d3\")\n buf.write(\"\\2\\u03d5\\2\\u03d7\\2\\u03d9\\2\\u03db\\2\\u03dd\\2\\u03df\\2\\u03e1\")\n buf.write(\"\\u01eb\\u03e3\\u01ec\\u03e5\\u01ed\\u03e7\\u01ee\\u03e9\\u01ef\")\n buf.write(\"\\u03eb\\u01f0\\u03ed\\u01f1\\u03ef\\u01f2\\u03f1\\u01f3\\u03f3\")\n buf.write(\"\\u01f4\\u03f5\\u01f5\\u03f7\\u01f6\\u03f9\\u01f7\\u03fb\\u01f8\")\n buf.write(\"\\u03fd\\u01f9\\u03ff\\u01fa\\u0401\\u01fb\\u0403\\u01fc\\u0405\")\n buf.write(\"\\u01fd\\u0407\\u01fe\\u0409\\u01ff\\u040b\\u0200\\u040d\\u0201\")\n buf.write(\"\\u040f\\u0202\\u0411\\2\\u0413\\u0203\\u0415\\u0204\\u0417\\u0205\")\n buf.write(\"\\u0419\\u0206\\u041b\\u0207\\u041d\\u0208\\u041f\\u0209\\u0421\")\n buf.write(\"\\2\\u0423\\2\\u0425\\2\\u0427\\u020a\\u0429\\u020b\\u042b\\u020c\")\n buf.write(\"\\u042d\\2\\u042f\\2\\u0431\\u020d\\u0433\\u020e\\u0435\\2\\u0437\")\n buf.write(\"\\2\\u0439\\2\\u043b\\2\\u043d\\2\\u043f\\2\\u0441\\2\\u0443\\2\\u0445\")\n buf.write(\"\\2\\u0447\\2\\u0449\\2\\u044b\\2\\u044d\\2\\u044f\\2\\u0451\\2\\u0453\")\n buf.write(\"\\2\\u0455\\2\\u0457\\2\\u0459\\2\\u045b\\2\\u045d\\2\\u045f\\2\\u0461\")\n buf.write(\"\\2\\u0463\\2\\u0465\\2\\u0467\\2\\3\\2\\'\\5\\2\\f\\f\\17\\17))\\5\\2\\62\")\n buf.write(\";CHch\\4\\2GGgg\\4\\2--//\\t\\2\\13\\f\\17\\17\\\"\\\"**>>]]}}\\5\\2\\f\")\n buf.write(\"\\f\\17\\17$$\\4\\2\\62;aa\\5\\2\\13\\f\\17\\17\\\"\\\"\\4\\2C\\\\c|\\4\\2\\f\")\n buf.write(\"\\f\\17\\17\\4\\2\\13\\13\\\"\\\"\\5\\2%&\\62;aa\\4\\2CCcc\\4\\2DDdd\\4\\2\")\n buf.write(\"EEee\\4\\2FFff\\4\\2HHhh\\4\\2IIii\\4\\2JJjj\\4\\2KKkk\\4\\2LLll\\4\")\n buf.write(\"\\2MMmm\\4\\2NNnn\\4\\2OOoo\\4\\2PPpp\\4\\2QQqq\\4\\2RRrr\\4\\2SSs\")\n buf.write(\"s\\4\\2TTtt\\4\\2UUuu\\4\\2VVvv\\4\\2WWww\\4\\2XXxx\\4\\2YYyy\\4\\2\")\n buf.write(\"ZZzz\\4\\2[[{{\\4\\2\\\\\\\\||\\2\\u14dd\\2\\3\\3\\2\\2\\2\\2\\5\\3\\2\\2\\2\")\n buf.write(\"\\2\\7\\3\\2\\2\\2\\2\\t\\3\\2\\2\\2\\2\\13\\3\\2\\2\\2\\2\\r\\3\\2\\2\\2\\2\\17\")\n buf.write(\"\\3\\2\\2\\2\\2\\21\\3\\2\\2\\2\\2\\23\\3\\2\\2\\2\\2\\25\\3\\2\\2\\2\\2\\27\\3\")\n buf.write(\"\\2\\2\\2\\2\\31\\3\\2\\2\\2\\2\\33\\3\\2\\2\\2\\2\\35\\3\\2\\2\\2\\2\\37\\3\\2\")\n buf.write(\"\\2\\2\\2!\\3\\2\\2\\2\\2#\\3\\2\\2\\2\\2%\\3\\2\\2\\2\\2\\'\\3\\2\\2\\2\\2)\\3\")\n buf.write(\"\\2\\2\\2\\2+\\3\\2\\2\\2\\2-\\3\\2\\2\\2\\2/\\3\\2\\2\\2\\2\\61\\3\\2\\2\\2\\2\")\n buf.write(\"\\63\\3\\2\\2\\2\\2\\65\\3\\2\\2\\2\\2\\67\\3\\2\\2\\2\\29\\3\\2\\2\\2\\2;\\3\")\n buf.write(\"\\2\\2\\2\\2=\\3\\2\\2\\2\\2?\\3\\2\\2\\2\\2A\\3\\2\\2\\2\\2C\\3\\2\\2\\2\\2E\")\n buf.write(\"\\3\\2\\2\\2\\2G\\3\\2\\2\\2\\2I\\3\\2\\2\\2\\2K\\3\\2\\2\\2\\2M\\3\\2\\2\\2\\2\")\n buf.write(\"O\\3\\2\\2\\2\\2Q\\3\\2\\2\\2\\2S\\3\\2\\2\\2\\2U\\3\\2\\2\\2\\2W\\3\\2\\2\\2\")\n buf.write(\"\\2Y\\3\\2\\2\\2\\2[\\3\\2\\2\\2\\2]\\3\\2\\2\\2\\2_\\3\\2\\2\\2\\2a\\3\\2\\2\")\n buf.write(\"\\2\\2c\\3\\2\\2\\2\\2e\\3\\2\\2\\2\\2g\\3\\2\\2\\2\\2i\\3\\2\\2\\2\\2k\\3\\2\")\n buf.write(\"\\2\\2\\2m\\3\\2\\2\\2\\2o\\3\\2\\2\\2\\2q\\3\\2\\2\\2\\2s\\3\\2\\2\\2\\2u\\3\")\n buf.write(\"\\2\\2\\2\\2w\\3\\2\\2\\2\\2y\\3\\2\\2\\2\\2{\\3\\2\\2\\2\\2}\\3\\2\\2\\2\\2\\177\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0081\\3\\2\\2\\2\\2\\u0083\\3\\2\\2\\2\\2\\u0085\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0087\\3\\2\\2\\2\\2\\u0089\\3\\2\\2\\2\\2\\u008b\\3\\2\\2\\2\\2\\u008d\")\n buf.write(\"\\3\\2\\2\\2\\2\\u008f\\3\\2\\2\\2\\2\\u0091\\3\\2\\2\\2\\2\\u0093\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0095\\3\\2\\2\\2\\2\\u0097\\3\\2\\2\\2\\2\\u0099\\3\\2\\2\\2\\2\\u009b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u009d\\3\\2\\2\\2\\2\\u009f\\3\\2\\2\\2\\2\\u00a1\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00a3\\3\\2\\2\\2\\2\\u00a5\\3\\2\\2\\2\\2\\u00a7\\3\\2\\2\\2\\2\\u00a9\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00ab\\3\\2\\2\\2\\2\\u00ad\\3\\2\\2\\2\\2\\u00af\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00b1\\3\\2\\2\\2\\2\\u00b3\\3\\2\\2\\2\\2\\u00b5\\3\\2\\2\\2\\2\\u00b7\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00b9\\3\\2\\2\\2\\2\\u00bb\\3\\2\\2\\2\\2\\u00bd\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00bf\\3\\2\\2\\2\\2\\u00c1\\3\\2\\2\\2\\2\\u00c3\\3\\2\\2\\2\\2\\u00c5\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00c7\\3\\2\\2\\2\\2\\u00c9\\3\\2\\2\\2\\2\\u00cb\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00cd\\3\\2\\2\\2\\2\\u00cf\\3\\2\\2\\2\\2\\u00d1\\3\\2\\2\\2\\2\\u00d3\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00d5\\3\\2\\2\\2\\2\\u00d7\\3\\2\\2\\2\\2\\u00d9\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00db\\3\\2\\2\\2\\2\\u00dd\\3\\2\\2\\2\\2\\u00df\\3\\2\\2\\2\\2\\u00e1\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00e3\\3\\2\\2\\2\\2\\u00e5\\3\\2\\2\\2\\2\\u00e7\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00e9\\3\\2\\2\\2\\2\\u00eb\\3\\2\\2\\2\\2\\u00ed\\3\\2\\2\\2\\2\\u00ef\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00f1\\3\\2\\2\\2\\2\\u00f3\\3\\2\\2\\2\\2\\u00f5\\3\\2\\2\")\n buf.write(\"\\2\\2\\u00f7\\3\\2\\2\\2\\2\\u00f9\\3\\2\\2\\2\\2\\u00fb\\3\\2\\2\\2\\2\\u00fd\")\n buf.write(\"\\3\\2\\2\\2\\2\\u00ff\\3\\2\\2\\2\\2\\u0101\\3\\2\\2\\2\\2\\u0103\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0105\\3\\2\\2\\2\\2\\u0107\\3\\2\\2\\2\\2\\u0109\\3\\2\\2\\2\\2\\u010b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u010d\\3\\2\\2\\2\\2\\u010f\\3\\2\\2\\2\\2\\u0111\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0113\\3\\2\\2\\2\\2\\u0115\\3\\2\\2\\2\\2\\u0117\\3\\2\\2\\2\\2\\u0119\")\n buf.write(\"\\3\\2\\2\\2\\2\\u011b\\3\\2\\2\\2\\2\\u011d\\3\\2\\2\\2\\2\\u011f\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0121\\3\\2\\2\\2\\2\\u0123\\3\\2\\2\\2\\2\\u0125\\3\\2\\2\\2\\2\\u0127\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0129\\3\\2\\2\\2\\2\\u012b\\3\\2\\2\\2\\2\\u012d\\3\\2\\2\")\n buf.write(\"\\2\\2\\u012f\\3\\2\\2\\2\\2\\u0131\\3\\2\\2\\2\\2\\u0133\\3\\2\\2\\2\\2\\u0135\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0137\\3\\2\\2\\2\\2\\u0139\\3\\2\\2\\2\\2\\u013b\\3\\2\\2\")\n buf.write(\"\\2\\2\\u013d\\3\\2\\2\\2\\2\\u013f\\3\\2\\2\\2\\2\\u0141\\3\\2\\2\\2\\2\\u0143\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0145\\3\\2\\2\\2\\2\\u0147\\3\\2\\2\\2\\2\\u0149\\3\\2\\2\")\n buf.write(\"\\2\\2\\u014b\\3\\2\\2\\2\\2\\u014d\\3\\2\\2\\2\\2\\u014f\\3\\2\\2\\2\\2\\u0151\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0153\\3\\2\\2\\2\\2\\u0155\\3\\2\\2\\2\\2\\u0157\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0159\\3\\2\\2\\2\\2\\u015b\\3\\2\\2\\2\\2\\u015d\\3\\2\\2\\2\\2\\u015f\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0161\\3\\2\\2\\2\\2\\u0163\\3\\2\\2\\2\\2\\u0165\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0167\\3\\2\\2\\2\\2\\u0169\\3\\2\\2\\2\\2\\u016b\\3\\2\\2\\2\\2\\u016d\")\n buf.write(\"\\3\\2\\2\\2\\2\\u016f\\3\\2\\2\\2\\2\\u0171\\3\\2\\2\\2\\2\\u0173\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0175\\3\\2\\2\\2\\2\\u0177\\3\\2\\2\\2\\2\\u0179\\3\\2\\2\\2\\2\\u017b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u017d\\3\\2\\2\\2\\2\\u017f\\3\\2\\2\\2\\2\\u0181\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0183\\3\\2\\2\\2\\2\\u0185\\3\\2\\2\\2\\2\\u0187\\3\\2\\2\\2\\2\\u0189\")\n buf.write(\"\\3\\2\\2\\2\\2\\u018b\\3\\2\\2\\2\\2\\u018d\\3\\2\\2\\2\\2\\u018f\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0191\\3\\2\\2\\2\\2\\u0193\\3\\2\\2\\2\\2\\u0195\\3\\2\\2\\2\\2\\u0197\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0199\\3\\2\\2\\2\\2\\u019b\\3\\2\\2\\2\\2\\u019d\\3\\2\\2\")\n buf.write(\"\\2\\2\\u019f\\3\\2\\2\\2\\2\\u01a1\\3\\2\\2\\2\\2\\u01a3\\3\\2\\2\\2\\2\\u01a5\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01a7\\3\\2\\2\\2\\2\\u01a9\\3\\2\\2\\2\\2\\u01ab\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01ad\\3\\2\\2\\2\\2\\u01af\\3\\2\\2\\2\\2\\u01b1\\3\\2\\2\\2\\2\\u01b3\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01b5\\3\\2\\2\\2\\2\\u01b7\\3\\2\\2\\2\\2\\u01b9\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01bb\\3\\2\\2\\2\\2\\u01bd\\3\\2\\2\\2\\2\\u01bf\\3\\2\\2\\2\\2\\u01c1\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01c3\\3\\2\\2\\2\\2\\u01c5\\3\\2\\2\\2\\2\\u01c7\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01c9\\3\\2\\2\\2\\2\\u01cb\\3\\2\\2\\2\\2\\u01cd\\3\\2\\2\\2\\2\\u01cf\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01d1\\3\\2\\2\\2\\2\\u01d3\\3\\2\\2\\2\\2\\u01d5\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01d7\\3\\2\\2\\2\\2\\u01d9\\3\\2\\2\\2\\2\\u01db\\3\\2\\2\\2\\2\\u01dd\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01df\\3\\2\\2\\2\\2\\u01e1\\3\\2\\2\\2\\2\\u01e3\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01e5\\3\\2\\2\\2\\2\\u01e7\\3\\2\\2\\2\\2\\u01e9\\3\\2\\2\\2\\2\\u01eb\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01ed\\3\\2\\2\\2\\2\\u01ef\\3\\2\\2\\2\\2\\u01f1\\3\\2\\2\")\n buf.write(\"\\2\\2\\u01f3\\3\\2\\2\\2\\2\\u01f5\\3\\2\\2\\2\\2\\u01f7\\3\\2\\2\\2\\2\\u01f9\")\n buf.write(\"\\3\\2\\2\\2\\2\\u01fb\\3\\2\\2\\2\\2\\u01fd\\3\\2\\2\\2\\2\\u01ff\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0201\\3\\2\\2\\2\\2\\u0203\\3\\2\\2\\2\\2\\u0205\\3\\2\\2\\2\\2\\u0207\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0209\\3\\2\\2\\2\\2\\u020b\\3\\2\\2\\2\\2\\u020d\\3\\2\\2\")\n buf.write(\"\\2\\2\\u020f\\3\\2\\2\\2\\2\\u0211\\3\\2\\2\\2\\2\\u0213\\3\\2\\2\\2\\2\\u0215\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0217\\3\\2\\2\\2\\2\\u0219\\3\\2\\2\\2\\2\\u021b\\3\\2\\2\")\n buf.write(\"\\2\\2\\u021d\\3\\2\\2\\2\\2\\u021f\\3\\2\\2\\2\\2\\u0221\\3\\2\\2\\2\\2\\u0223\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0225\\3\\2\\2\\2\\2\\u0227\\3\\2\\2\\2\\2\\u0229\\3\\2\\2\")\n buf.write(\"\\2\\2\\u022b\\3\\2\\2\\2\\2\\u022d\\3\\2\\2\\2\\2\\u022f\\3\\2\\2\\2\\2\\u0231\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0233\\3\\2\\2\\2\\2\\u0235\\3\\2\\2\\2\\2\\u0237\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0239\\3\\2\\2\\2\\2\\u023b\\3\\2\\2\\2\\2\\u023d\\3\\2\\2\\2\\2\\u023f\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0241\\3\\2\\2\\2\\2\\u0243\\3\\2\\2\\2\\2\\u0245\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0247\\3\\2\\2\\2\\2\\u0249\\3\\2\\2\\2\\2\\u024b\\3\\2\\2\\2\\2\\u024d\")\n buf.write(\"\\3\\2\\2\\2\\2\\u024f\\3\\2\\2\\2\\2\\u0251\\3\\2\\2\\2\\2\\u0253\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0255\\3\\2\\2\\2\\2\\u0257\\3\\2\\2\\2\\2\\u0259\\3\\2\\2\\2\\2\\u025b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u025d\\3\\2\\2\\2\\2\\u025f\\3\\2\\2\\2\\2\\u0261\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0263\\3\\2\\2\\2\\2\\u0265\\3\\2\\2\\2\\2\\u0267\\3\\2\\2\\2\\2\\u0269\")\n buf.write(\"\\3\\2\\2\\2\\2\\u026b\\3\\2\\2\\2\\2\\u026d\\3\\2\\2\\2\\2\\u026f\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0271\\3\\2\\2\\2\\2\\u0273\\3\\2\\2\\2\\2\\u0275\\3\\2\\2\\2\\2\\u0277\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0279\\3\\2\\2\\2\\2\\u027b\\3\\2\\2\\2\\2\\u027d\\3\\2\\2\")\n buf.write(\"\\2\\2\\u027f\\3\\2\\2\\2\\2\\u0281\\3\\2\\2\\2\\2\\u0283\\3\\2\\2\\2\\2\\u0285\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0287\\3\\2\\2\\2\\2\\u0289\\3\\2\\2\\2\\2\\u028b\\3\\2\\2\")\n buf.write(\"\\2\\2\\u028d\\3\\2\\2\\2\\2\\u028f\\3\\2\\2\\2\\2\\u0291\\3\\2\\2\\2\\2\\u0293\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0295\\3\\2\\2\\2\\2\\u0297\\3\\2\\2\\2\\2\\u0299\\3\\2\\2\")\n buf.write(\"\\2\\2\\u029b\\3\\2\\2\\2\\2\\u029d\\3\\2\\2\\2\\2\\u029f\\3\\2\\2\\2\\2\\u02a1\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02a3\\3\\2\\2\\2\\2\\u02a5\\3\\2\\2\\2\\2\\u02a7\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02a9\\3\\2\\2\\2\\2\\u02ab\\3\\2\\2\\2\\2\\u02ad\\3\\2\\2\\2\\2\\u02af\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02b1\\3\\2\\2\\2\\2\\u02b3\\3\\2\\2\\2\\2\\u02b5\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02b7\\3\\2\\2\\2\\2\\u02b9\\3\\2\\2\\2\\2\\u02bb\\3\\2\\2\\2\\2\\u02bd\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02bf\\3\\2\\2\\2\\2\\u02c1\\3\\2\\2\\2\\2\\u02c3\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02c5\\3\\2\\2\\2\\2\\u02c7\\3\\2\\2\\2\\2\\u02c9\\3\\2\\2\\2\\2\\u02cb\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02cd\\3\\2\\2\\2\\2\\u02cf\\3\\2\\2\\2\\2\\u02d1\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02d3\\3\\2\\2\\2\\2\\u02d5\\3\\2\\2\\2\\2\\u02d7\\3\\2\\2\\2\\2\\u02d9\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02db\\3\\2\\2\\2\\2\\u02dd\\3\\2\\2\\2\\2\\u02df\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02e1\\3\\2\\2\\2\\2\\u02e3\\3\\2\\2\\2\\2\\u02e5\\3\\2\\2\\2\\2\\u02e7\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02e9\\3\\2\\2\\2\\2\\u02eb\\3\\2\\2\\2\\2\\u02ed\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02ef\\3\\2\\2\\2\\2\\u02f1\\3\\2\\2\\2\\2\\u02f3\\3\\2\\2\\2\\2\\u02f5\")\n buf.write(\"\\3\\2\\2\\2\\2\\u02f7\\3\\2\\2\\2\\2\\u02f9\\3\\2\\2\\2\\2\\u02fb\\3\\2\\2\")\n buf.write(\"\\2\\2\\u02fd\\3\\2\\2\\2\\2\\u02ff\\3\\2\\2\\2\\2\\u0301\\3\\2\\2\\2\\2\\u0303\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0305\\3\\2\\2\\2\\2\\u0307\\3\\2\\2\\2\\2\\u0309\\3\\2\\2\")\n buf.write(\"\\2\\2\\u030b\\3\\2\\2\\2\\2\\u030d\\3\\2\\2\\2\\2\\u030f\\3\\2\\2\\2\\2\\u0311\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0313\\3\\2\\2\\2\\2\\u0315\\3\\2\\2\\2\\2\\u0317\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0319\\3\\2\\2\\2\\2\\u031b\\3\\2\\2\\2\\2\\u031d\\3\\2\\2\\2\\2\\u031f\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0321\\3\\2\\2\\2\\2\\u0323\\3\\2\\2\\2\\2\\u0325\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0327\\3\\2\\2\\2\\2\\u0329\\3\\2\\2\\2\\2\\u032b\\3\\2\\2\\2\\2\\u032d\")\n buf.write(\"\\3\\2\\2\\2\\2\\u032f\\3\\2\\2\\2\\2\\u0331\\3\\2\\2\\2\\2\\u0333\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0335\\3\\2\\2\\2\\2\\u0337\\3\\2\\2\\2\\2\\u0339\\3\\2\\2\\2\\2\\u033b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u033d\\3\\2\\2\\2\\2\\u033f\\3\\2\\2\\2\\2\\u0341\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0343\\3\\2\\2\\2\\2\\u0345\\3\\2\\2\\2\\2\\u0347\\3\\2\\2\\2\\2\\u0349\")\n buf.write(\"\\3\\2\\2\\2\\2\\u034b\\3\\2\\2\\2\\2\\u034d\\3\\2\\2\\2\\2\\u034f\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0351\\3\\2\\2\\2\\2\\u0353\\3\\2\\2\\2\\2\\u0355\\3\\2\\2\\2\\2\\u0357\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0359\\3\\2\\2\\2\\2\\u035b\\3\\2\\2\\2\\2\\u035d\\3\\2\\2\")\n buf.write(\"\\2\\2\\u035f\\3\\2\\2\\2\\2\\u0361\\3\\2\\2\\2\\2\\u0363\\3\\2\\2\\2\\2\\u0365\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0367\\3\\2\\2\\2\\2\\u0369\\3\\2\\2\\2\\2\\u036b\\3\\2\\2\")\n buf.write(\"\\2\\2\\u036d\\3\\2\\2\\2\\2\\u036f\\3\\2\\2\\2\\2\\u0371\\3\\2\\2\\2\\2\\u0373\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0375\\3\\2\\2\\2\\2\\u0377\\3\\2\\2\\2\\2\\u0379\\3\\2\\2\")\n buf.write(\"\\2\\2\\u037b\\3\\2\\2\\2\\2\\u037d\\3\\2\\2\\2\\2\\u037f\\3\\2\\2\\2\\2\\u0381\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0383\\3\\2\\2\\2\\2\\u0385\\3\\2\\2\\2\\2\\u0387\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0389\\3\\2\\2\\2\\2\\u038b\\3\\2\\2\\2\\2\\u038d\\3\\2\\2\\2\\2\\u038f\")\n buf.write(\"\\3\\2\\2\\2\\2\\u0391\\3\\2\\2\\2\\2\\u0393\\3\\2\\2\\2\\2\\u0395\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0397\\3\\2\\2\\2\\2\\u0399\\3\\2\\2\\2\\2\\u039b\\3\\2\\2\\2\\2\\u039d\")\n buf.write(\"\\3\\2\\2\\2\\2\\u039f\\3\\2\\2\\2\\2\\u03a1\\3\\2\\2\\2\\2\\u03a3\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03a5\\3\\2\\2\\2\\2\\u03a7\\3\\2\\2\\2\\2\\u03a9\\3\\2\\2\\2\\2\\u03ab\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03ad\\3\\2\\2\\2\\2\\u03af\\3\\2\\2\\2\\2\\u03b1\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03b3\\3\\2\\2\\2\\2\\u03b5\\3\\2\\2\\2\\2\\u03b7\\3\\2\\2\\2\\2\\u03b9\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03bb\\3\\2\\2\\2\\2\\u03bd\\3\\2\\2\\2\\2\\u03bf\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03c1\\3\\2\\2\\2\\2\\u03c3\\3\\2\\2\\2\\2\\u03c5\\3\\2\\2\\2\\2\\u03c7\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03c9\\3\\2\\2\\2\\2\\u03cb\\3\\2\\2\\2\\2\\u03cd\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03cf\\3\\2\\2\\2\\2\\u03d1\\3\\2\\2\\2\\2\\u03d3\\3\\2\\2\\2\\2\\u03e1\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03e3\\3\\2\\2\\2\\2\\u03e5\\3\\2\\2\\2\\2\\u03e7\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03e9\\3\\2\\2\\2\\2\\u03eb\\3\\2\\2\\2\\2\\u03ed\\3\\2\\2\\2\\2\\u03ef\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03f1\\3\\2\\2\\2\\2\\u03f3\\3\\2\\2\\2\\2\\u03f5\\3\\2\\2\")\n buf.write(\"\\2\\2\\u03f7\\3\\2\\2\\2\\2\\u03f9\\3\\2\\2\\2\\2\\u03fb\\3\\2\\2\\2\\2\\u03fd\")\n buf.write(\"\\3\\2\\2\\2\\2\\u03ff\\3\\2\\2\\2\\2\\u0401\\3\\2\\2\\2\\2\\u0403\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0405\\3\\2\\2\\2\\2\\u0407\\3\\2\\2\\2\\2\\u0409\\3\\2\\2\\2\\2\\u040b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u040d\\3\\2\\2\\2\\2\\u040f\\3\\2\\2\\2\\2\\u0413\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0415\\3\\2\\2\\2\\2\\u0417\\3\\2\\2\\2\\2\\u0419\\3\\2\\2\\2\\2\\u041b\")\n buf.write(\"\\3\\2\\2\\2\\2\\u041d\\3\\2\\2\\2\\2\\u041f\\3\\2\\2\\2\\2\\u0427\\3\\2\\2\")\n buf.write(\"\\2\\2\\u0429\\3\\2\\2\\2\\2\\u042b\\3\\2\\2\\2\\2\\u0431\\3\\2\\2\\2\\2\\u0433\")\n buf.write(\"\\3\\2\\2\\2\\3\\u0469\\3\\2\\2\\2\\5\\u046c\\3\\2\\2\\2\\7\\u046e\\3\\2\\2\")\n buf.write(\"\\2\\t\\u0472\\3\\2\\2\\2\\13\\u0478\\3\\2\\2\\2\\r\\u047e\\3\\2\\2\\2\\17\")\n buf.write(\"\\u0488\\3\\2\\2\\2\\21\\u048c\\3\\2\\2\\2\\23\\u0492\\3\\2\\2\\2\\25\\u049a\")\n buf.write(\"\\3\\2\\2\\2\\27\\u049e\\3\\2\\2\\2\\31\\u04a2\\3\\2\\2\\2\\33\\u04a8\\3\")\n buf.write(\"\\2\\2\\2\\35\\u04ab\\3\\2\\2\\2\\37\\u04b2\\3\\2\\2\\2!\\u04b9\\3\\2\\2\")\n buf.write(\"\\2#\\u04bd\\3\\2\\2\\2%\\u04c7\\3\\2\\2\\2\\'\\u04ca\\3\\2\\2\\2)\\u04d4\")\n buf.write(\"\\3\\2\\2\\2+\\u04da\\3\\2\\2\\2-\\u04e1\\3\\2\\2\\2/\\u04e6\\3\\2\\2\\2\")\n buf.write(\"\\61\\u04f0\\3\\2\\2\\2\\63\\u0507\\3\\2\\2\\2\\65\\u050d\\3\\2\\2\\2\\67\")\n buf.write(\"\\u0514\\3\\2\\2\\29\\u051a\\3\\2\\2\\2;\\u0522\\3\\2\\2\\2=\\u0528\\3\")\n buf.write(\"\\2\\2\\2?\\u0536\\3\\2\\2\\2A\\u0543\\3\\2\\2\\2C\\u0552\\3\\2\\2\\2E\\u0557\")\n buf.write(\"\\3\\2\\2\\2G\\u055d\\3\\2\\2\\2I\\u0562\\3\\2\\2\\2K\\u056a\\3\\2\\2\\2\")\n buf.write(\"M\\u056f\\3\\2\\2\\2O\\u0577\\3\\2\\2\\2Q\\u057c\\3\\2\\2\\2S\\u057f\\3\")\n buf.write(\"\\2\\2\\2U\\u0584\\3\\2\\2\\2W\\u0586\\3\\2\\2\\2Y\\u058c\\3\\2\\2\\2[\\u0591\")\n buf.write(\"\\3\\2\\2\\2]\\u059b\\3\\2\\2\\2_\\u05a3\\3\\2\\2\\2a\\u05a8\\3\\2\\2\\2\")\n buf.write(\"c\\u05ad\\3\\2\\2\\2e\\u05b2\\3\\2\\2\\2g\\u05ba\\3\\2\\2\\2i\\u05c4\\3\")\n buf.write(\"\\2\\2\\2k\\u05ca\\3\\2\\2\\2m\\u05ce\\3\\2\\2\\2o\\u05d3\\3\\2\\2\\2q\\u05d9\")\n buf.write(\"\\3\\2\\2\\2s\\u05e1\\3\\2\\2\\2u\\u05e9\\3\\2\\2\\2w\\u05f1\\3\\2\\2\\2\")\n buf.write(\"y\\u05f9\\3\\2\\2\\2{\\u0600\\3\\2\\2\\2}\\u060a\\3\\2\\2\\2\\177\\u0618\")\n buf.write(\"\\3\\2\\2\\2\\u0081\\u0620\\3\\2\\2\\2\\u0083\\u0629\\3\\2\\2\\2\\u0085\")\n buf.write(\"\\u0631\\3\\2\\2\\2\\u0087\\u0641\\3\\2\\2\\2\\u0089\\u064a\\3\\2\\2\\2\")\n buf.write(\"\\u008b\\u0655\\3\\2\\2\\2\\u008d\\u0661\\3\\2\\2\\2\\u008f\\u066d\\3\")\n buf.write(\"\\2\\2\\2\\u0091\\u0675\\3\\2\\2\\2\\u0093\\u067d\\3\\2\\2\\2\\u0095\\u0686\")\n buf.write(\"\\3\\2\\2\\2\\u0097\\u068e\\3\\2\\2\\2\\u0099\\u069a\\3\\2\\2\\2\\u009b\")\n buf.write(\"\\u06aa\\3\\2\\2\\2\\u009d\\u06af\\3\\2\\2\\2\\u009f\\u06b5\\3\\2\\2\\2\")\n buf.write(\"\\u00a1\\u06bc\\3\\2\\2\\2\\u00a3\\u06c2\\3\\2\\2\\2\\u00a5\\u06c7\\3\")\n buf.write(\"\\2\\2\\2\\u00a7\\u06cf\\3\\2\\2\\2\\u00a9\\u06dc\\3\\2\\2\\2\\u00ab\\u06e3\")\n buf.write(\"\\3\\2\\2\\2\\u00ad\\u06ef\\3\\2\\2\\2\\u00af\\u06f5\\3\\2\\2\\2\\u00b1\")\n buf.write(\"\\u06fa\\3\\2\\2\\2\\u00b3\\u0703\\3\\2\\2\\2\\u00b5\\u0708\\3\\2\\2\\2\")\n buf.write(\"\\u00b7\\u070c\\3\\2\\2\\2\\u00b9\\u071b\\3\\2\\2\\2\\u00bb\\u0726\\3\")\n buf.write(\"\\2\\2\\2\\u00bd\\u072a\\3\\2\\2\\2\\u00bf\\u0730\\3\\2\\2\\2\\u00c1\\u0734\")\n buf.write(\"\\3\\2\\2\\2\\u00c3\\u073c\\3\\2\\2\\2\\u00c5\\u0744\\3\\2\\2\\2\\u00c7\")\n buf.write(\"\\u074e\\3\\2\\2\\2\\u00c9\\u0758\\3\\2\\2\\2\\u00cb\\u0760\\3\\2\\2\\2\")\n buf.write(\"\\u00cd\\u0769\\3\\2\\2\\2\\u00cf\\u0772\\3\\2\\2\\2\\u00d1\\u077a\\3\")\n buf.write(\"\\2\\2\\2\\u00d3\\u0781\\3\\2\\2\\2\\u00d5\\u0787\\3\\2\\2\\2\\u00d7\\u078c\")\n buf.write(\"\\3\\2\\2\\2\\u00d9\\u079a\\3\\2\\2\\2\\u00db\\u07a4\\3\\2\\2\\2\\u00dd\")\n buf.write(\"\\u07ac\\3\\2\\2\\2\\u00df\\u07b9\\3\\2\\2\\2\\u00e1\\u07c2\\3\\2\\2\\2\")\n buf.write(\"\\u00e3\\u07cb\\3\\2\\2\\2\\u00e5\\u07d2\\3\\2\\2\\2\\u00e7\\u07d7\\3\")\n buf.write(\"\\2\\2\\2\\u00e9\\u07f0\\3\\2\\2\\2\\u00eb\\u07f5\\3\\2\\2\\2\\u00ed\\u07fd\")\n buf.write(\"\\3\\2\\2\\2\\u00ef\\u0802\\3\\2\\2\\2\\u00f1\\u0808\\3\\2\\2\\2\\u00f3\")\n buf.write(\"\\u080e\\3\\2\\2\\2\\u00f5\\u0815\\3\\2\\2\\2\\u00f7\\u081e\\3\\2\\2\\2\")\n buf.write(\"\\u00f9\\u0822\\3\\2\\2\\2\\u00fb\\u0831\\3\\2\\2\\2\\u00fd\\u0835\\3\")\n buf.write(\"\\2\\2\\2\\u00ff\\u083c\\3\\2\\2\\2\\u0101\\u0843\\3\\2\\2\\2\\u0103\\u084c\")\n buf.write(\"\\3\\2\\2\\2\\u0105\\u0853\\3\\2\\2\\2\\u0107\\u085d\\3\\2\\2\\2\\u0109\")\n buf.write(\"\\u086c\\3\\2\\2\\2\\u010b\\u0877\\3\\2\\2\\2\\u010d\\u087f\\3\\2\\2\\2\")\n buf.write(\"\\u010f\\u0889\\3\\2\\2\\2\\u0111\\u0891\\3\\2\\2\\2\\u0113\\u0898\\3\")\n buf.write(\"\\2\\2\\2\\u0115\\u089d\\3\\2\\2\\2\\u0117\\u08a5\\3\\2\\2\\2\\u0119\\u08ae\")\n buf.write(\"\\3\\2\\2\\2\\u011b\\u08b6\\3\\2\\2\\2\\u011d\\u08be\\3\\2\\2\\2\\u011f\")\n buf.write(\"\\u08c4\\3\\2\\2\\2\\u0121\\u08ca\\3\\2\\2\\2\\u0123\\u08d0\\3\\2\\2\\2\")\n buf.write(\"\\u0125\\u08d6\\3\\2\\2\\2\\u0127\\u08e2\\3\\2\\2\\2\\u0129\\u08e8\\3\")\n buf.write(\"\\2\\2\\2\\u012b\\u08f2\\3\\2\\2\\2\\u012d\\u08fa\\3\\2\\2\\2\\u012f\\u08fe\")\n buf.write(\"\\3\\2\\2\\2\\u0131\\u0905\\3\\2\\2\\2\\u0133\\u090b\\3\\2\\2\\2\\u0135\")\n buf.write(\"\\u0910\\3\\2\\2\\2\\u0137\\u0915\\3\\2\\2\\2\\u0139\\u091e\\3\\2\\2\\2\")\n buf.write(\"\\u013b\\u0923\\3\\2\\2\\2\\u013d\\u0929\\3\\2\\2\\2\\u013f\\u092f\\3\")\n buf.write(\"\\2\\2\\2\\u0141\\u0938\\3\\2\\2\\2\\u0143\\u093d\\3\\2\\2\\2\\u0145\\u0944\")\n buf.write(\"\\3\\2\\2\\2\\u0147\\u0949\\3\\2\\2\\2\\u0149\\u094e\\3\\2\\2\\2\\u014b\")\n buf.write(\"\\u0951\\3\\2\\2\\2\\u014d\\u0958\\3\\2\\2\\2\\u014f\\u0962\\3\\2\\2\\2\")\n buf.write(\"\\u0151\\u0965\\3\\2\\2\\2\\u0153\\u096d\\3\\2\\2\\2\\u0155\\u0977\\3\")\n buf.write(\"\\2\\2\\2\\u0157\\u0981\\3\\2\\2\\2\\u0159\\u0988\\3\\2\\2\\2\\u015b\\u098e\")\n buf.write(\"\\3\\2\\2\\2\\u015d\\u0996\\3\\2\\2\\2\\u015f\\u09a0\\3\\2\\2\\2\\u0161\")\n buf.write(\"\\u09a8\\3\\2\\2\\2\\u0163\\u09b1\\3\\2\\2\\2\\u0165\\u09b8\\3\\2\\2\\2\")\n buf.write(\"\\u0167\\u09be\\3\\2\\2\\2\\u0169\\u09c4\\3\\2\\2\\2\\u016b\\u09cb\\3\")\n buf.write(\"\\2\\2\\2\\u016d\\u09d8\\3\\2\\2\\2\\u016f\\u09e0\\3\\2\\2\\2\\u0171\\u09e4\")\n buf.write(\"\\3\\2\\2\\2\\u0173\\u09ec\\3\\2\\2\\2\\u0175\\u09f6\\3\\2\\2\\2\\u0177\")\n buf.write(\"\\u09ff\\3\\2\\2\\2\\u0179\\u0a04\\3\\2\\2\\2\\u017b\\u0a0f\\3\\2\\2\\2\")\n buf.write(\"\\u017d\\u0a12\\3\\2\\2\\2\\u017f\\u0a1c\\3\\2\\2\\2\\u0181\\u0a24\\3\")\n buf.write(\"\\2\\2\\2\\u0183\\u0a29\\3\\2\\2\\2\\u0185\\u0a2e\\3\\2\\2\\2\\u0187\\u0a33\")\n buf.write(\"\\3\\2\\2\\2\\u0189\\u0a3c\\3\\2\\2\\2\\u018b\\u0a41\\3\\2\\2\\2\\u018d\")\n buf.write(\"\\u0a4c\\3\\2\\2\\2\\u018f\\u0a54\\3\\2\\2\\2\\u0191\\u0a59\\3\\2\\2\\2\")\n buf.write(\"\\u0193\\u0a5f\\3\\2\\2\\2\\u0195\\u0a67\\3\\2\\2\\2\\u0197\\u0a6c\\3\")\n buf.write(\"\\2\\2\\2\\u0199\\u0a72\\3\\2\\2\\2\\u019b\\u0a78\\3\\2\\2\\2\\u019d\\u0a7e\")\n buf.write(\"\\3\\2\\2\\2\\u019f\\u0a84\\3\\2\\2\\2\\u01a1\\u0a8a\\3\\2\\2\\2\\u01a3\")\n buf.write(\"\\u0a8f\\3\\2\\2\\2\\u01a5\\u0a96\\3\\2\\2\\2\\u01a7\\u0a9a\\3\\2\\2\\2\")\n buf.write(\"\\u01a9\\u0aa1\\3\\2\\2\\2\\u01ab\\u0aa7\\3\\2\\2\\2\\u01ad\\u0aac\\3\")\n buf.write(\"\\2\\2\\2\\u01af\\u0ab1\\3\\2\\2\\2\\u01b1\\u0ab6\\3\\2\\2\\2\\u01b3\\u0aba\")\n buf.write(\"\\3\\2\\2\\2\\u01b5\\u0ac2\\3\\2\\2\\2\\u01b7\\u0acb\\3\\2\\2\\2\\u01b9\")\n buf.write(\"\\u0ad4\\3\\2\\2\\2\\u01bb\\u0adb\\3\\2\\2\\2\\u01bd\\u0ae1\\3\\2\\2\\2\")\n buf.write(\"\\u01bf\\u0ae7\\3\\2\\2\\2\\u01c1\\u0aee\\3\\2\\2\\2\\u01c3\\u0af7\\3\")\n buf.write(\"\\2\\2\\2\\u01c5\\u0b00\\3\\2\\2\\2\\u01c7\\u0b05\\3\\2\\2\\2\\u01c9\\u0b0b\")\n buf.write(\"\\3\\2\\2\\2\\u01cb\\u0b12\\3\\2\\2\\2\\u01cd\\u0b18\\3\\2\\2\\2\\u01cf\")\n buf.write(\"\\u0b21\\3\\2\\2\\2\\u01d1\\u0b26\\3\\2\\2\\2\\u01d3\\u0b2a\\3\\2\\2\\2\")\n buf.write(\"\\u01d5\\u0b32\\3\\2\\2\\2\\u01d7\\u0b3b\\3\\2\\2\\2\\u01d9\\u0b3f\\3\")\n buf.write(\"\\2\\2\\2\\u01db\\u0b45\\3\\2\\2\\2\\u01dd\\u0b4e\\3\\2\\2\\2\\u01df\\u0b54\")\n buf.write(\"\\3\\2\\2\\2\\u01e1\\u0b5b\\3\\2\\2\\2\\u01e3\\u0b5f\\3\\2\\2\\2\\u01e5\")\n buf.write(\"\\u0b62\\3\\2\\2\\2\\u01e7\\u0b6a\\3\\2\\2\\2\\u01e9\\u0b72\\3\\2\\2\\2\")\n buf.write(\"\\u01eb\\u0b79\\3\\2\\2\\2\\u01ed\\u0b81\\3\\2\\2\\2\\u01ef\\u0b92\\3\")\n buf.write(\"\\2\\2\\2\\u01f1\\u0b9d\\3\\2\\2\\2\\u01f3\\u0ba8\\3\\2\\2\\2\\u01f5\\u0bad\")\n buf.write(\"\\3\\2\\2\\2\\u01f7\\u0bb5\\3\\2\\2\\2\\u01f9\\u0bc3\\3\\2\\2\\2\\u01fb\")\n buf.write(\"\\u0bc7\\3\\2\\2\\2\\u01fd\\u0bce\\3\\2\\2\\2\\u01ff\\u0bd3\\3\\2\\2\\2\")\n buf.write(\"\\u0201\\u0bd9\\3\\2\\2\\2\\u0203\\u0be0\\3\\2\\2\\2\\u0205\\u0be8\\3\")\n buf.write(\"\\2\\2\\2\\u0207\\u0bf2\\3\\2\\2\\2\\u0209\\u0bf9\\3\\2\\2\\2\\u020b\\u0bfc\")\n buf.write(\"\\3\\2\\2\\2\\u020d\\u0c00\\3\\2\\2\\2\\u020f\\u0c04\\3\\2\\2\\2\\u0211\")\n buf.write(\"\\u0c08\\3\\2\\2\\2\\u0213\\u0c0b\\3\\2\\2\\2\\u0215\\u0c10\\3\\2\\2\\2\")\n buf.write(\"\\u0217\\u0c15\\3\\2\\2\\2\\u0219\\u0c1c\\3\\2\\2\\2\\u021b\\u0c1f\\3\")\n buf.write(\"\\2\\2\\2\\u021d\\u0c27\\3\\2\\2\\2\\u021f\\u0c2d\\3\\2\\2\\2\\u0221\\u0c38\")\n buf.write(\"\\3\\2\\2\\2\\u0223\\u0c40\\3\\2\\2\\2\\u0225\\u0c44\\3\\2\\2\\2\\u0227\")\n buf.write(\"\\u0c4a\\3\\2\\2\\2\\u0229\\u0c4f\\3\\2\\2\\2\\u022b\\u0c5a\\3\\2\\2\\2\")\n buf.write(\"\\u022d\\u0c62\\3\\2\\2\\2\\u022f\\u0c72\\3\\2\\2\\2\\u0231\\u0c7d\\3\")\n buf.write(\"\\2\\2\\2\\u0233\\u0c84\\3\\2\\2\\2\\u0235\\u0c8e\\3\\2\\2\\2\\u0237\\u0c96\")\n buf.write(\"\\3\\2\\2\\2\\u0239\\u0c9b\\3\\2\\2\\2\\u023b\\u0ca4\\3\\2\\2\\2\\u023d\")\n buf.write(\"\\u0caa\\3\\2\\2\\2\\u023f\\u0cb4\\3\\2\\2\\2\\u0241\\u0cba\\3\\2\\2\\2\")\n buf.write(\"\\u0243\\u0cbf\\3\\2\\2\\2\\u0245\\u0ccb\\3\\2\\2\\2\\u0247\\u0cd4\\3\")\n buf.write(\"\\2\\2\\2\\u0249\\u0cde\\3\\2\\2\\2\\u024b\\u0ce5\\3\\2\\2\\2\\u024d\\u0cef\")\n buf.write(\"\\3\\2\\2\\2\\u024f\\u0cf9\\3\\2\\2\\2\\u0251\\u0d01\\3\\2\\2\\2\\u0253\")\n buf.write(\"\\u0d07\\3\\2\\2\\2\\u0255\\u0d11\\3\\2\\2\\2\\u0257\\u0d17\\3\\2\\2\\2\")\n buf.write(\"\\u0259\\u0d1d\\3\\2\\2\\2\\u025b\\u0d21\\3\\2\\2\\2\\u025d\\u0d26\\3\")\n buf.write(\"\\2\\2\\2\\u025f\\u0d2b\\3\\2\\2\\2\\u0261\\u0d32\\3\\2\\2\\2\\u0263\\u0d36\")\n buf.write(\"\\3\\2\\2\\2\\u0265\\u0d40\\3\\2\\2\\2\\u0267\\u0d4c\\3\\2\\2\\2\\u0269\")\n buf.write(\"\\u0d53\\3\\2\\2\\2\\u026b\\u0d5d\\3\\2\\2\\2\\u026d\\u0d64\\3\\2\\2\\2\")\n buf.write(\"\\u026f\\u0d6c\\3\\2\\2\\2\\u0271\\u0d74\\3\\2\\2\\2\\u0273\\u0d88\\3\")\n buf.write(\"\\2\\2\\2\\u0275\\u0d8f\\3\\2\\2\\2\\u0277\\u0d9c\\3\\2\\2\\2\\u0279\\u0da3\")\n buf.write(\"\\3\\2\\2\\2\\u027b\\u0dad\\3\\2\\2\\2\\u027d\\u0db3\\3\\2\\2\\2\\u027f\")\n buf.write(\"\\u0dbb\\3\\2\\2\\2\\u0281\\u0dc2\\3\\2\\2\\2\\u0283\\u0dc8\\3\\2\\2\\2\")\n buf.write(\"\\u0285\\u0dd1\\3\\2\\2\\2\\u0287\\u0dd8\\3\\2\\2\\2\\u0289\\u0ddc\\3\")\n buf.write(\"\\2\\2\\2\\u028b\\u0de2\\3\\2\\2\\2\\u028d\\u0de7\\3\\2\\2\\2\\u028f\\u0ded\")\n buf.write(\"\\3\\2\\2\\2\\u0291\\u0df4\\3\\2\\2\\2\\u0293\\u0df9\\3\\2\\2\\2\\u0295\")\n buf.write(\"\\u0e03\\3\\2\\2\\2\\u0297\\u0e0a\\3\\2\\2\\2\\u0299\\u0e16\\3\\2\\2\\2\")\n buf.write(\"\\u029b\\u0e1a\\3\\2\\2\\2\\u029d\\u0e21\\3\\2\\2\\2\\u029f\\u0e28\\3\")\n buf.write(\"\\2\\2\\2\\u02a1\\u0e2d\\3\\2\\2\\2\\u02a3\\u0e35\\3\\2\\2\\2\\u02a5\\u0e3c\")\n buf.write(\"\\3\\2\\2\\2\\u02a7\\u0e41\\3\\2\\2\\2\\u02a9\\u0e4a\\3\\2\\2\\2\\u02ab\")\n buf.write(\"\\u0e55\\3\\2\\2\\2\\u02ad\\u0e62\\3\\2\\2\\2\\u02af\\u0e74\\3\\2\\2\\2\")\n buf.write(\"\\u02b1\\u0e80\\3\\2\\2\\2\\u02b3\\u0e90\\3\\2\\2\\2\\u02b5\\u0e94\\3\")\n buf.write(\"\\2\\2\\2\\u02b7\\u0e99\\3\\2\\2\\2\\u02b9\\u0ea2\\3\\2\\2\\2\\u02bb\\u0ea8\")\n buf.write(\"\\3\\2\\2\\2\\u02bd\\u0ead\\3\\2\\2\\2\\u02bf\\u0eb6\\3\\2\\2\\2\\u02c1\")\n buf.write(\"\\u0ebf\\3\\2\\2\\2\\u02c3\\u0ec8\\3\\2\\2\\2\\u02c5\\u0ed7\\3\\2\\2\\2\")\n buf.write(\"\\u02c7\\u0ede\\3\\2\\2\\2\\u02c9\\u0ee3\\3\\2\\2\\2\\u02cb\\u0ee8\\3\")\n buf.write(\"\\2\\2\\2\\u02cd\\u0ef1\\3\\2\\2\\2\\u02cf\\u0efa\\3\\2\\2\\2\\u02d1\\u0eff\")\n buf.write(\"\\3\\2\\2\\2\\u02d3\\u0f0d\\3\\2\\2\\2\\u02d5\\u0f15\\3\\2\\2\\2\\u02d7\")\n buf.write(\"\\u0f1e\\3\\2\\2\\2\\u02d9\\u0f29\\3\\2\\2\\2\\u02db\\u0f2f\\3\\2\\2\\2\")\n buf.write(\"\\u02dd\\u0f37\\3\\2\\2\\2\\u02df\\u0f41\\3\\2\\2\\2\\u02e1\\u0f4e\\3\")\n buf.write(\"\\2\\2\\2\\u02e3\\u0f55\\3\\2\\2\\2\\u02e5\\u0f60\\3\\2\\2\\2\\u02e7\\u0f67\")\n buf.write(\"\\3\\2\\2\\2\\u02e9\\u0f73\\3\\2\\2\\2\\u02eb\\u0f80\\3\\2\\2\\2\\u02ed\")\n buf.write(\"\\u0f8e\\3\\2\\2\\2\\u02ef\\u0f96\\3\\2\\2\\2\\u02f1\\u0f9e\\3\\2\\2\\2\")\n buf.write(\"\\u02f3\\u0fa6\\3\\2\\2\\2\\u02f5\\u0fac\\3\\2\\2\\2\\u02f7\\u0fb0\\3\")\n buf.write(\"\\2\\2\\2\\u02f9\\u0fb5\\3\\2\\2\\2\\u02fb\\u0fba\\3\\2\\2\\2\\u02fd\\u0fc4\")\n buf.write(\"\\3\\2\\2\\2\\u02ff\\u0fe0\\3\\2\\2\\2\\u0301\\u0ffb\\3\\2\\2\\2\\u0303\")\n buf.write(\"\\u1013\\3\\2\\2\\2\\u0305\\u1021\\3\\2\\2\\2\\u0307\\u102f\\3\\2\\2\\2\")\n buf.write(\"\\u0309\\u103f\\3\\2\\2\\2\\u030b\\u104f\\3\\2\\2\\2\\u030d\\u1052\\3\")\n buf.write(\"\\2\\2\\2\\u030f\\u105b\\3\\2\\2\\2\\u0311\\u1067\\3\\2\\2\\2\\u0313\\u1071\")\n buf.write(\"\\3\\2\\2\\2\\u0315\\u1077\\3\\2\\2\\2\\u0317\\u107f\\3\\2\\2\\2\\u0319\")\n buf.write(\"\\u1084\\3\\2\\2\\2\\u031b\\u1089\\3\\2\\2\\2\\u031d\\u1092\\3\\2\\2\\2\")\n buf.write(\"\\u031f\\u1097\\3\\2\\2\\2\\u0321\\u10a1\\3\\2\\2\\2\\u0323\\u10a7\\3\")\n buf.write(\"\\2\\2\\2\\u0325\\u10ad\\3\\2\\2\\2\\u0327\\u10b4\\3\\2\\2\\2\\u0329\\u10be\")\n buf.write(\"\\3\\2\\2\\2\\u032b\\u10c6\\3\\2\\2\\2\\u032d\\u10cc\\3\\2\\2\\2\\u032f\")\n buf.write(\"\\u10d3\\3\\2\\2\\2\\u0331\\u10db\\3\\2\\2\\2\\u0333\\u10e2\\3\\2\\2\\2\")\n buf.write(\"\\u0335\\u10e9\\3\\2\\2\\2\\u0337\\u10ed\\3\\2\\2\\2\\u0339\\u10f3\\3\")\n buf.write(\"\\2\\2\\2\\u033b\\u10fc\\3\\2\\2\\2\\u033d\\u1102\\3\\2\\2\\2\\u033f\\u1109\")\n buf.write(\"\\3\\2\\2\\2\\u0341\\u1111\\3\\2\\2\\2\\u0343\\u111a\\3\\2\\2\\2\\u0345\")\n buf.write(\"\\u1123\\3\\2\\2\\2\\u0347\\u112a\\3\\2\\2\\2\\u0349\\u1132\\3\\2\\2\\2\")\n buf.write(\"\\u034b\\u113a\\3\\2\\2\\2\\u034d\\u1143\\3\\2\\2\\2\\u034f\\u1148\\3\")\n buf.write(\"\\2\\2\\2\\u0351\\u1150\\3\\2\\2\\2\\u0353\\u115b\\3\\2\\2\\2\\u0355\\u1160\")\n buf.write(\"\\3\\2\\2\\2\\u0357\\u1169\\3\\2\\2\\2\\u0359\\u116f\\3\\2\\2\\2\\u035b\")\n buf.write(\"\\u1175\\3\\2\\2\\2\\u035d\\u117a\\3\\2\\2\\2\\u035f\\u1181\\3\\2\\2\\2\")\n buf.write(\"\\u0361\\u1186\\3\\2\\2\\2\\u0363\\u118c\\3\\2\\2\\2\\u0365\\u1190\\3\")\n buf.write(\"\\2\\2\\2\\u0367\\u1197\\3\\2\\2\\2\\u0369\\u11a5\\3\\2\\2\\2\\u036b\\u11ad\")\n buf.write(\"\\3\\2\\2\\2\\u036d\\u11ba\\3\\2\\2\\2\\u036f\\u11c5\\3\\2\\2\\2\\u0371\")\n buf.write(\"\\u11cf\\3\\2\\2\\2\\u0373\\u11d9\\3\\2\\2\\2\\u0375\\u11e7\\3\\2\\2\\2\")\n buf.write(\"\\u0377\\u11f0\\3\\2\\2\\2\\u0379\\u11f6\\3\\2\\2\\2\\u037b\\u11ff\\3\")\n buf.write(\"\\2\\2\\2\\u037d\\u1207\\3\\2\\2\\2\\u037f\\u1214\\3\\2\\2\\2\\u0381\\u121d\")\n buf.write(\"\\3\\2\\2\\2\\u0383\\u1222\\3\\2\\2\\2\\u0385\\u1226\\3\\2\\2\\2\\u0387\")\n buf.write(\"\\u123f\\3\\2\\2\\2\\u0389\\u1244\\3\\2\\2\\2\\u038b\\u124f\\3\\2\\2\\2\")\n buf.write(\"\\u038d\\u1261\\3\\2\\2\\2\\u038f\\u1271\\3\\2\\2\\2\\u0391\\u1284\\3\")\n buf.write(\"\\2\\2\\2\\u0393\\u129b\\3\\2\\2\\2\\u0395\\u12aa\\3\\2\\2\\2\\u0397\\u12b4\")\n buf.write(\"\\3\\2\\2\\2\\u0399\\u12bf\\3\\2\\2\\2\\u039b\\u12c7\\3\\2\\2\\2\\u039d\")\n buf.write(\"\\u12d4\\3\\2\\2\\2\\u039f\\u12e4\\3\\2\\2\\2\\u03a1\\u12f4\\3\\2\\2\\2\")\n buf.write(\"\\u03a3\\u12f9\\3\\2\\2\\2\\u03a5\\u12fd\\3\\2\\2\\2\\u03a7\\u1302\\3\")\n buf.write(\"\\2\\2\\2\\u03a9\\u1306\\3\\2\\2\\2\\u03ab\\u130b\\3\\2\\2\\2\\u03ad\\u130f\")\n buf.write(\"\\3\\2\\2\\2\\u03af\\u1316\\3\\2\\2\\2\\u03b1\\u131a\\3\\2\\2\\2\\u03b3\")\n buf.write(\"\\u1320\\3\\2\\2\\2\\u03b5\\u1330\\3\\2\\2\\2\\u03b7\\u133b\\3\\2\\2\\2\")\n buf.write(\"\\u03b9\\u133f\\3\\2\\2\\2\\u03bb\\u1348\\3\\2\\2\\2\\u03bd\\u134e\\3\")\n buf.write(\"\\2\\2\\2\\u03bf\\u1355\\3\\2\\2\\2\\u03c1\\u135a\\3\\2\\2\\2\\u03c3\\u1361\")\n buf.write(\"\\3\\2\\2\\2\\u03c5\\u136e\\3\\2\\2\\2\\u03c7\\u137b\\3\\2\\2\\2\\u03c9\")\n buf.write(\"\\u1388\\3\\2\\2\\2\\u03cb\\u138b\\3\\2\\2\\2\\u03cd\\u138d\\3\\2\\2\\2\")\n buf.write(\"\\u03cf\\u138f\\3\\2\\2\\2\\u03d1\\u139e\\3\\2\\2\\2\\u03d3\\u13aa\\3\")\n buf.write(\"\\2\\2\\2\\u03d5\\u13b3\\3\\2\\2\\2\\u03d7\\u13b5\\3\\2\\2\\2\\u03d9\\u13c0\")\n buf.write(\"\\3\\2\\2\\2\\u03db\\u13cb\\3\\2\\2\\2\\u03dd\\u13d6\\3\\2\\2\\2\\u03df\")\n buf.write(\"\\u13e1\\3\\2\\2\\2\\u03e1\\u13e3\\3\\2\\2\\2\\u03e3\\u13ed\\3\\2\\2\\2\")\n buf.write(\"\\u03e5\\u13ef\\3\\2\\2\\2\\u03e7\\u13f1\\3\\2\\2\\2\\u03e9\\u13f3\\3\")\n buf.write(\"\\2\\2\\2\\u03eb\\u13f5\\3\\2\\2\\2\\u03ed\\u13f8\\3\\2\\2\\2\\u03ef\\u13fa\")\n buf.write(\"\\3\\2\\2\\2\\u03f1\\u13fc\\3\\2\\2\\2\\u03f3\\u13fe\\3\\2\\2\\2\\u03f5\")\n buf.write(\"\\u1400\\3\\2\\2\\2\\u03f7\\u1402\\3\\2\\2\\2\\u03f9\\u1404\\3\\2\\2\\2\")\n buf.write(\"\\u03fb\\u1415\\3\\2\\2\\2\\u03fd\\u1417\\3\\2\\2\\2\\u03ff\\u1419\\3\")\n buf.write(\"\\2\\2\\2\\u0401\\u141b\\3\\2\\2\\2\\u0403\\u141e\\3\\2\\2\\2\\u0405\\u1420\")\n buf.write(\"\\3\\2\\2\\2\\u0407\\u142b\\3\\2\\2\\2\\u0409\\u142d\\3\\2\\2\\2\\u040b\")\n buf.write(\"\\u142f\\3\\2\\2\\2\\u040d\\u1431\\3\\2\\2\\2\\u040f\\u1433\\3\\2\\2\\2\")\n buf.write(\"\\u0411\\u1435\\3\\2\\2\\2\\u0413\\u1437\\3\\2\\2\\2\\u0415\\u143a\\3\")\n buf.write(\"\\2\\2\\2\\u0417\\u143c\\3\\2\\2\\2\\u0419\\u143e\\3\\2\\2\\2\\u041b\\u1440\")\n buf.write(\"\\3\\2\\2\\2\\u041d\\u1442\\3\\2\\2\\2\\u041f\\u1445\\3\\2\\2\\2\\u0421\")\n buf.write(\"\\u144b\\3\\2\\2\\2\\u0423\\u144e\\3\\2\\2\\2\\u0425\\u1455\\3\\2\\2\\2\")\n buf.write(\"\\u0427\\u1460\\3\\2\\2\\2\\u0429\\u146f\\3\\2\\2\\2\\u042b\\u147d\\3\")\n buf.write(\"\\2\\2\\2\\u042d\\u1490\\3\\2\\2\\2\\u042f\\u1494\\3\\2\\2\\2\\u0431\\u1496\")\n buf.write(\"\\3\\2\\2\\2\\u0433\\u149e\\3\\2\\2\\2\\u0435\\u14a3\\3\\2\\2\\2\\u0437\")\n buf.write(\"\\u14a5\\3\\2\\2\\2\\u0439\\u14a7\\3\\2\\2\\2\\u043b\\u14a9\\3\\2\\2\\2\")\n buf.write(\"\\u043d\\u14ab\\3\\2\\2\\2\\u043f\\u14ad\\3\\2\\2\\2\\u0441\\u14af\\3\")\n buf.write(\"\\2\\2\\2\\u0443\\u14b1\\3\\2\\2\\2\\u0445\\u14b3\\3\\2\\2\\2\\u0447\\u14b5\")\n buf.write(\"\\3\\2\\2\\2\\u0449\\u14b7\\3\\2\\2\\2\\u044b\\u14b9\\3\\2\\2\\2\\u044d\")\n buf.write(\"\\u14bb\\3\\2\\2\\2\\u044f\\u14bd\\3\\2\\2\\2\\u0451\\u14bf\\3\\2\\2\\2\")\n buf.write(\"\\u0453\\u14c1\\3\\2\\2\\2\\u0455\\u14c3\\3\\2\\2\\2\\u0457\\u14c5\\3\")\n buf.write(\"\\2\\2\\2\\u0459\\u14c7\\3\\2\\2\\2\\u045b\\u14c9\\3\\2\\2\\2\\u045d\\u14cb\")\n buf.write(\"\\3\\2\\2\\2\\u045f\\u14cd\\3\\2\\2\\2\\u0461\\u14cf\\3\\2\\2\\2\\u0463\")\n buf.write(\"\\u14d1\\3\\2\\2\\2\\u0465\\u14d3\\3\\2\\2\\2\\u0467\\u14d5\\3\\2\\2\\2\")\n buf.write(\"\\u0469\\u046a\\7\\60\\2\\2\\u046a\\u046b\\7\\60\\2\\2\\u046b\\4\\3\\2\")\n buf.write(\"\\2\\2\\u046c\\u046d\\5\\u0435\\u021b\\2\\u046d\\6\\3\\2\\2\\2\\u046e\")\n buf.write(\"\\u046f\\5\\u0435\\u021b\\2\\u046f\\u0470\\5\\u043b\\u021e\\2\\u0470\")\n buf.write(\"\\u0471\\5\\u043b\\u021e\\2\\u0471\\b\\3\\2\\2\\2\\u0472\\u0473\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0473\\u0474\\5\\u043f\\u0220\\2\\u0474\\u0475\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0475\\u0476\\5\\u043d\\u021f\\2\\u0476\\u0477\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0477\\n\\3\\2\\2\\2\\u0478\\u0479\\5\\u0435\\u021b\\2\\u0479\")\n buf.write(\"\\u047a\\5\\u0441\\u0221\\2\\u047a\\u047b\\5\\u043d\\u021f\\2\\u047b\")\n buf.write(\"\\u047c\\5\\u044f\\u0228\\2\\u047c\\u047d\\5\\u045b\\u022e\\2\\u047d\")\n buf.write(\"\\f\\3\\2\\2\\2\\u047e\\u047f\\5\\u0435\\u021b\\2\\u047f\\u0480\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0480\\u0481\\5\\u0441\\u0221\\2\\u0481\\u0482\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0482\\u0483\\5\\u043d\\u021f\\2\\u0483\\u0484\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0484\\u0485\\5\\u0435\\u021b\\2\\u0485\\u0486\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0486\\u0487\\5\\u043d\\u021f\\2\\u0487\\16\\3\\2\\2\\2\")\n buf.write(\"\\u0488\\u0489\\5\\u0435\\u021b\\2\\u0489\\u048a\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u048a\\u048b\\5\\u044b\\u0226\\2\\u048b\\20\\3\\2\\2\\2\\u048c\")\n buf.write(\"\\u048d\\5\\u0435\\u021b\\2\\u048d\\u048e\\5\\u044b\\u0226\\2\\u048e\")\n buf.write(\"\\u048f\\5\\u045b\\u022e\\2\\u048f\\u0490\\5\\u043d\\u021f\\2\\u0490\")\n buf.write(\"\\u0491\\5\\u0457\\u022c\\2\\u0491\\22\\3\\2\\2\\2\\u0492\\u0493\\5\")\n buf.write(\"\\u0435\\u021b\\2\\u0493\\u0494\\5\\u044f\\u0228\\2\\u0494\\u0495\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0495\\u0496\\5\\u044b\\u0226\\2\\u0496\\u0497\")\n buf.write(\"\\5\\u0465\\u0233\\2\\u0497\\u0498\\5\\u0467\\u0234\\2\\u0498\\u0499\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0499\\24\\3\\2\\2\\2\\u049a\\u049b\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u049b\\u049c\\5\\u044f\\u0228\\2\\u049c\\u049d\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u049d\\26\\3\\2\\2\\2\\u049e\\u049f\\5\\u0435\\u021b\\2\")\n buf.write(\"\\u049f\\u04a0\\5\\u044f\\u0228\\2\\u04a0\\u04a1\\5\\u0465\\u0233\")\n buf.write(\"\\2\\u04a1\\30\\3\\2\\2\\2\\u04a2\\u04a3\\5\\u0435\\u021b\\2\\u04a3\")\n buf.write(\"\\u04a4\\5\\u0457\\u022c\\2\\u04a4\\u04a5\\5\\u0457\\u022c\\2\\u04a5\")\n buf.write(\"\\u04a6\\5\\u0435\\u021b\\2\\u04a6\\u04a7\\5\\u0465\\u0233\\2\\u04a7\")\n buf.write(\"\\32\\3\\2\\2\\2\\u04a8\\u04a9\\5\\u0435\\u021b\\2\\u04a9\\u04aa\\5\")\n buf.write(\"\\u0459\\u022d\\2\\u04aa\\34\\3\\2\\2\\2\\u04ab\\u04ac\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u04ac\\u04ad\\5\\u0459\\u022d\\2\\u04ad\\u04ae\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u04ae\\u04af\\5\\u045d\\u022f\\2\\u04af\\u04b0\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u04b0\\u04b1\\5\\u043d\\u021f\\2\\u04b1\\36\\3\\2\\2\\2\\u04b2\")\n buf.write(\"\\u04b3\\5\\u0435\\u021b\\2\\u04b3\\u04b4\\5\\u0459\\u022d\\2\\u04b4\")\n buf.write(\"\\u04b5\\5\\u0459\\u022d\\2\\u04b5\\u04b6\\5\\u043d\\u021f\\2\\u04b6\")\n buf.write(\"\\u04b7\\5\\u0457\\u022c\\2\\u04b7\\u04b8\\5\\u045b\\u022e\\2\\u04b8\")\n buf.write(\" \\3\\2\\2\\2\\u04b9\\u04ba\\5\\u0435\\u021b\\2\\u04ba\\u04bb\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u04bb\\u04bc\\5\\u0439\\u021d\\2\\u04bc\\\"\\3\\2\\2\\2\\u04bd\")\n buf.write(\"\\u04be\\5\\u0435\\u021b\\2\\u04be\\u04bf\\5\\u0459\\u022d\\2\\u04bf\")\n buf.write(\"\\u04c0\\5\\u0459\\u022d\\2\\u04c0\\u04c1\\5\\u0451\\u0229\\2\\u04c1\")\n buf.write(\"\\u04c2\\5\\u0439\\u021d\\2\\u04c2\\u04c3\\5\\u0445\\u0223\\2\\u04c3\")\n buf.write(\"\\u04c4\\5\\u0435\\u021b\\2\\u04c4\\u04c5\\5\\u045b\\u022e\\2\\u04c5\")\n buf.write(\"\\u04c6\\5\\u043d\\u021f\\2\\u04c6$\\3\\2\\2\\2\\u04c7\\u04c8\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u04c8\\u04c9\\5\\u045b\\u022e\\2\\u04c9&\\3\\2\\2\\2\\u04ca\")\n buf.write(\"\\u04cb\\5\\u0435\\u021b\\2\\u04cb\\u04cc\\5\\u045b\\u022e\\2\\u04cc\")\n buf.write(\"\\u04cd\\5\\u045b\\u022e\\2\\u04cd\\u04ce\\5\\u0457\\u022c\\2\\u04ce\")\n buf.write(\"\\u04cf\\5\\u0445\\u0223\\2\\u04cf\\u04d0\\5\\u0437\\u021c\\2\\u04d0\")\n buf.write(\"\\u04d1\\5\\u045d\\u022f\\2\\u04d1\\u04d2\\5\\u045b\\u022e\\2\\u04d2\")\n buf.write(\"\\u04d3\\5\\u043d\\u021f\\2\\u04d3(\\3\\2\\2\\2\\u04d4\\u04d5\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u04d5\\u04d6\\5\\u045d\\u022f\\2\\u04d6\\u04d7\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u04d7\\u04d8\\5\\u0445\\u0223\\2\\u04d8\\u04d9\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u04d9*\\3\\2\\2\\2\\u04da\\u04db\\5\\u0435\\u021b\\2\\u04db\")\n buf.write(\"\\u04dc\\5\\u045d\\u022f\\2\\u04dc\\u04dd\\5\\u045b\\u022e\\2\\u04dd\")\n buf.write(\"\\u04de\\5\\u0443\\u0222\\2\\u04de\\u04df\\5\\u0445\\u0223\\2\\u04df\")\n buf.write(\"\\u04e0\\5\\u043b\\u021e\\2\\u04e0,\\3\\2\\2\\2\\u04e1\\u04e2\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u04e2\\u04e3\\5\\u045d\\u022f\\2\\u04e3\\u04e4\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u04e4\\u04e5\\5\\u0451\\u0229\\2\\u04e5.\\3\\2\\2\\2\\u04e6\")\n buf.write(\"\\u04e7\\5\\u0435\\u021b\\2\\u04e7\\u04e8\\5\\u045d\\u022f\\2\\u04e8\")\n buf.write(\"\\u04e9\\5\\u045b\\u022e\\2\\u04e9\\u04ea\\5\\u0451\\u0229\\2\\u04ea\")\n buf.write(\"\\u04eb\\5\\u044d\\u0227\\2\\u04eb\\u04ec\\5\\u0435\\u021b\\2\\u04ec\")\n buf.write(\"\\u04ed\\5\\u045b\\u022e\\2\\u04ed\\u04ee\\5\\u0445\\u0223\\2\\u04ee\")\n buf.write(\"\\u04ef\\5\\u0439\\u021d\\2\\u04ef\\60\\3\\2\\2\\2\\u04f0\\u04f1\\5\")\n buf.write(\"\\u0435\\u021b\\2\\u04f1\\u04f2\\5\\u045d\\u022f\\2\\u04f2\\u04f3\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u04f3\\u04f4\\5\\u0451\\u0229\\2\\u04f4\\u04f5\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u04f5\\u04f6\\5\\u0451\\u0229\\2\\u04f6\\u04f7\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u04f7\\u04f8\\5\\u0451\\u0229\\2\\u04f8\\u04f9\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u04f9\\u04fa\\5\\u0459\\u022d\\2\\u04fa\\u04fb\")\n buf.write(\"\\7a\\2\\2\\u04fb\\u04fc\\5\\u045b\\u022e\\2\\u04fc\\u04fd\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u04fd\\u04fe\\5\\u0435\\u021b\\2\\u04fe\\u04ff\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u04ff\\u0500\\5\\u0459\\u022d\\2\\u0500\\u0501\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0501\\u0502\\5\\u0439\\u021d\\2\\u0502\\u0503\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0503\\u0504\\5\\u0445\\u0223\\2\\u0504\\u0505\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0505\\u0506\\5\\u044f\\u0228\\2\\u0506\\62\\3\\2\\2\\2\")\n buf.write(\"\\u0507\\u0508\\5\\u0437\\u021c\\2\\u0508\\u0509\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0509\\u050a\\5\\u045b\\u022e\\2\\u050a\\u050b\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u050b\\u050c\\5\\u0443\\u0222\\2\\u050c\\64\\3\\2\\2\\2\\u050d\")\n buf.write(\"\\u050e\\5\\u0437\\u021c\\2\\u050e\\u050f\\5\\u043d\\u021f\\2\\u050f\")\n buf.write(\"\\u0510\\5\\u043f\\u0220\\2\\u0510\\u0511\\5\\u0451\\u0229\\2\\u0511\")\n buf.write(\"\\u0512\\5\\u0457\\u022c\\2\\u0512\\u0513\\5\\u043d\\u021f\\2\\u0513\")\n buf.write(\"\\66\\3\\2\\2\\2\\u0514\\u0515\\5\\u0437\\u021c\\2\\u0515\\u0516\\5\")\n buf.write(\"\\u043d\\u021f\\2\\u0516\\u0517\\5\\u0441\\u0221\\2\\u0517\\u0518\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0518\\u0519\\5\\u044f\\u0228\\2\\u05198\\3\")\n buf.write(\"\\2\\2\\2\\u051a\\u051b\\5\\u0437\\u021c\\2\\u051b\\u051c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u051c\\u051d\\5\\u045b\\u022e\\2\\u051d\\u051e\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u051e\\u051f\\5\\u043d\\u021f\\2\\u051f\\u0520\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0520\\u0521\\5\\u044f\\u0228\\2\\u0521:\\3\\2\\2\\2\\u0522\")\n buf.write(\"\\u0523\\5\\u0437\\u021c\\2\\u0523\\u0524\\5\\u043f\\u0220\\2\\u0524\")\n buf.write(\"\\u0525\\5\\u0445\\u0223\\2\\u0525\\u0526\\5\\u044b\\u0226\\2\\u0526\")\n buf.write(\"\\u0527\\5\\u043d\\u021f\\2\\u0527<\\3\\2\\2\\2\\u0528\\u0529\\5\\u0437\")\n buf.write(\"\\u021c\\2\\u0529\\u052a\\5\\u0445\\u0223\\2\\u052a\\u052b\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u052b\\u052c\\5\\u0435\\u021b\\2\\u052c\\u052d\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u052d\\u052e\\5\\u0465\\u0233\\2\\u052e\\u052f\\7a\\2\")\n buf.write(\"\\2\\u052f\\u0530\\5\\u043b\\u021e\\2\\u0530\\u0531\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0531\\u0532\\5\\u045d\\u022f\\2\\u0532\\u0533\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u0533\\u0534\\5\\u044b\\u0226\\2\\u0534\\u0535\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0535>\\3\\2\\2\\2\\u0536\\u0537\\5\\u0437\\u021c\\2\\u0537\\u0538\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0538\\u0539\\5\\u044f\\u0228\\2\\u0539\\u053a\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u053a\\u053b\\5\\u0457\\u022c\\2\\u053b\\u053c\")\n buf.write(\"\\5\\u0465\\u0233\\2\\u053c\\u053d\\7a\\2\\2\\u053d\\u053e\\5\\u043f\")\n buf.write(\"\\u0220\\2\\u053e\\u053f\\5\\u044b\\u0226\\2\\u053f\\u0540\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0540\\u0541\\5\\u0435\\u021b\\2\\u0541\\u0542\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0542@\\3\\2\\2\\2\\u0543\\u0544\\5\\u0437\\u021c\\2\\u0544\")\n buf.write(\"\\u0545\\5\\u0445\\u0223\\2\\u0545\\u0546\\5\\u044f\\u0228\\2\\u0546\")\n buf.write(\"\\u0547\\5\\u0435\\u021b\\2\\u0547\\u0548\\5\\u0457\\u022c\\2\\u0548\")\n buf.write(\"\\u0549\\5\\u0465\\u0233\\2\\u0549\\u054a\\7a\\2\\2\\u054a\\u054b\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u054b\\u054c\\5\\u044f\\u0228\\2\\u054c\\u054d\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u054d\\u054e\\5\\u043d\\u021f\\2\\u054e\\u054f\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u054f\\u0550\\5\\u043d\\u021f\\2\\u0550\\u0551\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0551B\\3\\2\\2\\2\\u0552\\u0553\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u0553\\u0554\\5\\u044b\\u0226\\2\\u0554\\u0555\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0555\\u0556\\5\\u0437\\u021c\\2\\u0556D\\3\\2\\2\\2\\u0557\\u0558\")\n buf.write(\"\\5\\u0437\\u021c\\2\\u0558\\u0559\\5\\u044b\\u0226\\2\\u0559\\u055a\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u055a\\u055b\\5\\u0439\\u021d\\2\\u055b\\u055c\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u055cF\\3\\2\\2\\2\\u055d\\u055e\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u055e\\u055f\\5\\u0451\\u0229\\2\\u055f\\u0560\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0560\\u0561\\5\\u0465\\u0233\\2\\u0561H\\3\\2\\2\\2\\u0562\\u0563\")\n buf.write(\"\\5\\u0437\\u021c\\2\\u0563\\u0564\\5\\u0451\\u0229\\2\\u0564\\u0565\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0565\\u0566\\5\\u044b\\u0226\\2\\u0566\\u0567\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0567\\u0568\\5\\u0435\\u021b\\2\\u0568\\u0569\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0569J\\3\\2\\2\\2\\u056a\\u056b\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u056b\\u056c\\5\\u0451\\u0229\\2\\u056c\\u056d\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u056d\\u056e\\5\\u0443\\u0222\\2\\u056eL\\3\\2\\2\\2\\u056f\\u0570\")\n buf.write(\"\\5\\u0437\\u021c\\2\\u0570\\u0571\\5\\u0457\\u022c\\2\\u0571\\u0572\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0572\\u0573\\5\\u0435\\u021b\\2\\u0573\\u0574\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0574\\u0575\\5\\u045b\\u022e\\2\\u0575\\u0576\")\n buf.write(\"\\5\\u0443\\u0222\\2\\u0576N\\3\\2\\2\\2\\u0577\\u0578\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u0578\\u0579\\5\\u045d\\u022f\\2\\u0579\\u057a\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u057a\\u057b\\5\\u0449\\u0225\\2\\u057bP\\3\\2\\2\\2\\u057c\\u057d\")\n buf.write(\"\\5\\u0437\\u021c\\2\\u057d\\u057e\\5\\u0465\\u0233\\2\\u057eR\\3\")\n buf.write(\"\\2\\2\\2\\u057f\\u0580\\5\\u0437\\u021c\\2\\u0580\\u0581\\5\\u0465\")\n buf.write(\"\\u0233\\2\\u0581\\u0582\\5\\u045b\\u022e\\2\\u0582\\u0583\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0583T\\3\\2\\2\\2\\u0584\\u0585\\5\\u0439\\u021d\\2\\u0585\")\n buf.write(\"V\\3\\2\\2\\2\\u0586\\u0587\\5\\u0439\\u021d\\2\\u0587\\u0588\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0588\\u0589\\5\\u0439\\u021d\\2\\u0589\\u058a\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u058a\\u058b\\5\\u043d\\u021f\\2\\u058bX\\3\\2\\2\\2\\u058c\")\n buf.write(\"\\u058d\\5\\u0439\\u021d\\2\\u058d\\u058e\\5\\u0435\\u021b\\2\\u058e\")\n buf.write(\"\\u058f\\5\\u044b\\u0226\\2\\u058f\\u0590\\5\\u044b\\u0226\\2\\u0590\")\n buf.write(\"Z\\3\\2\\2\\2\\u0591\\u0592\\5\\u0439\\u021d\\2\\u0592\\u0593\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0593\\u0594\\5\\u044f\\u0228\\2\\u0594\\u0595\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0595\\u0596\\5\\u044f\\u0228\\2\\u0596\\u0597\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0597\\u0598\\5\\u0439\\u021d\\2\\u0598\\u0599\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0599\\u059a\\5\\u044b\\u0226\\2\\u059a\\\\\\3\\2\\2\\2\\u059b\")\n buf.write(\"\\u059c\\5\\u0439\\u021d\\2\\u059c\\u059d\\5\\u0435\\u021b\\2\\u059d\")\n buf.write(\"\\u059e\\5\\u0459\\u022d\\2\\u059e\\u059f\\5\\u0439\\u021d\\2\\u059f\")\n buf.write(\"\\u05a0\\5\\u0435\\u021b\\2\\u05a0\\u05a1\\5\\u043b\\u021e\\2\\u05a1\")\n buf.write(\"\\u05a2\\5\\u043d\\u021f\\2\\u05a2^\\3\\2\\2\\2\\u05a3\\u05a4\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05a4\\u05a5\\5\\u0435\\u021b\\2\\u05a5\\u05a6\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u05a6\\u05a7\\5\\u043d\\u021f\\2\\u05a7`\\3\\2\\2\\2\\u05a8\")\n buf.write(\"\\u05a9\\5\\u0439\\u021d\\2\\u05a9\\u05aa\\5\\u0435\\u021b\\2\\u05aa\")\n buf.write(\"\\u05ab\\5\\u0459\\u022d\\2\\u05ab\\u05ac\\5\\u045b\\u022e\\2\\u05ac\")\n buf.write(\"b\\3\\2\\2\\2\\u05ad\\u05ae\\5\\u0439\\u021d\\2\\u05ae\\u05af\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u05af\\u05b0\\5\\u0435\\u021b\\2\\u05b0\\u05b1\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u05b1d\\3\\2\\2\\2\\u05b2\\u05b3\\5\\u0439\\u021d\\2\\u05b3\")\n buf.write(\"\\u05b4\\5\\u0443\\u0222\\2\\u05b4\\u05b5\\5\\u0435\\u021b\\2\\u05b5\")\n buf.write(\"\\u05b6\\5\\u0457\\u022c\\2\\u05b6\\u05b7\\7a\\2\\2\\u05b7\\u05b8\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u05b8\\u05b9\\5\\u0459\\u022d\\2\\u05b9f\\3\")\n buf.write(\"\\2\\2\\2\\u05ba\\u05bb\\5\\u0439\\u021d\\2\\u05bb\\u05bc\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u05bc\\u05bd\\5\\u0435\\u021b\\2\\u05bd\\u05be\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u05be\\u05bf\\5\\u0435\\u021b\\2\\u05bf\\u05c0\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05c0\\u05c1\\5\\u045b\\u022e\\2\\u05c1\\u05c2\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u05c2\\u05c3\\5\\u0457\\u022c\\2\\u05c3h\\3\\2\\2\\2\\u05c4\")\n buf.write(\"\\u05c5\\5\\u0439\\u021d\\2\\u05c5\\u05c6\\5\\u0443\\u0222\\2\\u05c6\")\n buf.write(\"\\u05c7\\5\\u043d\\u021f\\2\\u05c7\\u05c8\\5\\u0439\\u021d\\2\\u05c8\")\n buf.write(\"\\u05c9\\5\\u0449\\u0225\\2\\u05c9j\\3\\2\\2\\2\\u05ca\\u05cb\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05cb\\u05cc\\5\\u0443\\u0222\\2\\u05cc\\u05cd\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u05cdl\\3\\2\\2\\2\\u05ce\\u05cf\\5\\u0439\\u021d\\2\\u05cf\")\n buf.write(\"\\u05d0\\5\\u044b\\u0226\\2\\u05d0\\u05d1\\5\\u0451\\u0229\\2\\u05d1\")\n buf.write(\"\\u05d2\\5\\u0437\\u021c\\2\\u05d2n\\3\\2\\2\\2\\u05d3\\u05d4\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05d4\\u05d5\\5\\u044b\\u0226\\2\\u05d5\\u05d6\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u05d6\\u05d7\\5\\u0459\\u022d\\2\\u05d7\\u05d8\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u05d8p\\3\\2\\2\\2\\u05d9\\u05da\\5\\u0439\\u021d\\2\\u05da\")\n buf.write(\"\\u05db\\5\\u044b\\u0226\\2\\u05db\\u05dc\\5\\u045d\\u022f\\2\\u05dc\")\n buf.write(\"\\u05dd\\5\\u0459\\u022d\\2\\u05dd\\u05de\\5\\u045b\\u022e\\2\\u05de\")\n buf.write(\"\\u05df\\5\\u043d\\u021f\\2\\u05df\\u05e0\\5\\u0457\\u022c\\2\\u05e0\")\n buf.write(\"r\\3\\2\\2\\2\\u05e1\\u05e2\\5\\u0439\\u021d\\2\\u05e2\\u05e3\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u05e3\\u05e4\\5\\u044b\\u0226\\2\\u05e4\\u05e5\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u05e5\\u05e6\\5\\u043d\\u021f\\2\\u05e6\\u05e7\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05e7\\u05e8\\5\\u045b\\u022e\\2\\u05e8t\\3\\2\\2\\2\\u05e9\")\n buf.write(\"\\u05ea\\5\\u0439\\u021d\\2\\u05ea\\u05eb\\5\\u0451\\u0229\\2\\u05eb\")\n buf.write(\"\\u05ec\\5\\u044b\\u0226\\2\\u05ec\\u05ed\\5\\u045d\\u022f\\2\\u05ed\")\n buf.write(\"\\u05ee\\5\\u044d\\u0227\\2\\u05ee\\u05ef\\5\\u044f\\u0228\\2\\u05ef\")\n buf.write(\"\\u05f0\\5\\u0459\\u022d\\2\\u05f0v\\3\\2\\2\\2\\u05f1\\u05f2\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u05f2\\u05f3\\5\\u0451\\u0229\\2\\u05f3\\u05f4\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u05f4\\u05f5\\5\\u044d\\u0227\\2\\u05f5\\u05f6\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u05f6\\u05f7\\5\\u044f\\u0228\\2\\u05f7\\u05f8\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u05f8x\\3\\2\\2\\2\\u05f9\\u05fa\\5\\u0439\\u021d\\2\\u05fa\")\n buf.write(\"\\u05fb\\5\\u0451\\u0229\\2\\u05fb\\u05fc\\5\\u044d\\u0227\\2\\u05fc\")\n buf.write(\"\\u05fd\\5\\u044d\\u0227\\2\\u05fd\\u05fe\\5\\u0445\\u0223\\2\\u05fe\")\n buf.write(\"\\u05ff\\5\\u045b\\u022e\\2\\u05ffz\\3\\2\\2\\2\\u0600\\u0601\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0601\\u0602\\5\\u0451\\u0229\\2\\u0602\\u0603\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0603\\u0604\\5\\u044d\\u0227\\2\\u0604\\u0605\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0605\\u0606\\5\\u045b\\u022e\\2\\u0606\\u0607\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0607\\u0608\\5\\u043d\\u021f\\2\\u0608\\u0609\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u0609|\\3\\2\\2\\2\\u060a\\u060b\\5\\u0439\\u021d\\2\\u060b\")\n buf.write(\"\\u060c\\5\\u0451\\u0229\\2\\u060c\\u060d\\5\\u044d\\u0227\\2\\u060d\")\n buf.write(\"\\u060e\\5\\u0453\\u022a\\2\\u060e\\u060f\\5\\u0435\\u021b\\2\\u060f\")\n buf.write(\"\\u0610\\5\\u045b\\u022e\\2\\u0610\\u0611\\5\\u0445\\u0223\\2\\u0611\")\n buf.write(\"\\u0612\\5\\u0437\\u021c\\2\\u0612\\u0613\\5\\u0445\\u0223\\2\\u0613\")\n buf.write(\"\\u0614\\5\\u044b\\u0226\\2\\u0614\\u0615\\5\\u0445\\u0223\\2\\u0615\")\n buf.write(\"\\u0616\\5\\u045b\\u022e\\2\\u0616\\u0617\\5\\u0465\\u0233\\2\\u0617\")\n buf.write(\"~\\3\\2\\2\\2\\u0618\\u0619\\5\\u0439\\u021d\\2\\u0619\\u061a\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u061a\\u061b\\5\\u044d\\u0227\\2\\u061b\\u061c\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u061c\\u061d\\5\\u0445\\u0223\\2\\u061d\\u061e\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u061e\\u061f\\5\\u043d\\u021f\\2\\u061f\\u0080\\3\\2\\2\")\n buf.write(\"\\2\\u0620\\u0621\\5\\u0439\\u021d\\2\\u0621\\u0622\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0622\\u0623\\5\\u044d\\u0227\\2\\u0623\\u0624\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0624\\u0625\\5\\u0451\\u0229\\2\\u0625\\u0626\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0626\\u0627\\5\\u044f\\u0228\\2\\u0627\\u0628\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0628\\u0082\\3\\2\\2\\2\\u0629\\u062a\\5\\u0439\\u021d\\2\\u062a\")\n buf.write(\"\\u062b\\5\\u0451\\u0229\\2\\u062b\\u062c\\5\\u044f\\u0228\\2\\u062c\")\n buf.write(\"\\u062d\\5\\u044f\\u0228\\2\\u062d\\u062e\\5\\u043d\\u021f\\2\\u062e\")\n buf.write(\"\\u062f\\5\\u0439\\u021d\\2\\u062f\\u0630\\5\\u045b\\u022e\\2\\u0630\")\n buf.write(\"\\u0084\\3\\2\\2\\2\\u0631\\u0632\\5\\u0439\\u021d\\2\\u0632\\u0633\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0633\\u0634\\5\\u044f\\u0228\\2\\u0634\\u0635\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0635\\u0636\\5\\u043d\\u021f\\2\\u0636\\u0637\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0637\\u0638\\5\\u045b\\u022e\\2\\u0638\\u0639\")\n buf.write(\"\\7a\\2\\2\\u0639\\u063a\\5\\u0437\\u021c\\2\\u063a\\u063b\\5\\u0465\")\n buf.write(\"\\u0233\\2\\u063b\\u063c\\7a\\2\\2\\u063c\\u063d\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u063d\\u063e\\5\\u0451\\u0229\\2\\u063e\\u063f\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u063f\\u0640\\5\\u045b\\u022e\\2\\u0640\\u0086\\3\\2\\2\\2\\u0641\")\n buf.write(\"\\u0642\\5\\u0439\\u021d\\2\\u0642\\u0643\\5\\u0451\\u0229\\2\\u0643\")\n buf.write(\"\\u0644\\5\\u044f\\u0228\\2\\u0644\\u0645\\5\\u0459\\u022d\\2\\u0645\")\n buf.write(\"\\u0646\\5\\u045b\\u022e\\2\\u0646\\u0647\\5\\u0435\\u021b\\2\\u0647\")\n buf.write(\"\\u0648\\5\\u044f\\u0228\\2\\u0648\\u0649\\5\\u045b\\u022e\\2\\u0649\")\n buf.write(\"\\u0088\\3\\2\\2\\2\\u064a\\u064b\\5\\u0439\\u021d\\2\\u064b\\u064c\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u064c\\u064d\\5\\u044f\\u0228\\2\\u064d\\u064e\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u064e\\u064f\\5\\u045b\\u022e\\2\\u064f\\u0650\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0650\\u0651\\5\\u0435\\u021b\\2\\u0651\\u0652\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0652\\u0653\\5\\u044f\\u0228\\2\\u0653\\u0654\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0654\\u008a\\3\\2\\2\\2\\u0655\\u0656\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0656\\u0657\\5\\u0451\\u0229\\2\\u0657\\u0658\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0658\\u0659\\5\\u0459\\u022d\\2\\u0659\\u065a\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u065a\\u065b\\5\\u0457\\u022c\\2\\u065b\\u065c\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u065c\\u065d\\5\\u0445\\u0223\\2\\u065d\\u065e\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u065e\\u065f\\5\\u045b\\u022e\\2\\u065f\\u0660\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0660\\u008c\\3\\2\\2\\2\\u0661\\u0662\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u0662\\u0663\\5\\u0451\\u0229\\2\\u0663\\u0664\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0664\\u0665\\5\\u0459\\u022d\\2\\u0665\\u0666\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0666\\u0667\\5\\u0457\\u022c\\2\\u0667\\u0668\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0668\\u0669\\5\\u0439\\u021d\\2\\u0669\\u066a\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u066a\\u066b\\5\\u0451\\u0229\\2\\u066b\\u066c\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u066c\\u008e\\3\\2\\2\\2\\u066d\\u066e\\5\\u0439\\u021d\\2\\u066e\")\n buf.write(\"\\u066f\\5\\u0451\\u0229\\2\\u066f\\u0670\\5\\u044f\\u0228\\2\\u0670\")\n buf.write(\"\\u0671\\5\\u045b\\u022e\\2\\u0671\\u0672\\5\\u043d\\u021f\\2\\u0672\")\n buf.write(\"\\u0673\\5\\u044f\\u0228\\2\\u0673\\u0674\\5\\u045b\\u022e\\2\\u0674\")\n buf.write(\"\\u0090\\3\\2\\2\\2\\u0675\\u0676\\5\\u0439\\u021d\\2\\u0676\\u0677\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0677\\u0678\\5\\u044f\\u0228\\2\\u0678\\u0679\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0679\\u067a\\5\\u043d\\u021f\\2\\u067a\\u067b\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u067b\\u067c\\5\\u045b\\u022e\\2\\u067c\\u0092\")\n buf.write(\"\\3\\2\\2\\2\\u067d\\u067e\\5\\u0439\\u021d\\2\\u067e\\u067f\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u067f\\u0680\\5\\u044f\\u0228\\2\\u0680\\u0681\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0681\\u0682\\5\\u0445\\u0223\\2\\u0682\\u0683\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0683\\u0684\\5\\u045d\\u022f\\2\\u0684\\u0685\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0685\\u0094\\3\\2\\2\\2\\u0686\\u0687\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u0687\\u0688\\5\\u0451\\u0229\\2\\u0688\\u0689\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0689\\u068a\\5\\u045f\\u0230\\2\\u068a\\u068b\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u068b\\u068c\\5\\u0457\\u022c\\2\\u068c\\u068d\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u068d\\u0096\\3\\2\\2\\2\\u068e\\u068f\\5\\u0439\\u021d\\2\\u068f\")\n buf.write(\"\\u0690\\5\\u0451\\u0229\\2\\u0690\\u0691\\5\\u0457\\u022c\\2\\u0691\")\n buf.write(\"\\u0692\\5\\u0457\\u022c\\2\\u0692\\u0693\\5\\u045d\\u022f\\2\\u0693\")\n buf.write(\"\\u0694\\5\\u0453\\u022a\\2\\u0694\\u0695\\5\\u045b\\u022e\\2\\u0695\")\n buf.write(\"\\u0696\\7a\\2\\2\\u0696\\u0697\\5\\u0463\\u0232\\2\\u0697\\u0698\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0698\\u0699\\5\\u043b\\u021e\\2\\u0699\\u0098\")\n buf.write(\"\\3\\2\\2\\2\\u069a\\u069b\\5\\u0439\\u021d\\2\\u069b\\u069c\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u069c\\u069d\\5\\u0457\\u022c\\2\\u069d\\u069e\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u069e\\u069f\\5\\u045d\\u022f\\2\\u069f\\u06a0\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u06a0\\u06a1\\5\\u045b\\u022e\\2\\u06a1\\u06a2\\7a\\2\")\n buf.write(\"\\2\\u06a2\\u06a3\\5\\u0463\\u0232\\2\\u06a3\\u06a4\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u06a4\\u06a5\\5\\u043b\\u021e\\2\\u06a5\\u06a6\\7a\\2\\2\\u06a6\")\n buf.write(\"\\u06a7\\5\\u0435\\u021b\\2\\u06a7\\u06a8\\5\\u044b\\u0226\\2\\u06a8\")\n buf.write(\"\\u06a9\\5\\u044b\\u0226\\2\\u06a9\\u009a\\3\\2\\2\\2\\u06aa\\u06ab\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u06ab\\u06ac\\5\\u0451\\u0229\\2\\u06ac\\u06ad\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u06ad\\u06ae\\5\\u045b\\u022e\\2\\u06ae\\u009c\")\n buf.write(\"\\3\\2\\2\\2\\u06af\\u06b0\\5\\u0439\\u021d\\2\\u06b0\\u06b1\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u06b1\\u06b2\\5\\u045d\\u022f\\2\\u06b2\\u06b3\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u06b3\\u06b4\\5\\u045b\\u022e\\2\\u06b4\\u009e\\3\\2\\2\")\n buf.write(\"\\2\\u06b5\\u06b6\\5\\u0439\\u021d\\2\\u06b6\\u06b7\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u06b7\\u06b8\\5\\u043d\\u021f\\2\\u06b8\\u06b9\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u06b9\\u06ba\\5\\u045b\\u022e\\2\\u06ba\\u06bb\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u06bb\\u00a0\\3\\2\\2\\2\\u06bc\\u06bd\\5\\u0439\\u021d\\2\\u06bd\")\n buf.write(\"\\u06be\\5\\u0457\\u022c\\2\\u06be\\u06bf\\5\\u0451\\u0229\\2\\u06bf\")\n buf.write(\"\\u06c0\\5\\u0459\\u022d\\2\\u06c0\\u06c1\\5\\u0459\\u022d\\2\\u06c1\")\n buf.write(\"\\u00a2\\3\\2\\2\\2\\u06c2\\u06c3\\5\\u0439\\u021d\\2\\u06c3\\u06c4\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u06c4\\u06c5\\5\\u0437\\u021c\\2\\u06c5\\u06c6\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u06c6\\u00a4\\3\\2\\2\\2\\u06c7\\u06c8\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u06c8\\u06c9\\5\\u045d\\u022f\\2\\u06c9\\u06ca\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u06ca\\u06cb\\5\\u0457\\u022c\\2\\u06cb\\u06cc\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u06cc\\u06cd\\5\\u044f\\u0228\\2\\u06cd\\u06ce\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u06ce\\u00a6\\3\\2\\2\\2\\u06cf\\u06d0\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u06d0\\u06d1\\5\\u045d\\u022f\\2\\u06d1\\u06d2\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u06d2\\u06d3\\5\\u0457\\u022c\\2\\u06d3\\u06d4\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u06d4\\u06d5\\5\\u044f\\u0228\\2\\u06d5\\u06d6\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u06d6\\u06d7\\7a\\2\\2\\u06d7\\u06d8\\5\\u045d\\u022f\\2\\u06d8\")\n buf.write(\"\\u06d9\\5\\u0459\\u022d\\2\\u06d9\\u06da\\5\\u043d\\u021f\\2\\u06da\")\n buf.write(\"\\u06db\\5\\u0457\\u022c\\2\\u06db\\u00a8\\3\\2\\2\\2\\u06dc\\u06dd\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u06dd\\u06de\\5\\u045d\\u022f\\2\\u06de\\u06df\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u06df\\u06e0\\5\\u0459\\u022d\\2\\u06e0\\u06e1\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u06e1\\u06e2\\5\\u0457\\u022c\\2\\u06e2\\u00aa\")\n buf.write(\"\\3\\2\\2\\2\\u06e3\\u06e4\\5\\u0439\\u021d\\2\\u06e4\\u06e5\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u06e5\\u06e6\\5\\u0459\\u022d\\2\\u06e6\\u06e7\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u06e7\\u06e8\\5\\u0451\\u0229\\2\\u06e8\\u06e9\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u06e9\\u06ea\\5\\u043b\\u021e\\2\\u06ea\\u06eb\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u06eb\\u06ec\\5\\u045b\\u022e\\2\\u06ec\\u06ed\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u06ed\\u06ee\\5\\u044d\\u0227\\2\\u06ee\\u00ac\\3\\2\\2\")\n buf.write(\"\\2\\u06ef\\u06f0\\5\\u0439\\u021d\\2\\u06f0\\u06f1\\5\\u0465\\u0233\")\n buf.write(\"\\2\\u06f1\\u06f2\\5\\u0439\\u021d\\2\\u06f2\\u06f3\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u06f3\\u06f4\\5\\u043d\\u021f\\2\\u06f4\\u00ae\\3\\2\\2\\2\\u06f5\")\n buf.write(\"\\u06f6\\5\\u043b\\u021e\\2\\u06f6\\u06f7\\5\\u0435\\u021b\\2\\u06f7\")\n buf.write(\"\\u06f8\\5\\u045b\\u022e\\2\\u06f8\\u06f9\\5\\u0435\\u021b\\2\\u06f9\")\n buf.write(\"\\u00b0\\3\\2\\2\\2\\u06fa\\u06fb\\5\\u043b\\u021e\\2\\u06fb\\u06fc\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u06fc\\u06fd\\5\\u045b\\u022e\\2\\u06fd\\u06fe\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u06fe\\u06ff\\5\\u0437\\u021c\\2\\u06ff\\u0700\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0700\\u0701\\5\\u0459\\u022d\\2\\u0701\\u0702\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0702\\u00b2\\3\\2\\2\\2\\u0703\\u0704\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u0704\\u0705\\5\\u0435\\u021b\\2\\u0705\\u0706\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0706\\u0707\\5\\u043d\\u021f\\2\\u0707\\u00b4\\3\\2\\2\")\n buf.write(\"\\2\\u0708\\u0709\\5\\u043b\\u021e\\2\\u0709\\u070a\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u070a\\u070b\\5\\u0465\\u0233\\2\\u070b\\u00b6\\3\\2\\2\\2\\u070c\")\n buf.write(\"\\u070d\\5\\u043b\\u021e\\2\\u070d\\u070e\\5\\u0437\\u021c\\2\\u070e\")\n buf.write(\"\\u070f\\7a\\2\\2\\u070f\\u0710\\5\\u0457\\u022c\\2\\u0710\\u0711\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0711\\u0712\\5\\u044b\\u0226\\2\\u0712\\u0713\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0713\\u0714\\7a\\2\\2\\u0714\\u0715\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0715\\u0716\\5\\u0443\\u0222\\2\\u0716\\u0717\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0717\\u0718\\5\\u044f\\u0228\\2\\u0718\\u0719\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0719\\u071a\\5\\u043d\\u021f\\2\\u071a\\u00b8\\3\\2\\2\")\n buf.write(\"\\2\\u071b\\u071c\\5\\u043b\\u021e\\2\\u071c\\u071d\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u071d\\u071e\\5\\u045b\\u022e\\2\\u071e\\u071f\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u071f\\u0720\\5\\u044d\\u0227\\2\\u0720\\u0721\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0721\\u0722\\5\\u0467\\u0234\\2\\u0722\\u0723\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0723\\u0724\\5\\u044f\\u0228\\2\\u0724\\u0725\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0725\\u00ba\\3\\2\\2\\2\\u0726\\u0727\\5\\u043b\\u021e\\2\\u0727\")\n buf.write(\"\\u0728\\5\\u043b\\u021e\\2\\u0728\\u0729\\5\\u044b\\u0226\\2\\u0729\")\n buf.write(\"\\u00bc\\3\\2\\2\\2\\u072a\\u072b\\5\\u043b\\u021e\\2\\u072b\\u072c\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u072c\\u072d\\5\\u0437\\u021c\\2\\u072d\\u072e\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u072e\\u072f\\5\\u0441\\u0221\\2\\u072f\\u00be\")\n buf.write(\"\\3\\2\\2\\2\\u0730\\u0731\\5\\u043b\\u021e\\2\\u0731\\u0732\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0732\\u0733\\5\\u0439\\u021d\\2\\u0733\\u00c0\\3\\2\\2\")\n buf.write(\"\\2\\u0734\\u0735\\5\\u043b\\u021e\\2\\u0735\\u0736\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0736\\u0737\\5\\u0439\\u021d\\2\\u0737\\u0738\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0738\\u0739\\5\\u044d\\u0227\\2\\u0739\\u073a\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u073a\\u073b\\5\\u044b\\u0226\\2\\u073b\\u00c2\\3\\2\\2\\2\\u073c\")\n buf.write(\"\\u073d\\5\\u043b\\u021e\\2\\u073d\\u073e\\5\\u043d\\u021f\\2\\u073e\")\n buf.write(\"\\u073f\\5\\u0439\\u021d\\2\\u073f\\u0740\\5\\u044b\\u0226\\2\\u0740\")\n buf.write(\"\\u0741\\5\\u0435\\u021b\\2\\u0741\\u0742\\5\\u0457\\u022c\\2\\u0742\")\n buf.write(\"\\u0743\\5\\u043d\\u021f\\2\\u0743\\u00c4\\3\\2\\2\\2\\u0744\\u0745\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0745\\u0746\\5\\u043d\\u021f\\2\\u0746\\u0747\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0747\\u0748\\5\\u0451\\u0229\\2\\u0748\\u0749\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0749\\u074a\\5\\u0453\\u022a\\2\\u074a\\u074b\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u074b\\u074c\\5\\u0459\\u022d\\2\\u074c\\u074d\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u074d\\u00c6\\3\\2\\2\\2\\u074e\\u074f\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u074f\\u0750\\5\\u043d\\u021f\\2\\u0750\\u0751\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0751\\u0752\\5\\u0457\\u022c\\2\\u0752\\u0753\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0753\\u0754\\5\\u044d\\u0227\\2\\u0754\\u0755\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0755\\u0756\\5\\u044f\\u0228\\2\\u0756\\u0757\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0757\\u00c8\\3\\2\\2\\2\\u0758\\u0759\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0759\\u075a\\5\\u043d\\u021f\\2\\u075a\\u075b\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u075b\\u075c\\5\\u0435\\u021b\\2\\u075c\\u075d\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u075d\\u075e\\5\\u044b\\u0226\\2\\u075e\\u075f\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u075f\\u00ca\\3\\2\\2\\2\\u0760\\u0761\\5\\u043b\\u021e\\2\\u0761\")\n buf.write(\"\\u0762\\5\\u043d\\u021f\\2\\u0762\\u0763\\5\\u043f\\u0220\\2\\u0763\")\n buf.write(\"\\u0764\\5\\u0435\\u021b\\2\\u0764\\u0765\\5\\u045d\\u022f\\2\\u0765\")\n buf.write(\"\\u0766\\5\\u044b\\u0226\\2\\u0766\\u0767\\5\\u045b\\u022e\\2\\u0767\")\n buf.write(\"\\u0768\\5\\u0459\\u022d\\2\\u0768\\u00cc\\3\\2\\2\\2\\u0769\\u076a\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u076a\\u076b\\5\\u043d\\u021f\\2\\u076b\\u076c\")\n buf.write(\"\\5\\u043f\\u0220\\2\\u076c\\u076d\\5\\u043d\\u021f\\2\\u076d\\u076e\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u076e\\u076f\\5\\u0457\\u022c\\2\\u076f\\u0770\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0770\\u0771\\5\\u043b\\u021e\\2\\u0771\\u00ce\")\n buf.write(\"\\3\\2\\2\\2\\u0772\\u0773\\5\\u043b\\u021e\\2\\u0773\\u0774\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0774\\u0775\\5\\u043f\\u0220\\2\\u0775\\u0776\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0776\\u0777\\5\\u044f\\u0228\\2\\u0777\\u0778\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0778\\u0779\\5\\u0457\\u022c\\2\\u0779\\u00d0\\3\\2\\2\")\n buf.write(\"\\2\\u077a\\u077b\\5\\u043b\\u021e\\2\\u077b\\u077c\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u077c\\u077d\\5\\u044b\\u0226\\2\\u077d\\u077e\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u077e\\u077f\\5\\u045b\\u022e\\2\\u077f\\u0780\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0780\\u00d2\\3\\2\\2\\2\\u0781\\u0782\\5\\u043b\\u021e\\2\\u0782\")\n buf.write(\"\\u0783\\5\\u043d\\u021f\\2\\u0783\\u0784\\5\\u0453\\u022a\\2\\u0784\")\n buf.write(\"\\u0785\\5\\u045b\\u022e\\2\\u0785\\u0786\\5\\u0443\\u0222\\2\\u0786\")\n buf.write(\"\\u00d4\\3\\2\\2\\2\\u0787\\u0788\\5\\u043b\\u021e\\2\\u0788\\u0789\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0789\\u078a\\5\\u0459\\u022d\\2\\u078a\\u078b\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u078b\\u00d6\\3\\2\\2\\2\\u078c\\u078d\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u078d\\u078e\\5\\u043d\\u021f\\2\\u078e\\u078f\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u078f\\u0790\\5\\u043d\\u021f\\2\\u0790\\u0791\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0791\\u0792\\5\\u044d\\u0227\\2\\u0792\\u0793\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0793\\u0794\\5\\u044f\\u0228\\2\\u0794\\u0795\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0795\\u0796\\5\\u0459\\u022d\\2\\u0796\\u0797\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0797\\u0798\\5\\u0445\\u0223\\2\\u0798\\u0799\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0799\\u00d8\\3\\2\\2\\2\\u079a\\u079b\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u079b\\u079c\\5\\u0445\\u0223\\2\\u079c\\u079d\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u079d\\u079e\\5\\u043d\\u021f\\2\\u079e\\u079f\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u079f\\u07a0\\5\\u0459\\u022d\\2\\u07a0\\u07a1\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u07a1\\u07a2\\5\\u0451\\u0229\\2\\u07a2\\u07a3\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u07a3\\u00da\\3\\2\\2\\2\\u07a4\\u07a5\\5\\u043b\\u021e\\2\\u07a5\")\n buf.write(\"\\u07a6\\5\\u0445\\u0223\\2\\u07a6\\u07a7\\5\\u0459\\u022d\\2\\u07a7\")\n buf.write(\"\\u07a8\\5\\u0435\\u021b\\2\\u07a8\\u07a9\\5\\u0437\\u021c\\2\\u07a9\")\n buf.write(\"\\u07aa\\5\\u044b\\u0226\\2\\u07aa\\u07ab\\5\\u043d\\u021f\\2\\u07ab\")\n buf.write(\"\\u00dc\\3\\2\\2\\2\\u07ac\\u07ad\\5\\u043b\\u021e\\2\\u07ad\\u07ae\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u07ae\\u07af\\5\\u0459\\u022d\\2\\u07af\\u07b0\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u07b0\\u07b1\\5\\u0459\\u022d\\2\\u07b1\\u07b2\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u07b2\\u07b3\\5\\u0451\\u0229\\2\\u07b3\\u07b4\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u07b4\\u07b5\\5\\u0445\\u0223\\2\\u07b5\\u07b6\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u07b6\\u07b7\\5\\u045b\\u022e\\2\\u07b7\\u07b8\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u07b8\\u00de\\3\\2\\2\\2\\u07b9\\u07ba\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u07ba\\u07bb\\5\\u0445\\u0223\\2\\u07bb\\u07bc\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u07bc\\u07bd\\5\\u045b\\u022e\\2\\u07bd\\u07be\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u07be\\u07bf\\5\\u044f\\u0228\\2\\u07bf\\u07c0\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u07c0\\u07c1\\5\\u045b\\u022e\\2\\u07c1\\u00e0\\3\\2\\2\")\n buf.write(\"\\2\\u07c2\\u07c3\\5\\u043b\\u021e\\2\\u07c3\\u07c4\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u07c4\\u07c5\\5\\u0439\\u021d\\2\\u07c5\\u07c6\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u07c6\\u07c7\\5\\u044d\\u0227\\2\\u07c7\\u07c8\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u07c8\\u07c9\\5\\u044f\\u0228\\2\\u07c9\\u07ca\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u07ca\\u00e2\\3\\2\\2\\2\\u07cb\\u07cc\\5\\u043b\\u021e\\2\\u07cc\")\n buf.write(\"\\u07cd\\5\\u0451\\u0229\\2\\u07cd\\u07ce\\5\\u045d\\u022f\\2\\u07ce\")\n buf.write(\"\\u07cf\\5\\u0437\\u021c\\2\\u07cf\\u07d0\\5\\u044b\\u0226\\2\\u07d0\")\n buf.write(\"\\u07d1\\5\\u043d\\u021f\\2\\u07d1\\u00e4\\3\\2\\2\\2\\u07d2\\u07d3\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u07d3\\u07d4\\5\\u0457\\u022c\\2\\u07d4\\u07d5\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u07d5\\u07d6\\5\\u0453\\u022a\\2\\u07d6\\u00e6\")\n buf.write(\"\\3\\2\\2\\2\\u07d7\\u07d8\\5\\u043b\\u021e\\2\\u07d8\\u07d9\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u07d9\\u07da\\5\\u0445\\u0223\\2\\u07da\\u07db\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u07db\\u07dc\\5\\u045b\\u022e\\2\\u07dc\\u07dd\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u07dd\\u07de\\5\\u0457\\u022c\\2\\u07de\\u07df\\5\\u045f\")\n buf.write(\"\\u0230\\2\\u07df\\u07e0\\5\\u0435\\u021b\\2\\u07e0\\u07e1\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u07e1\\u07e2\\7a\\2\\2\\u07e2\\u07e3\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u07e3\\u07e4\\5\\u044f\\u0228\\2\\u07e4\\u07e5\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u07e5\\u07e6\\5\\u0451\\u0229\\2\\u07e6\\u07e7\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u07e7\\u07e8\\5\\u0459\\u022d\\2\\u07e8\\u07e9\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u07e9\\u07ea\\5\\u0457\\u022c\\2\\u07ea\\u07eb\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u07eb\\u07ec\\5\\u0445\\u0223\\2\\u07ec\\u07ed\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u07ed\\u07ee\\5\\u043d\\u021f\\2\\u07ee\\u07ef\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u07ef\\u00e8\\3\\2\\2\\2\\u07f0\\u07f1\\5\\u043d\\u021f\\2\\u07f1\")\n buf.write(\"\\u07f2\\5\\u0435\\u021b\\2\\u07f2\\u07f3\\5\\u0439\\u021d\\2\\u07f3\")\n buf.write(\"\\u07f4\\5\\u0443\\u0222\\2\\u07f4\\u00ea\\3\\2\\2\\2\\u07f5\\u07f6\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u07f6\\u07f7\\5\\u044b\\u0226\\2\\u07f7\\u07f8\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u07f8\\u07f9\\5\\u044d\\u0227\\2\\u07f9\\u07fa\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u07fa\\u07fb\\5\\u044f\\u0228\\2\\u07fb\\u07fc\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u07fc\\u00ec\\3\\2\\2\\2\\u07fd\\u07fe\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u07fe\\u07ff\\5\\u044b\\u0226\\2\\u07ff\\u0800\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0800\\u0801\\5\\u043d\\u021f\\2\\u0801\\u00ee\\3\\2\\2\")\n buf.write(\"\\2\\u0802\\u0803\\5\\u043d\\u021f\\2\\u0803\\u0804\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0804\\u0805\\5\\u0459\\u022d\\2\\u0805\\u0806\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0806\\u0807\\5\\u043f\\u0220\\2\\u0807\\u00f0\\3\\2\\2\\2\\u0808\")\n buf.write(\"\\u0809\\5\\u043d\\u021f\\2\\u0809\\u080a\\5\\u044d\\u0227\\2\\u080a\")\n buf.write(\"\\u080b\\5\\u0453\\u022a\\2\\u080b\\u080c\\5\\u045b\\u022e\\2\\u080c\")\n buf.write(\"\\u080d\\5\\u0465\\u0233\\2\\u080d\\u00f2\\3\\2\\2\\2\\u080e\\u080f\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u080f\\u0810\\5\\u044f\\u0228\\2\\u0810\\u0811\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0811\\u0812\\5\\u0437\\u021c\\2\\u0812\\u0813\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0813\\u0814\\5\\u043d\\u021f\\2\\u0814\\u00f4\")\n buf.write(\"\\3\\2\\2\\2\\u0815\\u0816\\5\\u043d\\u021f\\2\\u0816\\u0817\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0817\\u0818\\5\\u0439\\u021d\\2\\u0818\\u0819\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0819\\u081a\\5\\u043b\\u021e\\2\\u081a\\u081b\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u081b\\u081c\\5\\u044f\\u0228\\2\\u081c\\u081d\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u081d\\u00f6\\3\\2\\2\\2\\u081e\\u081f\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u081f\\u0820\\5\\u044f\\u0228\\2\\u0820\\u0821\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0821\\u00f8\\3\\2\\2\\2\\u0822\\u0823\\5\\u043d\\u021f\\2\\u0823\")\n buf.write(\"\\u0824\\5\\u044f\\u0228\\2\\u0824\\u0825\\5\\u045b\\u022e\\2\\u0825\")\n buf.write(\"\\u0826\\5\\u0445\\u0223\\2\\u0826\\u0827\\5\\u045b\\u022e\\2\\u0827\")\n buf.write(\"\\u0828\\5\\u0465\\u0233\\2\\u0828\\u0829\\5\\u043d\\u021f\\2\\u0829\")\n buf.write(\"\\u082a\\5\\u0459\\u022d\\2\\u082a\\u082b\\5\\u0439\\u021d\\2\\u082b\")\n buf.write(\"\\u082c\\5\\u0435\\u021b\\2\\u082c\\u082d\\5\\u0453\\u022a\\2\\u082d\")\n buf.write(\"\\u082e\\5\\u0445\\u0223\\2\\u082e\\u082f\\5\\u044f\\u0228\\2\\u082f\")\n buf.write(\"\\u0830\\5\\u0441\\u0221\\2\\u0830\\u00fa\\3\\2\\2\\2\\u0831\\u0832\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0832\\u0833\\5\\u0457\\u022c\\2\\u0833\\u0834\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0834\\u00fc\\3\\2\\2\\2\\u0835\\u0836\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0836\\u0837\\5\\u0457\\u022c\\2\\u0837\\u0838\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0838\\u0839\\5\\u0451\\u0229\\2\\u0839\\u083a\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u083a\\u083b\\5\\u0459\\u022d\\2\\u083b\\u00fe\\3\\2\\2\")\n buf.write(\"\\2\\u083c\\u083d\\5\\u043d\\u021f\\2\\u083d\\u083e\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u083e\\u083f\\5\\u0439\\u021d\\2\\u083f\\u0840\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0840\\u0841\\5\\u0453\\u022a\\2\\u0841\\u0842\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0842\\u0100\\3\\2\\2\\2\\u0843\\u0844\\5\\u043d\\u021f\\2\\u0844\")\n buf.write(\"\\u0845\\5\\u045f\\u0230\\2\\u0845\\u0846\\5\\u0435\\u021b\\2\\u0846\")\n buf.write(\"\\u0847\\5\\u044b\\u0226\\2\\u0847\\u0848\\5\\u044f\\u0228\\2\\u0848\")\n buf.write(\"\\u0849\\5\\u0435\\u021b\\2\\u0849\\u084a\\5\\u044d\\u0227\\2\\u084a\")\n buf.write(\"\\u084b\\5\\u043d\\u021f\\2\\u084b\\u0102\\3\\2\\2\\2\\u084c\\u084d\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u084d\\u084e\\5\\u0463\\u0232\\2\\u084e\\u084f\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u084f\\u0850\\5\\u043d\\u021f\\2\\u0850\\u0851\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0851\\u0852\\5\\u045b\\u022e\\2\\u0852\\u0104\")\n buf.write(\"\\3\\2\\2\\2\\u0853\\u0854\\5\\u043d\\u021f\\2\\u0854\\u0855\\5\\u0463\")\n buf.write(\"\\u0232\\2\\u0855\\u0856\\5\\u0439\\u021d\\2\\u0856\\u0857\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0857\\u0858\\5\\u0453\\u022a\\2\\u0858\\u0859\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0859\\u085a\\5\\u0445\\u0223\\2\\u085a\\u085b\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u085b\\u085c\\5\\u044f\\u0228\\2\\u085c\\u0106\\3\\2\\2\")\n buf.write(\"\\2\\u085d\\u085e\\5\\u043d\\u021f\\2\\u085e\\u085f\\5\\u0463\\u0232\")\n buf.write(\"\\2\\u085f\\u0860\\5\\u0439\\u021d\\2\\u0860\\u0861\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0861\\u0862\\5\\u0453\\u022a\\2\\u0862\\u0863\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0863\\u0864\\5\\u0445\\u0223\\2\\u0864\\u0865\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0865\\u0866\\5\\u044f\\u0228\\2\\u0866\\u0867\\7a\\2\\2\\u0867\")\n buf.write(\"\\u0868\\5\\u0445\\u0223\\2\\u0868\\u0869\\5\\u044f\\u0228\\2\\u0869\")\n buf.write(\"\\u086a\\5\\u0445\\u0223\\2\\u086a\\u086b\\5\\u045b\\u022e\\2\\u086b\")\n buf.write(\"\\u0108\\3\\2\\2\\2\\u086c\\u086d\\5\\u043d\\u021f\\2\\u086d\\u086e\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u086e\\u086f\\5\\u0439\\u021d\\2\\u086f\\u0870\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0870\\u0871\\5\\u0453\\u022a\\2\\u0871\\u0872\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0872\\u0873\\5\\u0445\\u0223\\2\\u0873\\u0874\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0874\\u0875\\5\\u044f\\u0228\\2\\u0875\\u0876\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0876\\u010a\\3\\2\\2\\2\\u0877\\u0878\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0878\\u0879\\5\\u0463\\u0232\\2\\u0879\\u087a\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u087a\\u087b\\5\\u044b\\u0226\\2\\u087b\\u087c\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u087c\\u087d\\5\\u043b\\u021e\\2\\u087d\\u087e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u087e\\u010c\\3\\2\\2\\2\\u087f\\u0880\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0880\\u0881\\5\\u0463\\u0232\\2\\u0881\\u0882\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u0882\\u0883\\5\\u044b\\u0226\\2\\u0883\\u0884\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0884\\u0885\\5\\u0459\\u022d\\2\\u0885\\u0886\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0886\\u0887\\5\\u045f\\u0230\\2\\u0887\\u0888\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0888\\u010e\\3\\2\\2\\2\\u0889\\u088a\\5\\u043d\\u021f\\2\\u088a\")\n buf.write(\"\\u088b\\5\\u0463\\u0232\\2\\u088b\\u088c\\5\\u043d\\u021f\\2\\u088c\")\n buf.write(\"\\u088d\\5\\u0439\\u021d\\2\\u088d\\u088e\\5\\u045d\\u022f\\2\\u088e\")\n buf.write(\"\\u088f\\5\\u045b\\u022e\\2\\u088f\\u0890\\5\\u043d\\u021f\\2\\u0890\")\n buf.write(\"\\u0110\\3\\2\\2\\2\\u0891\\u0892\\5\\u043d\\u021f\\2\\u0892\\u0893\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u0893\\u0894\\5\\u0445\\u0223\\2\\u0894\\u0895\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0895\\u0896\\5\\u045b\\u022e\\2\\u0896\\u0897\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0897\\u0112\\3\\2\\2\\2\\u0898\\u0899\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0899\\u089a\\5\\u0463\\u0232\\2\\u089a\\u089b\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u089b\\u089c\\5\\u045b\\u022e\\2\\u089c\\u0114\\3\\2\\2\")\n buf.write(\"\\2\\u089d\\u089e\\5\\u043d\\u021f\\2\\u089e\\u089f\\5\\u0463\\u0232\")\n buf.write(\"\\2\\u089f\\u08a0\\5\\u0453\\u022a\\2\\u08a0\\u08a1\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u08a1\\u08a2\\5\\u0435\\u021b\\2\\u08a2\\u08a3\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u08a3\\u08a4\\5\\u044f\\u0228\\2\\u08a4\\u0116\\3\\2\\2\\2\\u08a5\")\n buf.write(\"\\u08a6\\5\\u043d\\u021f\\2\\u08a6\\u08a7\\5\\u0463\\u0232\\2\\u08a7\")\n buf.write(\"\\u08a8\\5\\u045b\\u022e\\2\\u08a8\\u08a9\\5\\u043d\\u021f\\2\\u08a9\")\n buf.write(\"\\u08aa\\5\\u0457\\u022c\\2\\u08aa\\u08ab\\5\\u044f\\u0228\\2\\u08ab\")\n buf.write(\"\\u08ac\\5\\u0435\\u021b\\2\\u08ac\\u08ad\\5\\u044b\\u0226\\2\\u08ad\")\n buf.write(\"\\u0118\\3\\2\\2\\2\\u08ae\\u08af\\5\\u043d\\u021f\\2\\u08af\\u08b0\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u08b0\\u08b1\\5\\u045b\\u022e\\2\\u08b1\\u08b2\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u08b2\\u08b3\\5\\u0435\\u021b\\2\\u08b3\\u08b4\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u08b4\\u08b5\\5\\u045b\\u022e\\2\\u08b5\\u011a\")\n buf.write(\"\\3\\2\\2\\2\\u08b6\\u08b7\\5\\u043f\\u0220\\2\\u08b7\\u08b8\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u08b8\\u08b9\\5\\u0445\\u0223\\2\\u08b9\\u08ba\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u08ba\\u08bb\\5\\u045d\\u022f\\2\\u08bb\\u08bc\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u08bc\\u08bd\\5\\u043d\\u021f\\2\\u08bd\\u011c\\3\\2\\2\")\n buf.write(\"\\2\\u08be\\u08bf\\5\\u043f\\u0220\\2\\u08bf\\u08c0\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u08c0\\u08c1\\5\\u044b\\u0226\\2\\u08c1\\u08c2\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u08c2\\u08c3\\5\\u043d\\u021f\\2\\u08c3\\u011e\\3\\2\\2\\2\\u08c4\")\n buf.write(\"\\u08c5\\5\\u043f\\u0220\\2\\u08c5\\u08c6\\5\\u043d\\u021f\\2\\u08c6\")\n buf.write(\"\\u08c7\\5\\u045b\\u022e\\2\\u08c7\\u08c8\\5\\u0439\\u021d\\2\\u08c8\")\n buf.write(\"\\u08c9\\5\\u0443\\u0222\\2\\u08c9\\u0120\\3\\2\\2\\2\\u08ca\\u08cb\")\n buf.write(\"\\5\\u043f\\u0220\\2\\u08cb\\u08cc\\5\\u0445\\u0223\\2\\u08cc\\u08cd\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u08cd\\u08ce\\5\\u0435\\u021b\\2\\u08ce\\u08cf\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u08cf\\u0122\\3\\2\\2\\2\\u08d0\\u08d1\\5\\u043f\")\n buf.write(\"\\u0220\\2\\u08d1\\u08d2\\5\\u0445\\u0223\\2\\u08d2\\u08d3\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u08d3\\u08d4\\5\\u0459\\u022d\\2\\u08d4\\u08d5\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u08d5\\u0124\\3\\2\\2\\2\\u08d6\\u08d7\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u08d7\\u08d8\\5\\u0445\\u0223\\2\\u08d8\\u08d9\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u08d9\\u08da\\5\\u0459\\u022d\\2\\u08da\\u08db\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u08db\\u08dc\\7a\\2\\2\\u08dc\\u08dd\\5\\u045f\\u0230\\2\\u08dd\")\n buf.write(\"\\u08de\\5\\u0435\\u021b\\2\\u08de\\u08df\\5\\u044b\\u0226\\2\\u08df\")\n buf.write(\"\\u08e0\\5\\u045d\\u022f\\2\\u08e0\\u08e1\\5\\u043d\\u021f\\2\\u08e1\")\n buf.write(\"\\u0126\\3\\2\\2\\2\\u08e2\\u08e3\\5\\u043f\\u0220\\2\\u08e3\\u08e4\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u08e4\\u08e5\\5\\u0451\\u0229\\2\\u08e5\\u08e6\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u08e6\\u08e7\\5\\u045b\\u022e\\2\\u08e7\\u0128\")\n buf.write(\"\\3\\2\\2\\2\\u08e8\\u08e9\\5\\u043f\\u0220\\2\\u08e9\\u08ea\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u08ea\\u08eb\\5\\u044b\\u0226\\2\\u08eb\\u08ec\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u08ec\\u08ed\\5\\u0451\\u0229\\2\\u08ed\\u08ee\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u08ee\\u08ef\\5\\u0445\\u0223\\2\\u08ef\\u08f0\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u08f0\\u08f1\\5\\u0441\\u0221\\2\\u08f1\\u012a\\3\\2\\2\")\n buf.write(\"\\2\\u08f2\\u08f3\\5\\u043f\\u0220\\2\\u08f3\\u08f4\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u08f4\\u08f5\\5\\u044b\\u0226\\2\\u08f5\\u08f6\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u08f6\\u08f7\\5\\u0451\\u0229\\2\\u08f7\\u08f8\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u08f8\\u08f9\\5\\u0459\\u022d\\2\\u08f9\\u012c\\3\\2\\2\\2\\u08fa\")\n buf.write(\"\\u08fb\\5\\u043f\\u0220\\2\\u08fb\\u08fc\\5\\u0451\\u0229\\2\\u08fc\")\n buf.write(\"\\u08fd\\5\\u0457\\u022c\\2\\u08fd\\u012e\\3\\2\\2\\2\\u08fe\\u08ff\")\n buf.write(\"\\5\\u043f\\u0220\\2\\u08ff\\u0900\\5\\u0451\\u0229\\2\\u0900\\u0901\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0901\\u0902\\5\\u0435\\u021b\\2\\u0902\\u0903\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0903\\u0904\\5\\u044b\\u0226\\2\\u0904\\u0130\")\n buf.write(\"\\3\\2\\2\\2\\u0905\\u0906\\5\\u043f\\u0220\\2\\u0906\\u0907\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0907\\u0908\\5\\u0457\\u022c\\2\\u0908\\u0909\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0909\\u090a\\5\\u043d\\u021f\\2\\u090a\\u0132\\3\\2\\2\")\n buf.write(\"\\2\\u090b\\u090c\\5\\u043f\\u0220\\2\\u090c\\u090d\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u090d\\u090e\\5\\u0451\\u0229\\2\\u090e\\u090f\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u090f\\u0134\\3\\2\\2\\2\\u0910\\u0911\\5\\u043f\\u0220\\2\\u0911\")\n buf.write(\"\\u0912\\5\\u045d\\u022f\\2\\u0912\\u0913\\5\\u044b\\u0226\\2\\u0913\")\n buf.write(\"\\u0914\\5\\u044b\\u0226\\2\\u0914\\u0136\\3\\2\\2\\2\\u0915\\u0916\")\n buf.write(\"\\5\\u043f\\u0220\\2\\u0916\\u0917\\5\\u045d\\u022f\\2\\u0917\\u0918\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0918\\u0919\\5\\u0439\\u021d\\2\\u0919\\u091a\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u091a\\u091b\\5\\u0445\\u0223\\2\\u091b\\u091c\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u091c\\u091d\\5\\u044f\\u0228\\2\\u091d\\u0138\")\n buf.write(\"\\3\\2\\2\\2\\u091e\\u091f\\5\\u0441\\u0221\\2\\u091f\\u0920\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0920\\u0921\\5\\u045b\\u022e\\2\\u0921\\u0922\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0922\\u013a\\3\\2\\2\\2\\u0923\\u0924\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u0924\\u0925\\5\\u0457\\u022c\\2\\u0925\\u0926\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0926\\u0927\\5\\u044f\\u0228\\2\\u0927\\u0928\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0928\\u013c\\3\\2\\2\\2\\u0929\\u092a\\5\\u0441\\u0221\\2\\u092a\")\n buf.write(\"\\u092b\\5\\u0457\\u022c\\2\\u092b\\u092c\\5\\u0451\\u0229\\2\\u092c\")\n buf.write(\"\\u092d\\5\\u045d\\u022f\\2\\u092d\\u092e\\5\\u0453\\u022a\\2\\u092e\")\n buf.write(\"\\u013e\\3\\2\\2\\2\\u092f\\u0930\\5\\u0441\\u0221\\2\\u0930\\u0931\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0931\\u0932\\5\\u0451\\u0229\\2\\u0932\\u0933\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0933\\u0934\\5\\u0453\\u022a\\2\\u0934\\u0935\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0935\\u0936\\5\\u044f\\u0228\\2\\u0936\\u0937\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u0937\\u0140\\3\\2\\2\\2\\u0938\\u0939\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u0939\\u093a\\5\\u0435\\u021b\\2\\u093a\\u093b\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u093b\\u093c\\5\\u0443\\u0222\\2\\u093c\\u0142\\3\\2\\2\")\n buf.write(\"\\2\\u093d\\u093e\\5\\u0443\\u0222\\2\\u093e\\u093f\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u093f\\u0940\\5\\u045f\\u0230\\2\\u0940\\u0941\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0941\\u0942\\5\\u044f\\u0228\\2\\u0942\\u0943\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u0943\\u0144\\3\\2\\2\\2\\u0944\\u0945\\5\\u0443\\u0222\\2\\u0945\")\n buf.write(\"\\u0946\\5\\u0445\\u0223\\2\\u0946\\u0947\\5\\u043b\\u021e\\2\\u0947\")\n buf.write(\"\\u0948\\5\\u043d\\u021f\\2\\u0948\\u0146\\3\\2\\2\\2\\u0949\\u094a\")\n buf.write(\"\\5\\u0443\\u0222\\2\\u094a\\u094b\\5\\u0451\\u0229\\2\\u094b\\u094c\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u094c\\u094d\\5\\u0457\\u022c\\2\\u094d\\u0148\")\n buf.write(\"\\3\\2\\2\\2\\u094e\\u094f\\5\\u0445\\u0223\\2\\u094f\\u0950\\5\\u043f\")\n buf.write(\"\\u0220\\2\\u0950\\u014a\\3\\2\\2\\2\\u0951\\u0952\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0952\\u0953\\5\\u0441\\u0221\\2\\u0953\\u0954\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0954\\u0955\\5\\u0451\\u0229\\2\\u0955\\u0956\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0956\\u0957\\5\\u043d\\u021f\\2\\u0957\\u014c\\3\\2\\2\\2\\u0958\")\n buf.write(\"\\u0959\\5\\u0445\\u0223\\2\\u0959\\u095a\\5\\u044d\\u0227\\2\\u095a\")\n buf.write(\"\\u095b\\5\\u044d\\u0227\\2\\u095b\\u095c\\5\\u043d\\u021f\\2\\u095c\")\n buf.write(\"\\u095d\\5\\u043b\\u021e\\2\\u095d\\u095e\\5\\u0445\\u0223\\2\\u095e\")\n buf.write(\"\\u095f\\5\\u0435\\u021b\\2\\u095f\\u0960\\5\\u045b\\u022e\\2\\u0960\")\n buf.write(\"\\u0961\\5\\u043d\\u021f\\2\\u0961\\u014e\\3\\2\\2\\2\\u0962\\u0963\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0963\\u0964\\5\\u044f\\u0228\\2\\u0964\\u0150\")\n buf.write(\"\\3\\2\\2\\2\\u0965\\u0966\\5\\u0445\\u0223\\2\\u0966\\u0967\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0967\\u0968\\5\\u0439\\u021d\\2\\u0968\\u0969\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0969\\u096a\\5\\u045d\\u022f\\2\\u096a\\u096b\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u096b\\u096c\\5\\u043d\\u021f\\2\\u096c\\u0152\\3\\2\\2\")\n buf.write(\"\\2\\u096d\\u096e\\5\\u0445\\u0223\\2\\u096e\\u096f\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u096f\\u0970\\5\\u0439\\u021d\\2\\u0970\\u0971\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0971\\u0972\\5\\u045d\\u022f\\2\\u0972\\u0973\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0973\\u0974\\5\\u0445\\u0223\\2\\u0974\\u0975\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0975\\u0976\\5\\u0441\\u0221\\2\\u0976\\u0154\\3\\2\\2\\2\\u0977\")\n buf.write(\"\\u0978\\5\\u0445\\u0223\\2\\u0978\\u0979\\5\\u044f\\u0228\\2\\u0979\")\n buf.write(\"\\u097a\\5\\u0439\\u021d\\2\\u097a\\u097b\\5\\u0457\\u022c\\2\\u097b\")\n buf.write(\"\\u097c\\5\\u043d\\u021f\\2\\u097c\\u097d\\5\\u044d\\u0227\\2\\u097d\")\n buf.write(\"\\u097e\\5\\u043d\\u021f\\2\\u097e\\u097f\\5\\u044f\\u0228\\2\\u097f\")\n buf.write(\"\\u0980\\5\\u045b\\u022e\\2\\u0980\\u0156\\3\\2\\2\\2\\u0981\\u0982\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0982\\u0983\\5\\u044f\\u0228\\2\\u0983\\u0984\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0984\\u0985\\5\\u043d\\u021f\\2\\u0985\\u0986\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0986\\u0987\\5\\u045b\\u022e\\2\\u0987\\u0158\")\n buf.write(\"\\3\\2\\2\\2\\u0988\\u0989\\5\\u0445\\u0223\\2\\u0989\\u098a\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u098a\\u098b\\5\\u043b\\u021e\\2\\u098b\\u098c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u098c\\u098d\\5\\u0463\\u0232\\2\\u098d\\u015a\\3\\2\\2\")\n buf.write(\"\\2\\u098e\\u098f\\5\\u0445\\u0223\\2\\u098f\\u0990\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0990\\u0991\\5\\u043b\\u021e\\2\\u0991\\u0992\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0992\\u0993\\5\\u0463\\u0232\\2\\u0993\\u0994\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0994\\u0995\\5\\u043b\\u021e\\2\\u0995\\u015c\\3\\2\\2\\2\\u0996\")\n buf.write(\"\\u0997\\5\\u0445\\u0223\\2\\u0997\\u0998\\5\\u044f\\u0228\\2\\u0998\")\n buf.write(\"\\u0999\\5\\u043b\\u021e\\2\\u0999\\u099a\\5\\u0445\\u0223\\2\\u099a\")\n buf.write(\"\\u099b\\5\\u0439\\u021d\\2\\u099b\\u099c\\5\\u0435\\u021b\\2\\u099c\")\n buf.write(\"\\u099d\\5\\u045b\\u022e\\2\\u099d\\u099e\\5\\u0451\\u0229\\2\\u099e\")\n buf.write(\"\\u099f\\5\\u0457\\u022c\\2\\u099f\\u015e\\3\\2\\2\\2\\u09a0\\u09a1\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u09a1\\u09a2\\5\\u044f\\u0228\\2\\u09a2\\u09a3\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u09a3\\u09a4\\5\\u0445\\u0223\\2\\u09a4\\u09a5\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u09a5\\u09a6\\5\\u043d\\u021f\\2\\u09a6\\u09a7\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u09a7\\u0160\\3\\2\\2\\2\\u09a8\\u09a9\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u09a9\\u09aa\\5\\u044f\\u0228\\2\\u09aa\\u09ab\\5\\u043f\")\n buf.write(\"\\u0220\\2\\u09ab\\u09ac\\5\\u0445\\u0223\\2\\u09ac\\u09ad\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u09ad\\u09ae\\5\\u0445\\u0223\\2\\u09ae\\u09af\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u09af\\u09b0\\5\\u043d\\u021f\\2\\u09b0\\u0162\\3\\2\\2\")\n buf.write(\"\\2\\u09b1\\u09b2\\5\\u0445\\u0223\\2\\u09b2\\u09b3\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u09b3\\u09b4\\5\\u044b\\u0226\\2\\u09b4\\u09b5\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u09b5\\u09b6\\5\\u044f\\u0228\\2\\u09b6\\u09b7\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u09b7\\u0164\\3\\2\\2\\2\\u09b8\\u09b9\\5\\u0445\\u0223\\2\\u09b9\")\n buf.write(\"\\u09ba\\5\\u044f\\u0228\\2\\u09ba\\u09bb\\5\\u044f\\u0228\\2\\u09bb\")\n buf.write(\"\\u09bc\\5\\u043d\\u021f\\2\\u09bc\\u09bd\\5\\u0457\\u022c\\2\\u09bd\")\n buf.write(\"\\u0166\\3\\2\\2\\2\\u09be\\u09bf\\5\\u0445\\u0223\\2\\u09bf\\u09c0\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u09c0\\u09c1\\5\\u0451\\u0229\\2\\u09c1\\u09c2\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u09c2\\u09c3\\5\\u045b\\u022e\\2\\u09c3\\u0168\")\n buf.write(\"\\3\\2\\2\\2\\u09c4\\u09c5\\5\\u0445\\u0223\\2\\u09c5\\u09c6\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u09c6\\u09c7\\5\\u0459\\u022d\\2\\u09c7\\u09c8\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u09c8\\u09c9\\5\\u0457\\u022c\\2\\u09c9\\u09ca\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u09ca\\u016a\\3\\2\\2\\2\\u09cb\\u09cc\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u09cc\\u09cd\\5\\u044f\\u0228\\2\\u09cd\\u09ce\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u09ce\\u09cf\\5\\u045b\\u022e\\2\\u09cf\\u09d0\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u09d0\\u09d1\\5\\u044f\\u0228\\2\\u09d1\\u09d2\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u09d2\\u09d3\\5\\u0445\\u0223\\2\\u09d3\\u09d4\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u09d4\\u09d5\\5\\u0437\\u021c\\2\\u09d5\\u09d6\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u09d6\\u09d7\\5\\u043d\\u021f\\2\\u09d7\\u016c\\3\\2\\2\\2\\u09d8\")\n buf.write(\"\\u09d9\\5\\u0445\\u0223\\2\\u09d9\\u09da\\5\\u044f\\u0228\\2\\u09da\")\n buf.write(\"\\u09db\\5\\u0459\\u022d\\2\\u09db\\u09dc\\5\\u045b\\u022e\\2\\u09dc\")\n buf.write(\"\\u09dd\\5\\u043d\\u021f\\2\\u09dd\\u09de\\5\\u0435\\u021b\\2\\u09de\")\n buf.write(\"\\u09df\\5\\u043b\\u021e\\2\\u09df\\u016e\\3\\2\\2\\2\\u09e0\\u09e1\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u09e1\\u09e2\\5\\u044f\\u0228\\2\\u09e2\\u09e3\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u09e3\\u0170\\3\\2\\2\\2\\u09e4\\u09e5\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u09e5\\u09e6\\5\\u044f\\u0228\\2\\u09e6\\u09e7\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u09e7\\u09e8\\5\\u043d\\u021f\\2\\u09e8\\u09e9\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u09e9\\u09ea\\5\\u043d\\u021f\\2\\u09ea\\u09eb\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u09eb\\u0172\\3\\2\\2\\2\\u09ec\\u09ed\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u09ed\\u09ee\\5\\u044f\\u0228\\2\\u09ee\\u09ef\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u09ef\\u09f0\\5\\u043d\\u021f\\2\\u09f0\\u09f1\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u09f1\\u09f2\\5\\u0459\\u022d\\2\\u09f2\\u09f3\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u09f3\\u09f4\\5\\u0439\\u021d\\2\\u09f4\\u09f5\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u09f5\\u0174\\3\\2\\2\\2\\u09f6\\u09f7\\5\\u0445\\u0223\\2\\u09f7\")\n buf.write(\"\\u09f8\\5\\u044f\\u0228\\2\\u09f8\\u09f9\\5\\u045b\\u022e\\2\\u09f9\")\n buf.write(\"\\u09fa\\5\\u043d\\u021f\\2\\u09fa\\u09fb\\5\\u0457\\u022c\\2\\u09fb\")\n buf.write(\"\\u09fc\\5\\u045f\\u0230\\2\\u09fc\\u09fd\\5\\u0435\\u021b\\2\\u09fd\")\n buf.write(\"\\u09fe\\5\\u044b\\u0226\\2\\u09fe\\u0176\\3\\2\\2\\2\\u09ff\\u0a00\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0a00\\u0a01\\5\\u044f\\u0228\\2\\u0a01\\u0a02\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0a02\\u0a03\\5\\u0451\\u0229\\2\\u0a03\\u0178\")\n buf.write(\"\\3\\2\\2\\2\\u0a04\\u0a05\\5\\u0445\\u0223\\2\\u0a05\\u0a06\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0a06\\u0a07\\5\\u045f\\u0230\\2\\u0a07\\u0a08\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0a08\\u0a09\\5\\u044b\\u0226\\2\\u0a09\\u0a0a\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0a0a\\u0a0b\\5\\u043b\\u021e\\2\\u0a0b\\u0a0c\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0a0c\\u0a0d\\5\\u045b\\u022e\\2\\u0a0d\\u0a0e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0a0e\\u017a\\3\\2\\2\\2\\u0a0f\\u0a10\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0a10\\u0a11\\5\\u0459\\u022d\\2\\u0a11\\u017c\\3\\2\\2\\2\\u0a12\")\n buf.write(\"\\u0a13\\5\\u0445\\u0223\\2\\u0a13\\u0a14\\5\\u0459\\u022d\\2\\u0a14\")\n buf.write(\"\\u0a15\\5\\u0451\\u0229\\2\\u0a15\\u0a16\\5\\u044b\\u0226\\2\\u0a16\")\n buf.write(\"\\u0a17\\5\\u0435\\u021b\\2\\u0a17\\u0a18\\5\\u045b\\u022e\\2\\u0a18\")\n buf.write(\"\\u0a19\\5\\u0445\\u0223\\2\\u0a19\\u0a1a\\5\\u0451\\u0229\\2\\u0a1a\")\n buf.write(\"\\u0a1b\\5\\u044f\\u0228\\2\\u0a1b\\u017e\\3\\2\\2\\2\\u0a1c\\u0a1d\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0a1d\\u0a1e\\5\\u045b\\u022e\\2\\u0a1e\\u0a1f\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0a1f\\u0a20\\5\\u0457\\u022c\\2\\u0a20\\u0a21\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0a21\\u0a22\\5\\u045b\\u022e\\2\\u0a22\\u0a23\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0a23\\u0180\\3\\2\\2\\2\\u0a24\\u0a25\\5\\u0447\")\n buf.write(\"\\u0224\\2\\u0a25\\u0a26\\5\\u0435\\u021b\\2\\u0a26\\u0a27\\5\\u045f\")\n buf.write(\"\\u0230\\2\\u0a27\\u0a28\\5\\u0435\\u021b\\2\\u0a28\\u0182\\3\\2\\2\")\n buf.write(\"\\2\\u0a29\\u0a2a\\5\\u0447\\u0224\\2\\u0a2a\\u0a2b\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0a2b\\u0a2c\\5\\u0445\\u0223\\2\\u0a2c\\u0a2d\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0a2d\\u0184\\3\\2\\2\\2\\u0a2e\\u0a2f\\5\\u0449\\u0225\\2\\u0a2f\")\n buf.write(\"\\u0a30\\5\\u043d\\u021f\\2\\u0a30\\u0a31\\5\\u043d\\u021f\\2\\u0a31\")\n buf.write(\"\\u0a32\\5\\u0453\\u022a\\2\\u0a32\\u0186\\3\\2\\2\\2\\u0a33\\u0a34\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0a34\\u0a35\\5\\u0435\\u021b\\2\\u0a35\\u0a36\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0a36\\u0a37\\5\\u0441\\u0221\\2\\u0a37\\u0a38\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0a38\\u0a39\\5\\u0435\\u021b\\2\\u0a39\\u0a3a\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u0a3a\\u0a3b\\5\\u043d\\u021f\\2\\u0a3b\\u0188\")\n buf.write(\"\\3\\2\\2\\2\\u0a3c\\u0a3d\\5\\u044b\\u0226\\2\\u0a3d\\u0a3e\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0a3e\\u0a3f\\5\\u0459\\u022d\\2\\u0a3f\\u0a40\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0a40\\u018a\\3\\2\\2\\2\\u0a41\\u0a42\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0a42\\u0a43\\5\\u0435\\u021b\\2\\u0a43\\u0a44\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0a44\\u0a45\\5\\u045b\\u022e\\2\\u0a45\\u0a46\\7a\\2\\2\\u0a46\")\n buf.write(\"\\u0a47\\5\\u045f\\u0230\\2\\u0a47\\u0a48\\5\\u0435\\u021b\\2\\u0a48\")\n buf.write(\"\\u0a49\\5\\u044b\\u0226\\2\\u0a49\\u0a4a\\5\\u045d\\u022f\\2\\u0a4a\")\n buf.write(\"\\u0a4b\\5\\u043d\\u021f\\2\\u0a4b\\u018c\\3\\2\\2\\2\\u0a4c\\u0a4d\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0a4d\\u0a4e\\5\\u043d\\u021f\\2\\u0a4e\\u0a4f\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0a4f\\u0a50\\5\\u043b\\u021e\\2\\u0a50\\u0a51\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0a51\\u0a52\\5\\u044f\\u0228\\2\\u0a52\\u0a53\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u0a53\\u018e\\3\\2\\2\\2\\u0a54\\u0a55\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0a55\\u0a56\\5\\u043d\\u021f\\2\\u0a56\\u0a57\\5\\u043f\")\n buf.write(\"\\u0220\\2\\u0a57\\u0a58\\5\\u045b\\u022e\\2\\u0a58\\u0190\\3\\2\\2\")\n buf.write(\"\\2\\u0a59\\u0a5a\\5\\u044b\\u0226\\2\\u0a5a\\u0a5b\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0a5b\\u0a5c\\5\\u045f\\u0230\\2\\u0a5c\\u0a5d\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0a5d\\u0a5e\\5\\u044b\\u0226\\2\\u0a5e\\u0192\\3\\2\\2\\2\\u0a5f\")\n buf.write(\"\\u0a60\\5\\u044b\\u0226\\2\\u0a60\\u0a61\\5\\u0445\\u0223\\2\\u0a61\")\n buf.write(\"\\u0a62\\5\\u0437\\u021c\\2\\u0a62\\u0a63\\5\\u0457\\u022c\\2\\u0a63\")\n buf.write(\"\\u0a64\\5\\u0435\\u021b\\2\\u0a64\\u0a65\\5\\u0457\\u022c\\2\\u0a65\")\n buf.write(\"\\u0a66\\5\\u0465\\u0233\\2\\u0a66\\u0194\\3\\2\\2\\2\\u0a67\\u0a68\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0a68\\u0a69\\5\\u0445\\u0223\\2\\u0a69\\u0a6a\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u0a6a\\u0a6b\\5\\u043d\\u021f\\2\\u0a6b\\u0196\")\n buf.write(\"\\3\\2\\2\\2\\u0a6c\\u0a6d\\5\\u044b\\u0226\\2\\u0a6d\\u0a6e\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0a6e\\u0a6f\\5\\u0449\\u0225\\2\\u0a6f\\u0a70\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0a70\\u0a71\\7\\64\\2\\2\\u0a71\\u0198\\3\\2\\2\\2\\u0a72\")\n buf.write(\"\\u0a73\\5\\u044b\\u0226\\2\\u0a73\\u0a74\\5\\u0445\\u0223\\2\\u0a74\")\n buf.write(\"\\u0a75\\5\\u0449\\u0225\\2\\u0a75\\u0a76\\5\\u043d\\u021f\\2\\u0a76\")\n buf.write(\"\\u0a77\\7\\66\\2\\2\\u0a77\\u019a\\3\\2\\2\\2\\u0a78\\u0a79\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0a79\\u0a7a\\5\\u0445\\u0223\\2\\u0a7a\\u0a7b\\5\\u0449\")\n buf.write(\"\\u0225\\2\\u0a7b\\u0a7c\\5\\u043d\\u021f\\2\\u0a7c\\u0a7d\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0a7d\\u019c\\3\\2\\2\\2\\u0a7e\\u0a7f\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0a7f\\u0a80\\5\\u0445\\u0223\\2\\u0a80\\u0a81\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u0a81\\u0a82\\5\\u0445\\u0223\\2\\u0a82\\u0a83\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0a83\\u019e\\3\\2\\2\\2\\u0a84\\u0a85\\5\\u044b\\u0226\\2\\u0a85\")\n buf.write(\"\\u0a86\\5\\u0451\\u0229\\2\\u0a86\\u0a87\\5\\u0439\\u021d\\2\\u0a87\")\n buf.write(\"\\u0a88\\5\\u0435\\u021b\\2\\u0a88\\u0a89\\5\\u044b\\u0226\\2\\u0a89\")\n buf.write(\"\\u01a0\\3\\2\\2\\2\\u0a8a\\u0a8b\\5\\u044b\\u0226\\2\\u0a8b\\u0a8c\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0a8c\\u0a8d\\5\\u0439\\u021d\\2\\u0a8d\\u0a8e\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u0a8e\\u01a2\\3\\2\\2\\2\\u0a8f\\u0a90\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0a90\\u0a91\\5\\u0451\\u0229\\2\\u0a91\\u0a92\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0a92\\u0a93\\5\\u0449\\u0225\\2\\u0a93\\u0a94\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0a94\\u0a95\\5\\u043b\\u021e\\2\\u0a95\\u01a4\\3\\2\\2\")\n buf.write(\"\\2\\u0a96\\u0a97\\5\\u044b\\u0226\\2\\u0a97\\u0a98\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0a98\\u0a99\\5\\u0441\\u0221\\2\\u0a99\\u01a6\\3\\2\\2\\2\\u0a9a\")\n buf.write(\"\\u0a9b\\5\\u044b\\u0226\\2\\u0a9b\\u0a9c\\5\\u0451\\u0229\\2\\u0a9c\")\n buf.write(\"\\u0a9d\\5\\u0441\\u0221\\2\\u0a9d\\u0a9e\\5\\u0451\\u0229\\2\\u0a9e\")\n buf.write(\"\\u0a9f\\5\\u043f\\u0220\\2\\u0a9f\\u0aa0\\5\\u043f\\u0220\\2\\u0aa0\")\n buf.write(\"\\u01a8\\3\\2\\2\\2\\u0aa1\\u0aa2\\5\\u044b\\u0226\\2\\u0aa2\\u0aa3\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0aa3\\u0aa4\\5\\u0441\\u0221\\2\\u0aa4\\u0aa5\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0aa5\\u0aa6\\5\\u044f\\u0228\\2\\u0aa6\\u01aa\")\n buf.write(\"\\3\\2\\2\\2\\u0aa7\\u0aa8\\5\\u044b\\u0226\\2\\u0aa8\\u0aa9\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0aa9\\u0aaa\\5\\u044f\\u0228\\2\\u0aaa\\u0aab\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0aab\\u01ac\\3\\2\\2\\2\\u0aac\\u0aad\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0aad\\u0aae\\5\\u0451\\u0229\\2\\u0aae\\u0aaf\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0aaf\\u0ab0\\5\\u0453\\u022a\\2\\u0ab0\\u01ae\\3\\2\\2\\2\\u0ab1\")\n buf.write(\"\\u0ab2\\5\\u044d\\u0227\\2\\u0ab2\\u0ab3\\5\\u0435\\u021b\\2\\u0ab3\")\n buf.write(\"\\u0ab4\\5\\u0445\\u0223\\2\\u0ab4\\u0ab5\\5\\u044f\\u0228\\2\\u0ab5\")\n buf.write(\"\\u01b0\\3\\2\\2\\2\\u0ab6\\u0ab7\\5\\u044d\\u0227\\2\\u0ab7\\u0ab8\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0ab8\\u0ab9\\5\\u0453\\u022a\\2\\u0ab9\\u01b2\")\n buf.write(\"\\3\\2\\2\\2\\u0aba\\u0abb\\5\\u044d\\u0227\\2\\u0abb\\u0abc\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0abc\\u0abd\\5\\u045b\\u022e\\2\\u0abd\\u0abe\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0abe\\u0abf\\5\\u0443\\u0222\\2\\u0abf\\u0ac0\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0ac0\\u0ac1\\5\\u043b\\u021e\\2\\u0ac1\\u01b4\\3\\2\\2\")\n buf.write(\"\\2\\u0ac2\\u0ac3\\5\\u044d\\u0227\\2\\u0ac3\\u0ac4\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0ac4\\u0ac5\\5\\u0463\\u0232\\2\\u0ac5\\u0ac6\\5\\u045f\\u0230\")\n buf.write(\"\\2\\u0ac6\\u0ac7\\5\\u0435\\u021b\\2\\u0ac7\\u0ac8\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0ac8\\u0ac9\\5\\u045d\\u022f\\2\\u0ac9\\u0aca\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0aca\\u01b6\\3\\2\\2\\2\\u0acb\\u0acc\\5\\u044d\\u0227\\2\\u0acc\")\n buf.write(\"\\u0acd\\5\\u043d\\u021f\\2\\u0acd\\u0ace\\5\\u0435\\u021b\\2\\u0ace\")\n buf.write(\"\\u0acf\\5\\u0459\\u022d\\2\\u0acf\\u0ad0\\5\\u045d\\u022f\\2\\u0ad0\")\n buf.write(\"\\u0ad1\\5\\u0457\\u022c\\2\\u0ad1\\u0ad2\\5\\u043d\\u021f\\2\\u0ad2\")\n buf.write(\"\\u0ad3\\5\\u0459\\u022d\\2\\u0ad3\\u01b8\\3\\2\\2\\2\\u0ad4\\u0ad5\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0ad5\\u0ad6\\5\\u043d\\u021f\\2\\u0ad6\\u0ad7\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0ad7\\u0ad8\\5\\u0437\\u021c\\2\\u0ad8\\u0ad9\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0ad9\\u0ada\\5\\u0457\\u022c\\2\\u0ada\\u01ba\")\n buf.write(\"\\3\\2\\2\\2\\u0adb\\u0adc\\5\\u044d\\u0227\\2\\u0adc\\u0add\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0add\\u0ade\\5\\u0457\\u022c\\2\\u0ade\\u0adf\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0adf\\u0ae0\\5\\u043d\\u021f\\2\\u0ae0\\u01bc\\3\\2\\2\")\n buf.write(\"\\2\\u0ae1\\u0ae2\\5\\u044d\\u0227\\2\\u0ae2\\u0ae3\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0ae3\\u0ae4\\5\\u044f\\u0228\\2\\u0ae4\\u0ae5\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0ae5\\u0ae6\\5\\u0459\\u022d\\2\\u0ae6\\u01be\\3\\2\\2\\2\\u0ae7\")\n buf.write(\"\\u0ae8\\5\\u044d\\u0227\\2\\u0ae8\\u0ae9\\5\\u0445\\u0223\\2\\u0ae9\")\n buf.write(\"\\u0aea\\5\\u044f\\u0228\\2\\u0aea\\u0aeb\\5\\u045d\\u022f\\2\\u0aeb\")\n buf.write(\"\\u0aec\\5\\u045b\\u022e\\2\\u0aec\\u0aed\\5\\u043d\\u021f\\2\\u0aed\")\n buf.write(\"\\u01c0\\3\\2\\2\\2\\u0aee\\u0aef\\5\\u044d\\u0227\\2\\u0aef\\u0af0\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0af0\\u0af1\\5\\u044f\\u0228\\2\\u0af1\\u0af2\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u0af2\\u0af3\\5\\u0435\\u021b\\2\\u0af3\\u0af4\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0af4\\u0af5\\5\\u045d\\u022f\\2\\u0af5\\u0af6\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0af6\\u01c2\\3\\2\\2\\2\\u0af7\\u0af8\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0af8\\u0af9\\5\\u044b\\u0226\\2\\u0af9\\u0afa\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0afa\\u0afb\\5\\u044b\\u0226\\2\\u0afb\\u0afc\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0afc\\u0afd\\5\\u0437\\u021c\\2\\u0afd\\u0afe\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0afe\\u0aff\\5\\u044b\\u0226\\2\\u0aff\\u01c4\\3\\2\\2\")\n buf.write(\"\\2\\u0b00\\u0b01\\5\\u044d\\u0227\\2\\u0b01\\u0b02\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0b02\\u0b03\\5\\u043b\\u021e\\2\\u0b03\\u0b04\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0b04\\u01c6\\3\\2\\2\\2\\u0b05\\u0b06\\5\\u044d\\u0227\\2\\u0b06\")\n buf.write(\"\\u0b07\\5\\u0451\\u0229\\2\\u0b07\\u0b08\\5\\u043b\\u021e\\2\\u0b08\")\n buf.write(\"\\u0b09\\5\\u043d\\u021f\\2\\u0b09\\u0b0a\\5\\u044b\\u0226\\2\\u0b0a\")\n buf.write(\"\\u01c8\\3\\2\\2\\2\\u0b0b\\u0b0c\\5\\u044d\\u0227\\2\\u0b0c\\u0b0d\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0b0d\\u0b0e\\5\\u043b\\u021e\\2\\u0b0e\\u0b0f\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0b0f\\u0b10\\5\\u043f\\u0220\\2\\u0b10\\u0b11\")\n buf.write(\"\\5\\u0465\\u0233\\2\\u0b11\\u01ca\\3\\2\\2\\2\\u0b12\\u0b13\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0b13\\u0b14\\5\\u0451\\u0229\\2\\u0b14\\u0b15\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0b15\\u0b16\\5\\u045b\\u022e\\2\\u0b16\\u0b17\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u0b17\\u01cc\\3\\2\\2\\2\\u0b18\\u0b19\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u0b19\\u0b1a\\5\\u045d\\u022f\\2\\u0b1a\\u0b1b\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0b1b\\u0b1c\\5\\u045b\\u022e\\2\\u0b1c\\u0b1d\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0b1d\\u0b1e\\5\\u0459\\u022d\\2\\u0b1e\\u0b1f\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0b1f\\u0b20\\5\\u045b\\u022e\\2\\u0b20\\u01ce\\3\\2\\2\\2\\u0b21\")\n buf.write(\"\\u0b22\\5\\u044f\\u0228\\2\\u0b22\\u0b23\\5\\u0435\\u021b\\2\\u0b23\")\n buf.write(\"\\u0b24\\5\\u044d\\u0227\\2\\u0b24\\u0b25\\5\\u043d\\u021f\\2\\u0b25\")\n buf.write(\"\\u01d0\\3\\2\\2\\2\\u0b26\\u0b27\\5\\u044f\\u0228\\2\\u0b27\\u0b28\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0b28\\u0b29\\5\\u044f\\u0228\\2\\u0b29\\u01d2\")\n buf.write(\"\\3\\2\\2\\2\\u0b2a\\u0b2b\\5\\u044f\\u0228\\2\\u0b2b\\u0b2c\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0b2c\\u0b2d\\5\\u045b\\u022e\\2\\u0b2d\\u0b2e\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0b2e\\u0b2f\\5\\u0457\\u022c\\2\\u0b2f\\u0b30\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0b30\\u0b31\\5\\u044b\\u0226\\2\\u0b31\\u01d4\\3\\2\\2\")\n buf.write(\"\\2\\u0b32\\u0b33\\5\\u044f\\u0228\\2\\u0b33\\u0b34\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0b34\\u0b35\\5\\u045b\\u022e\\2\\u0b35\\u0b36\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0b36\\u0b37\\5\\u0457\\u022c\\2\\u0b37\\u0b38\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0b38\\u0b39\\5\\u044b\\u0226\\2\\u0b39\\u0b3a\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0b3a\\u01d6\\3\\2\\2\\2\\u0b3b\\u0b3c\\5\\u044f\\u0228\\2\\u0b3c\")\n buf.write(\"\\u0b3d\\5\\u0435\\u021b\\2\\u0b3d\\u0b3e\\5\\u045f\\u0230\\2\\u0b3e\")\n buf.write(\"\\u01d8\\3\\2\\2\\2\\u0b3f\\u0b40\\5\\u044f\\u0228\\2\\u0b40\\u0b41\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0b41\\u0b42\\5\\u0443\\u0222\\2\\u0b42\\u0b43\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0b43\\u0b44\\5\\u0457\\u022c\\2\\u0b44\\u01da\")\n buf.write(\"\\3\\2\\2\\2\\u0b45\\u0b46\\5\\u044f\\u0228\\2\\u0b46\\u0b47\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0b47\\u0b48\\5\\u0443\\u0222\\2\\u0b48\\u0b49\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0b49\\u0b4a\\5\\u0457\\u022c\\2\\u0b4a\\u0b4b\\7a\\2\")\n buf.write(\"\\2\\u0b4b\\u0b4c\\5\\u0439\\u021d\\2\\u0b4c\\u0b4d\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0b4d\\u01dc\\3\\2\\2\\2\\u0b4e\\u0b4f\\5\\u044f\\u0228\\2\\u0b4f\")\n buf.write(\"\\u0b50\\5\\u0439\\u021d\\2\\u0b50\\u0b51\\5\\u044b\\u0226\\2\\u0b51\")\n buf.write(\"\\u0b52\\5\\u0451\\u0229\\2\\u0b52\\u0b53\\5\\u0437\\u021c\\2\\u0b53\")\n buf.write(\"\\u01de\\3\\2\\2\\2\\u0b54\\u0b55\\5\\u044f\\u0228\\2\\u0b55\\u0b56\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0b56\\u0b57\\5\\u0459\\u022d\\2\\u0b57\\u0b58\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0b58\\u0b59\\5\\u043d\\u021f\\2\\u0b59\\u0b5a\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0b5a\\u01e0\\3\\2\\2\\2\\u0b5b\\u0b5c\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0b5c\\u0b5d\\5\\u043d\\u021f\\2\\u0b5d\\u0b5e\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u0b5e\\u01e2\\3\\2\\2\\2\\u0b5f\\u0b60\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0b60\\u0b61\\5\\u0451\\u0229\\2\\u0b61\\u01e4\\3\\2\\2\\2\\u0b62\")\n buf.write(\"\\u0b63\\5\\u044f\\u0228\\2\\u0b63\\u0b64\\5\\u0451\\u0229\\2\\u0b64\")\n buf.write(\"\\u0b65\\5\\u0435\\u021b\\2\\u0b65\\u0b66\\5\\u045d\\u022f\\2\\u0b66\")\n buf.write(\"\\u0b67\\5\\u043b\\u021e\\2\\u0b67\\u0b68\\5\\u0445\\u0223\\2\\u0b68\")\n buf.write(\"\\u0b69\\5\\u045b\\u022e\\2\\u0b69\\u01e6\\3\\2\\2\\2\\u0b6a\\u0b6b\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0b6b\\u0b6c\\5\\u0451\\u0229\\2\\u0b6c\\u0b6d\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0b6d\\u0b6e\\5\\u0435\\u021b\\2\\u0b6e\\u0b6f\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0b6f\\u0b70\\5\\u0443\\u0222\\2\\u0b70\\u0b71\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0b71\\u01e8\\3\\2\\2\\2\\u0b72\\u0b73\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0b73\\u0b74\\5\\u0451\\u0229\\2\\u0b74\\u0b75\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0b75\\u0b76\\5\\u0451\\u0229\\2\\u0b76\\u0b77\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0b77\\u0b78\\5\\u0465\\u0233\\2\\u0b78\\u01ea\\3\\2\\2\")\n buf.write(\"\\2\\u0b79\\u0b7a\\5\\u044f\\u0228\\2\\u0b7a\\u0b7b\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0b7b\\u0b7c\\5\\u0439\\u021d\\2\\u0b7c\\u0b7d\\5\\u0465\\u0233\")\n buf.write(\"\\2\\u0b7d\\u0b7e\\5\\u0439\\u021d\\2\\u0b7e\\u0b7f\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0b7f\\u0b80\\5\\u043d\\u021f\\2\\u0b80\\u01ec\\3\\2\\2\\2\\u0b81\")\n buf.write(\"\\u0b82\\5\\u044f\\u0228\\2\\u0b82\\u0b83\\5\\u0451\\u0229\\2\\u0b83\")\n buf.write(\"\\u0b84\\5\\u043d\\u021f\\2\\u0b84\\u0b85\\5\\u044f\\u0228\\2\\u0b85\")\n buf.write(\"\\u0b86\\5\\u045b\\u022e\\2\\u0b86\\u0b87\\5\\u0445\\u0223\\2\\u0b87\")\n buf.write(\"\\u0b88\\5\\u045b\\u022e\\2\\u0b88\\u0b89\\5\\u0465\\u0233\\2\\u0b89\")\n buf.write(\"\\u0b8a\\5\\u043d\\u021f\\2\\u0b8a\\u0b8b\\5\\u0459\\u022d\\2\\u0b8b\")\n buf.write(\"\\u0b8c\\5\\u0439\\u021d\\2\\u0b8c\\u0b8d\\5\\u0435\\u021b\\2\\u0b8d\")\n buf.write(\"\\u0b8e\\5\\u0453\\u022a\\2\\u0b8e\\u0b8f\\5\\u0445\\u0223\\2\\u0b8f\")\n buf.write(\"\\u0b90\\5\\u044f\\u0228\\2\\u0b90\\u0b91\\5\\u0441\\u0221\\2\\u0b91\")\n buf.write(\"\\u01ee\\3\\2\\2\\2\\u0b92\\u0b93\\5\\u044f\\u0228\\2\\u0b93\\u0b94\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0b94\\u0b95\\5\\u044d\\u0227\\2\\u0b95\\u0b96\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0b96\\u0b97\\5\\u0463\\u0232\\2\\u0b97\\u0b98\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u0b98\\u0b99\\5\\u0435\\u021b\\2\\u0b99\\u0b9a\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0b9a\\u0b9b\\5\\u045d\\u022f\\2\\u0b9b\\u0b9c\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0b9c\\u01f0\\3\\2\\2\\2\\u0b9d\\u0b9e\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0b9e\\u0b9f\\5\\u0451\\u0229\\2\\u0b9f\\u0ba0\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0ba0\\u0ba1\\5\\u0445\\u0223\\2\\u0ba1\\u0ba2\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0ba2\\u0ba3\\5\\u045f\\u0230\\2\\u0ba3\\u0ba4\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0ba4\\u0ba5\\5\\u044b\\u0226\\2\\u0ba5\\u0ba6\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0ba6\\u0ba7\\5\\u043d\\u021f\\2\\u0ba7\\u01f2\\3\\2\\2\")\n buf.write(\"\\2\\u0ba8\\u0ba9\\5\\u044f\\u0228\\2\\u0ba9\\u0baa\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0baa\\u0bab\\5\\u044f\\u0228\\2\\u0bab\\u0bac\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0bac\\u01f4\\3\\2\\2\\2\\u0bad\\u0bae\\5\\u044f\\u0228\\2\\u0bae\")\n buf.write(\"\\u0baf\\5\\u0451\\u0229\\2\\u0baf\\u0bb0\\5\\u0451\\u0229\\2\\u0bb0\")\n buf.write(\"\\u0bb1\\5\\u0457\\u022c\\2\\u0bb1\\u0bb2\\5\\u043b\\u021e\\2\\u0bb2\")\n buf.write(\"\\u0bb3\\5\\u043d\\u021f\\2\\u0bb3\\u0bb4\\5\\u0457\\u022c\\2\\u0bb4\")\n buf.write(\"\\u01f6\\3\\2\\2\\2\\u0bb5\\u0bb6\\5\\u044f\\u0228\\2\\u0bb6\\u0bb7\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0bb7\\u0bb8\\5\\u0459\\u022d\\2\\u0bb8\\u0bb9\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0bb9\\u0bba\\5\\u0443\\u0222\\2\\u0bba\\u0bbb\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0bbb\\u0bbc\\5\\u044d\\u0227\\2\\u0bbc\\u0bbd\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0bbd\\u0bbe\\5\\u0439\\u021d\\2\\u0bbe\\u0bbf\")\n buf.write(\"\\5\\u0443\\u0222\\2\\u0bbf\\u0bc0\\5\\u043d\\u021f\\2\\u0bc0\\u0bc1\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0bc1\\u0bc2\\5\\u0449\\u0225\\2\\u0bc2\\u01f8\")\n buf.write(\"\\3\\2\\2\\2\\u0bc3\\u0bc4\\5\\u044f\\u0228\\2\\u0bc4\\u0bc5\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0bc5\\u0bc6\\5\\u045b\\u022e\\2\\u0bc6\\u01fa\\3\\2\\2\")\n buf.write(\"\\2\\u0bc7\\u0bc8\\5\\u044f\\u0228\\2\\u0bc8\\u0bc9\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0bc9\\u0bca\\5\\u0461\\u0231\\2\\u0bca\\u0bcb\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0bcb\\u0bcc\\5\\u0445\\u0223\\2\\u0bcc\\u0bcd\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0bcd\\u01fc\\3\\2\\2\\2\\u0bce\\u0bcf\\5\\u044f\\u0228\\2\\u0bcf\")\n buf.write(\"\\u0bd0\\5\\u045d\\u022f\\2\\u0bd0\\u0bd1\\5\\u044b\\u0226\\2\\u0bd1\")\n buf.write(\"\\u0bd2\\5\\u044b\\u0226\\2\\u0bd2\\u01fe\\3\\2\\2\\2\\u0bd3\\u0bd4\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0bd4\\u0bd5\\5\\u045d\\u022f\\2\\u0bd5\\u0bd6\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0bd6\\u0bd7\\5\\u044b\\u0226\\2\\u0bd7\\u0bd8\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0bd8\\u0200\\3\\2\\2\\2\\u0bd9\\u0bda\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0bda\\u0bdb\\5\\u045d\\u022f\\2\\u0bdb\\u0bdc\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0bdc\\u0bdd\\5\\u0437\\u021c\\2\\u0bdd\\u0bde\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0bde\\u0bdf\\5\\u0457\\u022c\\2\\u0bdf\\u0202\\3\\2\\2\")\n buf.write(\"\\2\\u0be0\\u0be1\\5\\u044f\\u0228\\2\\u0be1\\u0be2\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0be2\\u0be3\\5\\u044d\\u0227\\2\\u0be3\\u0be4\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0be4\\u0be5\\5\\u0457\\u022c\\2\\u0be5\\u0be6\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0be6\\u0be7\\5\\u0439\\u021d\\2\\u0be7\\u0204\\3\\2\\2\\2\\u0be8\")\n buf.write(\"\\u0be9\\5\\u044f\\u0228\\2\\u0be9\\u0bea\\5\\u045f\\u0230\\2\\u0bea\")\n buf.write(\"\\u0beb\\5\\u0435\\u021b\\2\\u0beb\\u0bec\\5\\u0457\\u022c\\2\\u0bec\")\n buf.write(\"\\u0bed\\5\\u0439\\u021d\\2\\u0bed\\u0bee\\5\\u0443\\u0222\\2\\u0bee\")\n buf.write(\"\\u0bef\\5\\u0435\\u021b\\2\\u0bef\\u0bf0\\5\\u0457\\u022c\\2\\u0bf0\")\n buf.write(\"\\u0bf1\\7\\64\\2\\2\\u0bf1\\u0206\\3\\2\\2\\2\\u0bf2\\u0bf3\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0bf3\\u0bf4\\5\\u0437\\u021c\\2\\u0bf4\\u0bf5\\5\\u0447\")\n buf.write(\"\\u0224\\2\\u0bf5\\u0bf6\\5\\u043d\\u021f\\2\\u0bf6\\u0bf7\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u0bf7\\u0bf8\\5\\u045b\\u022e\\2\\u0bf8\\u0208\\3\\2\\2\")\n buf.write(\"\\2\\u0bf9\\u0bfa\\5\\u0451\\u0229\\2\\u0bfa\\u0bfb\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u0bfb\\u020a\\3\\2\\2\\2\\u0bfc\\u0bfd\\5\\u0451\\u0229\\2\\u0bfd\")\n buf.write(\"\\u0bfe\\5\\u043f\\u0220\\2\\u0bfe\\u0bff\\5\\u043f\\u0220\\2\\u0bff\")\n buf.write(\"\\u020c\\3\\2\\2\\2\\u0c00\\u0c01\\5\\u0451\\u0229\\2\\u0c01\\u0c02\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0c02\\u0c03\\5\\u043b\\u021e\\2\\u0c03\\u020e\")\n buf.write(\"\\3\\2\\2\\2\\u0c04\\u0c05\\5\\u0451\\u0229\\2\\u0c05\\u0c06\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0c06\\u0c07\\5\\u043b\\u021e\\2\\u0c07\\u0210\\3\\2\\2\")\n buf.write(\"\\2\\u0c08\\u0c09\\5\\u0451\\u0229\\2\\u0c09\\u0c0a\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0c0a\\u0212\\3\\2\\2\\2\\u0c0b\\u0c0c\\5\\u0451\\u0229\\2\\u0c0c\")\n buf.write(\"\\u0c0d\\5\\u044f\\u0228\\2\\u0c0d\\u0c0e\\5\\u044b\\u0226\\2\\u0c0e\")\n buf.write(\"\\u0c0f\\5\\u0465\\u0233\\2\\u0c0f\\u0214\\3\\2\\2\\2\\u0c10\\u0c11\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0c11\\u0c12\\5\\u0453\\u022a\\2\\u0c12\\u0c13\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0c13\\u0c14\\5\\u044f\\u0228\\2\\u0c14\\u0216\")\n buf.write(\"\\3\\2\\2\\2\\u0c15\\u0c16\\5\\u0451\\u0229\\2\\u0c16\\u0c17\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0c17\\u0c18\\5\\u045b\\u022e\\2\\u0c18\\u0c19\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0c19\\u0c1a\\5\\u0451\\u0229\\2\\u0c1a\\u0c1b\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0c1b\\u0218\\3\\2\\2\\2\\u0c1c\\u0c1d\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0c1d\\u0c1e\\5\\u0457\\u022c\\2\\u0c1e\\u021a\\3\\2\\2\\2\\u0c1f\")\n buf.write(\"\\u0c20\\5\\u0451\\u0229\\2\\u0c20\\u0c21\\5\\u0457\\u022c\\2\\u0c21\")\n buf.write(\"\\u0c22\\5\\u0435\\u021b\\2\\u0c22\\u0c23\\5\\u043b\\u021e\\2\\u0c23\")\n buf.write(\"\\u0c24\\5\\u0435\\u021b\\2\\u0c24\\u0c25\\5\\u045b\\u022e\\2\\u0c25\")\n buf.write(\"\\u0c26\\5\\u0435\\u021b\\2\\u0c26\\u021c\\3\\2\\2\\2\\u0c27\\u0c28\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0c28\\u0c29\\5\\u0457\\u022c\\2\\u0c29\\u0c2a\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0c2a\\u0c2b\\5\\u043d\\u021f\\2\\u0c2b\\u0c2c\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0c2c\\u021e\\3\\2\\2\\2\\u0c2d\\u0c2e\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0c2e\\u0c2f\\5\\u0457\\u022c\\2\\u0c2f\\u0c30\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u0c30\\u0c31\\5\\u0445\\u0223\\2\\u0c31\\u0c32\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0c32\\u0c33\\5\\u0435\\u021b\\2\\u0c33\\u0c34\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0c34\\u0c35\\5\\u0445\\u0223\\2\\u0c35\\u0c36\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0c36\\u0c37\\5\\u0465\\u0233\\2\\u0c37\\u0220\\3\\2\\2\")\n buf.write(\"\\2\\u0c38\\u0c39\\5\\u0451\\u0229\\2\\u0c39\\u0c3a\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0c3a\\u0c3b\\5\\u043d\\u021f\\2\\u0c3b\\u0c3c\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0c3c\\u0c3d\\5\\u0457\\u022c\\2\\u0c3d\\u0c3e\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0c3e\\u0c3f\\5\\u0457\\u022c\\2\\u0c3f\\u0222\\3\\2\\2\\2\\u0c40\")\n buf.write(\"\\u0c41\\5\\u0451\\u0229\\2\\u0c41\\u0c42\\5\\u045d\\u022f\\2\\u0c42\")\n buf.write(\"\\u0c43\\5\\u045b\\u022e\\2\\u0c43\\u0224\\3\\2\\2\\2\\u0c44\\u0c45\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0c45\\u0c46\\5\\u045d\\u022f\\2\\u0c46\\u0c47\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0c47\\u0c48\\5\\u043d\\u021f\\2\\u0c48\\u0c49\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0c49\\u0226\\3\\2\\2\\2\\u0c4a\\u0c4b\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0c4b\\u0c4c\\5\\u045f\\u0230\\2\\u0c4c\\u0c4d\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0c4d\\u0c4e\\5\\u0457\\u022c\\2\\u0c4e\\u0228\\3\\2\\2\")\n buf.write(\"\\2\\u0c4f\\u0c50\\5\\u0451\\u0229\\2\\u0c50\\u0c51\\5\\u045f\\u0230\")\n buf.write(\"\\2\\u0c51\\u0c52\\5\\u043d\\u021f\\2\\u0c52\\u0c53\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0c53\\u0c54\\5\\u0457\\u022c\\2\\u0c54\\u0c55\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0c55\\u0c56\\5\\u043b\\u021e\\2\\u0c56\\u0c57\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0c57\\u0c58\\5\\u044f\\u0228\\2\\u0c58\\u0c59\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u0c59\\u022a\\3\\2\\2\\2\\u0c5a\\u0c5b\\5\\u0453\\u022a\\2\\u0c5b\")\n buf.write(\"\\u0c5c\\5\\u0435\\u021b\\2\\u0c5c\\u0c5d\\5\\u0439\\u021d\\2\\u0c5d\")\n buf.write(\"\\u0c5e\\5\\u0449\\u0225\\2\\u0c5e\\u0c5f\\5\\u0435\\u021b\\2\\u0c5f\")\n buf.write(\"\\u0c60\\5\\u0441\\u0221\\2\\u0c60\\u0c61\\5\\u043d\\u021f\\2\\u0c61\")\n buf.write(\"\\u022c\\3\\2\\2\\2\\u0c62\\u0c63\\5\\u0453\\u022a\\2\\u0c63\\u0c64\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0c64\\u0c65\\5\\u0457\\u022c\\2\\u0c65\\u0c66\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0c66\\u0c67\\5\\u044b\\u0226\\2\\u0c67\\u0c68\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0c68\\u0c69\\5\\u043d\\u021f\\2\\u0c69\\u0c6a\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0c6a\\u0c6b\\7a\\2\\2\\u0c6b\\u0c6c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0c6c\\u0c6d\\5\\u044f\\u0228\\2\\u0c6d\\u0c6e\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0c6e\\u0c6f\\5\\u0437\\u021c\\2\\u0c6f\\u0c70\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0c70\\u0c71\\5\\u043d\\u021f\\2\\u0c71\\u022e\\3\\2\\2\")\n buf.write(\"\\2\\u0c72\\u0c73\\5\\u0453\\u022a\\2\\u0c73\\u0c74\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0c74\\u0c75\\5\\u0457\\u022c\\2\\u0c75\\u0c76\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0c76\\u0c77\\5\\u044d\\u0227\\2\\u0c77\\u0c78\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0c78\\u0c79\\5\\u045b\\u022e\\2\\u0c79\\u0c7a\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0c7a\\u0c7b\\5\\u0457\\u022c\\2\\u0c7b\\u0c7c\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0c7c\\u0230\\3\\2\\2\\2\\u0c7d\\u0c7e\\5\\u0453\\u022a\\2\\u0c7e\")\n buf.write(\"\\u0c7f\\5\\u0435\\u021b\\2\\u0c7f\\u0c80\\5\\u0457\\u022c\\2\\u0c80\")\n buf.write(\"\\u0c81\\5\\u043d\\u021f\\2\\u0c81\\u0c82\\5\\u044f\\u0228\\2\\u0c82\")\n buf.write(\"\\u0c83\\5\\u045b\\u022e\\2\\u0c83\\u0232\\3\\2\\2\\2\\u0c84\\u0c85\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0c85\\u0c86\\5\\u0435\\u021b\\2\\u0c86\\u0c87\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0c87\\u0c88\\5\\u045b\\u022e\\2\\u0c88\\u0c89\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0c89\\u0c8a\\5\\u045b\\u022e\\2\\u0c8a\\u0c8b\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0c8b\\u0c8c\\5\\u0451\\u0229\\2\\u0c8c\\u0c8d\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0c8d\\u0234\\3\\2\\2\\2\\u0c8e\\u0c8f\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0c8f\\u0c90\\5\\u0435\\u021b\\2\\u0c90\\u0c91\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0c91\\u0c92\\5\\u0459\\u022d\\2\\u0c92\\u0c93\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0c93\\u0c94\\5\\u044f\\u0228\\2\\u0c94\\u0c95\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0c95\\u0236\\3\\2\\2\\2\\u0c96\\u0c97\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0c97\\u0c98\\5\\u0435\\u021b\\2\\u0c98\\u0c99\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0c99\\u0c9a\\5\\u0443\\u0222\\2\\u0c9a\\u0238\\3\\2\\2\\2\\u0c9b\")\n buf.write(\"\\u0c9c\\7\\'\\2\\2\\u0c9c\\u0c9d\\5\\u0457\\u022c\\2\\u0c9d\\u0c9e\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0c9e\\u0c9f\\5\\u0461\\u0231\\2\\u0c9f\\u0ca0\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0ca0\\u0ca1\\5\\u0465\\u0233\\2\\u0ca1\\u0ca2\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0ca2\\u0ca3\\5\\u043d\\u021f\\2\\u0ca3\\u023a\")\n buf.write(\"\\3\\2\\2\\2\\u0ca4\\u0ca5\\7\\'\\2\\2\\u0ca5\\u0ca6\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0ca6\\u0ca7\\5\\u0465\\u0233\\2\\u0ca7\\u0ca8\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0ca8\\u0ca9\\5\\u043d\\u021f\\2\\u0ca9\\u023c\\3\\2\\2\\2\\u0caa\")\n buf.write(\"\\u0cab\\5\\u0453\\u022a\\2\\u0cab\\u0cac\\5\\u0445\\u0223\\2\\u0cac\")\n buf.write(\"\\u0cad\\5\\u0453\\u022a\\2\\u0cad\\u0cae\\5\\u043d\\u021f\\2\\u0cae\")\n buf.write(\"\\u0caf\\5\\u044b\\u0226\\2\\u0caf\\u0cb0\\5\\u0445\\u0223\\2\\u0cb0\")\n buf.write(\"\\u0cb1\\5\\u044f\\u0228\\2\\u0cb1\\u0cb2\\5\\u043d\\u021f\\2\\u0cb2\")\n buf.write(\"\\u0cb3\\5\\u043b\\u021e\\2\\u0cb3\\u023e\\3\\2\\2\\2\\u0cb4\\u0cb5\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0cb5\\u0cb6\\5\\u0445\\u0223\\2\\u0cb6\\u0cb7\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u0cb7\\u0cb8\\5\\u0451\\u0229\\2\\u0cb8\\u0cb9\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0cb9\\u0240\\3\\2\\2\\2\\u0cba\\u0cbb\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0cbb\\u0cbc\\5\\u044b\\u0226\\2\\u0cbc\\u0cbd\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0cbd\\u0cbe\\5\\u044f\\u0228\\2\\u0cbe\\u0242\\3\\2\\2\")\n buf.write(\"\\2\\u0cbf\\u0cc0\\5\\u0453\\u022a\\2\\u0cc0\\u0cc1\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0cc1\\u0cc2\\5\\u0459\\u022d\\2\\u0cc2\\u0cc3\\7a\\2\\2\\u0cc3\")\n buf.write(\"\\u0cc4\\5\\u0445\\u0223\\2\\u0cc4\\u0cc5\\5\\u044f\\u0228\\2\\u0cc5\")\n buf.write(\"\\u0cc6\\5\\u045b\\u022e\\2\\u0cc6\\u0cc7\\5\\u043d\\u021f\\2\\u0cc7\")\n buf.write(\"\\u0cc8\\5\\u0441\\u0221\\2\\u0cc8\\u0cc9\\5\\u043d\\u021f\\2\\u0cc9\")\n buf.write(\"\\u0cca\\5\\u0457\\u022c\\2\\u0cca\\u0244\\3\\2\\2\\2\\u0ccb\\u0ccc\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0ccc\\u0ccd\\5\\u0451\\u0229\\2\\u0ccd\\u0cce\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0cce\\u0ccf\\5\\u0445\\u0223\\2\\u0ccf\\u0cd0\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0cd0\\u0cd1\\5\\u0445\\u0223\\2\\u0cd1\\u0cd2\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u0cd2\\u0cd3\\5\\u043d\\u021f\\2\\u0cd3\\u0246\")\n buf.write(\"\\3\\2\\2\\2\\u0cd4\\u0cd5\\5\\u0453\\u022a\\2\\u0cd5\\u0cd6\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0cd6\\u0cd7\\5\\u0459\\u022d\\2\\u0cd7\\u0cd8\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0cd8\\u0cd9\\5\\u045b\\u022e\\2\\u0cd9\\u0cda\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0cda\\u0cdb\\5\\u045f\\u0230\\2\\u0cdb\\u0cdc\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0cdc\\u0cdd\\5\\u044f\\u0228\\2\\u0cdd\\u0248\\3\\2\\2\")\n buf.write(\"\\2\\u0cde\\u0cdf\\5\\u0453\\u022a\\2\\u0cdf\\u0ce0\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0ce0\\u0ce1\\5\\u0435\\u021b\\2\\u0ce1\\u0ce2\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u0ce2\\u0ce3\\5\\u044d\\u0227\\2\\u0ce3\\u0ce4\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0ce4\\u024a\\3\\2\\2\\2\\u0ce5\\u0ce6\\5\\u0453\\u022a\\2\\u0ce6\")\n buf.write(\"\\u0ce7\\5\\u0457\\u022c\\2\\u0ce7\\u0ce8\\5\\u043d\\u021f\\2\\u0ce8\")\n buf.write(\"\\u0ce9\\5\\u0439\\u021d\\2\\u0ce9\\u0cea\\5\\u043d\\u021f\\2\\u0cea\")\n buf.write(\"\\u0ceb\\5\\u043b\\u021e\\2\\u0ceb\\u0cec\\5\\u0445\\u0223\\2\\u0cec\")\n buf.write(\"\\u0ced\\5\\u044f\\u0228\\2\\u0ced\\u0cee\\5\\u0441\\u0221\\2\\u0cee\")\n buf.write(\"\\u024c\\3\\2\\2\\2\\u0cef\\u0cf0\\5\\u0453\\u022a\\2\\u0cf0\\u0cf1\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0cf1\\u0cf2\\5\\u043d\\u021f\\2\\u0cf2\\u0cf3\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0cf3\\u0cf4\\5\\u0445\\u0223\\2\\u0cf4\\u0cf5\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0cf5\\u0cf6\\5\\u0445\\u0223\\2\\u0cf6\\u0cf7\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0cf7\\u0cf8\\5\\u044f\\u0228\\2\\u0cf8\\u024e\")\n buf.write(\"\\3\\2\\2\\2\\u0cf9\\u0cfa\\5\\u0453\\u022a\\2\\u0cfa\\u0cfb\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0cfb\\u0cfc\\5\\u043d\\u021f\\2\\u0cfc\\u0cfd\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0cfd\\u0cfe\\5\\u043d\\u021f\\2\\u0cfe\\u0cff\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0cff\\u0d00\\5\\u045b\\u022e\\2\\u0d00\\u0250\\3\\2\\2\")\n buf.write(\"\\2\\u0d01\\u0d02\\5\\u0453\\u022a\\2\\u0d02\\u0d03\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d03\\u0d04\\5\\u0445\\u0223\\2\\u0d04\\u0d05\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0d05\\u0d06\\5\\u0457\\u022c\\2\\u0d06\\u0252\\3\\2\\2\\2\\u0d07\")\n buf.write(\"\\u0d08\\5\\u0453\\u022a\\2\\u0d08\\u0d09\\5\\u0457\\u022c\\2\\u0d09\")\n buf.write(\"\\u0d0a\\5\\u0451\\u0229\\2\\u0d0a\\u0d0b\\5\\u0439\\u021d\\2\\u0d0b\")\n buf.write(\"\\u0d0c\\5\\u043d\\u021f\\2\\u0d0c\\u0d0d\\5\\u043b\\u021e\\2\\u0d0d\")\n buf.write(\"\\u0d0e\\5\\u045d\\u022f\\2\\u0d0e\\u0d0f\\5\\u0457\\u022c\\2\\u0d0f\")\n buf.write(\"\\u0d10\\5\\u043d\\u021f\\2\\u0d10\\u0254\\3\\2\\2\\2\\u0d11\\u0d12\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0d12\\u0d13\\5\\u0435\\u021b\\2\\u0d13\\u0d14\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0d14\\u0d15\\5\\u0459\\u022d\\2\\u0d15\\u0d16\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d16\\u0256\\3\\2\\2\\2\\u0d17\\u0d18\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0d18\\u0d19\\5\\u0435\\u021b\\2\\u0d19\\u0d1a\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0d1a\\u0d1b\\5\\u0441\\u0221\\2\\u0d1b\\u0d1c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d1c\\u0258\\3\\2\\2\\2\\u0d1d\\u0d1e\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d1e\\u0d1f\\5\\u0435\\u021b\\2\\u0d1f\\u0d20\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u0d20\\u025a\\3\\2\\2\\2\\u0d21\\u0d22\\5\\u0457\\u022c\\2\\u0d22\")\n buf.write(\"\\u0d23\\5\\u043d\\u021f\\2\\u0d23\\u0d24\\5\\u0435\\u021b\\2\\u0d24\")\n buf.write(\"\\u0d25\\5\\u043b\\u021e\\2\\u0d25\\u025c\\3\\2\\2\\2\\u0d26\\u0d27\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0d27\\u0d28\\5\\u043d\\u021f\\2\\u0d28\\u0d29\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0d29\\u0d2a\\5\\u044b\\u0226\\2\\u0d2a\\u025e\")\n buf.write(\"\\3\\2\\2\\2\\u0d2b\\u0d2c\\5\\u0457\\u022c\\2\\u0d2c\\u0d2d\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d2d\\u0d2e\\5\\u0439\\u021d\\2\\u0d2e\\u0d2f\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0d2f\\u0d30\\5\\u0457\\u022c\\2\\u0d30\\u0d31\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u0d31\\u0260\\3\\2\\2\\2\\u0d32\\u0d33\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d33\\u0d34\\5\\u043d\\u021f\\2\\u0d34\\u0d35\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u0d35\\u0262\\3\\2\\2\\2\\u0d36\\u0d37\\5\\u0457\\u022c\\2\\u0d37\")\n buf.write(\"\\u0d38\\5\\u043d\\u021f\\2\\u0d38\\u0d39\\5\\u043f\\u0220\\2\\u0d39\")\n buf.write(\"\\u0d3a\\5\\u043d\\u021f\\2\\u0d3a\\u0d3b\\5\\u0457\\u022c\\2\\u0d3b\")\n buf.write(\"\\u0d3c\\5\\u043d\\u021f\\2\\u0d3c\\u0d3d\\5\\u044f\\u0228\\2\\u0d3d\")\n buf.write(\"\\u0d3e\\5\\u0439\\u021d\\2\\u0d3e\\u0d3f\\5\\u043d\\u021f\\2\\u0d3f\")\n buf.write(\"\\u0264\\3\\2\\2\\2\\u0d40\\u0d41\\5\\u0457\\u022c\\2\\u0d41\\u0d42\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d42\\u0d43\\5\\u043f\\u0220\\2\\u0d43\\u0d44\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d44\\u0d45\\5\\u0457\\u022c\\2\\u0d45\\u0d46\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d46\\u0d47\\5\\u044f\\u0228\\2\\u0d47\\u0d48\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0d48\\u0d49\\5\\u0445\\u0223\\2\\u0d49\\u0d4a\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0d4a\\u0d4b\\5\\u0441\\u0221\\2\\u0d4b\\u0266\")\n buf.write(\"\\3\\2\\2\\2\\u0d4c\\u0d4d\\5\\u0457\\u022c\\2\\u0d4d\\u0d4e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d4e\\u0d4f\\5\\u0447\\u0224\\2\\u0d4f\\u0d50\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d50\\u0d51\\5\\u0439\\u021d\\2\\u0d51\\u0d52\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0d52\\u0268\\3\\2\\2\\2\\u0d53\\u0d54\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d54\\u0d55\\5\\u043d\\u021f\\2\\u0d55\\u0d56\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0d56\\u0d57\\5\\u0445\\u0223\\2\\u0d57\\u0d58\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0d58\\u0d59\\5\\u0459\\u022d\\2\\u0d59\\u0d5a\\7a\\2\\2\\u0d5a\")\n buf.write(\"\\u0d5b\\5\\u0451\\u0229\\2\\u0d5b\\u0d5c\\5\\u044f\\u0228\\2\\u0d5c\")\n buf.write(\"\\u026a\\3\\2\\2\\2\\u0d5d\\u0d5e\\5\\u0457\\u022c\\2\\u0d5e\\u0d5f\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d5f\\u0d60\\5\\u044f\\u0228\\2\\u0d60\\u0d61\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0d61\\u0d62\\5\\u044d\\u0227\\2\\u0d62\\u0d63\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d63\\u026c\\3\\2\\2\\2\\u0d64\\u0d65\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0d65\\u0d66\\5\\u043d\\u021f\\2\\u0d66\\u0d67\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0d67\\u0d68\\5\\u044b\\u0226\\2\\u0d68\\u0d69\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0d69\\u0d6a\\5\\u0439\\u021d\\2\\u0d6a\\u0d6b\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d6b\\u026e\\3\\2\\2\\2\\u0d6c\\u0d6d\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d6d\\u0d6e\\5\\u043d\\u021f\\2\\u0d6e\\u0d6f\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0d6f\\u0d70\\5\\u0453\\u022a\\2\\u0d70\\u0d71\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0d71\\u0d72\\5\\u0439\\u021d\\2\\u0d72\\u0d73\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0d73\\u0270\\3\\2\\2\\2\\u0d74\\u0d75\\5\\u0457\\u022c\\2\\u0d75\")\n buf.write(\"\\u0d76\\5\\u043d\\u021f\\2\\u0d76\\u0d77\\5\\u0459\\u022d\\2\\u0d77\")\n buf.write(\"\\u0d78\\5\\u045b\\u022e\\2\\u0d78\\u0d79\\5\\u0457\\u022c\\2\\u0d79\")\n buf.write(\"\\u0d7a\\5\\u0445\\u0223\\2\\u0d7a\\u0d7b\\5\\u0439\\u021d\\2\\u0d7b\")\n buf.write(\"\\u0d7c\\5\\u045b\\u022e\\2\\u0d7c\\u0d7d\\7a\\2\\2\\u0d7d\\u0d7e\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0d7e\\u0d7f\\5\\u043d\\u021f\\2\\u0d7f\\u0d80\")\n buf.write(\"\\5\\u043f\\u0220\\2\\u0d80\\u0d81\\5\\u043d\\u021f\\2\\u0d81\\u0d82\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0d82\\u0d83\\5\\u043d\\u021f\\2\\u0d83\\u0d84\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0d84\\u0d85\\5\\u0439\\u021d\\2\\u0d85\\u0d86\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0d86\\u0d87\\5\\u0459\\u022d\\2\\u0d87\\u0272\")\n buf.write(\"\\3\\2\\2\\2\\u0d88\\u0d89\\5\\u0457\\u022c\\2\\u0d89\\u0d8a\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0d8a\\u0d8b\\5\\u0459\\u022d\\2\\u0d8b\\u0d8c\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0d8c\\u0d8d\\5\\u044b\\u0226\\2\\u0d8d\\u0d8e\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0d8e\\u0274\\3\\2\\2\\2\\u0d8f\\u0d90\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0d90\\u0d91\\5\\u043d\\u021f\\2\\u0d91\\u0d92\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0d92\\u0d93\\5\\u045d\\u022f\\2\\u0d93\\u0d94\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0d94\\u0d95\\5\\u045b\\u022e\\2\\u0d95\\u0d96\\7a\\2\\2\\u0d96\")\n buf.write(\"\\u0d97\\5\\u0439\\u021d\\2\\u0d97\\u0d98\\5\\u0435\\u021b\\2\\u0d98\")\n buf.write(\"\\u0d99\\5\\u0439\\u021d\\2\\u0d99\\u0d9a\\5\\u0443\\u0222\\2\\u0d9a\")\n buf.write(\"\\u0d9b\\5\\u043d\\u021f\\2\\u0d9b\\u0276\\3\\2\\2\\2\\u0d9c\\u0d9d\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0d9d\\u0d9e\\5\\u043d\\u021f\\2\\u0d9e\\u0d9f\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0d9f\\u0da0\\5\\u045d\\u022f\\2\\u0da0\\u0da1\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0da1\\u0da2\\5\\u044f\\u0228\\2\\u0da2\\u0278\")\n buf.write(\"\\3\\2\\2\\2\\u0da3\\u0da4\\5\\u0457\\u022c\\2\\u0da4\\u0da5\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0da5\\u0da6\\5\\u045b\\u022e\\2\\u0da6\\u0da7\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0da7\\u0da8\\5\\u0457\\u022c\\2\\u0da8\\u0da9\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0da9\\u0daa\\5\\u0445\\u0223\\2\\u0daa\\u0dab\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0dab\\u0dac\\5\\u0441\\u0221\\2\\u0dac\\u027a\\3\\2\\2\")\n buf.write(\"\\2\\u0dad\\u0dae\\5\\u0457\\u022c\\2\\u0dae\\u0daf\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0daf\\u0db0\\5\\u045d\\u022f\\2\\u0db0\\u0db1\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0db1\\u0db2\\5\\u043d\\u021f\\2\\u0db2\\u027c\\3\\2\\2\\2\\u0db3\")\n buf.write(\"\\u0db4\\5\\u0457\\u022c\\2\\u0db4\\u0db5\\5\\u043d\\u021f\\2\\u0db5\")\n buf.write(\"\\u0db6\\5\\u045f\\u0230\\2\\u0db6\\u0db7\\5\\u043d\\u021f\\2\\u0db7\")\n buf.write(\"\\u0db8\\5\\u0457\\u022c\\2\\u0db8\\u0db9\\5\\u0459\\u022d\\2\\u0db9\")\n buf.write(\"\\u0dba\\5\\u043d\\u021f\\2\\u0dba\\u027e\\3\\2\\2\\2\\u0dbb\\u0dbc\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0dbc\\u0dbd\\5\\u043d\\u021f\\2\\u0dbd\\u0dbe\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u0dbe\\u0dbf\\5\\u0451\\u0229\\2\\u0dbf\\u0dc0\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u0dc0\\u0dc1\\5\\u043d\\u021f\\2\\u0dc1\\u0280\")\n buf.write(\"\\3\\2\\2\\2\\u0dc2\\u0dc3\\5\\u0457\\u022c\\2\\u0dc3\\u0dc4\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0dc4\\u0dc5\\5\\u0441\\u0221\\2\\u0dc5\\u0dc6\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u0dc6\\u0dc7\\5\\u045b\\u022e\\2\\u0dc7\\u0282\\3\\2\\2\")\n buf.write(\"\\2\\u0dc8\\u0dc9\\5\\u0457\\u022c\\2\\u0dc9\\u0dca\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0dca\\u0dcb\\5\\u044b\\u0226\\2\\u0dcb\\u0dcc\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0dcc\\u0dcd\\5\\u0437\\u021c\\2\\u0dcd\\u0dce\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0dce\\u0dcf\\5\\u0439\\u021d\\2\\u0dcf\\u0dd0\\5\\u0449\\u0225\")\n buf.write(\"\\2\\u0dd0\\u0284\\3\\2\\2\\2\\u0dd1\\u0dd2\\5\\u0457\\u022c\\2\\u0dd2\")\n buf.write(\"\\u0dd3\\5\\u0451\\u0229\\2\\u0dd3\\u0dd4\\5\\u044b\\u0226\\2\\u0dd4\")\n buf.write(\"\\u0dd5\\5\\u044b\\u0226\\2\\u0dd5\\u0dd6\\5\\u045d\\u022f\\2\\u0dd6\")\n buf.write(\"\\u0dd7\\5\\u0453\\u022a\\2\\u0dd7\\u0286\\3\\2\\2\\2\\u0dd8\\u0dd9\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0dd9\\u0dda\\5\\u0451\\u0229\\2\\u0dda\\u0ddb\")\n buf.write(\"\\5\\u0461\\u0231\\2\\u0ddb\\u0288\\3\\2\\2\\2\\u0ddc\\u0ddd\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0ddd\\u0dde\\5\\u0451\\u0229\\2\\u0dde\\u0ddf\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u0ddf\\u0de0\\5\\u0445\\u0223\\2\\u0de0\\u0de1\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u0de1\\u028a\\3\\2\\2\\2\\u0de2\\u0de3\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0de3\\u0de4\\5\\u0451\\u0229\\2\\u0de4\\u0de5\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u0de5\\u0de6\\5\\u0459\\u022d\\2\\u0de6\\u028c\\3\\2\\2\\2\\u0de7\")\n buf.write(\"\\u0de8\\5\\u0457\\u022c\\2\\u0de8\\u0de9\\5\\u045d\\u022f\\2\\u0de9\")\n buf.write(\"\\u0dea\\5\\u044b\\u0226\\2\\u0dea\\u0deb\\5\\u043d\\u021f\\2\\u0deb\")\n buf.write(\"\\u0dec\\5\\u0459\\u022d\\2\\u0dec\\u028e\\3\\2\\2\\2\\u0ded\\u0dee\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0dee\\u0def\\5\\u0435\\u021b\\2\\u0def\\u0df0\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0df0\\u0df1\\5\\u0453\\u022a\\2\\u0df1\\u0df2\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0df2\\u0df3\\5\\u043d\\u021f\\2\\u0df3\\u0290\")\n buf.write(\"\\3\\2\\2\\2\\u0df4\\u0df5\\5\\u0459\\u022d\\2\\u0df5\\u0df6\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0df6\\u0df7\\5\\u045f\\u0230\\2\\u0df7\\u0df8\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0df8\\u0292\\3\\2\\2\\2\\u0df9\\u0dfa\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0dfa\\u0dfb\\5\\u0435\\u021b\\2\\u0dfb\\u0dfc\\5\\u045f\\u0230\")\n buf.write(\"\\2\\u0dfc\\u0dfd\\5\\u043d\\u021f\\2\\u0dfd\\u0dfe\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0dfe\\u0dff\\5\\u0451\\u0229\\2\\u0dff\\u0e00\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0e00\\u0e01\\5\\u044f\\u0228\\2\\u0e01\\u0e02\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0e02\\u0294\\3\\2\\2\\2\\u0e03\\u0e04\\5\\u0459\\u022d\\2\\u0e04\")\n buf.write(\"\\u0e05\\5\\u0439\\u021d\\2\\u0e05\\u0e06\\5\\u0443\\u0222\\2\\u0e06\")\n buf.write(\"\\u0e07\\5\\u043d\\u021f\\2\\u0e07\\u0e08\\5\\u044d\\u0227\\2\\u0e08\")\n buf.write(\"\\u0e09\\5\\u0435\\u021b\\2\\u0e09\\u0296\\3\\2\\2\\2\\u0e0a\\u0e0b\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0e0b\\u0e0c\\5\\u0439\\u021d\\2\\u0e0c\\u0e0d\")\n buf.write(\"\\5\\u0443\\u0222\\2\\u0e0d\\u0e0e\\5\\u043d\\u021f\\2\\u0e0e\\u0e0f\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0e0f\\u0e10\\5\\u0435\\u021b\\2\\u0e10\\u0e11\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0e11\\u0e12\\5\\u0443\\u0222\\2\\u0e12\\u0e13\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0e13\\u0e14\\5\\u0439\\u021d\\2\\u0e14\\u0e15\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u0e15\\u0298\\3\\2\\2\\2\\u0e16\\u0e17\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0e17\\u0e18\\5\\u0439\\u021d\\2\\u0e18\\u0e19\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0e19\\u029a\\3\\2\\2\\2\\u0e1a\\u0e1b\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0e1b\\u0e1c\\5\\u043d\\u021f\\2\\u0e1c\\u0e1d\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0e1d\\u0e1e\\5\\u0457\\u022c\\2\\u0e1e\\u0e1f\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u0e1f\\u0e20\\5\\u0443\\u0222\\2\\u0e20\\u029c\\3\\2\\2\\2\\u0e21\")\n buf.write(\"\\u0e22\\5\\u0459\\u022d\\2\\u0e22\\u0e23\\5\\u043d\\u021f\\2\\u0e23\")\n buf.write(\"\\u0e24\\5\\u0439\\u021d\\2\\u0e24\\u0e25\\5\\u0451\\u0229\\2\\u0e25\")\n buf.write(\"\\u0e26\\5\\u044f\\u0228\\2\\u0e26\\u0e27\\5\\u043b\\u021e\\2\\u0e27\")\n buf.write(\"\\u029e\\3\\2\\2\\2\\u0e28\\u0e29\\5\\u0459\\u022d\\2\\u0e29\\u0e2a\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0e2a\\u0e2b\\5\\u043d\\u021f\\2\\u0e2b\\u0e2c\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0e2c\\u02a0\\3\\2\\2\\2\\u0e2d\\u0e2e\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0e2e\\u0e2f\\5\\u043d\\u021f\\2\\u0e2f\\u0e30\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0e30\\u0e31\\5\\u044d\\u0227\\2\\u0e31\\u0e32\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0e32\\u0e33\\5\\u044f\\u0228\\2\\u0e33\\u0e34\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0e34\\u02a2\\3\\2\\2\\2\\u0e35\\u0e36\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0e36\\u0e37\\5\\u043d\\u021f\\2\\u0e37\\u0e38\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0e38\\u0e39\\5\\u043d\\u021f\\2\\u0e39\\u0e3a\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u0e3a\\u0e3b\\5\\u045b\\u022e\\2\\u0e3b\\u02a4\\3\\2\\2\\2\\u0e3c\")\n buf.write(\"\\u0e3d\\5\\u0459\\u022d\\2\\u0e3d\\u0e3e\\5\\u043d\\u021f\\2\\u0e3e\")\n buf.write(\"\\u0e3f\\5\\u044b\\u0226\\2\\u0e3f\\u0e40\\5\\u043f\\u0220\\2\\u0e40\")\n buf.write(\"\\u02a6\\3\\2\\2\\2\\u0e41\\u0e42\\5\\u0459\\u022d\\2\\u0e42\\u0e43\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0e43\\u0e44\\5\\u0455\\u022b\\2\\u0e44\\u0e45\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0e45\\u0e46\\5\\u043d\\u021f\\2\\u0e46\\u0e47\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0e47\\u0e48\\5\\u0439\\u021d\\2\\u0e48\\u0e49\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0e49\\u02a8\\3\\2\\2\\2\\u0e4a\\u0e4b\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0e4b\\u0e4c\\5\\u043d\\u021f\\2\\u0e4c\\u0e4d\\5\\u0455\")\n buf.write(\"\\u022b\\2\\u0e4d\\u0e4e\\5\\u045d\\u022f\\2\\u0e4e\\u0e4f\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0e4f\\u0e50\\5\\u044f\\u0228\\2\\u0e50\\u0e51\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0e51\\u0e52\\5\\u0445\\u0223\\2\\u0e52\\u0e53\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0e53\\u0e54\\5\\u044b\\u0226\\2\\u0e54\\u02aa\\3\\2\\2\")\n buf.write(\"\\2\\u0e55\\u0e56\\5\\u0459\\u022d\\2\\u0e56\\u0e57\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0e57\\u0e58\\5\\u0457\\u022c\\2\\u0e58\\u0e59\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0e59\\u0e5a\\5\\u0435\\u021b\\2\\u0e5a\\u0e5b\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0e5b\\u0e5c\\5\\u0445\\u0223\\2\\u0e5c\\u0e5d\\5\\u0467\\u0234\")\n buf.write(\"\\2\\u0e5d\\u0e5e\\5\\u0435\\u021b\\2\\u0e5e\\u0e5f\\5\\u0437\\u021c\")\n buf.write(\"\\2\\u0e5f\\u0e60\\5\\u044b\\u0226\\2\\u0e60\\u0e61\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0e61\\u02ac\\3\\2\\2\\2\\u0e62\\u0e63\\5\\u0459\\u022d\\2\\u0e63\")\n buf.write(\"\\u0e64\\5\\u043d\\u021f\\2\\u0e64\\u0e65\\5\\u0457\\u022c\\2\\u0e65\")\n buf.write(\"\\u0e66\\5\\u0445\\u0223\\2\\u0e66\\u0e67\\5\\u0435\\u021b\\2\\u0e67\")\n buf.write(\"\\u0e68\\5\\u044b\\u0226\\2\\u0e68\\u0e69\\5\\u044b\\u0226\\2\\u0e69\")\n buf.write(\"\\u0e6a\\5\\u0465\\u0233\\2\\u0e6a\\u0e6b\\7a\\2\\2\\u0e6b\\u0e6c\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u0e6c\\u0e6d\\5\\u043d\\u021f\\2\\u0e6d\\u0e6e\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0e6e\\u0e6f\\5\\u0459\\u022d\\2\\u0e6f\\u0e70\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0e70\\u0e71\\5\\u0437\\u021c\\2\\u0e71\\u0e72\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0e72\\u0e73\\5\\u043d\\u021f\\2\\u0e73\\u02ae\")\n buf.write(\"\\3\\2\\2\\2\\u0e74\\u0e75\\5\\u0459\\u022d\\2\\u0e75\\u0e76\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0e76\\u0e77\\5\\u0457\\u022c\\2\\u0e77\\u0e78\\5\\u045f\")\n buf.write(\"\\u0230\\2\\u0e78\\u0e79\\5\\u043d\\u021f\\2\\u0e79\\u0e7a\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0e7a\\u0e7b\\5\\u043d\\u021f\\2\\u0e7b\\u0e7c\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0e7c\\u0e7d\\5\\u0457\\u022c\\2\\u0e7d\\u0e7e\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0e7e\\u0e7f\\5\\u0457\\u022c\\2\\u0e7f\\u02b0\\3\\2\\2\")\n buf.write(\"\\2\\u0e80\\u0e81\\5\\u0459\\u022d\\2\\u0e81\\u0e82\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0e82\\u0e83\\5\\u0459\\u022d\\2\\u0e83\\u0e84\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0e84\\u0e85\\5\\u0445\\u0223\\2\\u0e85\\u0e86\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0e86\\u0e87\\5\\u044f\\u0228\\2\\u0e87\\u0e88\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0e88\\u0e89\\5\\u0445\\u0223\\2\\u0e89\\u0e8a\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u0e8a\\u0e8b\\5\\u043d\\u021f\\2\\u0e8b\\u0e8c\\5\\u0467\\u0234\")\n buf.write(\"\\2\\u0e8c\\u0e8d\\5\\u0451\\u0229\\2\\u0e8d\\u0e8e\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0e8e\\u0e8f\\5\\u043d\\u021f\\2\\u0e8f\\u02b2\\3\\2\\2\\2\\u0e90\")\n buf.write(\"\\u0e91\\5\\u0459\\u022d\\2\\u0e91\\u0e92\\5\\u043d\\u021f\\2\\u0e92\")\n buf.write(\"\\u0e93\\5\\u045b\\u022e\\2\\u0e93\\u02b4\\3\\2\\2\\2\\u0e94\\u0e95\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0e95\\u0e96\\5\\u043d\\u021f\\2\\u0e96\\u0e97\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0e97\\u0e98\\5\\u0459\\u022d\\2\\u0e98\\u02b6\")\n buf.write(\"\\3\\2\\2\\2\\u0e99\\u0e9a\\5\\u0459\\u022d\\2\\u0e9a\\u0e9b\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0e9b\\u0e9c\\5\\u045b\\u022e\\2\\u0e9c\\u0e9d\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0e9d\\u0e9e\\5\\u0445\\u0223\\2\\u0e9e\\u0e9f\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0e9f\\u0ea0\\5\\u0441\\u0221\\2\\u0ea0\\u0ea1\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0ea1\\u02b8\\3\\2\\2\\2\\u0ea2\\u0ea3\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0ea3\\u0ea4\\5\\u0443\\u0222\\2\\u0ea4\\u0ea5\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0ea5\\u0ea6\\5\\u0457\\u022c\\2\\u0ea6\\u0ea7\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0ea7\\u02ba\\3\\2\\2\\2\\u0ea8\\u0ea9\\5\\u0459\\u022d\\2\\u0ea9\")\n buf.write(\"\\u0eaa\\5\\u0443\\u0222\\2\\u0eaa\\u0eab\\5\\u0451\\u0229\\2\\u0eab\")\n buf.write(\"\\u0eac\\5\\u0461\\u0231\\2\\u0eac\\u02bc\\3\\2\\2\\2\\u0ead\\u0eae\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0eae\\u0eaf\\5\\u0443\\u0222\\2\\u0eaf\\u0eb0\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0eb0\\u0eb1\\5\\u045b\\u022e\\2\\u0eb1\\u0eb2\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0eb2\\u0eb3\\5\\u0451\\u0229\\2\\u0eb3\\u0eb4\")\n buf.write(\"\\5\\u0461\\u0231\\2\\u0eb4\\u0eb5\\5\\u044f\\u0228\\2\\u0eb5\\u02be\")\n buf.write(\"\\3\\2\\2\\2\\u0eb6\\u0eb7\\5\\u0459\\u022d\\2\\u0eb7\\u0eb8\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0eb8\\u0eb9\\5\\u0437\\u021c\\2\\u0eb9\\u0eba\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0eba\\u0ebb\\5\\u0445\\u0223\\2\\u0ebb\\u0ebc\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u0ebc\\u0ebd\\5\\u0441\\u0221\\2\\u0ebd\\u0ebe\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0ebe\\u02c0\\3\\2\\2\\2\\u0ebf\\u0ec0\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0ec0\\u0ec1\\5\\u0445\\u0223\\2\\u0ec1\\u0ec2\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u0ec2\\u0ec3\\5\\u044f\\u0228\\2\\u0ec3\\u0ec4\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0ec4\\u0ec5\\5\\u0465\\u0233\\2\\u0ec5\\u0ec6\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0ec6\\u0ec7\\5\\u043d\\u021f\\2\\u0ec7\\u02c2\\3\\2\\2\\2\\u0ec8\")\n buf.write(\"\\u0ec9\\5\\u0459\\u022d\\2\\u0ec9\\u0eca\\5\\u0445\\u0223\\2\\u0eca\")\n buf.write(\"\\u0ecb\\5\\u044d\\u0227\\2\\u0ecb\\u0ecc\\5\\u0453\\u022a\\2\\u0ecc\")\n buf.write(\"\\u0ecd\\5\\u044b\\u0226\\2\\u0ecd\\u0ece\\5\\u043d\\u021f\\2\\u0ece\")\n buf.write(\"\\u0ecf\\7a\\2\\2\\u0ecf\\u0ed0\\5\\u0445\\u0223\\2\\u0ed0\\u0ed1\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0ed1\\u0ed2\\5\\u045b\\u022e\\2\\u0ed2\\u0ed3\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0ed3\\u0ed4\\5\\u0441\\u0221\\2\\u0ed4\\u0ed5\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0ed5\\u0ed6\\5\\u0457\\u022c\\2\\u0ed6\\u02c4\")\n buf.write(\"\\3\\2\\2\\2\\u0ed7\\u0ed8\\5\\u0459\\u022d\\2\\u0ed8\\u0ed9\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0ed9\\u0eda\\5\\u044f\\u0228\\2\\u0eda\\u0edb\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u0edb\\u0edc\\5\\u044b\\u0226\\2\\u0edc\\u0edd\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0edd\\u02c6\\3\\2\\2\\2\\u0ede\\u0edf\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0edf\\u0ee0\\5\\u0445\\u0223\\2\\u0ee0\\u0ee1\\5\\u0467\\u0234\")\n buf.write(\"\\2\\u0ee1\\u0ee2\\5\\u043d\\u021f\\2\\u0ee2\\u02c8\\3\\2\\2\\2\\u0ee3\")\n buf.write(\"\\u0ee4\\5\\u0459\\u022d\\2\\u0ee4\\u0ee5\\5\\u0449\\u0225\\2\\u0ee5\")\n buf.write(\"\\u0ee6\\5\\u0445\\u0223\\2\\u0ee6\\u0ee7\\5\\u0453\\u022a\\2\\u0ee7\")\n buf.write(\"\\u02ca\\3\\2\\2\\2\\u0ee8\\u0ee9\\5\\u0459\\u022d\\2\\u0ee9\\u0eea\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0eea\\u0eeb\\5\\u0435\\u021b\\2\\u0eeb\\u0eec\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u0eec\\u0eed\\5\\u044b\\u0226\\2\\u0eed\\u0eee\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0eee\\u0eef\\5\\u044f\\u0228\\2\\u0eef\\u0ef0\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0ef0\\u02cc\\3\\2\\2\\2\\u0ef1\\u0ef2\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0ef2\\u0ef3\\5\\u044f\\u0228\\2\\u0ef3\\u0ef4\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0ef4\\u0ef5\\5\\u0453\\u022a\\2\\u0ef5\\u0ef6\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0ef6\\u0ef7\\5\\u0443\\u0222\\2\\u0ef7\\u0ef8\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u0ef8\\u0ef9\\5\\u045b\\u022e\\2\\u0ef9\\u02ce\\3\\2\\2\")\n buf.write(\"\\2\\u0efa\\u0efb\\5\\u0459\\u022d\\2\\u0efb\\u0efc\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u0efc\\u0efd\\5\\u044d\\u0227\\2\\u0efd\\u0efe\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0efe\\u02d0\\3\\2\\2\\2\\u0eff\\u0f00\\5\\u0459\\u022d\\2\\u0f00\")\n buf.write(\"\\u0f01\\5\\u0453\\u022a\\2\\u0f01\\u0f02\\5\\u043d\\u021f\\2\\u0f02\")\n buf.write(\"\\u0f03\\5\\u0439\\u021d\\2\\u0f03\\u0f04\\5\\u0445\\u0223\\2\\u0f04\")\n buf.write(\"\\u0f05\\5\\u043f\\u0220\\2\\u0f05\\u0f06\\5\\u0445\\u0223\\2\\u0f06\")\n buf.write(\"\\u0f07\\5\\u0439\\u021d\\2\\u0f07\\u0f08\\5\\u0435\\u021b\\2\\u0f08\")\n buf.write(\"\\u0f09\\5\\u045b\\u022e\\2\\u0f09\\u0f0a\\5\\u0445\\u0223\\2\\u0f0a\")\n buf.write(\"\\u0f0b\\5\\u0451\\u0229\\2\\u0f0b\\u0f0c\\5\\u044f\\u0228\\2\\u0f0c\")\n buf.write(\"\\u02d2\\3\\2\\2\\2\\u0f0d\\u0f0e\\5\\u0459\\u022d\\2\\u0f0e\\u0f0f\")\n buf.write(\"\\5\\u0455\\u022b\\2\\u0f0f\\u0f10\\5\\u044b\\u0226\\2\\u0f10\\u0f11\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0f11\\u0f12\\5\\u0435\\u021b\\2\\u0f12\\u0f13\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0f13\\u0f14\\5\\u0435\\u021b\\2\\u0f14\\u02d4\")\n buf.write(\"\\3\\2\\2\\2\\u0f15\\u0f16\\5\\u0459\\u022d\\2\\u0f16\\u0f17\\5\\u0455\")\n buf.write(\"\\u022b\\2\\u0f17\\u0f18\\5\\u044b\\u0226\\2\\u0f18\\u0f19\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0f19\\u0f1a\\5\\u0457\\u022c\\2\\u0f1a\\u0f1b\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0f1b\\u0f1c\\5\\u0451\\u0229\\2\\u0f1c\\u0f1d\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u0f1d\\u02d6\\3\\2\\2\\2\\u0f1e\\u0f1f\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0f1f\\u0f20\\5\\u045b\\u022e\\2\\u0f20\\u0f21\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0f21\\u0f22\\5\\u044f\\u0228\\2\\u0f22\\u0f23\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u0f23\\u0f24\\5\\u0435\\u021b\\2\\u0f24\\u0f25\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u0f25\\u0f26\\5\\u0451\\u0229\\2\\u0f26\\u0f27\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0f27\\u0f28\\5\\u043d\\u021f\\2\\u0f28\\u02d8\\3\\2\\2\\2\\u0f29\")\n buf.write(\"\\u0f2a\\5\\u0459\\u022d\\2\\u0f2a\\u0f2b\\5\\u045b\\u022e\\2\\u0f2b\")\n buf.write(\"\\u0f2c\\5\\u0435\\u021b\\2\\u0f2c\\u0f2d\\5\\u0457\\u022c\\2\\u0f2d\")\n buf.write(\"\\u0f2e\\5\\u045b\\u022e\\2\\u0f2e\\u02da\\3\\2\\2\\2\\u0f2f\\u0f30\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0f30\\u0f31\\5\\u045b\\u022e\\2\\u0f31\\u0f32\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0f32\\u0f33\\5\\u0457\\u022c\\2\\u0f33\\u0f34\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0f34\\u0f35\\5\\u045d\\u022f\\2\\u0f35\\u0f36\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u0f36\\u02dc\\3\\2\\2\\2\\u0f37\\u0f38\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0f38\\u0f39\\5\\u045b\\u022e\\2\\u0f39\\u0f3a\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0f3a\\u0f3b\\5\\u045b\\u022e\\2\\u0f3b\\u0f3c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0f3c\\u0f3d\\5\\u044d\\u0227\\2\\u0f3d\\u0f3e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0f3e\\u0f3f\\5\\u044f\\u0228\\2\\u0f3f\\u0f40\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0f40\\u02de\\3\\2\\2\\2\\u0f41\\u0f42\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0f42\\u0f43\\5\\u045b\\u022e\\2\\u0f43\\u0f44\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u0f44\\u0f45\\5\\u045b\\u022e\\2\\u0f45\\u0f46\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0f46\\u0f47\\5\\u044d\\u0227\\2\\u0f47\\u0f48\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0f48\\u0f49\\5\\u044f\\u0228\\2\\u0f49\\u0f4a\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0f4a\\u0f4b\\7a\\2\\2\\u0f4b\\u0f4c\\5\\u0445\\u0223\\2\\u0f4c\")\n buf.write(\"\\u0f4d\\5\\u043b\\u021e\\2\\u0f4d\\u02e0\\3\\2\\2\\2\\u0f4e\\u0f4f\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0f4f\\u0f50\\5\\u045b\\u022e\\2\\u0f50\\u0f51\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0f51\\u0f52\\5\\u045b\\u022e\\2\\u0f52\\u0f53\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u0f53\\u0f54\\5\\u0439\\u021d\\2\\u0f54\\u02e2\")\n buf.write(\"\\3\\2\\2\\2\\u0f55\\u0f56\\5\\u0459\\u022d\\2\\u0f56\\u0f57\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0f57\\u0f58\\5\\u0435\\u021b\\2\\u0f58\\u0f59\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0f59\\u0f5a\\5\\u0445\\u0223\\2\\u0f5a\\u0f5b\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0f5b\\u0f5c\\5\\u045b\\u022e\\2\\u0f5c\\u0f5d\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0f5d\\u0f5e\\5\\u0439\\u021d\\2\\u0f5e\\u0f5f\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0f5f\\u02e4\\3\\2\\2\\2\\u0f60\\u0f61\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u0f61\\u0f62\\5\\u045b\\u022e\\2\\u0f62\\u0f63\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u0f63\\u0f64\\5\\u0445\\u0223\\2\\u0f64\\u0f65\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u0f65\\u0f66\\5\\u0441\\u0221\\2\\u0f66\\u02e6\\3\\2\\2\\2\\u0f67\")\n buf.write(\"\\u0f68\\5\\u0459\\u022d\\2\\u0f68\\u0f69\\5\\u045d\\u022f\\2\\u0f69\")\n buf.write(\"\\u0f6a\\5\\u0437\\u021c\\2\\u0f6a\\u0f6b\\5\\u044d\\u0227\\2\\u0f6b\")\n buf.write(\"\\u0f6c\\5\\u045d\\u022f\\2\\u0f6c\\u0f6d\\5\\u044b\\u0226\\2\\u0f6d\")\n buf.write(\"\\u0f6e\\5\\u045b\\u022e\\2\\u0f6e\\u0f6f\\5\\u0445\\u0223\\2\\u0f6f\")\n buf.write(\"\\u0f70\\5\\u0459\\u022d\\2\\u0f70\\u0f71\\5\\u043d\\u021f\\2\\u0f71\")\n buf.write(\"\\u0f72\\5\\u045b\\u022e\\2\\u0f72\\u02e8\\3\\2\\2\\2\\u0f73\\u0f74\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0f74\\u0f75\\5\\u045d\\u022f\\2\\u0f75\\u0f76\")\n buf.write(\"\\5\\u0437\\u021c\\2\\u0f76\\u0f77\\5\\u0453\\u022a\\2\\u0f77\\u0f78\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0f78\\u0f79\\5\\u0457\\u022c\\2\\u0f79\\u0f7a\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0f7a\\u0f7b\\5\\u0445\\u0223\\2\\u0f7b\\u0f7c\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0f7c\\u0f7d\\5\\u0445\\u0223\\2\\u0f7d\\u0f7e\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u0f7e\\u0f7f\\5\\u044f\\u0228\\2\\u0f7f\\u02ea\")\n buf.write(\"\\3\\2\\2\\2\\u0f80\\u0f81\\5\\u0459\\u022d\\2\\u0f81\\u0f82\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0f82\\u0f83\\5\\u0437\\u021c\\2\\u0f83\\u0f84\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0f84\\u0f85\\5\\u045b\\u022e\\2\\u0f85\\u0f86\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0f86\\u0f87\\5\\u045b\\u022e\\2\\u0f87\\u0f88\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u0f88\\u0f89\\5\\u045b\\u022e\\2\\u0f89\\u0f8a\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0f8a\\u0f8b\\5\\u0437\\u021c\\2\\u0f8b\\u0f8c\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u0f8c\\u0f8d\\5\\u043d\\u021f\\2\\u0f8d\\u02ec\\3\\2\\2\")\n buf.write(\"\\2\\u0f8e\\u0f8f\\5\\u0459\\u022d\\2\\u0f8f\\u0f90\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u0f90\\u0f91\\5\\u0437\\u021c\\2\\u0f91\\u0f92\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0f92\\u0f93\\5\\u0465\\u0233\\2\\u0f93\\u0f94\\5\\u0453\\u022a\")\n buf.write(\"\\2\\u0f94\\u0f95\\5\\u043d\\u021f\\2\\u0f95\\u02ee\\3\\2\\2\\2\\u0f96\")\n buf.write(\"\\u0f97\\5\\u0459\\u022d\\2\\u0f97\\u0f98\\5\\u045d\\u022f\\2\\u0f98\")\n buf.write(\"\\u0f99\\5\\u0439\\u021d\\2\\u0f99\\u0f9a\\5\\u0439\\u021d\\2\\u0f9a\")\n buf.write(\"\\u0f9b\\5\\u043d\\u021f\\2\\u0f9b\\u0f9c\\5\\u0459\\u022d\\2\\u0f9c\")\n buf.write(\"\\u0f9d\\5\\u0459\\u022d\\2\\u0f9d\\u02f0\\3\\2\\2\\2\\u0f9e\\u0f9f\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0f9f\\u0fa0\\5\\u045d\\u022f\\2\\u0fa0\\u0fa1\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u0fa1\\u0fa2\\5\\u0453\\u022a\\2\\u0fa2\\u0fa3\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u0fa3\\u0fa4\\5\\u044f\\u0228\\2\\u0fa4\\u0fa5\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0fa5\\u02f2\\3\\2\\2\\2\\u0fa6\\u0fa7\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0fa7\\u0fa8\\5\\u0435\\u021b\\2\\u0fa8\\u0fa9\\5\\u0437\")\n buf.write(\"\\u021c\\2\\u0fa9\\u0faa\\5\\u044b\\u0226\\2\\u0faa\\u0fab\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0fab\\u02f4\\3\\2\\2\\2\\u0fac\\u0fad\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0fad\\u0fae\\5\\u0443\\u0222\\2\\u0fae\\u0faf\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0faf\\u02f6\\3\\2\\2\\2\\u0fb0\\u0fb1\\5\\u045b\\u022e\\2\\u0fb1\")\n buf.write(\"\\u0fb2\\5\\u0443\\u0222\\2\\u0fb2\\u0fb3\\5\\u043d\\u021f\\2\\u0fb3\")\n buf.write(\"\\u0fb4\\5\\u044f\\u0228\\2\\u0fb4\\u02f8\\3\\2\\2\\2\\u0fb5\\u0fb6\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0fb6\\u0fb7\\5\\u0445\\u0223\\2\\u0fb7\\u0fb8\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0fb8\\u0fb9\\5\\u043d\\u021f\\2\\u0fb9\\u02fa\")\n buf.write(\"\\3\\2\\2\\2\\u0fba\\u0fbb\\5\\u045b\\u022e\\2\\u0fbb\\u0fbc\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u0fbc\\u0fbd\\5\\u044d\\u0227\\2\\u0fbd\\u0fbe\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u0fbe\\u0fbf\\5\\u0459\\u022d\\2\\u0fbf\\u0fc0\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0fc0\\u0fc1\\5\\u0435\\u021b\\2\\u0fc1\\u0fc2\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0fc2\\u0fc3\\5\\u0453\\u022a\\2\\u0fc3\\u02fc\\3\\2\\2\")\n buf.write(\"\\2\\u0fc4\\u0fc5\\5\\u045b\\u022e\\2\\u0fc5\\u0fc6\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u0fc6\\u0fc7\\5\\u044d\\u0227\\2\\u0fc7\\u0fc8\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u0fc8\\u0fc9\\5\\u0459\\u022d\\2\\u0fc9\\u0fca\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0fca\\u0fcb\\5\\u0435\\u021b\\2\\u0fcb\\u0fcc\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u0fcc\\u0fcd\\5\\u0453\\u022a\\2\\u0fcd\\u0fce\\7a\\2\\2\\u0fce\")\n buf.write(\"\\u0fcf\\5\\u044b\\u0226\\2\\u0fcf\\u0fd0\\5\\u045b\\u022e\\2\\u0fd0\")\n buf.write(\"\\u0fd1\\5\\u0467\\u0234\\2\\u0fd1\\u0fd2\\7a\\2\\2\\u0fd2\\u0fd3\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u0fd3\\u0fd4\\5\\u044f\\u0228\\2\\u0fd4\\u0fd5\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u0fd5\\u0fd6\\5\\u0451\\u0229\\2\\u0fd6\\u0fd7\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0fd7\\u0fd8\\5\\u0459\\u022d\\2\\u0fd8\\u0fd9\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0fd9\\u0fda\\5\\u0457\\u022c\\2\\u0fda\\u0fdb\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u0fdb\\u0fdc\\5\\u0445\\u0223\\2\\u0fdc\\u0fdd\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u0fdd\\u0fde\\5\\u043d\\u021f\\2\\u0fde\\u0fdf\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u0fdf\\u02fe\\3\\2\\2\\2\\u0fe0\\u0fe1\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u0fe1\\u0fe2\\5\\u0445\\u0223\\2\\u0fe2\\u0fe3\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u0fe3\\u0fe4\\5\\u043d\\u021f\\2\\u0fe4\\u0fe5\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u0fe5\\u0fe6\\5\\u045b\\u022e\\2\\u0fe6\\u0fe7\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u0fe7\\u0fe8\\5\\u044d\\u0227\\2\\u0fe8\\u0fe9\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u0fe9\\u0fea\\7a\\2\\2\\u0fea\\u0feb\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u0feb\\u0fec\\5\\u0467\\u0234\\2\\u0fec\\u0fed\\7a\\2\\2\\u0fed\")\n buf.write(\"\\u0fee\\5\\u045d\\u022f\\2\\u0fee\\u0fef\\5\\u044f\\u0228\\2\\u0fef\")\n buf.write(\"\\u0ff0\\5\\u0439\\u021d\\2\\u0ff0\\u0ff1\\5\\u0451\\u0229\\2\\u0ff1\")\n buf.write(\"\\u0ff2\\5\\u044f\\u0228\\2\\u0ff2\\u0ff3\\5\\u0459\\u022d\\2\\u0ff3\")\n buf.write(\"\\u0ff4\\5\\u045b\\u022e\\2\\u0ff4\\u0ff5\\5\\u0457\\u022c\\2\\u0ff5\")\n buf.write(\"\\u0ff6\\5\\u0435\\u021b\\2\\u0ff6\\u0ff7\\5\\u0445\\u0223\\2\\u0ff7\")\n buf.write(\"\\u0ff8\\5\\u044f\\u0228\\2\\u0ff8\\u0ff9\\5\\u043d\\u021f\\2\\u0ff9\")\n buf.write(\"\\u0ffa\\5\\u043b\\u021e\\2\\u0ffa\\u0300\\3\\2\\2\\2\\u0ffb\\u0ffc\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u0ffc\\u0ffd\\5\\u0445\\u0223\\2\\u0ffd\\u0ffe\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u0ffe\\u0fff\\5\\u043d\\u021f\\2\\u0fff\\u1000\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u1000\\u1001\\5\\u045b\\u022e\\2\\u1001\\u1002\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1002\\u1003\\5\\u044d\\u0227\\2\\u1003\\u1004\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u1004\\u1005\\7a\\2\\2\\u1005\\u1006\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u1006\\u1007\\5\\u044f\\u0228\\2\\u1007\\u1008\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u1008\\u1009\\5\\u0451\\u0229\\2\\u1009\\u100a\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u100a\\u100b\\5\\u0459\\u022d\\2\\u100b\\u100c\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u100c\\u100d\\5\\u0457\\u022c\\2\\u100d\\u100e\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u100e\\u100f\\5\\u0445\\u0223\\2\\u100f\\u1010\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u1010\\u1011\\5\\u043d\\u021f\\2\\u1011\\u1012\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u1012\\u0302\\3\\2\\2\\2\\u1013\\u1014\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u1014\\u1015\\5\\u0445\\u0223\\2\\u1015\\u1016\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u1016\\u1017\\5\\u043d\\u021f\\2\\u1017\\u1018\\5\\u0467\\u0234\")\n buf.write(\"\\2\\u1018\\u1019\\5\\u0451\\u0229\\2\\u1019\\u101a\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u101a\\u101b\\5\\u043d\\u021f\\2\\u101b\\u101c\\7a\\2\\2\\u101c\")\n buf.write(\"\\u101d\\5\\u0435\\u021b\\2\\u101d\\u101e\\5\\u0437\\u021c\\2\\u101e\")\n buf.write(\"\\u101f\\5\\u0437\\u021c\\2\\u101f\\u1020\\5\\u0457\\u022c\\2\\u1020\")\n buf.write(\"\\u0304\\3\\2\\2\\2\\u1021\\u1022\\5\\u045b\\u022e\\2\\u1022\\u1023\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1023\\u1024\\5\\u044d\\u0227\\2\\u1024\\u1025\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u1025\\u1026\\5\\u0467\\u0234\\2\\u1026\\u1027\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u1027\\u1028\\5\\u044f\\u0228\\2\\u1028\\u1029\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u1029\\u102a\\7a\\2\\2\\u102a\\u102b\\5\\u0443\")\n buf.write(\"\\u0222\\2\\u102b\\u102c\\5\\u0451\\u0229\\2\\u102c\\u102d\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u102d\\u102e\\5\\u0457\\u022c\\2\\u102e\\u0306\\3\\2\\2\")\n buf.write(\"\\2\\u102f\\u1030\\5\\u045b\\u022e\\2\\u1030\\u1031\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u1031\\u1032\\5\\u044d\\u0227\\2\\u1032\\u1033\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1033\\u1034\\5\\u0467\\u0234\\2\\u1034\\u1035\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u1035\\u1036\\5\\u044f\\u0228\\2\\u1036\\u1037\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1037\\u1038\\7a\\2\\2\\u1038\\u1039\\5\\u044d\\u0227\\2\\u1039\")\n buf.write(\"\\u103a\\5\\u0445\\u0223\\2\\u103a\\u103b\\5\\u044f\\u0228\\2\\u103b\")\n buf.write(\"\\u103c\\5\\u045d\\u022f\\2\\u103c\\u103d\\5\\u045b\\u022e\\2\\u103d\")\n buf.write(\"\\u103e\\5\\u043d\\u021f\\2\\u103e\\u0308\\3\\2\\2\\2\\u103f\\u1040\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u1040\\u1041\\5\\u0445\\u0223\\2\\u1041\\u1042\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u1042\\u1043\\5\\u043d\\u021f\\2\\u1043\\u1044\")\n buf.write(\"\\5\\u0467\\u0234\\2\\u1044\\u1045\\5\\u0451\\u0229\\2\\u1045\\u1046\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u1046\\u1047\\5\\u043d\\u021f\\2\\u1047\\u1048\")\n buf.write(\"\\7a\\2\\2\\u1048\\u1049\\5\\u0457\\u022c\\2\\u1049\\u104a\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u104a\\u104b\\5\\u0441\\u0221\\2\\u104b\\u104c\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u104c\\u104d\\5\\u0451\\u0229\\2\\u104d\\u104e\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u104e\\u030a\\3\\2\\2\\2\\u104f\\u1050\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u1050\\u1051\\5\\u0451\\u0229\\2\\u1051\\u030c\\3\\2\\2\\2\\u1052\")\n buf.write(\"\\u1053\\5\\u045b\\u022e\\2\\u1053\\u1054\\5\\u0457\\u022c\\2\\u1054\")\n buf.write(\"\\u1055\\5\\u0435\\u021b\\2\\u1055\\u1056\\5\\u0445\\u0223\\2\\u1056\")\n buf.write(\"\\u1057\\5\\u044b\\u0226\\2\\u1057\\u1058\\5\\u0445\\u0223\\2\\u1058\")\n buf.write(\"\\u1059\\5\\u044f\\u0228\\2\\u1059\\u105a\\5\\u0441\\u0221\\2\\u105a\")\n buf.write(\"\\u030e\\3\\2\\2\\2\\u105b\\u105c\\5\\u045b\\u022e\\2\\u105c\\u105d\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u105d\\u105e\\5\\u0435\\u021b\\2\\u105e\\u105f\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u105f\\u1060\\5\\u0459\\u022d\\2\\u1060\\u1061\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1061\\u1062\\5\\u0439\\u021d\\2\\u1062\\u1063\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u1063\\u1064\\5\\u0445\\u0223\\2\\u1064\\u1065\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u1065\\u1066\\5\\u044f\\u0228\\2\\u1066\\u0310\")\n buf.write(\"\\3\\2\\2\\2\\u1067\\u1068\\5\\u045b\\u022e\\2\\u1068\\u1069\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u1069\\u106a\\5\\u0435\\u021b\\2\\u106a\\u106b\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u106b\\u106c\\5\\u0459\\u022d\\2\\u106c\\u106d\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u106d\\u106e\\5\\u0435\\u021b\\2\\u106e\\u106f\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u106f\\u1070\\5\\u043d\\u021f\\2\\u1070\\u0312\\3\\2\\2\")\n buf.write(\"\\2\\u1071\\u1072\\5\\u045b\\u022e\\2\\u1072\\u1073\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u1073\\u1074\\5\\u043d\\u021f\\2\\u1074\\u1075\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u1075\\u1076\\5\\u045b\\u022e\\2\\u1076\\u0314\\3\\2\\2\\2\\u1077\")\n buf.write(\"\\u1078\\5\\u045b\\u022e\\2\\u1078\\u1079\\5\\u0457\\u022c\\2\\u1079\")\n buf.write(\"\\u107a\\5\\u0445\\u0223\\2\\u107a\\u107b\\5\\u0441\\u0221\\2\\u107b\")\n buf.write(\"\\u107c\\5\\u0441\\u0221\\2\\u107c\\u107d\\5\\u043d\\u021f\\2\\u107d\")\n buf.write(\"\\u107e\\5\\u0457\\u022c\\2\\u107e\\u0316\\3\\2\\2\\2\\u107f\\u1080\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u1080\\u1081\\5\\u0457\\u022c\\2\\u1081\\u1082\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1082\\u1083\\5\\u044d\\u0227\\2\\u1083\\u0318\")\n buf.write(\"\\3\\2\\2\\2\\u1084\\u1085\\5\\u045b\\u022e\\2\\u1085\\u1086\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u1086\\u1087\\5\\u045d\\u022f\\2\\u1087\\u1088\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u1088\\u031a\\3\\2\\2\\2\\u1089\\u108a\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u108a\\u108b\\5\\u0457\\u022c\\2\\u108b\\u108c\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u108c\\u108d\\5\\u044f\\u0228\\2\\u108d\\u108e\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u108e\\u108f\\5\\u0435\\u021b\\2\\u108f\\u1090\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u1090\\u1091\\5\\u043d\\u021f\\2\\u1091\\u031c\\3\\2\\2\\2\\u1092\")\n buf.write(\"\\u1093\\5\\u045b\\u022e\\2\\u1093\\u1094\\5\\u0465\\u0233\\2\\u1094\")\n buf.write(\"\\u1095\\5\\u0453\\u022a\\2\\u1095\\u1096\\5\\u043d\\u021f\\2\\u1096\")\n buf.write(\"\\u031e\\3\\2\\2\\2\\u1097\\u1098\\5\\u045d\\u022f\\2\\u1098\\u1099\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u1099\\u109a\\5\\u0437\\u021c\\2\\u109a\\u109b\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u109b\\u109c\\5\\u045d\\u022f\\2\\u109c\\u109d\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u109d\\u109e\\5\\u043b\\u021e\\2\\u109e\\u109f\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u109f\\u10a0\\5\\u043b\\u021e\\2\\u10a0\\u0320\")\n buf.write(\"\\3\\2\\2\\2\\u10a1\\u10a2\\5\\u045d\\u022f\\2\\u10a2\\u10a3\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u10a3\\u10a4\\5\\u043b\\u021e\\2\\u10a4\\u10a5\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u10a5\\u10a6\\5\\u0457\\u022c\\2\\u10a6\\u0322\\3\\2\\2\")\n buf.write(\"\\2\\u10a7\\u10a8\\5\\u045d\\u022f\\2\\u10a8\\u10a9\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u10a9\\u10aa\\5\\u0445\\u0223\\2\\u10aa\\u10ab\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u10ab\\u10ac\\5\\u044f\\u0228\\2\\u10ac\\u0324\\3\\2\\2\\2\\u10ad\")\n buf.write(\"\\u10ae\\5\\u045d\\u022f\\2\\u10ae\\u10af\\5\\u044f\\u0228\\2\\u10af\")\n buf.write(\"\\u10b0\\5\\u0445\\u0223\\2\\u10b0\\u10b1\\5\\u0455\\u022b\\2\\u10b1\")\n buf.write(\"\\u10b2\\5\\u045d\\u022f\\2\\u10b2\\u10b3\\5\\u043d\\u021f\\2\\u10b3\")\n buf.write(\"\\u0326\\3\\2\\2\\2\\u10b4\\u10b5\\5\\u045d\\u022f\\2\\u10b5\\u10b6\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u10b6\\u10b7\\5\\u044b\\u0226\\2\\u10b7\\u10b8\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u10b8\\u10b9\\5\\u044d\\u0227\\2\\u10b9\\u10ba\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u10ba\\u10bb\\5\\u045b\\u022e\\2\\u10bb\\u10bc\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u10bc\\u10bd\\5\\u043b\\u021e\\2\\u10bd\\u0328\")\n buf.write(\"\\3\\2\\2\\2\\u10be\\u10bf\\5\\u045d\\u022f\\2\\u10bf\\u10c0\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u10c0\\u10c1\\5\\u0453\\u022a\\2\\u10c1\\u10c2\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u10c2\\u10c3\\5\\u045f\\u0230\\2\\u10c3\\u10c4\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u10c4\\u10c5\\5\\u045b\\u022e\\2\\u10c5\\u032a\\3\\2\\2\")\n buf.write(\"\\2\\u10c6\\u10c7\\5\\u045d\\u022f\\2\\u10c7\\u10c8\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u10c8\\u10c9\\5\\u045b\\u022e\\2\\u10c9\\u10ca\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u10ca\\u10cb\\5\\u044b\\u0226\\2\\u10cb\\u032c\\3\\2\\2\\2\\u10cc\")\n buf.write(\"\\u10cd\\5\\u045d\\u022f\\2\\u10cd\\u10ce\\5\\u0453\\u022a\\2\\u10ce\")\n buf.write(\"\\u10cf\\5\\u043b\\u021e\\2\\u10cf\\u10d0\\5\\u0435\\u021b\\2\\u10d0\")\n buf.write(\"\\u10d1\\5\\u045b\\u022e\\2\\u10d1\\u10d2\\5\\u043d\\u021f\\2\\u10d2\")\n buf.write(\"\\u032e\\3\\2\\2\\2\\u10d3\\u10d4\\5\\u045d\\u022f\\2\\u10d4\\u10d5\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u10d5\\u10d6\\5\\u043b\\u021e\\2\\u10d6\\u10d7\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u10d7\\u10d8\\5\\u045b\\u022e\\2\\u10d8\\u10d9\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u10d9\\u10da\\5\\u043b\\u021e\\2\\u10da\\u0330\")\n buf.write(\"\\3\\2\\2\\2\\u10db\\u10dc\\5\\u045d\\u022f\\2\\u10dc\\u10dd\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u10dd\\u10de\\5\\u0459\\u022d\\2\\u10de\\u10df\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u10df\\u10e0\\5\\u0457\\u022c\\2\\u10e0\\u10e1\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u10e1\\u0332\\3\\2\\2\\2\\u10e2\\u10e3\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u10e3\\u10e4\\5\\u0457\\u022c\\2\\u10e4\\u10e5\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u10e5\\u10e6\\5\\u0461\\u0231\\2\\u10e6\\u10e7\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u10e7\\u10e8\\5\\u043b\\u021e\\2\\u10e8\\u0334\\3\\2\\2\\2\\u10e9\")\n buf.write(\"\\u10ea\\5\\u045d\\u022f\\2\\u10ea\\u10eb\\5\\u0459\\u022d\\2\\u10eb\")\n buf.write(\"\\u10ec\\5\\u043d\\u021f\\2\\u10ec\\u0336\\3\\2\\2\\2\\u10ed\\u10ee\")\n buf.write(\"\\5\\u045d\\u022f\\2\\u10ee\\u10ef\\5\\u0459\\u022d\\2\\u10ef\\u10f0\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u10f0\\u10f1\\5\\u044f\\u0228\\2\\u10f1\\u10f2\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u10f2\\u0338\\3\\2\\2\\2\\u10f3\\u10f4\\5\\u045f\")\n buf.write(\"\\u0230\\2\\u10f4\\u10f5\\5\\u0435\\u021b\\2\\u10f5\\u10f6\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u10f6\\u10f7\\5\\u0445\\u0223\\2\\u10f7\\u10f8\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u10f8\\u10f9\\5\\u0435\\u021b\\2\\u10f9\\u10fa\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u10fa\\u10fb\\5\\u043d\\u021f\\2\\u10fb\\u033a\\3\\2\\2\")\n buf.write(\"\\2\\u10fc\\u10fd\\5\\u045f\\u0230\\2\\u10fd\\u10fe\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u10fe\\u10ff\\5\\u044b\\u0226\\2\\u10ff\\u1100\\5\\u045d\\u022f\")\n buf.write(\"\\2\\u1100\\u1101\\5\\u043d\\u021f\\2\\u1101\\u033c\\3\\2\\2\\2\\u1102\")\n buf.write(\"\\u1103\\5\\u045f\\u0230\\2\\u1103\\u1104\\5\\u0435\\u021b\\2\\u1104\")\n buf.write(\"\\u1105\\5\\u044b\\u0226\\2\\u1105\\u1106\\5\\u045d\\u022f\\2\\u1106\")\n buf.write(\"\\u1107\\5\\u043d\\u021f\\2\\u1107\\u1108\\5\\u0459\\u022d\\2\\u1108\")\n buf.write(\"\\u033e\\3\\2\\2\\2\\u1109\\u110a\\5\\u045f\\u0230\\2\\u110a\\u110b\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u110b\\u110c\\5\\u0457\\u022c\\2\\u110c\\u110d\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u110d\\u110e\\5\\u0443\\u0222\\2\\u110e\\u110f\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u110f\\u1110\\5\\u0457\\u022c\\2\\u1110\\u0340\")\n buf.write(\"\\3\\2\\2\\2\\u1111\\u1112\\5\\u045f\\u0230\\2\\u1112\\u1113\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u1113\\u1114\\5\\u0457\\u022c\\2\\u1114\\u1115\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u1115\\u1116\\5\\u0443\\u0222\\2\\u1116\\u1117\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u1117\\u1118\\5\\u0457\\u022c\\2\\u1118\\u1119\\7\\64\")\n buf.write(\"\\2\\2\\u1119\\u0342\\3\\2\\2\\2\\u111a\\u111b\\5\\u045f\\u0230\\2\\u111b\")\n buf.write(\"\\u111c\\5\\u0435\\u021b\\2\\u111c\\u111d\\5\\u0457\\u022c\\2\\u111d\")\n buf.write(\"\\u111e\\5\\u0445\\u0223\\2\\u111e\\u111f\\5\\u0435\\u021b\\2\\u111f\")\n buf.write(\"\\u1120\\5\\u0437\\u021c\\2\\u1120\\u1121\\5\\u044b\\u0226\\2\\u1121\")\n buf.write(\"\\u1122\\5\\u043d\\u021f\\2\\u1122\\u0344\\3\\2\\2\\2\\u1123\\u1124\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u1124\\u1125\\5\\u0435\\u021b\\2\\u1125\\u1126\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u1126\\u1127\\5\\u0457\\u022c\\2\\u1127\\u1128\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1128\\u1129\\5\\u0465\\u0233\\2\\u1129\\u0346\")\n buf.write(\"\\3\\2\\2\\2\\u112a\\u112b\\5\\u045f\\u0230\\2\\u112b\\u112c\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u112c\\u112d\\5\\u0457\\u022c\\2\\u112d\\u112e\\5\\u0465\")\n buf.write(\"\\u0233\\2\\u112e\\u112f\\5\\u0445\\u0223\\2\\u112f\\u1130\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u1130\\u1131\\5\\u0441\\u0221\\2\\u1131\\u0348\\3\\2\\2\")\n buf.write(\"\\2\\u1132\\u1133\\5\\u045f\\u0230\\2\\u1133\\u1134\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1134\\u1135\\5\\u0457\\u022c\\2\\u1135\\u1136\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u1136\\u1137\\5\\u0445\\u0223\\2\\u1137\\u1138\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u1138\\u1139\\5\\u044f\\u0228\\2\\u1139\\u034a\\3\\2\\2\\2\\u113a\")\n buf.write(\"\\u113b\\5\\u045f\\u0230\\2\\u113b\\u113c\\5\\u043d\\u021f\\2\\u113c\")\n buf.write(\"\\u113d\\5\\u0457\\u022c\\2\\u113d\\u113e\\5\\u0459\\u022d\\2\\u113e\")\n buf.write(\"\\u113f\\5\\u0445\\u0223\\2\\u113f\\u1140\\5\\u0451\\u0229\\2\\u1140\")\n buf.write(\"\\u1141\\5\\u044f\\u0228\\2\\u1141\\u1142\\5\\u0459\\u022d\\2\\u1142\")\n buf.write(\"\\u034c\\3\\2\\2\\2\\u1143\\u1144\\5\\u0461\\u0231\\2\\u1144\\u1145\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1145\\u1146\\5\\u0445\\u0223\\2\\u1146\\u1147\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u1147\\u034e\\3\\2\\2\\2\\u1148\\u1149\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u1149\\u114a\\5\\u0435\\u021b\\2\\u114a\\u114b\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u114b\\u114c\\5\\u044f\\u0228\\2\\u114c\\u114d\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u114d\\u114e\\5\\u044f\\u0228\\2\\u114e\\u114f\\5\\u0441\")\n buf.write(\"\\u0221\\2\\u114f\\u0350\\3\\2\\2\\2\\u1150\\u1151\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u1151\\u1152\\5\\u043d\\u021f\\2\\u1152\\u1153\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u1153\\u1154\\5\\u044b\\u0226\\2\\u1154\\u1155\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u1155\\u1156\\5\\u0451\\u0229\\2\\u1156\\u1157\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u1157\\u1158\\5\\u044d\\u0227\\2\\u1158\\u1159\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1159\\u115a\\5\\u043b\\u021e\\2\\u115a\\u0352\\3\\2\\2\\2\\u115b\")\n buf.write(\"\\u115c\\5\\u0461\\u0231\\2\\u115c\\u115d\\5\\u0443\\u0222\\2\\u115d\")\n buf.write(\"\\u115e\\5\\u043d\\u021f\\2\\u115e\\u115f\\5\\u044f\\u0228\\2\\u115f\")\n buf.write(\"\\u0354\\3\\2\\2\\2\\u1160\\u1161\\5\\u0461\\u0231\\2\\u1161\\u1162\")\n buf.write(\"\\5\\u0443\\u0222\\2\\u1162\\u1163\\5\\u043d\\u021f\\2\\u1163\\u1164\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u1164\\u1165\\5\\u043d\\u021f\\2\\u1165\\u1166\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u1166\\u1167\\5\\u043d\\u021f\\2\\u1167\\u1168\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u1168\\u0356\\3\\2\\2\\2\\u1169\\u116a\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u116a\\u116b\\5\\u0443\\u0222\\2\\u116b\\u116c\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u116c\\u116d\\5\\u0457\\u022c\\2\\u116d\\u116e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u116e\\u0358\\3\\2\\2\\2\\u116f\\u1170\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u1170\\u1171\\5\\u0443\\u0222\\2\\u1171\\u1172\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u1172\\u1173\\5\\u044b\\u0226\\2\\u1173\\u1174\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1174\\u035a\\3\\2\\2\\2\\u1175\\u1176\\5\\u0461\\u0231\\2\\u1176\")\n buf.write(\"\\u1177\\5\\u0445\\u0223\\2\\u1177\\u1178\\5\\u045b\\u022e\\2\\u1178\")\n buf.write(\"\\u1179\\5\\u0443\\u0222\\2\\u1179\\u035c\\3\\2\\2\\2\\u117a\\u117b\")\n buf.write(\"\\5\\u0461\\u0231\\2\\u117b\\u117c\\5\\u0445\\u0223\\2\\u117c\\u117d\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u117d\\u117e\\5\\u0443\\u0222\\2\\u117e\\u117f\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u117f\\u1180\\5\\u044f\\u0228\\2\\u1180\\u035e\")\n buf.write(\"\\3\\2\\2\\2\\u1181\\u1182\\5\\u0461\\u0231\\2\\u1182\\u1183\\5\\u0451\")\n buf.write(\"\\u0229\\2\\u1183\\u1184\\5\\u0457\\u022c\\2\\u1184\\u1185\\5\\u0449\")\n buf.write(\"\\u0225\\2\\u1185\\u0360\\3\\2\\2\\2\\u1186\\u1187\\5\\u0461\\u0231\")\n buf.write(\"\\2\\u1187\\u1188\\5\\u0457\\u022c\\2\\u1188\\u1189\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u1189\\u118a\\5\\u045b\\u022e\\2\\u118a\\u118b\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u118b\\u0362\\3\\2\\2\\2\\u118c\\u118d\\5\\u0463\\u0232\\2\\u118d\")\n buf.write(\"\\u118e\\5\\u044d\\u0227\\2\\u118e\\u118f\\5\\u044b\\u0226\\2\\u118f\")\n buf.write(\"\\u0364\\3\\2\\2\\2\\u1190\\u1191\\5\\u0463\\u0232\\2\\u1191\\u1192\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u1192\\u1193\\5\\u044b\\u0226\\2\\u1193\\u1194\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1194\\u1195\\5\\u0441\\u0221\\2\\u1195\\u1196\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u1196\\u0366\\3\\2\\2\\2\\u1197\\u1198\\5\\u0463\")\n buf.write(\"\\u0232\\2\\u1198\\u1199\\5\\u044d\\u0227\\2\\u1199\\u119a\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u119a\\u119b\\5\\u0435\\u021b\\2\\u119b\\u119c\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u119c\\u119d\\5\\u045b\\u022e\\2\\u119d\\u119e\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u119e\\u119f\\5\\u0445\\u0223\\2\\u119f\\u11a0\\5\\u0437\")\n buf.write(\"\\u021c\\2\\u11a0\\u11a1\\5\\u045d\\u022f\\2\\u11a1\\u11a2\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u11a2\\u11a3\\5\\u043d\\u021f\\2\\u11a3\\u11a4\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u11a4\\u0368\\3\\2\\2\\2\\u11a5\\u11a6\\5\\u0463\\u0232\")\n buf.write(\"\\2\\u11a6\\u11a7\\5\\u044d\\u0227\\2\\u11a7\\u11a8\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u11a8\\u11a9\\5\\u0439\\u021d\\2\\u11a9\\u11aa\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u11aa\\u11ab\\5\\u0459\\u022d\\2\\u11ab\\u11ac\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u11ac\\u036a\\3\\2\\2\\2\\u11ad\\u11ae\\5\\u0463\\u0232\\2\\u11ae\")\n buf.write(\"\\u11af\\5\\u044d\\u0227\\2\\u11af\\u11b0\\5\\u044b\\u0226\\2\\u11b0\")\n buf.write(\"\\u11b1\\5\\u0439\\u021d\\2\\u11b1\\u11b2\\5\\u0451\\u0229\\2\\u11b2\")\n buf.write(\"\\u11b3\\5\\u044b\\u0226\\2\\u11b3\\u11b4\\5\\u0435\\u021b\\2\\u11b4\")\n buf.write(\"\\u11b5\\5\\u045b\\u022e\\2\\u11b5\\u11b6\\5\\u045b\\u022e\\2\\u11b6\")\n buf.write(\"\\u11b7\\5\\u045f\\u0230\\2\\u11b7\\u11b8\\5\\u0435\\u021b\\2\\u11b8\")\n buf.write(\"\\u11b9\\5\\u044b\\u0226\\2\\u11b9\\u036c\\3\\2\\2\\2\\u11ba\\u11bb\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u11bb\\u11bc\\5\\u044d\\u0227\\2\\u11bc\\u11bd\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u11bd\\u11be\\5\\u043d\\u021f\\2\\u11be\\u11bf\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u11bf\\u11c0\\5\\u043d\\u021f\\2\\u11c0\\u11c1\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u11c1\\u11c2\\5\\u043d\\u021f\\2\\u11c2\\u11c3\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u11c3\\u11c4\\5\\u045b\\u022e\\2\\u11c4\\u036e\")\n buf.write(\"\\3\\2\\2\\2\\u11c5\\u11c6\\5\\u0463\\u0232\\2\\u11c6\\u11c7\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u11c7\\u11c8\\5\\u044b\\u0226\\2\\u11c8\\u11c9\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u11c9\\u11ca\\5\\u0463\\u0232\\2\\u11ca\\u11cb\\5\\u0445\")\n buf.write(\"\\u0223\\2\\u11cb\\u11cc\\5\\u0459\\u022d\\2\\u11cc\\u11cd\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u11cd\\u11ce\\5\\u0459\\u022d\\2\\u11ce\\u0370\\3\\2\\2\")\n buf.write(\"\\2\\u11cf\\u11d0\\5\\u0463\\u0232\\2\\u11d0\\u11d1\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u11d1\\u11d2\\5\\u044b\\u0226\\2\\u11d2\\u11d3\\5\\u043f\\u0220\")\n buf.write(\"\\2\\u11d3\\u11d4\\5\\u0451\\u0229\\2\\u11d4\\u11d5\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u11d5\\u11d6\\5\\u043d\\u021f\\2\\u11d6\\u11d7\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u11d7\\u11d8\\5\\u045b\\u022e\\2\\u11d8\\u0372\\3\\2\\2\\2\\u11d9\")\n buf.write(\"\\u11da\\5\\u0463\\u0232\\2\\u11da\\u11db\\5\\u044d\\u0227\\2\\u11db\")\n buf.write(\"\\u11dc\\5\\u044b\\u0226\\2\\u11dc\\u11dd\\5\\u044f\\u0228\\2\\u11dd\")\n buf.write(\"\\u11de\\5\\u0435\\u021b\\2\\u11de\\u11df\\5\\u044d\\u0227\\2\\u11df\")\n buf.write(\"\\u11e0\\5\\u043d\\u021f\\2\\u11e0\\u11e1\\5\\u0459\\u022d\\2\\u11e1\")\n buf.write(\"\\u11e2\\5\\u0453\\u022a\\2\\u11e2\\u11e3\\5\\u0435\\u021b\\2\\u11e3\")\n buf.write(\"\\u11e4\\5\\u0439\\u021d\\2\\u11e4\\u11e5\\5\\u043d\\u021f\\2\\u11e5\")\n buf.write(\"\\u11e6\\5\\u0459\\u022d\\2\\u11e6\\u0374\\3\\2\\2\\2\\u11e7\\u11e8\")\n buf.write(\"\\5\\u0463\\u0232\\2\\u11e8\\u11e9\\5\\u044d\\u0227\\2\\u11e9\\u11ea\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u11ea\\u11eb\\5\\u0453\\u022a\\2\\u11eb\\u11ec\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u11ec\\u11ed\\5\\u0457\\u022c\\2\\u11ed\\u11ee\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u11ee\\u11ef\\5\\u043d\\u021f\\2\\u11ef\\u0376\")\n buf.write(\"\\3\\2\\2\\2\\u11f0\\u11f1\\5\\u0463\\u0232\\2\\u11f1\\u11f2\\5\\u044d\")\n buf.write(\"\\u0227\\2\\u11f2\\u11f3\\5\\u044b\\u0226\\2\\u11f3\\u11f4\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u11f4\\u11f5\\5\\u0445\\u0223\\2\\u11f5\\u0378\\3\\2\\2\")\n buf.write(\"\\2\\u11f6\\u11f7\\5\\u0463\\u0232\\2\\u11f7\\u11f8\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u11f8\\u11f9\\5\\u044b\\u0226\\2\\u11f9\\u11fa\\5\\u0455\\u022b\")\n buf.write(\"\\2\\u11fa\\u11fb\\5\\u045d\\u022f\\2\\u11fb\\u11fc\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u11fc\\u11fd\\5\\u0457\\u022c\\2\\u11fd\\u11fe\\5\\u0465\\u0233\")\n buf.write(\"\\2\\u11fe\\u037a\\3\\2\\2\\2\\u11ff\\u1200\\5\\u0463\\u0232\\2\\u1200\")\n buf.write(\"\\u1201\\5\\u044d\\u0227\\2\\u1201\\u1202\\5\\u044b\\u0226\\2\\u1202\")\n buf.write(\"\\u1203\\5\\u0457\\u022c\\2\\u1203\\u1204\\5\\u0451\\u0229\\2\\u1204\")\n buf.write(\"\\u1205\\5\\u0451\\u0229\\2\\u1205\\u1206\\5\\u045b\\u022e\\2\\u1206\")\n buf.write(\"\\u037c\\3\\2\\2\\2\\u1207\\u1208\\5\\u0463\\u0232\\2\\u1208\\u1209\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u1209\\u120a\\5\\u044b\\u0226\\2\\u120a\\u120b\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u120b\\u120c\\5\\u043d\\u021f\\2\\u120c\\u120d\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u120d\\u120e\\5\\u0445\\u0223\\2\\u120e\\u120f\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u120f\\u1210\\5\\u044b\\u0226\\2\\u1210\\u1211\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1211\\u1212\\5\\u0467\\u0234\\2\\u1212\\u1213\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u1213\\u037e\\3\\2\\2\\2\\u1214\\u1215\\5\\u0463\")\n buf.write(\"\\u0232\\2\\u1215\\u1216\\5\\u044d\\u0227\\2\\u1216\\u1217\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u1217\\u1218\\5\\u045b\\u022e\\2\\u1218\\u1219\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u1219\\u121a\\5\\u0437\\u021c\\2\\u121a\\u121b\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u121b\\u121c\\5\\u043d\\u021f\\2\\u121c\\u0380\\3\\2\\2\")\n buf.write(\"\\2\\u121d\\u121e\\5\\u0465\\u0233\\2\\u121e\\u121f\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u121f\\u1220\\5\\u0435\\u021b\\2\\u1220\\u1221\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u1221\\u0382\\3\\2\\2\\2\\u1222\\u1223\\5\\u0465\\u0233\\2\\u1223\")\n buf.write(\"\\u1224\\5\\u043d\\u021f\\2\\u1224\\u1225\\5\\u0459\\u022d\\2\\u1225\")\n buf.write(\"\\u0384\\3\\2\\2\\2\\u1226\\u1227\\5\\u0465\\u0233\\2\\u1227\\u1228\")\n buf.write(\"\\5\\u044d\\u0227\\2\\u1228\\u1229\\5\\u0445\\u0223\\2\\u1229\\u122a\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u122a\\u122b\\5\\u045b\\u022e\\2\\u122b\\u122c\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u122c\\u122d\\5\\u0457\\u022c\\2\\u122d\\u122e\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u122e\\u122f\\5\\u0435\\u021b\\2\\u122f\\u1230\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u1230\\u1231\\7a\\2\\2\\u1231\\u1232\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u1232\\u1233\\5\\u044f\\u0228\\2\\u1233\\u1234\\5\\u0439\")\n buf.write(\"\\u021d\\2\\u1234\\u1235\\5\\u0451\\u0229\\2\\u1235\\u1236\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u1236\\u1237\\5\\u0459\\u022d\\2\\u1237\\u1238\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u1238\\u1239\\5\\u0457\\u022c\\2\\u1239\\u123a\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u123a\\u123b\\5\\u0445\\u0223\\2\\u123b\\u123c\\5\\u044f\")\n buf.write(\"\\u0228\\2\\u123c\\u123d\\5\\u043d\\u021f\\2\\u123d\\u123e\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u123e\\u0386\\3\\2\\2\\2\\u123f\\u1240\\5\\u0467\\u0234\")\n buf.write(\"\\2\\u1240\\u1241\\5\\u0451\\u0229\\2\\u1241\\u1242\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u1242\\u1243\\5\\u043d\\u021f\\2\\u1243\\u0388\\3\\2\\2\\2\\u1244\")\n buf.write(\"\\u1245\\5\\u0453\\u022a\\2\\u1245\\u1246\\5\\u0457\\u022c\\2\\u1246\")\n buf.write(\"\\u1247\\5\\u043d\\u021f\\2\\u1247\\u1248\\5\\u043b\\u021e\\2\\u1248\")\n buf.write(\"\\u1249\\5\\u0445\\u0223\\2\\u1249\\u124a\\5\\u0439\\u021d\\2\\u124a\")\n buf.write(\"\\u124b\\5\\u045b\\u022e\\2\\u124b\\u124c\\5\\u0445\\u0223\\2\\u124c\")\n buf.write(\"\\u124d\\5\\u0451\\u0229\\2\\u124d\\u124e\\5\\u044f\\u0228\\2\\u124e\")\n buf.write(\"\\u038a\\3\\2\\2\\2\\u124f\\u1250\\5\\u0453\\u022a\\2\\u1250\\u1251\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u1251\\u1252\\5\\u043d\\u021f\\2\\u1252\\u1253\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u1253\\u1254\\5\\u0445\\u0223\\2\\u1254\\u1255\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u1255\\u1256\\5\\u045b\\u022e\\2\\u1256\\u1257\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1257\\u1258\\5\\u0451\\u0229\\2\\u1258\\u1259\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u1259\\u125a\\7a\\2\\2\\u125a\\u125b\\5\\u0437\")\n buf.write(\"\\u021c\\2\\u125b\\u125c\\5\\u0451\\u0229\\2\\u125c\\u125d\\5\\u045d\")\n buf.write(\"\\u022f\\2\\u125d\\u125e\\5\\u044f\\u0228\\2\\u125e\\u125f\\5\\u043b\")\n buf.write(\"\\u021e\\2\\u125f\\u1260\\5\\u0459\\u022d\\2\\u1260\\u038c\\3\\2\\2\")\n buf.write(\"\\2\\u1261\\u1262\\5\\u0453\\u022a\\2\\u1262\\u1263\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u1263\\u1264\\5\\u043d\\u021f\\2\\u1264\\u1265\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u1265\\u1266\\5\\u0445\\u0223\\2\\u1266\\u1267\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u1267\\u1268\\5\\u045b\\u022e\\2\\u1268\\u1269\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u1269\\u126a\\5\\u0451\\u0229\\2\\u126a\\u126b\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u126b\\u126c\\7a\\2\\2\\u126c\\u126d\\5\\u0439\\u021d\\2\\u126d\")\n buf.write(\"\\u126e\\5\\u0451\\u0229\\2\\u126e\\u126f\\5\\u0459\\u022d\\2\\u126f\")\n buf.write(\"\\u1270\\5\\u045b\\u022e\\2\\u1270\\u038e\\3\\2\\2\\2\\u1271\\u1272\")\n buf.write(\"\\5\\u0453\\u022a\\2\\u1272\\u1273\\5\\u0457\\u022c\\2\\u1273\\u1274\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u1274\\u1275\\5\\u043b\\u021e\\2\\u1275\\u1276\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1276\\u1277\\5\\u0439\\u021d\\2\\u1277\\u1278\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u1278\\u1279\\5\\u0445\\u0223\\2\\u1279\\u127a\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u127a\\u127b\\5\\u044f\\u0228\\2\\u127b\\u127c\")\n buf.write(\"\\7a\\2\\2\\u127c\\u127d\\5\\u043b\\u021e\\2\\u127d\\u127e\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u127e\\u127f\\5\\u045b\\u022e\\2\\u127f\\u1280\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u1280\\u1281\\5\\u0445\\u0223\\2\\u1281\\u1282\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u1282\\u1283\\5\\u0459\\u022d\\2\\u1283\\u0390\\3\\2\\2\")\n buf.write(\"\\2\\u1284\\u1285\\5\\u0453\\u022a\\2\\u1285\\u1286\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u1286\\u1287\\5\\u043d\\u021f\\2\\u1287\\u1288\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u1288\\u1289\\5\\u0445\\u0223\\2\\u1289\\u128a\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u128a\\u128b\\5\\u045b\\u022e\\2\\u128b\\u128c\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u128c\\u128d\\5\\u0451\\u0229\\2\\u128d\\u128e\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u128e\\u128f\\7a\\2\\2\\u128f\\u1290\\5\\u0453\\u022a\\2\\u1290\")\n buf.write(\"\\u1291\\5\\u0457\\u022c\\2\\u1291\\u1292\\5\\u0451\\u0229\\2\\u1292\")\n buf.write(\"\\u1293\\5\\u0437\\u021c\\2\\u1293\\u1294\\5\\u0435\\u021b\\2\\u1294\")\n buf.write(\"\\u1295\\5\\u0437\\u021c\\2\\u1295\\u1296\\5\\u0445\\u0223\\2\\u1296\")\n buf.write(\"\\u1297\\5\\u044b\\u0226\\2\\u1297\\u1298\\5\\u0445\\u0223\\2\\u1298\")\n buf.write(\"\\u1299\\5\\u045b\\u022e\\2\\u1299\\u129a\\5\\u0465\\u0233\\2\\u129a\")\n buf.write(\"\\u0392\\3\\2\\2\\2\\u129b\\u129c\\5\\u0453\\u022a\\2\\u129c\\u129d\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u129d\\u129e\\5\\u043d\\u021f\\2\\u129e\\u129f\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u129f\\u12a0\\5\\u0445\\u0223\\2\\u12a0\\u12a1\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u12a1\\u12a2\\5\\u045b\\u022e\\2\\u12a2\\u12a3\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u12a3\\u12a4\\5\\u0451\\u0229\\2\\u12a4\\u12a5\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u12a5\\u12a6\\7a\\2\\2\\u12a6\\u12a7\\5\\u0459\")\n buf.write(\"\\u022d\\2\\u12a7\\u12a8\\5\\u043d\\u021f\\2\\u12a8\\u12a9\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u12a9\\u0394\\3\\2\\2\\2\\u12aa\\u12ab\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u12ab\\u12ac\\5\\u045d\\u022f\\2\\u12ac\\u12ad\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u12ad\\u12ae\\5\\u043d\\u021f\\2\\u12ae\\u12af\\7a\\2\\2\\u12af\")\n buf.write(\"\\u12b0\\5\\u043b\\u021e\\2\\u12b0\\u12b1\\5\\u0445\\u0223\\2\\u12b1\")\n buf.write(\"\\u12b2\\5\\u0459\\u022d\\2\\u12b2\\u12b3\\5\\u045b\\u022e\\2\\u12b3\")\n buf.write(\"\\u0396\\3\\2\\2\\2\\u12b4\\u12b5\\5\\u043b\\u021e\\2\\u12b5\\u12b6\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u12b6\\u12b7\\5\\u044f\\u0228\\2\\u12b7\\u12b8\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u12b8\\u12b9\\5\\u043d\\u021f\\2\\u12b9\\u12ba\")\n buf.write(\"\\7a\\2\\2\\u12ba\\u12bb\\5\\u0457\\u022c\\2\\u12bb\\u12bc\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u12bc\\u12bd\\5\\u044f\\u0228\\2\\u12bd\\u12be\\5\\u0449\")\n buf.write(\"\\u0225\\2\\u12be\\u0398\\3\\2\\2\\2\\u12bf\\u12c0\\5\\u044b\\u0226\")\n buf.write(\"\\2\\u12c0\\u12c1\\5\\u0445\\u0223\\2\\u12c1\\u12c2\\5\\u0459\\u022d\")\n buf.write(\"\\2\\u12c2\\u12c3\\5\\u045b\\u022e\\2\\u12c3\\u12c4\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u12c4\\u12c5\\5\\u0441\\u0221\\2\\u12c5\\u12c6\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u12c6\\u039a\\3\\2\\2\\2\\u12c7\\u12c8\\5\\u0453\\u022a\\2\\u12c8\")\n buf.write(\"\\u12c9\\5\\u043d\\u021f\\2\\u12c9\\u12ca\\5\\u0457\\u022c\\2\\u12ca\")\n buf.write(\"\\u12cb\\5\\u0439\\u021d\\2\\u12cb\\u12cc\\5\\u043d\\u021f\\2\\u12cc\")\n buf.write(\"\\u12cd\\5\\u044f\\u0228\\2\\u12cd\\u12ce\\5\\u045b\\u022e\\2\\u12ce\")\n buf.write(\"\\u12cf\\7a\\2\\2\\u12cf\\u12d0\\5\\u0457\\u022c\\2\\u12d0\\u12d1\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u12d1\\u12d2\\5\\u044f\\u0228\\2\\u12d2\\u12d3\")\n buf.write(\"\\5\\u0449\\u0225\\2\\u12d3\\u039c\\3\\2\\2\\2\\u12d4\\u12d5\\5\\u0453\")\n buf.write(\"\\u022a\\2\\u12d5\\u12d6\\5\\u043d\\u021f\\2\\u12d6\\u12d7\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u12d7\\u12d8\\5\\u0439\\u021d\\2\\u12d8\\u12d9\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u12d9\\u12da\\5\\u044f\\u0228\\2\\u12da\\u12db\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u12db\\u12dc\\5\\u0445\\u0223\\2\\u12dc\\u12dd\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u12dd\\u12de\\5\\u043d\\u021f\\2\\u12de\\u12df\\7a\\2\")\n buf.write(\"\\2\\u12df\\u12e0\\5\\u0439\\u021d\\2\\u12e0\\u12e1\\5\\u0451\\u0229\")\n buf.write(\"\\2\\u12e1\\u12e2\\5\\u044f\\u0228\\2\\u12e2\\u12e3\\5\\u045b\\u022e\")\n buf.write(\"\\2\\u12e3\\u039e\\3\\2\\2\\2\\u12e4\\u12e5\\5\\u0453\\u022a\\2\\u12e5\")\n buf.write(\"\\u12e6\\5\\u043d\\u021f\\2\\u12e6\\u12e7\\5\\u0457\\u022c\\2\\u12e7\")\n buf.write(\"\\u12e8\\5\\u0439\\u021d\\2\\u12e8\\u12e9\\5\\u043d\\u021f\\2\\u12e9\")\n buf.write(\"\\u12ea\\5\\u044f\\u0228\\2\\u12ea\\u12eb\\5\\u045b\\u022e\\2\\u12eb\")\n buf.write(\"\\u12ec\\5\\u0445\\u0223\\2\\u12ec\\u12ed\\5\\u044b\\u0226\\2\\u12ed\")\n buf.write(\"\\u12ee\\5\\u043d\\u021f\\2\\u12ee\\u12ef\\7a\\2\\2\\u12ef\\u12f0\")\n buf.write(\"\\5\\u043b\\u021e\\2\\u12f0\\u12f1\\5\\u0445\\u0223\\2\\u12f1\\u12f2\")\n buf.write(\"\\5\\u0459\\u022d\\2\\u12f2\\u12f3\\5\\u0439\\u021d\\2\\u12f3\\u03a0\")\n buf.write(\"\\3\\2\\2\\2\\u12f4\\u12f5\\5\\u0457\\u022c\\2\\u12f5\\u12f6\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u12f6\\u12f7\\5\\u044f\\u0228\\2\\u12f7\\u12f8\\5\\u0449\")\n buf.write(\"\\u0225\\2\\u12f8\\u03a2\\3\\2\\2\\2\\u12f9\\u12fa\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u12fa\\u12fb\\5\\u045f\\u0230\\2\\u12fb\\u12fc\\5\\u0441\\u0221\")\n buf.write(\"\\2\\u12fc\\u03a4\\3\\2\\2\\2\\u12fd\\u12fe\\5\\u0439\\u021d\\2\\u12fe\")\n buf.write(\"\\u12ff\\5\\u0451\\u0229\\2\\u12ff\\u1300\\5\\u0457\\u022c\\2\\u1300\")\n buf.write(\"\\u1301\\5\\u0457\\u022c\\2\\u1301\\u03a6\\3\\2\\2\\2\\u1302\\u1303\")\n buf.write(\"\\5\\u044b\\u0226\\2\\u1303\\u1304\\5\\u0435\\u021b\\2\\u1304\\u1305\")\n buf.write(\"\\5\\u0441\\u0221\\2\\u1305\\u03a8\\3\\2\\2\\2\\u1306\\u1307\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u1307\\u1308\\5\\u043d\\u021f\\2\\u1308\\u1309\\5\\u0435\")\n buf.write(\"\\u021b\\2\\u1309\\u130a\\5\\u043b\\u021e\\2\\u130a\\u03aa\\3\\2\\2\")\n buf.write(\"\\2\\u130b\\u130c\\5\\u044d\\u0227\\2\\u130c\\u130d\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u130d\\u130e\\5\\u0463\\u0232\\2\\u130e\\u03ac\\3\\2\\2\\2\\u130f\")\n buf.write(\"\\u1310\\5\\u044d\\u0227\\2\\u1310\\u1311\\5\\u043d\\u021f\\2\\u1311\")\n buf.write(\"\\u1312\\5\\u043b\\u021e\\2\\u1312\\u1313\\5\\u0445\\u0223\\2\\u1313\")\n buf.write(\"\\u1314\\5\\u0435\\u021b\\2\\u1314\\u1315\\5\\u044f\\u0228\\2\\u1315\")\n buf.write(\"\\u03ae\\3\\2\\2\\2\\u1316\\u1317\\5\\u044d\\u0227\\2\\u1317\\u1318\")\n buf.write(\"\\5\\u0445\\u0223\\2\\u1318\\u1319\\5\\u044f\\u0228\\2\\u1319\\u03b0\")\n buf.write(\"\\3\\2\\2\\2\\u131a\\u131b\\5\\u044f\\u0228\\2\\u131b\\u131c\\5\\u045b\")\n buf.write(\"\\u022e\\2\\u131c\\u131d\\5\\u0445\\u0223\\2\\u131d\\u131e\\5\\u044b\")\n buf.write(\"\\u0226\\2\\u131e\\u131f\\5\\u043d\\u021f\\2\\u131f\\u03b2\\3\\2\\2\")\n buf.write(\"\\2\\u1320\\u1321\\5\\u0457\\u022c\\2\\u1321\\u1322\\5\\u0435\\u021b\")\n buf.write(\"\\2\\u1322\\u1323\\5\\u045b\\u022e\\2\\u1323\\u1324\\5\\u0445\\u0223\")\n buf.write(\"\\2\\u1324\\u1325\\5\\u0451\\u0229\\2\\u1325\\u1326\\7a\\2\\2\\u1326\")\n buf.write(\"\\u1327\\5\\u045b\\u022e\\2\\u1327\\u1328\\5\\u0451\\u0229\\2\\u1328\")\n buf.write(\"\\u1329\\7a\\2\\2\\u1329\\u132a\\5\\u0457\\u022c\\2\\u132a\\u132b\")\n buf.write(\"\\5\\u043d\\u021f\\2\\u132b\\u132c\\5\\u0453\\u022a\\2\\u132c\\u132d\")\n buf.write(\"\\5\\u0451\\u0229\\2\\u132d\\u132e\\5\\u0457\\u022c\\2\\u132e\\u132f\")\n buf.write(\"\\5\\u045b\\u022e\\2\\u132f\\u03b4\\3\\2\\2\\2\\u1330\\u1331\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u1331\\u1332\\5\\u0451\\u0229\\2\\u1332\\u1333\\5\\u0461\")\n buf.write(\"\\u0231\\2\\u1333\\u1334\\7a\\2\\2\\u1334\\u1335\\5\\u044f\\u0228\")\n buf.write(\"\\2\\u1335\\u1336\\5\\u045d\\u022f\\2\\u1336\\u1337\\5\\u044d\\u0227\")\n buf.write(\"\\2\\u1337\\u1338\\5\\u0437\\u021c\\2\\u1338\\u1339\\5\\u043d\\u021f\")\n buf.write(\"\\2\\u1339\\u133a\\5\\u0457\\u022c\\2\\u133a\\u03b6\\3\\2\\2\\2\\u133b\")\n buf.write(\"\\u133c\\5\\u0459\\u022d\\2\\u133c\\u133d\\5\\u045d\\u022f\\2\\u133d\")\n buf.write(\"\\u133e\\5\\u044d\\u0227\\2\\u133e\\u03b8\\3\\2\\2\\2\\u133f\\u1340\")\n buf.write(\"\\5\\u045f\\u0230\\2\\u1340\\u1341\\5\\u0435\\u021b\\2\\u1341\\u1342\")\n buf.write(\"\\5\\u0457\\u022c\\2\\u1342\\u1343\\5\\u0445\\u0223\\2\\u1343\\u1344\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1344\\u1345\\5\\u044f\\u0228\\2\\u1345\\u1346\")\n buf.write(\"\\5\\u0439\\u021d\\2\\u1346\\u1347\\5\\u043d\\u021f\\2\\u1347\\u03ba\")\n buf.write(\"\\3\\2\\2\\2\\u1348\\u1349\\5\\u0457\\u022c\\2\\u1349\\u134a\\5\\u043d\")\n buf.write(\"\\u021f\\2\\u134a\\u134b\\5\\u0441\\u0221\\2\\u134b\\u134c\\5\\u0457\")\n buf.write(\"\\u022c\\2\\u134c\\u134d\\7a\\2\\2\\u134d\\u03bc\\3\\2\\2\\2\\u134e\")\n buf.write(\"\\u134f\\5\\u0459\\u022d\\2\\u134f\\u1350\\5\\u045b\\u022e\\2\\u1350\")\n buf.write(\"\\u1351\\5\\u043b\\u021e\\2\\u1351\\u1352\\5\\u043b\\u021e\\2\\u1352\")\n buf.write(\"\\u1353\\5\\u043d\\u021f\\2\\u1353\\u1354\\5\\u045f\\u0230\\2\\u1354\")\n buf.write(\"\\u03be\\3\\2\\2\\2\\u1355\\u1356\\5\\u045f\\u0230\\2\\u1356\\u1357\")\n buf.write(\"\\5\\u0435\\u021b\\2\\u1357\\u1358\\5\\u0457\\u022c\\2\\u1358\\u1359\")\n buf.write(\"\\7a\\2\\2\\u1359\\u03c0\\3\\2\\2\\2\\u135a\\u135b\\5\\u0439\\u021d\")\n buf.write(\"\\2\\u135b\\u135c\\5\\u0451\\u0229\\2\\u135c\\u135d\\5\\u045f\\u0230\")\n buf.write(\"\\2\\u135d\\u135e\\5\\u0435\\u021b\\2\\u135e\\u135f\\5\\u0457\\u022c\")\n buf.write(\"\\2\\u135f\\u1360\\7a\\2\\2\\u1360\\u03c2\\3\\2\\2\\2\\u1361\\u1362\")\n buf.write(\"\\5\\u044f\\u0228\\2\\u1362\\u1369\\7)\\2\\2\\u1363\\u1368\\n\\2\\2\")\n buf.write(\"\\2\\u1364\\u1365\\7)\\2\\2\\u1365\\u1368\\7)\\2\\2\\u1366\\u1368\\5\")\n buf.write(\"\\u042d\\u0217\\2\\u1367\\u1363\\3\\2\\2\\2\\u1367\\u1364\\3\\2\\2\\2\")\n buf.write(\"\\u1367\\u1366\\3\\2\\2\\2\\u1368\\u136b\\3\\2\\2\\2\\u1369\\u1367\\3\")\n buf.write(\"\\2\\2\\2\\u1369\\u136a\\3\\2\\2\\2\\u136a\\u136c\\3\\2\\2\\2\\u136b\\u1369\")\n buf.write(\"\\3\\2\\2\\2\\u136c\\u136d\\7)\\2\\2\\u136d\\u03c4\\3\\2\\2\\2\\u136e\")\n buf.write(\"\\u1377\\5\\u0437\\u021c\\2\\u136f\\u1373\\7)\\2\\2\\u1370\\u1372\")\n buf.write(\"\\4\\62\\63\\2\\u1371\\u1370\\3\\2\\2\\2\\u1372\\u1375\\3\\2\\2\\2\\u1373\")\n buf.write(\"\\u1371\\3\\2\\2\\2\\u1373\\u1374\\3\\2\\2\\2\\u1374\\u1376\\3\\2\\2\\2\")\n buf.write(\"\\u1375\\u1373\\3\\2\\2\\2\\u1376\\u1378\\7)\\2\\2\\u1377\\u136f\\3\")\n buf.write(\"\\2\\2\\2\\u1378\\u1379\\3\\2\\2\\2\\u1379\\u1377\\3\\2\\2\\2\\u1379\\u137a\")\n buf.write(\"\\3\\2\\2\\2\\u137a\\u03c6\\3\\2\\2\\2\\u137b\\u1384\\5\\u0463\\u0232\")\n buf.write(\"\\2\\u137c\\u1380\\7)\\2\\2\\u137d\\u137f\\t\\3\\2\\2\\u137e\\u137d\")\n buf.write(\"\\3\\2\\2\\2\\u137f\\u1382\\3\\2\\2\\2\\u1380\\u137e\\3\\2\\2\\2\\u1380\")\n buf.write(\"\\u1381\\3\\2\\2\\2\\u1381\\u1383\\3\\2\\2\\2\\u1382\\u1380\\3\\2\\2\\2\")\n buf.write(\"\\u1383\\u1385\\7)\\2\\2\\u1384\\u137c\\3\\2\\2\\2\\u1385\\u1386\\3\")\n buf.write(\"\\2\\2\\2\\u1386\\u1384\\3\\2\\2\\2\\u1386\\u1387\\3\\2\\2\\2\\u1387\\u03c8\")\n buf.write(\"\\3\\2\\2\\2\\u1388\\u1389\\7\\60\\2\\2\\u1389\\u138a\\7\\60\\2\\2\\u138a\")\n buf.write(\"\\u03ca\\3\\2\\2\\2\\u138b\\u138c\\7\\60\\2\\2\\u138c\\u03cc\\3\\2\\2\")\n buf.write(\"\\2\\u138d\\u138e\\5\\u0423\\u0212\\2\\u138e\\u03ce\\3\\2\\2\\2\\u138f\")\n buf.write(\"\\u1398\\5\\u0425\\u0213\\2\\u1390\\u1392\\t\\4\\2\\2\\u1391\\u1393\")\n buf.write(\"\\t\\5\\2\\2\\u1392\\u1391\\3\\2\\2\\2\\u1392\\u1393\\3\\2\\2\\2\\u1393\")\n buf.write(\"\\u1396\\3\\2\\2\\2\\u1394\\u1397\\5\\u0425\\u0213\\2\\u1395\\u1397\")\n buf.write(\"\\5\\u0423\\u0212\\2\\u1396\\u1394\\3\\2\\2\\2\\u1396\\u1395\\3\\2\\2\")\n buf.write(\"\\2\\u1397\\u1399\\3\\2\\2\\2\\u1398\\u1390\\3\\2\\2\\2\\u1398\\u1399\")\n buf.write(\"\\3\\2\\2\\2\\u1399\\u139c\\3\\2\\2\\2\\u139a\\u139d\\5\\u043b\\u021e\")\n buf.write(\"\\2\\u139b\\u139d\\5\\u043f\\u0220\\2\\u139c\\u139a\\3\\2\\2\\2\\u139c\")\n buf.write(\"\\u139b\\3\\2\\2\\2\\u139c\\u139d\\3\\2\\2\\2\\u139d\\u03d0\\3\\2\\2\\2\")\n buf.write(\"\\u139e\\u13a5\\7)\\2\\2\\u139f\\u13a4\\n\\2\\2\\2\\u13a0\\u13a1\\7\")\n buf.write(\")\\2\\2\\u13a1\\u13a4\\7)\\2\\2\\u13a2\\u13a4\\5\\u042d\\u0217\\2\\u13a3\")\n buf.write(\"\\u139f\\3\\2\\2\\2\\u13a3\\u13a0\\3\\2\\2\\2\\u13a3\\u13a2\\3\\2\\2\\2\")\n buf.write(\"\\u13a4\\u13a7\\3\\2\\2\\2\\u13a5\\u13a3\\3\\2\\2\\2\\u13a5\\u13a6\\3\")\n buf.write(\"\\2\\2\\2\\u13a6\\u13a8\\3\\2\\2\\2\\u13a7\\u13a5\\3\\2\\2\\2\\u13a8\\u13a9\")\n buf.write(\"\\7)\\2\\2\\u13a9\\u03d2\\3\\2\\2\\2\\u13aa\\u13af\\5\\u0455\\u022b\")\n buf.write(\"\\2\\u13ab\\u13b0\\5\\u03d7\\u01ec\\2\\u13ac\\u13b0\\5\\u03d9\\u01ed\")\n buf.write(\"\\2\\u13ad\\u13b0\\5\\u03db\\u01ee\\2\\u13ae\\u13b0\\5\\u03dd\\u01ef\")\n buf.write(\"\\2\\u13af\\u13ab\\3\\2\\2\\2\\u13af\\u13ac\\3\\2\\2\\2\\u13af\\u13ad\")\n buf.write(\"\\3\\2\\2\\2\\u13af\\u13ae\\3\\2\\2\\2\\u13b0\\u13b1\\3\\2\\2\\2\\u13b1\")\n buf.write(\"\\u13b2\\b\\u01ea\\2\\2\\u13b2\\u03d4\\3\\2\\2\\2\\u13b3\\u13b4\\7)\")\n buf.write(\"\\2\\2\\u13b4\\u03d6\\3\\2\\2\\2\\u13b5\\u13b6\\5\\u03d5\\u01eb\\2\\u13b6\")\n buf.write(\"\\u13ba\\7>\\2\\2\\u13b7\\u13b9\\13\\2\\2\\2\\u13b8\\u13b7\\3\\2\\2\\2\")\n buf.write(\"\\u13b9\\u13bc\\3\\2\\2\\2\\u13ba\\u13bb\\3\\2\\2\\2\\u13ba\\u13b8\\3\")\n buf.write(\"\\2\\2\\2\\u13bb\\u13bd\\3\\2\\2\\2\\u13bc\\u13ba\\3\\2\\2\\2\\u13bd\\u13be\")\n buf.write(\"\\7@\\2\\2\\u13be\\u13bf\\5\\u03d5\\u01eb\\2\\u13bf\\u03d8\\3\\2\\2\")\n buf.write(\"\\2\\u13c0\\u13c1\\5\\u03d5\\u01eb\\2\\u13c1\\u13c5\\7}\\2\\2\\u13c2\")\n buf.write(\"\\u13c4\\13\\2\\2\\2\\u13c3\\u13c2\\3\\2\\2\\2\\u13c4\\u13c7\\3\\2\\2\")\n buf.write(\"\\2\\u13c5\\u13c6\\3\\2\\2\\2\\u13c5\\u13c3\\3\\2\\2\\2\\u13c6\\u13c8\")\n buf.write(\"\\3\\2\\2\\2\\u13c7\\u13c5\\3\\2\\2\\2\\u13c8\\u13c9\\7\\177\\2\\2\\u13c9\")\n buf.write(\"\\u13ca\\5\\u03d5\\u01eb\\2\\u13ca\\u03da\\3\\2\\2\\2\\u13cb\\u13cc\")\n buf.write(\"\\5\\u03d5\\u01eb\\2\\u13cc\\u13d0\\7]\\2\\2\\u13cd\\u13cf\\13\\2\\2\")\n buf.write(\"\\2\\u13ce\\u13cd\\3\\2\\2\\2\\u13cf\\u13d2\\3\\2\\2\\2\\u13d0\\u13d1\")\n buf.write(\"\\3\\2\\2\\2\\u13d0\\u13ce\\3\\2\\2\\2\\u13d1\\u13d3\\3\\2\\2\\2\\u13d2\")\n buf.write(\"\\u13d0\\3\\2\\2\\2\\u13d3\\u13d4\\7_\\2\\2\\u13d4\\u13d5\\5\\u03d5\")\n buf.write(\"\\u01eb\\2\\u13d5\\u03dc\\3\\2\\2\\2\\u13d6\\u13d7\\5\\u03d5\\u01eb\")\n buf.write(\"\\2\\u13d7\\u13db\\7*\\2\\2\\u13d8\\u13da\\13\\2\\2\\2\\u13d9\\u13d8\")\n buf.write(\"\\3\\2\\2\\2\\u13da\\u13dd\\3\\2\\2\\2\\u13db\\u13dc\\3\\2\\2\\2\\u13db\")\n buf.write(\"\\u13d9\\3\\2\\2\\2\\u13dc\\u13de\\3\\2\\2\\2\\u13dd\\u13db\\3\\2\\2\\2\")\n buf.write(\"\\u13de\\u13df\\7+\\2\\2\\u13df\\u13e0\\5\\u03d5\\u01eb\\2\\u13e0\")\n buf.write(\"\\u03de\\3\\2\\2\\2\\u13e1\\u13e2\\n\\6\\2\\2\\u13e2\\u03e0\\3\\2\\2\\2\")\n buf.write(\"\\u13e3\\u13e7\\7$\\2\\2\\u13e4\\u13e8\\n\\7\\2\\2\\u13e5\\u13e6\\7\")\n buf.write(\"$\\2\\2\\u13e6\\u13e8\\7$\\2\\2\\u13e7\\u13e4\\3\\2\\2\\2\\u13e7\\u13e5\")\n buf.write(\"\\3\\2\\2\\2\\u13e8\\u13e9\\3\\2\\2\\2\\u13e9\\u13e7\\3\\2\\2\\2\\u13e9\")\n buf.write(\"\\u13ea\\3\\2\\2\\2\\u13ea\\u13eb\\3\\2\\2\\2\\u13eb\\u13ec\\7$\\2\\2\")\n buf.write(\"\\u13ec\\u03e2\\3\\2\\2\\2\\u13ed\\u13ee\\7\\'\\2\\2\\u13ee\\u03e4\\3\")\n buf.write(\"\\2\\2\\2\\u13ef\\u13f0\\7(\\2\\2\\u13f0\\u03e6\\3\\2\\2\\2\\u13f1\\u13f2\")\n buf.write(\"\\7*\\2\\2\\u13f2\\u03e8\\3\\2\\2\\2\\u13f3\\u13f4\\7+\\2\\2\\u13f4\\u03ea\")\n buf.write(\"\\3\\2\\2\\2\\u13f5\\u13f6\\7,\\2\\2\\u13f6\\u13f7\\7,\\2\\2\\u13f7\\u03ec\")\n buf.write(\"\\3\\2\\2\\2\\u13f8\\u13f9\\7,\\2\\2\\u13f9\\u03ee\\3\\2\\2\\2\\u13fa\")\n buf.write(\"\\u13fb\\7-\\2\\2\\u13fb\\u03f0\\3\\2\\2\\2\\u13fc\\u13fd\\7/\\2\\2\\u13fd\")\n buf.write(\"\\u03f2\\3\\2\\2\\2\\u13fe\\u13ff\\7.\\2\\2\\u13ff\\u03f4\\3\\2\\2\\2\")\n buf.write(\"\\u1400\\u1401\\7\\61\\2\\2\\u1401\\u03f6\\3\\2\\2\\2\\u1402\\u1403\")\n buf.write(\"\\7B\\2\\2\\u1403\\u03f8\\3\\2\\2\\2\\u1404\\u1405\\7<\\2\\2\\u1405\\u1406\")\n buf.write(\"\\7?\\2\\2\\u1406\\u03fa\\3\\2\\2\\2\\u1407\\u1408\\7<\\2\\2\\u1408\\u140d\")\n buf.write(\"\\5\\u0421\\u0211\\2\\u1409\\u140c\\5\\u0421\\u0211\\2\\u140a\\u140c\")\n buf.write(\"\\t\\b\\2\\2\\u140b\\u1409\\3\\2\\2\\2\\u140b\\u140a\\3\\2\\2\\2\\u140c\")\n buf.write(\"\\u140f\\3\\2\\2\\2\\u140d\\u140b\\3\\2\\2\\2\\u140d\\u140e\\3\\2\\2\\2\")\n buf.write(\"\\u140e\\u1416\\3\\2\\2\\2\\u140f\\u140d\\3\\2\\2\\2\\u1410\\u1411\\7\")\n buf.write(\"<\\2\\2\\u1411\\u1416\\5\\u03e1\\u01f1\\2\\u1412\\u1413\\7<\\2\\2\\u1413\")\n buf.write(\"\\u1416\\5\\u03cd\\u01e7\\2\\u1414\\u1416\\5\\u0411\\u0209\\2\\u1415\")\n buf.write(\"\\u1407\\3\\2\\2\\2\\u1415\\u1410\\3\\2\\2\\2\\u1415\\u1412\\3\\2\\2\\2\")\n buf.write(\"\\u1415\\u1414\\3\\2\\2\\2\\u1416\\u03fc\\3\\2\\2\\2\\u1417\\u1418\\7\")\n buf.write(\"<\\2\\2\\u1418\\u03fe\\3\\2\\2\\2\\u1419\\u141a\\7=\\2\\2\\u141a\\u0400\")\n buf.write(\"\\3\\2\\2\\2\\u141b\\u141c\\7>\\2\\2\\u141c\\u141d\\7?\\2\\2\\u141d\\u0402\")\n buf.write(\"\\3\\2\\2\\2\\u141e\\u141f\\7>\\2\\2\\u141f\\u0404\\3\\2\\2\\2\\u1420\")\n buf.write(\"\\u1421\\7@\\2\\2\\u1421\\u1422\\7?\\2\\2\\u1422\\u0406\\3\\2\\2\\2\\u1423\")\n buf.write(\"\\u1424\\7#\\2\\2\\u1424\\u142c\\7?\\2\\2\\u1425\\u1426\\7>\\2\\2\\u1426\")\n buf.write(\"\\u142c\\7@\\2\\2\\u1427\\u1428\\7`\\2\\2\\u1428\\u142c\\7?\\2\\2\\u1429\")\n buf.write(\"\\u142a\\7\\u0080\\2\\2\\u142a\\u142c\\7?\\2\\2\\u142b\\u1423\\3\\2\")\n buf.write(\"\\2\\2\\u142b\\u1425\\3\\2\\2\\2\\u142b\\u1427\\3\\2\\2\\2\\u142b\\u1429\")\n buf.write(\"\\3\\2\\2\\2\\u142c\\u0408\\3\\2\\2\\2\\u142d\\u142e\\7`\\2\\2\\u142e\")\n buf.write(\"\\u040a\\3\\2\\2\\2\\u142f\\u1430\\7\\u0080\\2\\2\\u1430\\u040c\\3\\2\")\n buf.write(\"\\2\\2\\u1431\\u1432\\7#\\2\\2\\u1432\\u040e\\3\\2\\2\\2\\u1433\\u1434\")\n buf.write(\"\\7@\\2\\2\\u1434\\u0410\\3\\2\\2\\2\\u1435\\u1436\\7A\\2\\2\\u1436\\u0412\")\n buf.write(\"\\3\\2\\2\\2\\u1437\\u1438\\7~\\2\\2\\u1438\\u1439\\7~\\2\\2\\u1439\\u0414\")\n buf.write(\"\\3\\2\\2\\2\\u143a\\u143b\\7~\\2\\2\\u143b\\u0416\\3\\2\\2\\2\\u143c\")\n buf.write(\"\\u143d\\7?\\2\\2\\u143d\\u0418\\3\\2\\2\\2\\u143e\\u143f\\7]\\2\\2\\u143f\")\n buf.write(\"\\u041a\\3\\2\\2\\2\\u1440\\u1441\\7_\\2\\2\\u1441\\u041c\\3\\2\\2\\2\")\n buf.write(\"\\u1442\\u1443\\7a\\2\\2\\u1443\\u041e\\3\\2\\2\\2\\u1444\\u1446\\t\")\n buf.write(\"\\t\\2\\2\\u1445\\u1444\\3\\2\\2\\2\\u1446\\u1447\\3\\2\\2\\2\\u1447\\u1445\")\n buf.write(\"\\3\\2\\2\\2\\u1447\\u1448\\3\\2\\2\\2\\u1448\\u1449\\3\\2\\2\\2\\u1449\")\n buf.write(\"\\u144a\\b\\u0210\\3\\2\\u144a\\u0420\\3\\2\\2\\2\\u144b\\u144c\\t\\n\")\n buf.write(\"\\2\\2\\u144c\\u0422\\3\\2\\2\\2\\u144d\\u144f\\4\\62;\\2\\u144e\\u144d\")\n buf.write(\"\\3\\2\\2\\2\\u144f\\u1450\\3\\2\\2\\2\\u1450\\u144e\\3\\2\\2\\2\\u1450\")\n buf.write(\"\\u1451\\3\\2\\2\\2\\u1451\\u0424\\3\\2\\2\\2\\u1452\\u1454\\5\\u03cd\")\n buf.write(\"\\u01e7\\2\\u1453\\u1452\\3\\2\\2\\2\\u1454\\u1457\\3\\2\\2\\2\\u1455\")\n buf.write(\"\\u1453\\3\\2\\2\\2\\u1455\\u1456\\3\\2\\2\\2\\u1456\\u1459\\3\\2\\2\\2\")\n buf.write(\"\\u1457\\u1455\\3\\2\\2\\2\\u1458\\u145a\\7\\60\\2\\2\\u1459\\u1458\")\n buf.write(\"\\3\\2\\2\\2\\u1459\\u145a\\3\\2\\2\\2\\u145a\\u145c\\3\\2\\2\\2\\u145b\")\n buf.write(\"\\u145d\\5\\u03cd\\u01e7\\2\\u145c\\u145b\\3\\2\\2\\2\\u145d\\u145e\")\n buf.write(\"\\3\\2\\2\\2\\u145e\\u145c\\3\\2\\2\\2\\u145e\\u145f\\3\\2\\2\\2\\u145f\")\n buf.write(\"\\u0426\\3\\2\\2\\2\\u1460\\u1461\\7/\\2\\2\\u1461\\u1462\\7/\\2\\2\\u1462\")\n buf.write(\"\\u1466\\3\\2\\2\\2\\u1463\\u1465\\n\\13\\2\\2\\u1464\\u1463\\3\\2\\2\")\n buf.write(\"\\2\\u1465\\u1468\\3\\2\\2\\2\\u1466\\u1464\\3\\2\\2\\2\\u1466\\u1467\")\n buf.write(\"\\3\\2\\2\\2\\u1467\\u146b\\3\\2\\2\\2\\u1468\\u1466\\3\\2\\2\\2\\u1469\")\n buf.write(\"\\u146c\\5\\u042d\\u0217\\2\\u146a\\u146c\\7\\2\\2\\3\\u146b\\u1469\")\n buf.write(\"\\3\\2\\2\\2\\u146b\\u146a\\3\\2\\2\\2\\u146c\\u146d\\3\\2\\2\\2\\u146d\")\n buf.write(\"\\u146e\\b\\u0214\\4\\2\\u146e\\u0428\\3\\2\\2\\2\\u146f\\u1470\\7\\61\")\n buf.write(\"\\2\\2\\u1470\\u1471\\7,\\2\\2\\u1471\\u1475\\3\\2\\2\\2\\u1472\\u1474\")\n buf.write(\"\\13\\2\\2\\2\\u1473\\u1472\\3\\2\\2\\2\\u1474\\u1477\\3\\2\\2\\2\\u1475\")\n buf.write(\"\\u1476\\3\\2\\2\\2\\u1475\\u1473\\3\\2\\2\\2\\u1476\\u1478\\3\\2\\2\\2\")\n buf.write(\"\\u1477\\u1475\\3\\2\\2\\2\\u1478\\u1479\\7,\\2\\2\\u1479\\u147a\\7\")\n buf.write(\"\\61\\2\\2\\u147a\\u147b\\3\\2\\2\\2\\u147b\\u147c\\b\\u0215\\4\\2\\u147c\")\n buf.write(\"\\u042a\\3\\2\\2\\2\\u147d\\u147e\\7r\\2\\2\\u147e\\u147f\\7t\\2\\2\\u147f\")\n buf.write(\"\\u1480\\7q\\2\\2\\u1480\\u1481\\7o\\2\\2\\u1481\\u1482\\7r\\2\\2\\u1482\")\n buf.write(\"\\u1483\\7v\\2\\2\\u1483\\u1484\\3\\2\\2\\2\\u1484\\u1488\\5\\u042f\")\n buf.write(\"\\u0218\\2\\u1485\\u1487\\n\\13\\2\\2\\u1486\\u1485\\3\\2\\2\\2\\u1487\")\n buf.write(\"\\u148a\\3\\2\\2\\2\\u1488\\u1486\\3\\2\\2\\2\\u1488\\u1489\\3\\2\\2\\2\")\n buf.write(\"\\u1489\\u148d\\3\\2\\2\\2\\u148a\\u1488\\3\\2\\2\\2\\u148b\\u148e\\5\")\n buf.write(\"\\u042d\\u0217\\2\\u148c\\u148e\\7\\2\\2\\3\\u148d\\u148b\\3\\2\\2\\2\")\n buf.write(\"\\u148d\\u148c\\3\\2\\2\\2\\u148e\\u042c\\3\\2\\2\\2\\u148f\\u1491\\7\")\n buf.write(\"\\17\\2\\2\\u1490\\u148f\\3\\2\\2\\2\\u1490\\u1491\\3\\2\\2\\2\\u1491\")\n buf.write(\"\\u1492\\3\\2\\2\\2\\u1492\\u1493\\7\\f\\2\\2\\u1493\\u042e\\3\\2\\2\\2\")\n buf.write(\"\\u1494\\u1495\\t\\f\\2\\2\\u1495\\u0430\\3\\2\\2\\2\\u1496\\u149b\\5\")\n buf.write(\"\\u0421\\u0211\\2\\u1497\\u149a\\5\\u0421\\u0211\\2\\u1498\\u149a\")\n buf.write(\"\\t\\r\\2\\2\\u1499\\u1497\\3\\2\\2\\2\\u1499\\u1498\\3\\2\\2\\2\\u149a\")\n buf.write(\"\\u149d\\3\\2\\2\\2\\u149b\\u1499\\3\\2\\2\\2\\u149b\\u149c\\3\\2\\2\\2\")\n buf.write(\"\\u149c\\u0432\\3\\2\\2\\2\\u149d\\u149b\\3\\2\\2\\2\\u149e\\u149f\\7\")\n buf.write(\"B\\2\\2\\u149f\\u14a0\\7#\\2\\2\\u14a0\\u14a1\\3\\2\\2\\2\\u14a1\\u14a2\")\n buf.write(\"\\b\\u021a\\4\\2\\u14a2\\u0434\\3\\2\\2\\2\\u14a3\\u14a4\\t\\16\\2\\2\")\n buf.write(\"\\u14a4\\u0436\\3\\2\\2\\2\\u14a5\\u14a6\\t\\17\\2\\2\\u14a6\\u0438\")\n buf.write(\"\\3\\2\\2\\2\\u14a7\\u14a8\\t\\20\\2\\2\\u14a8\\u043a\\3\\2\\2\\2\\u14a9\")\n buf.write(\"\\u14aa\\t\\21\\2\\2\\u14aa\\u043c\\3\\2\\2\\2\\u14ab\\u14ac\\t\\4\\2\")\n buf.write(\"\\2\\u14ac\\u043e\\3\\2\\2\\2\\u14ad\\u14ae\\t\\22\\2\\2\\u14ae\\u0440\")\n buf.write(\"\\3\\2\\2\\2\\u14af\\u14b0\\t\\23\\2\\2\\u14b0\\u0442\\3\\2\\2\\2\\u14b1\")\n buf.write(\"\\u14b2\\t\\24\\2\\2\\u14b2\\u0444\\3\\2\\2\\2\\u14b3\\u14b4\\t\\25\\2\")\n buf.write(\"\\2\\u14b4\\u0446\\3\\2\\2\\2\\u14b5\\u14b6\\t\\26\\2\\2\\u14b6\\u0448\")\n buf.write(\"\\3\\2\\2\\2\\u14b7\\u14b8\\t\\27\\2\\2\\u14b8\\u044a\\3\\2\\2\\2\\u14b9\")\n buf.write(\"\\u14ba\\t\\30\\2\\2\\u14ba\\u044c\\3\\2\\2\\2\\u14bb\\u14bc\\t\\31\\2\")\n buf.write(\"\\2\\u14bc\\u044e\\3\\2\\2\\2\\u14bd\\u14be\\t\\32\\2\\2\\u14be\\u0450\")\n buf.write(\"\\3\\2\\2\\2\\u14bf\\u14c0\\t\\33\\2\\2\\u14c0\\u0452\\3\\2\\2\\2\\u14c1\")\n buf.write(\"\\u14c2\\t\\34\\2\\2\\u14c2\\u0454\\3\\2\\2\\2\\u14c3\\u14c4\\t\\35\\2\")\n buf.write(\"\\2\\u14c4\\u0456\\3\\2\\2\\2\\u14c5\\u14c6\\t\\36\\2\\2\\u14c6\\u0458\")\n buf.write(\"\\3\\2\\2\\2\\u14c7\\u14c8\\t\\37\\2\\2\\u14c8\\u045a\\3\\2\\2\\2\\u14c9\")\n buf.write(\"\\u14ca\\t \\2\\2\\u14ca\\u045c\\3\\2\\2\\2\\u14cb\\u14cc\\t!\\2\\2\\u14cc\")\n buf.write(\"\\u045e\\3\\2\\2\\2\\u14cd\\u14ce\\t\\\"\\2\\2\\u14ce\\u0460\\3\\2\\2\\2\")\n buf.write(\"\\u14cf\\u14d0\\t#\\2\\2\\u14d0\\u0462\\3\\2\\2\\2\\u14d1\\u14d2\\t\")\n buf.write(\"$\\2\\2\\u14d2\\u0464\\3\\2\\2\\2\\u14d3\\u14d4\\t%\\2\\2\\u14d4\\u0466\")\n buf.write(\"\\3\\2\\2\\2\\u14d5\\u14d6\\t&\\2\\2\\u14d6\\u0468\\3\\2\\2\\2\\'\\2\\u1367\")\n buf.write(\"\\u1369\\u1373\\u1379\\u1380\\u1386\\u1392\\u1396\\u1398\\u139c\")\n buf.write(\"\\u13a3\\u13a5\\u13af\\u13ba\\u13c5\\u13d0\\u13db\\u13e7\\u13e9\")\n buf.write(\"\\u140b\\u140d\\u1415\\u142b\\u1447\\u1450\\u1455\\u1459\\u145e\")\n buf.write(\"\\u1466\\u146b\\u1475\\u1488\\u148d\\u1490\\u1499\\u149b\\5\\t\\u01ea\")\n buf.write(\"\\2\\b\\2\\2\\2\\3\\2\")\n return buf.getvalue()\n\n\nclass PlSqlLexer(Lexer):\n\n atn = ATNDeserializer().deserialize(serializedATN())\n\n decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]\n\n T__0 = 1\n A_LETTER = 2\n ADD = 3\n AFTER = 4\n AGENT = 5\n AGGREGATE = 6\n ALL = 7\n ALTER = 8\n ANALYZE = 9\n AND = 10\n ANY = 11\n ARRAY = 12\n AS = 13\n ASSUME = 14\n ASSERT = 15\n ASC = 16\n ASSOCIATE = 17\n AT = 18\n ATTRIBUTE = 19\n AUDIT = 20\n AUTHID = 21\n AUTO = 22\n AUTOMATIC = 23\n AUTONOMOUS_TRANSACTION = 24\n BATCH = 25\n BEFORE = 26\n BEGIN = 27\n BETWEEN = 28\n BFILE = 29\n BINARY_DOUBLE = 30\n BINARY_FLOAT = 31\n BINARY_INTEGER = 32\n BLOB = 33\n BLOCK = 34\n BODY = 35\n BOOLEAN = 36\n BOTH = 37\n BREADTH = 38\n BULK = 39\n BY = 40\n BYTE = 41\n C_LETTER = 42\n CACHE = 43\n CALL = 44\n CANONICAL = 45\n CASCADE = 46\n CASE = 47\n CAST = 48\n CHAR = 49\n CHAR_CS = 50\n CHARACTER = 51\n CHECK = 52\n CHR = 53\n CLOB = 54\n CLOSE = 55\n CLUSTER = 56\n COLLECT = 57\n COLUMNS = 58\n COMMENT = 59\n COMMIT = 60\n COMMITTED = 61\n COMPATIBILITY = 62\n COMPILE = 63\n COMPOUND = 64\n CONNECT = 65\n CONNECT_BY_ROOT = 66\n CONSTANT = 67\n CONSTRAINT = 68\n CONSTRAINTS = 69\n CONSTRUCTOR = 70\n CONTENT = 71\n CONTEXT = 72\n CONTINUE = 73\n CONVERT = 74\n CORRUPT_XID = 75\n CORRUPT_XID_ALL = 76\n COST = 77\n COUNT = 78\n CREATE = 79\n CROSS = 80\n CUBE = 81\n CURRENT = 82\n CURRENT_USER = 83\n CURSOR = 84\n CUSTOMDATUM = 85\n CYCLE = 86\n DATA = 87\n DATABASE = 88\n DATE = 89\n DAY = 90\n DB_ROLE_CHANGE = 91\n DBTIMEZONE = 92\n DDL = 93\n DEBUG = 94\n DEC = 95\n DECIMAL = 96\n DECLARE = 97\n DECOMPOSE = 98\n DECREMENT = 99\n DEFAULT = 100\n DEFAULTS = 101\n DEFERRED = 102\n DEFINER = 103\n DELETE = 104\n DEPTH = 105\n DESC = 106\n DETERMINISTIC = 107\n DIMENSION = 108\n DISABLE = 109\n DISASSOCIATE = 110\n DISTINCT = 111\n DOCUMENT = 112\n DOUBLE = 113\n DROP = 114\n DSINTERVAL_UNCONSTRAINED = 115\n EACH = 116\n ELEMENT = 117\n ELSE = 118\n ELSIF = 119\n EMPTY = 120\n ENABLE = 121\n ENCODING = 122\n END = 123\n ENTITYESCAPING = 124\n ERR = 125\n ERRORS = 126\n ESCAPE = 127\n EVALNAME = 128\n EXCEPT = 129\n EXCEPTION = 130\n EXCEPTION_INIT = 131\n EXCEPTIONS = 132\n EXCLUDE = 133\n EXCLUSIVE = 134\n EXECUTE = 135\n EXISTS = 136\n EXIT = 137\n EXPLAIN = 138\n EXTERNAL = 139\n EXTRACT = 140\n FAILURE = 141\n FALSE = 142\n FETCH = 143\n FINAL = 144\n FIRST = 145\n FIRST_VALUE = 146\n FLOAT = 147\n FOLLOWING = 148\n FOLLOWS = 149\n FOR = 150\n FORALL = 151\n FORCE = 152\n FROM = 153\n FULL = 154\n FUNCTION = 155\n GOTO = 156\n GRANT = 157\n GROUP = 158\n GROUPING = 159\n HASH = 160\n HAVING = 161\n HIDE = 162\n HOUR = 163\n IF = 164\n IGNORE = 165\n IMMEDIATE = 166\n IN = 167\n INCLUDE = 168\n INCLUDING = 169\n INCREMENT = 170\n INDENT = 171\n INDEX = 172\n INDEXED = 173\n INDICATOR = 174\n INDICES = 175\n INFINITE = 176\n INLINE = 177\n INNER = 178\n INOUT = 179\n INSERT = 180\n INSTANTIABLE = 181\n INSTEAD = 182\n INT = 183\n INTEGER = 184\n INTERSECT = 185\n INTERVAL = 186\n INTO = 187\n INVALIDATE = 188\n IS = 189\n ISOLATION = 190\n ITERATE = 191\n JAVA = 192\n JOIN = 193\n KEEP = 194\n LANGUAGE = 195\n LAST = 196\n LAST_VALUE = 197\n LEADING = 198\n LEFT = 199\n LEVEL = 200\n LIBRARY = 201\n LIKE = 202\n LIKE2 = 203\n LIKE4 = 204\n LIKEC = 205\n LIMIT = 206\n LOCAL = 207\n LOCK = 208\n LOCKED = 209\n LOG = 210\n LOGOFF = 211\n LOGON = 212\n LONG = 213\n LOOP = 214\n MAIN = 215\n MAP = 216\n MATCHED = 217\n MAXVALUE = 218\n MEASURES = 219\n MEMBER = 220\n MERGE = 221\n MINUS = 222\n MINUTE = 223\n MINVALUE = 224\n MLSLABEL = 225\n MODE = 226\n MODEL = 227\n MODIFY = 228\n MONTH = 229\n MULTISET = 230\n NAME = 231\n NAN = 232\n NATURAL = 233\n NATURALN = 234\n NAV = 235\n NCHAR = 236\n NCHAR_CS = 237\n NCLOB = 238\n NESTED = 239\n NEW = 240\n NO = 241\n NOAUDIT = 242\n NOCACHE = 243\n NOCOPY = 244\n NOCYCLE = 245\n NOENTITYESCAPING = 246\n NOMAXVALUE = 247\n NOMINVALUE = 248\n NONE = 249\n NOORDER = 250\n NOSCHEMACHECK = 251\n NOT = 252\n NOWAIT = 253\n NULL = 254\n NULLS = 255\n NUMBER = 256\n NUMERIC = 257\n NVARCHAR2 = 258\n OBJECT = 259\n OF = 260\n OFF = 261\n OID = 262\n OLD = 263\n ON = 264\n ONLY = 265\n OPEN = 266\n OPTION = 267\n OR = 268\n ORADATA = 269\n ORDER = 270\n ORDINALITY = 271\n OSERROR = 272\n OUT = 273\n OUTER = 274\n OVER = 275\n OVERRIDING = 276\n PACKAGE = 277\n PARALLEL_ENABLE = 278\n PARAMETERS = 279\n PARENT = 280\n PARTITION = 281\n PASSING = 282\n PATH = 283\n PERCENT_ROWTYPE = 284\n PERCENT_TYPE = 285\n PIPELINED = 286\n PIVOT = 287\n PLAN = 288\n PLS_INTEGER = 289\n POSITIVE = 290\n POSITIVEN = 291\n PRAGMA = 292\n PRECEDING = 293\n PRECISION = 294\n PRESENT = 295\n PRIOR = 296\n PROCEDURE = 297\n RAISE = 298\n RANGE = 299\n RAW = 300\n READ = 301\n REAL = 302\n RECORD = 303\n REF = 304\n REFERENCE = 305\n REFERENCING = 306\n REJECT = 307\n RELIES_ON = 308\n RENAME = 309\n REPLACE = 310\n RESPECT = 311\n RESTRICT_REFERENCES = 312\n RESULT = 313\n RESULT_CACHE = 314\n RETURN = 315\n RETURNING = 316\n REUSE = 317\n REVERSE = 318\n REVOKE = 319\n RIGHT = 320\n ROLLBACK = 321\n ROLLUP = 322\n ROW = 323\n ROWID = 324\n ROWS = 325\n RULES = 326\n SAMPLE = 327\n SAVE = 328\n SAVEPOINT = 329\n SCHEMA = 330\n SCHEMACHECK = 331\n SCN = 332\n SEARCH = 333\n SECOND = 334\n SEED = 335\n SEGMENT = 336\n SELECT = 337\n SELF = 338\n SEQUENCE = 339\n SEQUENTIAL = 340\n SERIALIZABLE = 341\n SERIALLY_REUSABLE = 342\n SERVERERROR = 343\n SESSIONTIMEZONE = 344\n SET = 345\n SETS = 346\n SETTINGS = 347\n SHARE = 348\n SHOW = 349\n SHUTDOWN = 350\n SIBLINGS = 351\n SIGNTYPE = 352\n SIMPLE_INTEGER = 353\n SINGLE = 354\n SIZE = 355\n SKIP_ = 356\n SMALLINT = 357\n SNAPSHOT = 358\n SOME = 359\n SPECIFICATION = 360\n SQLDATA = 361\n SQLERROR = 362\n STANDALONE = 363\n START = 364\n STARTUP = 365\n STATEMENT = 366\n STATEMENT_ID = 367\n STATIC = 368\n STATISTICS = 369\n STRING = 370\n SUBMULTISET = 371\n SUBPARTITION = 372\n SUBSTITUTABLE = 373\n SUBTYPE = 374\n SUCCESS = 375\n SUSPEND = 376\n TABLE = 377\n THE = 378\n THEN = 379\n TIME = 380\n TIMESTAMP = 381\n TIMESTAMP_LTZ_UNCONSTRAINED = 382\n TIMESTAMP_TZ_UNCONSTRAINED = 383\n TIMESTAMP_UNCONSTRAINED = 384\n TIMEZONE_ABBR = 385\n TIMEZONE_HOUR = 386\n TIMEZONE_MINUTE = 387\n TIMEZONE_REGION = 388\n TO = 389\n TRAILING = 390\n TRANSACTION = 391\n TRANSLATE = 392\n TREAT = 393\n TRIGGER = 394\n TRIM = 395\n TRUE = 396\n TRUNCATE = 397\n TYPE = 398\n UNBOUNDED = 399\n UNDER = 400\n UNION = 401\n UNIQUE = 402\n UNLIMITED = 403\n UNPIVOT = 404\n UNTIL = 405\n UPDATE = 406\n UPDATED = 407\n UPSERT = 408\n UROWID = 409\n USE = 410\n USING = 411\n VALIDATE = 412\n VALUE = 413\n VALUES = 414\n VARCHAR = 415\n VARCHAR2 = 416\n VARIABLE = 417\n VARRAY = 418\n VARYING = 419\n VERSION = 420\n VERSIONS = 421\n WAIT = 422\n WARNING = 423\n WELLFORMED = 424\n WHEN = 425\n WHENEVER = 426\n WHERE = 427\n WHILE = 428\n WITH = 429\n WITHIN = 430\n WORK = 431\n WRITE = 432\n XML = 433\n XMLAGG = 434\n XMLATTRIBUTES = 435\n XMLCAST = 436\n XMLCOLATTVAL = 437\n XMLELEMENT = 438\n XMLEXISTS = 439\n XMLFOREST = 440\n XMLNAMESPACES = 441\n XMLPARSE = 442\n XMLPI = 443\n XMLQUERY = 444\n XMLROOT = 445\n XMLSERIALIZE = 446\n XMLTABLE = 447\n YEAR = 448\n YES = 449\n YMINTERVAL_UNCONSTRAINED = 450\n ZONE = 451\n PREDICTION = 452\n PREDICTION_BOUNDS = 453\n PREDICTION_COST = 454\n PREDICTION_DETAILS = 455\n PREDICTION_PROBABILITY = 456\n PREDICTION_SET = 457\n CUME_DIST = 458\n DENSE_RANK = 459\n LISTAGG = 460\n PERCENT_RANK = 461\n PERCENTILE_CONT = 462\n PERCENTILE_DISC = 463\n RANK = 464\n AVG = 465\n CORR = 466\n LAG = 467\n LEAD = 468\n MAX = 469\n MEDIAN = 470\n MIN = 471\n NTILE = 472\n RATIO_TO_REPORT = 473\n ROW_NUMBER = 474\n SUM = 475\n VARIANCE = 476\n REGR_ = 477\n STDDEV = 478\n VAR_ = 479\n COVAR_ = 480\n NATIONAL_CHAR_STRING_LIT = 481\n BIT_STRING_LIT = 482\n HEX_STRING_LIT = 483\n DOUBLE_PERIOD = 484\n PERIOD = 485\n UNSIGNED_INTEGER = 486\n APPROXIMATE_NUM_LIT = 487\n CHAR_STRING = 488\n DELIMITED_ID = 489\n PERCENT = 490\n AMPERSAND = 491\n LEFT_PAREN = 492\n RIGHT_PAREN = 493\n DOUBLE_ASTERISK = 494\n ASTERISK = 495\n PLUS_SIGN = 496\n MINUS_SIGN = 497\n COMMA = 498\n SOLIDUS = 499\n AT_SIGN = 500\n ASSIGN_OP = 501\n BINDVAR = 502\n COLON = 503\n SEMICOLON = 504\n LESS_THAN_OR_EQUALS_OP = 505\n LESS_THAN_OP = 506\n GREATER_THAN_OR_EQUALS_OP = 507\n NOT_EQUAL_OP = 508\n CARRET_OPERATOR_PART = 509\n TILDE_OPERATOR_PART = 510\n EXCLAMATION_OPERATOR_PART = 511\n GREATER_THAN_OP = 512\n CONCATENATION_OP = 513\n VERTICAL_BAR = 514\n EQUALS_OP = 515\n LEFT_BRACKET = 516\n RIGHT_BRACKET = 517\n INTRODUCER = 518\n SPACES = 519\n SINGLE_LINE_COMMENT = 520\n MULTI_LINE_COMMENT = 521\n PROMPT = 522\n REGULAR_ID = 523\n ZV = 524\n\n channelNames = [ u\"DEFAULT_TOKEN_CHANNEL\", u\"HIDDEN\" ]\n\n modeNames = [ \"DEFAULT_MODE\" ]\n\n literalNames = [ \"<INVALID>\",\n \"'..'\", \"'.'\", \"'%'\", \"'&'\", \"'('\", \"')'\", \"'**'\", \"'*'\", \"'+'\", \n \"'-'\", \"','\", \"'/'\", \"'@'\", \"':='\", \"':'\", \"';'\", \"'<='\", \"'<'\", \n \"'>='\", \"'^'\", \"'~'\", \"'!'\", \"'>'\", \"'||'\", \"'|'\", \"'='\", \"'['\", \n \"']'\", \"'_'\", \"'@!'\" ]\n\n symbolicNames = [ \"<INVALID>\",\n \"A_LETTER\", \"ADD\", \"AFTER\", \"AGENT\", \"AGGREGATE\", \"ALL\", \"ALTER\", \n \"ANALYZE\", \"AND\", \"ANY\", \"ARRAY\", \"AS\", \"ASSUME\", \"ASSERT\", \n \"ASC\", \"ASSOCIATE\", \"AT\", \"ATTRIBUTE\", \"AUDIT\", \"AUTHID\", \"AUTO\", \n \"AUTOMATIC\", \"AUTONOMOUS_TRANSACTION\", \"BATCH\", \"BEFORE\", \"BEGIN\", \n \"BETWEEN\", \"BFILE\", \"BINARY_DOUBLE\", \"BINARY_FLOAT\", \"BINARY_INTEGER\", \n \"BLOB\", \"BLOCK\", \"BODY\", \"BOOLEAN\", \"BOTH\", \"BREADTH\", \"BULK\", \n \"BY\", \"BYTE\", \"C_LETTER\", \"CACHE\", \"CALL\", \"CANONICAL\", \"CASCADE\", \n \"CASE\", \"CAST\", \"CHAR\", \"CHAR_CS\", \"CHARACTER\", \"CHECK\", \"CHR\", \n \"CLOB\", \"CLOSE\", \"CLUSTER\", \"COLLECT\", \"COLUMNS\", \"COMMENT\", \n \"COMMIT\", \"COMMITTED\", \"COMPATIBILITY\", \"COMPILE\", \"COMPOUND\", \n \"CONNECT\", \"CONNECT_BY_ROOT\", \"CONSTANT\", \"CONSTRAINT\", \"CONSTRAINTS\", \n \"CONSTRUCTOR\", \"CONTENT\", \"CONTEXT\", \"CONTINUE\", \"CONVERT\", \n \"CORRUPT_XID\", \"CORRUPT_XID_ALL\", \"COST\", \"COUNT\", \"CREATE\", \n \"CROSS\", \"CUBE\", \"CURRENT\", \"CURRENT_USER\", \"CURSOR\", \"CUSTOMDATUM\", \n \"CYCLE\", \"DATA\", \"DATABASE\", \"DATE\", \"DAY\", \"DB_ROLE_CHANGE\", \n \"DBTIMEZONE\", \"DDL\", \"DEBUG\", \"DEC\", \"DECIMAL\", \"DECLARE\", \"DECOMPOSE\", \n \"DECREMENT\", \"DEFAULT\", \"DEFAULTS\", \"DEFERRED\", \"DEFINER\", \"DELETE\", \n \"DEPTH\", \"DESC\", \"DETERMINISTIC\", \"DIMENSION\", \"DISABLE\", \"DISASSOCIATE\", \n \"DISTINCT\", \"DOCUMENT\", \"DOUBLE\", \"DROP\", \"DSINTERVAL_UNCONSTRAINED\", \n \"EACH\", \"ELEMENT\", \"ELSE\", \"ELSIF\", \"EMPTY\", \"ENABLE\", \"ENCODING\", \n \"END\", \"ENTITYESCAPING\", \"ERR\", \"ERRORS\", \"ESCAPE\", \"EVALNAME\", \n \"EXCEPT\", \"EXCEPTION\", \"EXCEPTION_INIT\", \"EXCEPTIONS\", \"EXCLUDE\", \n \"EXCLUSIVE\", \"EXECUTE\", \"EXISTS\", \"EXIT\", \"EXPLAIN\", \"EXTERNAL\", \n \"EXTRACT\", \"FAILURE\", \"FALSE\", \"FETCH\", \"FINAL\", \"FIRST\", \"FIRST_VALUE\", \n \"FLOAT\", \"FOLLOWING\", \"FOLLOWS\", \"FOR\", \"FORALL\", \"FORCE\", \"FROM\", \n \"FULL\", \"FUNCTION\", \"GOTO\", \"GRANT\", \"GROUP\", \"GROUPING\", \"HASH\", \n \"HAVING\", \"HIDE\", \"HOUR\", \"IF\", \"IGNORE\", \"IMMEDIATE\", \"IN\", \n \"INCLUDE\", \"INCLUDING\", \"INCREMENT\", \"INDENT\", \"INDEX\", \"INDEXED\", \n \"INDICATOR\", \"INDICES\", \"INFINITE\", \"INLINE\", \"INNER\", \"INOUT\", \n \"INSERT\", \"INSTANTIABLE\", \"INSTEAD\", \"INT\", \"INTEGER\", \"INTERSECT\", \n \"INTERVAL\", \"INTO\", \"INVALIDATE\", \"IS\", \"ISOLATION\", \"ITERATE\", \n \"JAVA\", \"JOIN\", \"KEEP\", \"LANGUAGE\", \"LAST\", \"LAST_VALUE\", \"LEADING\", \n \"LEFT\", \"LEVEL\", \"LIBRARY\", \"LIKE\", \"LIKE2\", \"LIKE4\", \"LIKEC\", \n \"LIMIT\", \"LOCAL\", \"LOCK\", \"LOCKED\", \"LOG\", \"LOGOFF\", \"LOGON\", \n \"LONG\", \"LOOP\", \"MAIN\", \"MAP\", \"MATCHED\", \"MAXVALUE\", \"MEASURES\", \n \"MEMBER\", \"MERGE\", \"MINUS\", \"MINUTE\", \"MINVALUE\", \"MLSLABEL\", \n \"MODE\", \"MODEL\", \"MODIFY\", \"MONTH\", \"MULTISET\", \"NAME\", \"NAN\", \n \"NATURAL\", \"NATURALN\", \"NAV\", \"NCHAR\", \"NCHAR_CS\", \"NCLOB\", \n \"NESTED\", \"NEW\", \"NO\", \"NOAUDIT\", \"NOCACHE\", \"NOCOPY\", \"NOCYCLE\", \n \"NOENTITYESCAPING\", \"NOMAXVALUE\", \"NOMINVALUE\", \"NONE\", \"NOORDER\", \n \"NOSCHEMACHECK\", \"NOT\", \"NOWAIT\", \"NULL\", \"NULLS\", \"NUMBER\", \n \"NUMERIC\", \"NVARCHAR2\", \"OBJECT\", \"OF\", \"OFF\", \"OID\", \"OLD\", \n \"ON\", \"ONLY\", \"OPEN\", \"OPTION\", \"OR\", \"ORADATA\", \"ORDER\", \"ORDINALITY\", \n \"OSERROR\", \"OUT\", \"OUTER\", \"OVER\", \"OVERRIDING\", \"PACKAGE\", \n \"PARALLEL_ENABLE\", \"PARAMETERS\", \"PARENT\", \"PARTITION\", \"PASSING\", \n \"PATH\", \"PERCENT_ROWTYPE\", \"PERCENT_TYPE\", \"PIPELINED\", \"PIVOT\", \n \"PLAN\", \"PLS_INTEGER\", \"POSITIVE\", \"POSITIVEN\", \"PRAGMA\", \"PRECEDING\", \n \"PRECISION\", \"PRESENT\", \"PRIOR\", \"PROCEDURE\", \"RAISE\", \"RANGE\", \n \"RAW\", \"READ\", \"REAL\", \"RECORD\", \"REF\", \"REFERENCE\", \"REFERENCING\", \n \"REJECT\", \"RELIES_ON\", \"RENAME\", \"REPLACE\", \"RESPECT\", \"RESTRICT_REFERENCES\", \n \"RESULT\", \"RESULT_CACHE\", \"RETURN\", \"RETURNING\", \"REUSE\", \"REVERSE\", \n \"REVOKE\", \"RIGHT\", \"ROLLBACK\", \"ROLLUP\", \"ROW\", \"ROWID\", \"ROWS\", \n \"RULES\", \"SAMPLE\", \"SAVE\", \"SAVEPOINT\", \"SCHEMA\", \"SCHEMACHECK\", \n \"SCN\", \"SEARCH\", \"SECOND\", \"SEED\", \"SEGMENT\", \"SELECT\", \"SELF\", \n \"SEQUENCE\", \"SEQUENTIAL\", \"SERIALIZABLE\", \"SERIALLY_REUSABLE\", \n \"SERVERERROR\", \"SESSIONTIMEZONE\", \"SET\", \"SETS\", \"SETTINGS\", \n \"SHARE\", \"SHOW\", \"SHUTDOWN\", \"SIBLINGS\", \"SIGNTYPE\", \"SIMPLE_INTEGER\", \n \"SINGLE\", \"SIZE\", \"SKIP_\", \"SMALLINT\", \"SNAPSHOT\", \"SOME\", \"SPECIFICATION\", \n \"SQLDATA\", \"SQLERROR\", \"STANDALONE\", \"START\", \"STARTUP\", \"STATEMENT\", \n \"STATEMENT_ID\", \"STATIC\", \"STATISTICS\", \"STRING\", \"SUBMULTISET\", \n \"SUBPARTITION\", \"SUBSTITUTABLE\", \"SUBTYPE\", \"SUCCESS\", \"SUSPEND\", \n \"TABLE\", \"THE\", \"THEN\", \"TIME\", \"TIMESTAMP\", \"TIMESTAMP_LTZ_UNCONSTRAINED\", \n \"TIMESTAMP_TZ_UNCONSTRAINED\", \"TIMESTAMP_UNCONSTRAINED\", \"TIMEZONE_ABBR\", \n \"TIMEZONE_HOUR\", \"TIMEZONE_MINUTE\", \"TIMEZONE_REGION\", \"TO\", \n \"TRAILING\", \"TRANSACTION\", \"TRANSLATE\", \"TREAT\", \"TRIGGER\", \n \"TRIM\", \"TRUE\", \"TRUNCATE\", \"TYPE\", \"UNBOUNDED\", \"UNDER\", \"UNION\", \n \"UNIQUE\", \"UNLIMITED\", \"UNPIVOT\", \"UNTIL\", \"UPDATE\", \"UPDATED\", \n \"UPSERT\", \"UROWID\", \"USE\", \"USING\", \"VALIDATE\", \"VALUE\", \"VALUES\", \n \"VARCHAR\", \"VARCHAR2\", \"VARIABLE\", \"VARRAY\", \"VARYING\", \"VERSION\", \n \"VERSIONS\", \"WAIT\", \"WARNING\", \"WELLFORMED\", \"WHEN\", \"WHENEVER\", \n \"WHERE\", \"WHILE\", \"WITH\", \"WITHIN\", \"WORK\", \"WRITE\", \"XML\", \n \"XMLAGG\", \"XMLATTRIBUTES\", \"XMLCAST\", \"XMLCOLATTVAL\", \"XMLELEMENT\", \n \"XMLEXISTS\", \"XMLFOREST\", \"XMLNAMESPACES\", \"XMLPARSE\", \"XMLPI\", \n \"XMLQUERY\", \"XMLROOT\", \"XMLSERIALIZE\", \"XMLTABLE\", \"YEAR\", \"YES\", \n \"YMINTERVAL_UNCONSTRAINED\", \"ZONE\", \"PREDICTION\", \"PREDICTION_BOUNDS\", \n \"PREDICTION_COST\", \"PREDICTION_DETAILS\", \"PREDICTION_PROBABILITY\", \n \"PREDICTION_SET\", \"CUME_DIST\", \"DENSE_RANK\", \"LISTAGG\", \"PERCENT_RANK\", \n \"PERCENTILE_CONT\", \"PERCENTILE_DISC\", \"RANK\", \"AVG\", \"CORR\", \n \"LAG\", \"LEAD\", \"MAX\", \"MEDIAN\", \"MIN\", \"NTILE\", \"RATIO_TO_REPORT\", \n \"ROW_NUMBER\", \"SUM\", \"VARIANCE\", \"REGR_\", \"STDDEV\", \"VAR_\", \n \"COVAR_\", \"NATIONAL_CHAR_STRING_LIT\", \"BIT_STRING_LIT\", \"HEX_STRING_LIT\", \n \"DOUBLE_PERIOD\", \"PERIOD\", \"UNSIGNED_INTEGER\", \"APPROXIMATE_NUM_LIT\", \n \"CHAR_STRING\", \"DELIMITED_ID\", \"PERCENT\", \"AMPERSAND\", \"LEFT_PAREN\", \n \"RIGHT_PAREN\", \"DOUBLE_ASTERISK\", \"ASTERISK\", \"PLUS_SIGN\", \"MINUS_SIGN\", \n \"COMMA\", \"SOLIDUS\", \"AT_SIGN\", \"ASSIGN_OP\", \"BINDVAR\", \"COLON\", \n \"SEMICOLON\", \"LESS_THAN_OR_EQUALS_OP\", \"LESS_THAN_OP\", \"GREATER_THAN_OR_EQUALS_OP\", \n \"NOT_EQUAL_OP\", \"CARRET_OPERATOR_PART\", \"TILDE_OPERATOR_PART\", \n \"EXCLAMATION_OPERATOR_PART\", \"GREATER_THAN_OP\", \"CONCATENATION_OP\", \n \"VERTICAL_BAR\", \"EQUALS_OP\", \"LEFT_BRACKET\", \"RIGHT_BRACKET\", \n \"INTRODUCER\", \"SPACES\", \"SINGLE_LINE_COMMENT\", \"MULTI_LINE_COMMENT\", \n \"PROMPT\", \"REGULAR_ID\", \"ZV\" ]\n\n ruleNames = [ \"T__0\", \"A_LETTER\", \"ADD\", \"AFTER\", \"AGENT\", \"AGGREGATE\", \n \"ALL\", \"ALTER\", \"ANALYZE\", \"AND\", \"ANY\", \"ARRAY\", \"AS\", \n \"ASSUME\", \"ASSERT\", \"ASC\", \"ASSOCIATE\", \"AT\", \"ATTRIBUTE\", \n \"AUDIT\", \"AUTHID\", \"AUTO\", \"AUTOMATIC\", \"AUTONOMOUS_TRANSACTION\", \n \"BATCH\", \"BEFORE\", \"BEGIN\", \"BETWEEN\", \"BFILE\", \"BINARY_DOUBLE\", \n \"BINARY_FLOAT\", \"BINARY_INTEGER\", \"BLOB\", \"BLOCK\", \"BODY\", \n \"BOOLEAN\", \"BOTH\", \"BREADTH\", \"BULK\", \"BY\", \"BYTE\", \"C_LETTER\", \n \"CACHE\", \"CALL\", \"CANONICAL\", \"CASCADE\", \"CASE\", \"CAST\", \n \"CHAR\", \"CHAR_CS\", \"CHARACTER\", \"CHECK\", \"CHR\", \"CLOB\", \n \"CLOSE\", \"CLUSTER\", \"COLLECT\", \"COLUMNS\", \"COMMENT\", \"COMMIT\", \n \"COMMITTED\", \"COMPATIBILITY\", \"COMPILE\", \"COMPOUND\", \"CONNECT\", \n \"CONNECT_BY_ROOT\", \"CONSTANT\", \"CONSTRAINT\", \"CONSTRAINTS\", \n \"CONSTRUCTOR\", \"CONTENT\", \"CONTEXT\", \"CONTINUE\", \"CONVERT\", \n \"CORRUPT_XID\", \"CORRUPT_XID_ALL\", \"COST\", \"COUNT\", \"CREATE\", \n \"CROSS\", \"CUBE\", \"CURRENT\", \"CURRENT_USER\", \"CURSOR\", \n \"CUSTOMDATUM\", \"CYCLE\", \"DATA\", \"DATABASE\", \"DATE\", \"DAY\", \n \"DB_ROLE_CHANGE\", \"DBTIMEZONE\", \"DDL\", \"DEBUG\", \"DEC\", \n \"DECIMAL\", \"DECLARE\", \"DECOMPOSE\", \"DECREMENT\", \"DEFAULT\", \n \"DEFAULTS\", \"DEFERRED\", \"DEFINER\", \"DELETE\", \"DEPTH\", \n \"DESC\", \"DETERMINISTIC\", \"DIMENSION\", \"DISABLE\", \"DISASSOCIATE\", \n \"DISTINCT\", \"DOCUMENT\", \"DOUBLE\", \"DROP\", \"DSINTERVAL_UNCONSTRAINED\", \n \"EACH\", \"ELEMENT\", \"ELSE\", \"ELSIF\", \"EMPTY\", \"ENABLE\", \n \"ENCODING\", \"END\", \"ENTITYESCAPING\", \"ERR\", \"ERRORS\", \n \"ESCAPE\", \"EVALNAME\", \"EXCEPT\", \"EXCEPTION\", \"EXCEPTION_INIT\", \n \"EXCEPTIONS\", \"EXCLUDE\", \"EXCLUSIVE\", \"EXECUTE\", \"EXISTS\", \n \"EXIT\", \"EXPLAIN\", \"EXTERNAL\", \"EXTRACT\", \"FAILURE\", \"FALSE\", \n \"FETCH\", \"FINAL\", \"FIRST\", \"FIRST_VALUE\", \"FLOAT\", \"FOLLOWING\", \n \"FOLLOWS\", \"FOR\", \"FORALL\", \"FORCE\", \"FROM\", \"FULL\", \"FUNCTION\", \n \"GOTO\", \"GRANT\", \"GROUP\", \"GROUPING\", \"HASH\", \"HAVING\", \n \"HIDE\", \"HOUR\", \"IF\", \"IGNORE\", \"IMMEDIATE\", \"IN\", \"INCLUDE\", \n \"INCLUDING\", \"INCREMENT\", \"INDENT\", \"INDEX\", \"INDEXED\", \n \"INDICATOR\", \"INDICES\", \"INFINITE\", \"INLINE\", \"INNER\", \n \"INOUT\", \"INSERT\", \"INSTANTIABLE\", \"INSTEAD\", \"INT\", \"INTEGER\", \n \"INTERSECT\", \"INTERVAL\", \"INTO\", \"INVALIDATE\", \"IS\", \"ISOLATION\", \n \"ITERATE\", \"JAVA\", \"JOIN\", \"KEEP\", \"LANGUAGE\", \"LAST\", \n \"LAST_VALUE\", \"LEADING\", \"LEFT\", \"LEVEL\", \"LIBRARY\", \"LIKE\", \n \"LIKE2\", \"LIKE4\", \"LIKEC\", \"LIMIT\", \"LOCAL\", \"LOCK\", \"LOCKED\", \n \"LOG\", \"LOGOFF\", \"LOGON\", \"LONG\", \"LOOP\", \"MAIN\", \"MAP\", \n \"MATCHED\", \"MAXVALUE\", \"MEASURES\", \"MEMBER\", \"MERGE\", \n \"MINUS\", \"MINUTE\", \"MINVALUE\", \"MLSLABEL\", \"MODE\", \"MODEL\", \n \"MODIFY\", \"MONTH\", \"MULTISET\", \"NAME\", \"NAN\", \"NATURAL\", \n \"NATURALN\", \"NAV\", \"NCHAR\", \"NCHAR_CS\", \"NCLOB\", \"NESTED\", \n \"NEW\", \"NO\", \"NOAUDIT\", \"NOCACHE\", \"NOCOPY\", \"NOCYCLE\", \n \"NOENTITYESCAPING\", \"NOMAXVALUE\", \"NOMINVALUE\", \"NONE\", \n \"NOORDER\", \"NOSCHEMACHECK\", \"NOT\", \"NOWAIT\", \"NULL\", \"NULLS\", \n \"NUMBER\", \"NUMERIC\", \"NVARCHAR2\", \"OBJECT\", \"OF\", \"OFF\", \n \"OID\", \"OLD\", \"ON\", \"ONLY\", \"OPEN\", \"OPTION\", \"OR\", \"ORADATA\", \n \"ORDER\", \"ORDINALITY\", \"OSERROR\", \"OUT\", \"OUTER\", \"OVER\", \n \"OVERRIDING\", \"PACKAGE\", \"PARALLEL_ENABLE\", \"PARAMETERS\", \n \"PARENT\", \"PARTITION\", \"PASSING\", \"PATH\", \"PERCENT_ROWTYPE\", \n \"PERCENT_TYPE\", \"PIPELINED\", \"PIVOT\", \"PLAN\", \"PLS_INTEGER\", \n \"POSITIVE\", \"POSITIVEN\", \"PRAGMA\", \"PRECEDING\", \"PRECISION\", \n \"PRESENT\", \"PRIOR\", \"PROCEDURE\", \"RAISE\", \"RANGE\", \"RAW\", \n \"READ\", \"REAL\", \"RECORD\", \"REF\", \"REFERENCE\", \"REFERENCING\", \n \"REJECT\", \"RELIES_ON\", \"RENAME\", \"REPLACE\", \"RESPECT\", \n \"RESTRICT_REFERENCES\", \"RESULT\", \"RESULT_CACHE\", \"RETURN\", \n \"RETURNING\", \"REUSE\", \"REVERSE\", \"REVOKE\", \"RIGHT\", \"ROLLBACK\", \n \"ROLLUP\", \"ROW\", \"ROWID\", \"ROWS\", \"RULES\", \"SAMPLE\", \"SAVE\", \n \"SAVEPOINT\", \"SCHEMA\", \"SCHEMACHECK\", \"SCN\", \"SEARCH\", \n \"SECOND\", \"SEED\", \"SEGMENT\", \"SELECT\", \"SELF\", \"SEQUENCE\", \n \"SEQUENTIAL\", \"SERIALIZABLE\", \"SERIALLY_REUSABLE\", \"SERVERERROR\", \n \"SESSIONTIMEZONE\", \"SET\", \"SETS\", \"SETTINGS\", \"SHARE\", \n \"SHOW\", \"SHUTDOWN\", \"SIBLINGS\", \"SIGNTYPE\", \"SIMPLE_INTEGER\", \n \"SINGLE\", \"SIZE\", \"SKIP_\", \"SMALLINT\", \"SNAPSHOT\", \"SOME\", \n \"SPECIFICATION\", \"SQLDATA\", \"SQLERROR\", \"STANDALONE\", \n \"START\", \"STARTUP\", \"STATEMENT\", \"STATEMENT_ID\", \"STATIC\", \n \"STATISTICS\", \"STRING\", \"SUBMULTISET\", \"SUBPARTITION\", \n \"SUBSTITUTABLE\", \"SUBTYPE\", \"SUCCESS\", \"SUSPEND\", \"TABLE\", \n \"THE\", \"THEN\", \"TIME\", \"TIMESTAMP\", \"TIMESTAMP_LTZ_UNCONSTRAINED\", \n \"TIMESTAMP_TZ_UNCONSTRAINED\", \"TIMESTAMP_UNCONSTRAINED\", \n \"TIMEZONE_ABBR\", \"TIMEZONE_HOUR\", \"TIMEZONE_MINUTE\", \"TIMEZONE_REGION\", \n \"TO\", \"TRAILING\", \"TRANSACTION\", \"TRANSLATE\", \"TREAT\", \n \"TRIGGER\", \"TRIM\", \"TRUE\", \"TRUNCATE\", \"TYPE\", \"UNBOUNDED\", \n \"UNDER\", \"UNION\", \"UNIQUE\", \"UNLIMITED\", \"UNPIVOT\", \"UNTIL\", \n \"UPDATE\", \"UPDATED\", \"UPSERT\", \"UROWID\", \"USE\", \"USING\", \n \"VALIDATE\", \"VALUE\", \"VALUES\", \"VARCHAR\", \"VARCHAR2\", \n \"VARIABLE\", \"VARRAY\", \"VARYING\", \"VERSION\", \"VERSIONS\", \n \"WAIT\", \"WARNING\", \"WELLFORMED\", \"WHEN\", \"WHENEVER\", \"WHERE\", \n \"WHILE\", \"WITH\", \"WITHIN\", \"WORK\", \"WRITE\", \"XML\", \"XMLAGG\", \n \"XMLATTRIBUTES\", \"XMLCAST\", \"XMLCOLATTVAL\", \"XMLELEMENT\", \n \"XMLEXISTS\", \"XMLFOREST\", \"XMLNAMESPACES\", \"XMLPARSE\", \n \"XMLPI\", \"XMLQUERY\", \"XMLROOT\", \"XMLSERIALIZE\", \"XMLTABLE\", \n \"YEAR\", \"YES\", \"YMINTERVAL_UNCONSTRAINED\", \"ZONE\", \"PREDICTION\", \n \"PREDICTION_BOUNDS\", \"PREDICTION_COST\", \"PREDICTION_DETAILS\", \n \"PREDICTION_PROBABILITY\", \"PREDICTION_SET\", \"CUME_DIST\", \n \"DENSE_RANK\", \"LISTAGG\", \"PERCENT_RANK\", \"PERCENTILE_CONT\", \n \"PERCENTILE_DISC\", \"RANK\", \"AVG\", \"CORR\", \"LAG\", \"LEAD\", \n \"MAX\", \"MEDIAN\", \"MIN\", \"NTILE\", \"RATIO_TO_REPORT\", \"ROW_NUMBER\", \n \"SUM\", \"VARIANCE\", \"REGR_\", \"STDDEV\", \"VAR_\", \"COVAR_\", \n \"NATIONAL_CHAR_STRING_LIT\", \"BIT_STRING_LIT\", \"HEX_STRING_LIT\", \n \"DOUBLE_PERIOD\", \"PERIOD\", \"UNSIGNED_INTEGER\", \"APPROXIMATE_NUM_LIT\", \n \"CHAR_STRING\", \"CHAR_STRING_PERL\", \"QUOTE\", \"QS_ANGLE\", \n \"QS_BRACE\", \"QS_BRACK\", \"QS_PAREN\", \"QS_OTHER_CH\", \"DELIMITED_ID\", \n \"PERCENT\", \"AMPERSAND\", \"LEFT_PAREN\", \"RIGHT_PAREN\", \"DOUBLE_ASTERISK\", \n \"ASTERISK\", \"PLUS_SIGN\", \"MINUS_SIGN\", \"COMMA\", \"SOLIDUS\", \n \"AT_SIGN\", \"ASSIGN_OP\", \"BINDVAR\", \"COLON\", \"SEMICOLON\", \n \"LESS_THAN_OR_EQUALS_OP\", \"LESS_THAN_OP\", \"GREATER_THAN_OR_EQUALS_OP\", \n \"NOT_EQUAL_OP\", \"CARRET_OPERATOR_PART\", \"TILDE_OPERATOR_PART\", \n \"EXCLAMATION_OPERATOR_PART\", \"GREATER_THAN_OP\", \"QUESTION_MARK\", \n \"CONCATENATION_OP\", \"VERTICAL_BAR\", \"EQUALS_OP\", \"LEFT_BRACKET\", \n \"RIGHT_BRACKET\", \"INTRODUCER\", \"SPACES\", \"SIMPLE_LETTER\", \n \"UNSIGNED_INTEGER_FRAGMENT\", \"FLOAT_FRAGMENT\", \"SINGLE_LINE_COMMENT\", \n \"MULTI_LINE_COMMENT\", \"PROMPT\", \"NEWLINE\", \"SPACE\", \"REGULAR_ID\", \n \"ZV\", \"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\", \"I\", \"J\", \n \"K\", \"L\", \"M\", \"N\", \"O\", \"P\", \"Q\", \"R\", \"S\", \"T\", \"U\", \n \"V\", \"W\", \"X\", \"Y\", \"Z\" ]\n\n grammarFileName = \"PlSql.g4\"\n\n def __init__(self, input=None, output:TextIO = sys.stdout):\n super().__init__(input, output)\n self.checkVersion(\"4.7.2\")\n self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())\n self._actions = None\n self._predicates = None\n\n\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# a little more thing to be done.
def eval_loop():
while True:
s = input('Please input: ')
if s != 'done':
print(eval(s))
else:
break
eval_loop()
|
normal
|
{
"blob_id": "80969de6924ae5fe6bb8e7f1211e7aca28c63989",
"index": 2615,
"step-1": "<mask token>\n",
"step-2": "def eval_loop():\n while True:\n s = input('Please input: ')\n if s != 'done':\n print(eval(s))\n else:\n break\n\n\n<mask token>\n",
"step-3": "def eval_loop():\n while True:\n s = input('Please input: ')\n if s != 'done':\n print(eval(s))\n else:\n break\n\n\neval_loop()\n",
"step-4": "# a little more thing to be done.\ndef eval_loop():\n\twhile True:\n\t\ts = input('Please input: ')\n\t\tif s != 'done':\n\t\t\tprint(eval(s))\n\t\telse:\n\t\t\tbreak\n\neval_loop()",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def FivePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=True)
df['FA_start'] = df['gene_start']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['FA_end'] = df_exon['end']
df_exon = df_exon[['name', 'FA_end']]
df = pd.merge(df, df_exon, how='left', on='name')
df['FA_length'] = df['FA_end'] - df['FA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
<|reserved_special_token_0|>
def getAreas(df):
"""
This function will get the first and last exons for plu and min strand.
Call it area because not necessarily exon.
"""
df_plu = df[df['strand'] == '+']
df_min = df[df['strand'] == '-']
df_plu_FA = FivePrimeArea(df_plu)
df_min_FA = FivePrimeArea(df_min)
df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')
df_min = pd.merge(df_min_FA, df_min_LA, on='name')
df = pd.concat([df_plu, df_min])
return df
def chrDIC(df):
"""This function will take a gtf and return strand specific dictionary of different chrm"""
chr_names = df['chr'].unique().tolist()
d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}
return d_chr
def countInside(df, start, end):
rows_df = df[(start < df['start']) & (df['end'] < end)]
names = rows_df['name'].unique().tolist()
names = ','.join(names)
if len(names) > 0:
return names
else:
return np.nan
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def FivePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=True)
df['FA_start'] = df['gene_start']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['FA_end'] = df_exon['end']
df_exon = df_exon[['name', 'FA_end']]
df = pd.merge(df, df_exon, how='left', on='name')
df['FA_length'] = df['FA_end'] - df['FA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
<|reserved_special_token_0|>
def getAreas(df):
"""
This function will get the first and last exons for plu and min strand.
Call it area because not necessarily exon.
"""
df_plu = df[df['strand'] == '+']
df_min = df[df['strand'] == '-']
df_plu_FA = FivePrimeArea(df_plu)
df_min_FA = FivePrimeArea(df_min)
df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')
df_min = pd.merge(df_min_FA, df_min_LA, on='name')
df = pd.concat([df_plu, df_min])
return df
def chrDIC(df):
"""This function will take a gtf and return strand specific dictionary of different chrm"""
chr_names = df['chr'].unique().tolist()
d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}
return d_chr
def countInside(df, start, end):
rows_df = df[(start < df['start']) & (df['end'] < end)]
names = rows_df['name'].unique().tolist()
names = ','.join(names)
if len(names) > 0:
return names
else:
return np.nan
<|reserved_special_token_0|>
def flattenGTF(file_in, file_type, NEXTFLOW=True):
if file_type == 'ENSEMBL':
print(f'Flattening ENSEMBL like genome {file_in}')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['chr'] = df['chr'].astype(str)
df = df[~df['chr'].str.contains('\\.')]
df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
fout = f'{file_in[:-4]}_sort.gtf'
df.to_csv(fout, sep='\t', index=None, quoting=csv.QUOTE_NONE,
header=None)
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('gene_id ', '')
df['name'] = df['name'].str.replace('"', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'BED':
my_col = ['chr', 'start', 'end', 'name', 'strand']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
if file_type == 'REFSEQGFF':
print(f'Flattening REFSEQGFF like genome')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),
('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',
'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',
'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',
'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',
'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',
'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',
'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',
'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',
'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',
'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',
'NC_000023.11'), ('chrY', 'NC_000024.10')]
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[df['type'] == 'gene'].copy()
for l in replace_list:
df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])
df = df[~df['chr'].str.contains('\\.')]
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('ID=gene-', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'REFSEQBED':
my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',
'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[['chr', 'start', 'end', 'name', 'strand']]
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
df_plu = df[df['strand'] == '+'].copy()
df_min = df[df['strand'] == '-'].copy()
df_plu, df_plu_inside = removeInside(df_plu)
df_min, df_min_inside = removeInside(df_min)
df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)
df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)
df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)
df = pd.concat([df_plu, df_min])
df = df.sort_values(by=['chr', 'end'], ascending=False)
gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']]
df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',
'LA_start', 'LA_end']]
if NEXTFLOW:
file_in = os.path.basename(file_in)
fout = f'{file_in[:-4]}_flat.txt'
fout2 = f'{file_in[:-4]}_flat.gtf'
fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'
print(f'Outputting flat file {fout}')
df.to_csv(fout, sep='\t', index=None)
gtf.to_csv(fout2, sep='\t', index=None, quoting=csv.QUOTE_NONE, header=None
)
gtf_names = gtf[['chr']].copy()
gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)
gtf_names.to_csv(fout3, sep='\t', index=None)
return df
<|reserved_special_token_0|>
def parse_arguments():
parser = argparse.ArgumentParser(description=
'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'
)
parser.add_argument('--annotation_in', action='store', metavar=
'annotation_in')
parser.add_argument('--file_type', action='store', metavar='file_type',
default='ENSEMBL')
args = parser.parse_args()
return args
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def FivePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=True)
df['FA_start'] = df['gene_start']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['FA_end'] = df_exon['end']
df_exon = df_exon[['name', 'FA_end']]
df = pd.merge(df, df_exon, how='left', on='name')
df['FA_length'] = df['FA_end'] - df['FA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
def ThreePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=False)
df['LA_end'] = df['gene_end']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['LA_start'] = df_exon['start']
df_exon = df_exon[['name', 'LA_start']]
df = pd.merge(df, df_exon, how='left', on='name')
df['LA_length'] = df['LA_end'] - df['LA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
def getAreas(df):
"""
This function will get the first and last exons for plu and min strand.
Call it area because not necessarily exon.
"""
df_plu = df[df['strand'] == '+']
df_min = df[df['strand'] == '-']
df_plu_FA = FivePrimeArea(df_plu)
df_min_FA = FivePrimeArea(df_min)
df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')
df_min = pd.merge(df_min_FA, df_min_LA, on='name')
df = pd.concat([df_plu, df_min])
return df
def chrDIC(df):
"""This function will take a gtf and return strand specific dictionary of different chrm"""
chr_names = df['chr'].unique().tolist()
d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}
return d_chr
def countInside(df, start, end):
rows_df = df[(start < df['start']) & (df['end'] < end)]
names = rows_df['name'].unique().tolist()
names = ','.join(names)
if len(names) > 0:
return names
else:
return np.nan
def removeInside(df):
d_chr = chrDIC(df)
df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']],
row['start'], row['end']), axis=1)
df2 = df.dropna(subset=['genes_inside'])
all_names = []
for i in range(len(df2)):
names = df2['genes_inside'].iloc[i]
names = names.split(',')
all_names = all_names + names
inside_genes = list(set(all_names))
l = len(inside_genes)
print(f'Removing {l} genes that are inside other genes')
df_inside = pd.DataFrame(inside_genes, columns=['name'])
df = df[~df['name'].isin(df_inside['name'])].copy()
del df['genes_inside']
return df, df_inside
def flattenGTF(file_in, file_type, NEXTFLOW=True):
if file_type == 'ENSEMBL':
print(f'Flattening ENSEMBL like genome {file_in}')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['chr'] = df['chr'].astype(str)
df = df[~df['chr'].str.contains('\\.')]
df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
fout = f'{file_in[:-4]}_sort.gtf'
df.to_csv(fout, sep='\t', index=None, quoting=csv.QUOTE_NONE,
header=None)
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('gene_id ', '')
df['name'] = df['name'].str.replace('"', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'BED':
my_col = ['chr', 'start', 'end', 'name', 'strand']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
if file_type == 'REFSEQGFF':
print(f'Flattening REFSEQGFF like genome')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),
('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',
'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',
'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',
'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',
'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',
'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',
'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',
'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',
'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',
'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',
'NC_000023.11'), ('chrY', 'NC_000024.10')]
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[df['type'] == 'gene'].copy()
for l in replace_list:
df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])
df = df[~df['chr'].str.contains('\\.')]
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('ID=gene-', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'REFSEQBED':
my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',
'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[['chr', 'start', 'end', 'name', 'strand']]
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
df_plu = df[df['strand'] == '+'].copy()
df_min = df[df['strand'] == '-'].copy()
df_plu, df_plu_inside = removeInside(df_plu)
df_min, df_min_inside = removeInside(df_min)
df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)
df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)
df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)
df = pd.concat([df_plu, df_min])
df = df.sort_values(by=['chr', 'end'], ascending=False)
gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']]
df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',
'LA_start', 'LA_end']]
if NEXTFLOW:
file_in = os.path.basename(file_in)
fout = f'{file_in[:-4]}_flat.txt'
fout2 = f'{file_in[:-4]}_flat.gtf'
fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'
print(f'Outputting flat file {fout}')
df.to_csv(fout, sep='\t', index=None)
gtf.to_csv(fout2, sep='\t', index=None, quoting=csv.QUOTE_NONE, header=None
)
gtf_names = gtf[['chr']].copy()
gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)
gtf_names.to_csv(fout3, sep='\t', index=None)
return df
<|reserved_special_token_0|>
def parse_arguments():
parser = argparse.ArgumentParser(description=
'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'
)
parser.add_argument('--annotation_in', action='store', metavar=
'annotation_in')
parser.add_argument('--file_type', action='store', metavar='file_type',
default='ENSEMBL')
args = parser.parse_args()
return args
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def FivePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=True)
df['FA_start'] = df['gene_start']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['FA_end'] = df_exon['end']
df_exon = df_exon[['name', 'FA_end']]
df = pd.merge(df, df_exon, how='left', on='name')
df['FA_length'] = df['FA_end'] - df['FA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
def ThreePrimeArea(df):
df = df.sort_values(by=['chr', 'end'], ascending=False)
df['LA_end'] = df['gene_end']
df_exon = df[df['type'] == 'exon'].copy()
df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')
df_exon['LA_start'] = df_exon['start']
df_exon = df_exon[['name', 'LA_start']]
df = pd.merge(df, df_exon, how='left', on='name')
df['LA_length'] = df['LA_end'] - df['LA_start']
df = df.drop_duplicates(subset=['name'], keep='first')
return df
def getAreas(df):
"""
This function will get the first and last exons for plu and min strand.
Call it area because not necessarily exon.
"""
df_plu = df[df['strand'] == '+']
df_min = df[df['strand'] == '-']
df_plu_FA = FivePrimeArea(df_plu)
df_min_FA = FivePrimeArea(df_min)
df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',
'LA_length']]
df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')
df_min = pd.merge(df_min_FA, df_min_LA, on='name')
df = pd.concat([df_plu, df_min])
return df
def chrDIC(df):
"""This function will take a gtf and return strand specific dictionary of different chrm"""
chr_names = df['chr'].unique().tolist()
d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}
return d_chr
def countInside(df, start, end):
rows_df = df[(start < df['start']) & (df['end'] < end)]
names = rows_df['name'].unique().tolist()
names = ','.join(names)
if len(names) > 0:
return names
else:
return np.nan
def removeInside(df):
d_chr = chrDIC(df)
df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']],
row['start'], row['end']), axis=1)
df2 = df.dropna(subset=['genes_inside'])
all_names = []
for i in range(len(df2)):
names = df2['genes_inside'].iloc[i]
names = names.split(',')
all_names = all_names + names
inside_genes = list(set(all_names))
l = len(inside_genes)
print(f'Removing {l} genes that are inside other genes')
df_inside = pd.DataFrame(inside_genes, columns=['name'])
df = df[~df['name'].isin(df_inside['name'])].copy()
del df['genes_inside']
return df, df_inside
def flattenGTF(file_in, file_type, NEXTFLOW=True):
if file_type == 'ENSEMBL':
print(f'Flattening ENSEMBL like genome {file_in}')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['chr'] = df['chr'].astype(str)
df = df[~df['chr'].str.contains('\\.')]
df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
fout = f'{file_in[:-4]}_sort.gtf'
df.to_csv(fout, sep='\t', index=None, quoting=csv.QUOTE_NONE,
header=None)
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('gene_id ', '')
df['name'] = df['name'].str.replace('"', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'BED':
my_col = ['chr', 'start', 'end', 'name', 'strand']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
if file_type == 'REFSEQGFF':
print(f'Flattening REFSEQGFF like genome')
my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']
replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),
('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',
'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',
'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',
'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',
'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',
'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',
'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',
'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',
'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',
'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',
'NC_000023.11'), ('chrY', 'NC_000024.10')]
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[df['type'] == 'gene'].copy()
for l in replace_list:
df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])
df = df[~df['chr'].str.contains('\\.')]
df['name'] = df['gene_id'].str.split(';', expand=True)[0]
df['name'] = df['name'].str.replace('ID=gene-', '')
df['type'] = df['type'].astype(str)
df_gene = df[df['type'] == 'gene'].copy()
df_gene['gene_start'] = df_gene['start']
df_gene['gene_end'] = df_gene['end']
df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()
df = pd.merge(df, df_gene, how='left', on='name')
df = getAreas(df)
df['start'] = df['gene_start']
df['end'] = df['gene_end']
if file_type == 'REFSEQBED':
my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',
'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']
df = pd.read_csv(file_in, sep='\t', header=None, names=my_col,
comment='#', low_memory=False)
df = df[['chr', 'start', 'end', 'name', 'strand']]
df['FA_start'] = df['start']
df['FA_end'] = df['end']
df['LA_start'] = df['start']
df['LA_end'] = df['end']
df['dot'] = '.'
df['dot2'] = '.'
df['source'] = 'NA'
df['type'] = 'NA'
df['gene_id'] = df['name']
df_plu = df[df['strand'] == '+'].copy()
df_min = df[df['strand'] == '-'].copy()
df_plu, df_plu_inside = removeInside(df_plu)
df_min, df_min_inside = removeInside(df_min)
df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)
df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)
df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)
df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)
df = pd.concat([df_plu, df_min])
df = df.sort_values(by=['chr', 'end'], ascending=False)
gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',
'dot2', 'gene_id']]
df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',
'LA_start', 'LA_end']]
if NEXTFLOW:
file_in = os.path.basename(file_in)
fout = f'{file_in[:-4]}_flat.txt'
fout2 = f'{file_in[:-4]}_flat.gtf'
fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'
print(f'Outputting flat file {fout}')
df.to_csv(fout, sep='\t', index=None)
gtf.to_csv(fout2, sep='\t', index=None, quoting=csv.QUOTE_NONE, header=None
)
gtf_names = gtf[['chr']].copy()
gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)
gtf_names.to_csv(fout3, sep='\t', index=None)
return df
<|reserved_special_token_0|>
def parse_arguments():
parser = argparse.ArgumentParser(description=
'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'
)
parser.add_argument('--annotation_in', action='store', metavar=
'annotation_in')
parser.add_argument('--file_type', action='store', metavar='file_type',
default='ENSEMBL')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_arguments()
file_in = args.annotation_in
file_type = args.file_type
flattenGTF(file_in, file_type)
<|reserved_special_token_1|>
#################################################
### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ###
#################################################
# file to edit: dev_nb/10_DogcatcherFlatten.ipynb
import pandas as pd
import argparse
import csv
import os
import numpy as np
import string
def FivePrimeArea(df):
df = df.sort_values(by=["chr","end"],ascending=True)
df["FA_start"] = df["gene_start"]
df_exon = df[df["type"]=="exon"].copy()
df_exon = df_exon.drop_duplicates(subset=['name'],keep="first")
df_exon["FA_end"] = df_exon["end"]
df_exon = df_exon[["name","FA_end"]]
df = pd.merge(df,df_exon,how="left",on="name")
df["FA_length"] = df["FA_end"] - df["FA_start"]
df = df.drop_duplicates(subset=['name'],keep="first")
return df
def ThreePrimeArea(df):
df = df.sort_values(by=["chr","end"],ascending=False)
df["LA_end"] = df["gene_end"]
df_exon = df[df["type"]=="exon"].copy()
# Keep first exon
df_exon = df_exon.drop_duplicates(subset=['name'],keep="first")
df_exon["LA_start"] = df_exon["start"]
df_exon = df_exon[["name","LA_start"]]
df = pd.merge(df,df_exon,how="left",on="name")
df["LA_length"] = df["LA_end"] - df["LA_start"]
df = df.drop_duplicates(subset=['name'],keep="first")
return df
def getAreas(df):
"""
This function will get the first and last exons for plu and min strand.
Call it area because not necessarily exon.
"""
df_plu = df[df["strand"]=="+"]
df_min = df[df["strand"]=="-"]
df_plu_FA = FivePrimeArea(df_plu)
df_min_FA = FivePrimeArea(df_min)
df_plu_LA = ThreePrimeArea(df_plu)[["name","LA_start","LA_end","LA_length"]]
df_min_LA = ThreePrimeArea(df_min)[["name","LA_start","LA_end","LA_length"]]
df_plu = pd.merge(df_plu_FA,df_plu_LA,on="name")
df_min = pd.merge(df_min_FA,df_min_LA,on="name")
df = pd.concat([df_plu,df_min])
return df
def chrDIC(df):
"""This function will take a gtf and return strand specific dictionary of different chrm"""
chr_names=df['chr'].unique().tolist()
d_chr = d_gtf_chr = {chrom : df[df["chr"]==chrom] for chrom in chr_names}
return d_chr
def countInside(df, start, end):
rows_df = df[ (start < df["start"]) & (df["end"] < end) ]
names = rows_df['name'].unique().tolist()
names = ",".join(names)
if len(names) >0:
return names
else:
return np.nan
def removeInside(df):
d_chr = chrDIC(df)
df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']], row["start"], row["end"]), axis=1)
df2 = df.dropna(subset=['genes_inside'])
all_names = []
for i in range(len(df2)):
names = df2["genes_inside"].iloc[i]
names = names.split(",")
all_names = all_names + names
inside_genes = list(set(all_names))
l = len(inside_genes)
print(f"Removing {l} genes that are inside other genes")
df_inside = pd.DataFrame(inside_genes,columns=['name'])
df = df[~df["name"].isin(df_inside["name"])].copy()
del df["genes_inside"]
return df, df_inside
def flattenGTF(file_in,file_type,NEXTFLOW=True):
if file_type == "ENSEMBL":
print(f"Flattening ENSEMBL like genome {file_in}")
my_col = ["chr","source","type","start","end","dot","strand","dot2","gene_id"]
df = pd.read_csv(file_in, sep="\t",header=None,names=my_col, comment="#",low_memory=False)
df["chr"] = df["chr"].astype(str)
df = df[~df["chr"].str.contains("\.") ] # Take out patches
df.sort_values(by=["chr","start"], inplace=True, ascending=True)
fout = f"{file_in[:-4]}_sort.gtf"
df.to_csv(fout,sep="\t", index=None,quoting=csv.QUOTE_NONE, header=None)
df["name"] = df["gene_id"].str.split(';',expand=True)[0]
df["name"] = df["name"].str.replace("gene_id ","")
df["name"] = df["name"].str.replace("\"","")
df["type"] = df["type"].astype(str)
df_gene = df[df["type"]=="gene"].copy()
df_gene["gene_start"] = df_gene["start"]
df_gene["gene_end"] = df_gene["end"]
df_gene = df_gene[["name","gene_start","gene_end"]].copy()
df = pd.merge(df,df_gene,how="left",on="name")
df = getAreas(df)
df["start"] = df["gene_start"]
df["end"] = df["gene_end"]
# df = df[["chr","start","end","strand","name","type"]].copy()
if file_type == "BED":
my_col = ["chr","start","end","name","strand"]
df = pd.read_csv(file_in, sep="\t",header=None,names=my_col, comment="#",low_memory=False)
df["FA_start"] = df["start"]
df["FA_end"] = df["end"]
df["LA_start"] = df["start"]
df["LA_end"] = df["end"]
df["dot"] = "."
df["dot2"] = "."
df["source"] = "NA"
df["type"] = "NA"
df["gene_id"] = df["name"]
if file_type == "REFSEQGFF":
# Chrome numbers are changed. Need to change back to chr1 etc.
# https://www.ncbi.nlm.nih.gov/assembly/GCF_000001405.39#/def_asm_Primary_Assembly
print(f"Flattening REFSEQGFF like genome")
# https://ftp.ncbi.nlm.nih.gov/genomes/refseq/vertebrate_mammalian/Homo_sapiens/reference/
#download this GCF_000001405.39_GRCh38.p13_genomic.gtf.gz
# sort and index in IGV
# NC_000001.11 BestRefSeq gene 11874 14409 . + . gene_id "DDX11L1"; transcript_id ""; db_xref "GeneID:100287102"; db_xref "HGNC:HGNC:37102"; description "DEAD/H-box helicase 11 like 1 (pseudogene)"; gbkey "Gene"; gene "DDX11L1"; gene_biotype "transcribed_pseudogene"; pseudo "true";
my_col = ["chr","source","type","start","end","dot","strand","dot2","gene_id"]
replace_list = [("chr1","NC_000001.11"),
("chr2","NC_000002.12"),
("chr3","NC_000003.12"),
("chr4","NC_000004.12"),
("chr5","NC_000005.10"),
("chr6","NC_000006.12"),
("chr7","NC_000007.14"),
("chr8","NC_000008.11"),
("chr9","NC_000009.12"),
("chr10","NC_000010.11"),
("chr11","NC_000011.10"),
("chr12","NC_000012.12"),
("chr13","NC_000013.11"),
("chr14","NC_000014.9"),
("chr15","NC_000015.10"),
("chr16","NC_000016.10"),
("chr17","NC_000017.11"),
("chr18","NC_000018.10"),
("chr19","NC_000019.10"),
("chr20","NC_000020.11"),
("chr21","NC_000021.9"),
("chr22","NC_000022.11"),
("chrX","NC_000023.11"),
("chrY","NC_000024.10")]
df = pd.read_csv(file_in, sep="\t",header=None,names=my_col, comment="#",low_memory=False)
df = df[df["type"]=="gene"].copy()
# Change NC names to chr
for l in replace_list:
df["chr"] = np.where(df["chr"]==l[1],l[0],df["chr"])
df = df[~df["chr"].str.contains("\.") ] # Take out patches
df["name"] = df["gene_id"].str.split(';',expand=True)[0]
df["name"] = df["name"].str.replace("ID=gene-","")
df["type"] = df["type"].astype(str)
df_gene = df[df["type"]=="gene"].copy()
df_gene["gene_start"] = df_gene["start"]
df_gene["gene_end"] = df_gene["end"]
df_gene = df_gene[["name","gene_start","gene_end"]].copy()
df = pd.merge(df,df_gene,how="left",on="name")
df = getAreas(df)
df["start"] = df["gene_start"]
df["end"] = df["gene_end"]
# df = df[["chr","start","end","strand","name","type"]].copy()
if file_type == "REFSEQBED":
# chr1 11873 14409 NR_046018 0 +
# 14409 14409 0 3 354,109,1189, 0,739,1347,
my_col = ["chr","start","end","name","dot","strand","start1","start2","dot2","dot3","gene_id","gene_id2"]
df = pd.read_csv(file_in, sep="\t",header=None,names=my_col, comment="#",low_memory=False)
df = df[["chr","start","end","name","strand"]]
df["FA_start"] = df["start"]
df["FA_end"] = df["end"]
df["LA_start"] = df["start"]
df["LA_end"] = df["end"]
df["dot"] = "."
df["dot2"] = "."
df["source"] = "NA"
df["type"] = "NA"
df["gene_id"] = df["name"]
df_plu = df[df["strand"]=="+"].copy()
df_min = df[df["strand"]=="-"].copy()
df_plu, df_plu_inside = removeInside(df_plu)
df_min, df_min_inside = removeInside(df_min)
df_plu.sort_values(by=["chr","end"], inplace=True, ascending=False)
df_plu.drop_duplicates(subset=["start","chr"], keep='first', inplace=True)
df_min.sort_values(by=["chr","start"], inplace=True, ascending=True)
df_min.drop_duplicates(subset=["end","chr"], keep='first', inplace=True)
df = pd.concat([df_plu,df_min])
df = df.sort_values(by=["chr","end"],ascending=False)
gtf = df[["chr","source","type","start","end","dot","strand","dot2","gene_id"] ]
df = df[["chr","start","end","name","strand","FA_start","FA_end","LA_start","LA_end"]]
if NEXTFLOW:
file_in = os.path.basename(file_in)
fout = f"{file_in[:-4]}_flat.txt"
fout2 = f"{file_in[:-4]}_flat.gtf"
fout3 = f"{file_in[:-4]}_flat_CHROMNAMES.txt"
print(f"Outputting flat file {fout}")
df.to_csv(fout,sep="\t",index=None)
gtf.to_csv(fout2,sep="\t", index=None,quoting=csv.QUOTE_NONE, header=None)
gtf_names = gtf[["chr"]].copy()
gtf_names.drop_duplicates(subset=["chr"], keep='first', inplace=True)
gtf_names.to_csv(fout3,sep="\t", index=None)
return df
import argparse
def parse_arguments():
parser = argparse.ArgumentParser(description='Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED')
parser.add_argument('--annotation_in', action= 'store', metavar='annotation_in')
parser.add_argument('--file_type', action= 'store', metavar='file_type',default="ENSEMBL")
args = parser.parse_args()
return args
if __name__=="__main__":
args = parse_arguments()
file_in = args.annotation_in
file_type = args.file_type
flattenGTF(file_in,file_type)
|
flexible
|
{
"blob_id": "5c5922fd3a7a5eec121d94e69bc972089e435175",
"index": 9406,
"step-1": "<mask token>\n\n\ndef FivePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=True)\n df['FA_start'] = df['gene_start']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['FA_end'] = df_exon['end']\n df_exon = df_exon[['name', 'FA_end']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['FA_length'] = df['FA_end'] - df['FA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\n<mask token>\n\n\ndef getAreas(df):\n \"\"\"\n This function will get the first and last exons for plu and min strand.\n Call it area because not necessarily exon.\n \"\"\"\n df_plu = df[df['strand'] == '+']\n df_min = df[df['strand'] == '-']\n df_plu_FA = FivePrimeArea(df_plu)\n df_min_FA = FivePrimeArea(df_min)\n df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')\n df_min = pd.merge(df_min_FA, df_min_LA, on='name')\n df = pd.concat([df_plu, df_min])\n return df\n\n\ndef chrDIC(df):\n \"\"\"This function will take a gtf and return strand specific dictionary of different chrm\"\"\"\n chr_names = df['chr'].unique().tolist()\n d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}\n return d_chr\n\n\ndef countInside(df, start, end):\n rows_df = df[(start < df['start']) & (df['end'] < end)]\n names = rows_df['name'].unique().tolist()\n names = ','.join(names)\n if len(names) > 0:\n return names\n else:\n return np.nan\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef FivePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=True)\n df['FA_start'] = df['gene_start']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['FA_end'] = df_exon['end']\n df_exon = df_exon[['name', 'FA_end']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['FA_length'] = df['FA_end'] - df['FA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\n<mask token>\n\n\ndef getAreas(df):\n \"\"\"\n This function will get the first and last exons for plu and min strand.\n Call it area because not necessarily exon.\n \"\"\"\n df_plu = df[df['strand'] == '+']\n df_min = df[df['strand'] == '-']\n df_plu_FA = FivePrimeArea(df_plu)\n df_min_FA = FivePrimeArea(df_min)\n df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')\n df_min = pd.merge(df_min_FA, df_min_LA, on='name')\n df = pd.concat([df_plu, df_min])\n return df\n\n\ndef chrDIC(df):\n \"\"\"This function will take a gtf and return strand specific dictionary of different chrm\"\"\"\n chr_names = df['chr'].unique().tolist()\n d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}\n return d_chr\n\n\ndef countInside(df, start, end):\n rows_df = df[(start < df['start']) & (df['end'] < end)]\n names = rows_df['name'].unique().tolist()\n names = ','.join(names)\n if len(names) > 0:\n return names\n else:\n return np.nan\n\n\n<mask token>\n\n\ndef flattenGTF(file_in, file_type, NEXTFLOW=True):\n if file_type == 'ENSEMBL':\n print(f'Flattening ENSEMBL like genome {file_in}')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['chr'] = df['chr'].astype(str)\n df = df[~df['chr'].str.contains('\\\\.')]\n df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n fout = f'{file_in[:-4]}_sort.gtf'\n df.to_csv(fout, sep='\\t', index=None, quoting=csv.QUOTE_NONE,\n header=None)\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('gene_id ', '')\n df['name'] = df['name'].str.replace('\"', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'BED':\n my_col = ['chr', 'start', 'end', 'name', 'strand']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n if file_type == 'REFSEQGFF':\n print(f'Flattening REFSEQGFF like genome')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),\n ('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',\n 'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',\n 'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',\n 'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',\n 'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',\n 'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',\n 'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',\n 'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',\n 'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',\n 'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',\n 'NC_000023.11'), ('chrY', 'NC_000024.10')]\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[df['type'] == 'gene'].copy()\n for l in replace_list:\n df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])\n df = df[~df['chr'].str.contains('\\\\.')]\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('ID=gene-', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'REFSEQBED':\n my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',\n 'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[['chr', 'start', 'end', 'name', 'strand']]\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n df_plu = df[df['strand'] == '+'].copy()\n df_min = df[df['strand'] == '-'].copy()\n df_plu, df_plu_inside = removeInside(df_plu)\n df_min, df_min_inside = removeInside(df_min)\n df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)\n df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)\n df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)\n df = pd.concat([df_plu, df_min])\n df = df.sort_values(by=['chr', 'end'], ascending=False)\n gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']]\n df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',\n 'LA_start', 'LA_end']]\n if NEXTFLOW:\n file_in = os.path.basename(file_in)\n fout = f'{file_in[:-4]}_flat.txt'\n fout2 = f'{file_in[:-4]}_flat.gtf'\n fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'\n print(f'Outputting flat file {fout}')\n df.to_csv(fout, sep='\\t', index=None)\n gtf.to_csv(fout2, sep='\\t', index=None, quoting=csv.QUOTE_NONE, header=None\n )\n gtf_names = gtf[['chr']].copy()\n gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)\n gtf_names.to_csv(fout3, sep='\\t', index=None)\n return df\n\n\n<mask token>\n\n\ndef parse_arguments():\n parser = argparse.ArgumentParser(description=\n 'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'\n )\n parser.add_argument('--annotation_in', action='store', metavar=\n 'annotation_in')\n parser.add_argument('--file_type', action='store', metavar='file_type',\n default='ENSEMBL')\n args = parser.parse_args()\n return args\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef FivePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=True)\n df['FA_start'] = df['gene_start']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['FA_end'] = df_exon['end']\n df_exon = df_exon[['name', 'FA_end']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['FA_length'] = df['FA_end'] - df['FA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\ndef ThreePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=False)\n df['LA_end'] = df['gene_end']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['LA_start'] = df_exon['start']\n df_exon = df_exon[['name', 'LA_start']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['LA_length'] = df['LA_end'] - df['LA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\ndef getAreas(df):\n \"\"\"\n This function will get the first and last exons for plu and min strand.\n Call it area because not necessarily exon.\n \"\"\"\n df_plu = df[df['strand'] == '+']\n df_min = df[df['strand'] == '-']\n df_plu_FA = FivePrimeArea(df_plu)\n df_min_FA = FivePrimeArea(df_min)\n df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')\n df_min = pd.merge(df_min_FA, df_min_LA, on='name')\n df = pd.concat([df_plu, df_min])\n return df\n\n\ndef chrDIC(df):\n \"\"\"This function will take a gtf and return strand specific dictionary of different chrm\"\"\"\n chr_names = df['chr'].unique().tolist()\n d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}\n return d_chr\n\n\ndef countInside(df, start, end):\n rows_df = df[(start < df['start']) & (df['end'] < end)]\n names = rows_df['name'].unique().tolist()\n names = ','.join(names)\n if len(names) > 0:\n return names\n else:\n return np.nan\n\n\ndef removeInside(df):\n d_chr = chrDIC(df)\n df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']],\n row['start'], row['end']), axis=1)\n df2 = df.dropna(subset=['genes_inside'])\n all_names = []\n for i in range(len(df2)):\n names = df2['genes_inside'].iloc[i]\n names = names.split(',')\n all_names = all_names + names\n inside_genes = list(set(all_names))\n l = len(inside_genes)\n print(f'Removing {l} genes that are inside other genes')\n df_inside = pd.DataFrame(inside_genes, columns=['name'])\n df = df[~df['name'].isin(df_inside['name'])].copy()\n del df['genes_inside']\n return df, df_inside\n\n\ndef flattenGTF(file_in, file_type, NEXTFLOW=True):\n if file_type == 'ENSEMBL':\n print(f'Flattening ENSEMBL like genome {file_in}')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['chr'] = df['chr'].astype(str)\n df = df[~df['chr'].str.contains('\\\\.')]\n df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n fout = f'{file_in[:-4]}_sort.gtf'\n df.to_csv(fout, sep='\\t', index=None, quoting=csv.QUOTE_NONE,\n header=None)\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('gene_id ', '')\n df['name'] = df['name'].str.replace('\"', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'BED':\n my_col = ['chr', 'start', 'end', 'name', 'strand']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n if file_type == 'REFSEQGFF':\n print(f'Flattening REFSEQGFF like genome')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),\n ('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',\n 'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',\n 'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',\n 'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',\n 'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',\n 'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',\n 'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',\n 'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',\n 'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',\n 'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',\n 'NC_000023.11'), ('chrY', 'NC_000024.10')]\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[df['type'] == 'gene'].copy()\n for l in replace_list:\n df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])\n df = df[~df['chr'].str.contains('\\\\.')]\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('ID=gene-', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'REFSEQBED':\n my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',\n 'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[['chr', 'start', 'end', 'name', 'strand']]\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n df_plu = df[df['strand'] == '+'].copy()\n df_min = df[df['strand'] == '-'].copy()\n df_plu, df_plu_inside = removeInside(df_plu)\n df_min, df_min_inside = removeInside(df_min)\n df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)\n df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)\n df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)\n df = pd.concat([df_plu, df_min])\n df = df.sort_values(by=['chr', 'end'], ascending=False)\n gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']]\n df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',\n 'LA_start', 'LA_end']]\n if NEXTFLOW:\n file_in = os.path.basename(file_in)\n fout = f'{file_in[:-4]}_flat.txt'\n fout2 = f'{file_in[:-4]}_flat.gtf'\n fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'\n print(f'Outputting flat file {fout}')\n df.to_csv(fout, sep='\\t', index=None)\n gtf.to_csv(fout2, sep='\\t', index=None, quoting=csv.QUOTE_NONE, header=None\n )\n gtf_names = gtf[['chr']].copy()\n gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)\n gtf_names.to_csv(fout3, sep='\\t', index=None)\n return df\n\n\n<mask token>\n\n\ndef parse_arguments():\n parser = argparse.ArgumentParser(description=\n 'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'\n )\n parser.add_argument('--annotation_in', action='store', metavar=\n 'annotation_in')\n parser.add_argument('--file_type', action='store', metavar='file_type',\n default='ENSEMBL')\n args = parser.parse_args()\n return args\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef FivePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=True)\n df['FA_start'] = df['gene_start']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['FA_end'] = df_exon['end']\n df_exon = df_exon[['name', 'FA_end']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['FA_length'] = df['FA_end'] - df['FA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\ndef ThreePrimeArea(df):\n df = df.sort_values(by=['chr', 'end'], ascending=False)\n df['LA_end'] = df['gene_end']\n df_exon = df[df['type'] == 'exon'].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'], keep='first')\n df_exon['LA_start'] = df_exon['start']\n df_exon = df_exon[['name', 'LA_start']]\n df = pd.merge(df, df_exon, how='left', on='name')\n df['LA_length'] = df['LA_end'] - df['LA_start']\n df = df.drop_duplicates(subset=['name'], keep='first')\n return df\n\n\ndef getAreas(df):\n \"\"\"\n This function will get the first and last exons for plu and min strand.\n Call it area because not necessarily exon.\n \"\"\"\n df_plu = df[df['strand'] == '+']\n df_min = df[df['strand'] == '-']\n df_plu_FA = FivePrimeArea(df_plu)\n df_min_FA = FivePrimeArea(df_min)\n df_plu_LA = ThreePrimeArea(df_plu)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_min_LA = ThreePrimeArea(df_min)[['name', 'LA_start', 'LA_end',\n 'LA_length']]\n df_plu = pd.merge(df_plu_FA, df_plu_LA, on='name')\n df_min = pd.merge(df_min_FA, df_min_LA, on='name')\n df = pd.concat([df_plu, df_min])\n return df\n\n\ndef chrDIC(df):\n \"\"\"This function will take a gtf and return strand specific dictionary of different chrm\"\"\"\n chr_names = df['chr'].unique().tolist()\n d_chr = d_gtf_chr = {chrom: df[df['chr'] == chrom] for chrom in chr_names}\n return d_chr\n\n\ndef countInside(df, start, end):\n rows_df = df[(start < df['start']) & (df['end'] < end)]\n names = rows_df['name'].unique().tolist()\n names = ','.join(names)\n if len(names) > 0:\n return names\n else:\n return np.nan\n\n\ndef removeInside(df):\n d_chr = chrDIC(df)\n df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']],\n row['start'], row['end']), axis=1)\n df2 = df.dropna(subset=['genes_inside'])\n all_names = []\n for i in range(len(df2)):\n names = df2['genes_inside'].iloc[i]\n names = names.split(',')\n all_names = all_names + names\n inside_genes = list(set(all_names))\n l = len(inside_genes)\n print(f'Removing {l} genes that are inside other genes')\n df_inside = pd.DataFrame(inside_genes, columns=['name'])\n df = df[~df['name'].isin(df_inside['name'])].copy()\n del df['genes_inside']\n return df, df_inside\n\n\ndef flattenGTF(file_in, file_type, NEXTFLOW=True):\n if file_type == 'ENSEMBL':\n print(f'Flattening ENSEMBL like genome {file_in}')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['chr'] = df['chr'].astype(str)\n df = df[~df['chr'].str.contains('\\\\.')]\n df.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n fout = f'{file_in[:-4]}_sort.gtf'\n df.to_csv(fout, sep='\\t', index=None, quoting=csv.QUOTE_NONE,\n header=None)\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('gene_id ', '')\n df['name'] = df['name'].str.replace('\"', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'BED':\n my_col = ['chr', 'start', 'end', 'name', 'strand']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n if file_type == 'REFSEQGFF':\n print(f'Flattening REFSEQGFF like genome')\n my_col = ['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']\n replace_list = [('chr1', 'NC_000001.11'), ('chr2', 'NC_000002.12'),\n ('chr3', 'NC_000003.12'), ('chr4', 'NC_000004.12'), ('chr5',\n 'NC_000005.10'), ('chr6', 'NC_000006.12'), ('chr7',\n 'NC_000007.14'), ('chr8', 'NC_000008.11'), ('chr9',\n 'NC_000009.12'), ('chr10', 'NC_000010.11'), ('chr11',\n 'NC_000011.10'), ('chr12', 'NC_000012.12'), ('chr13',\n 'NC_000013.11'), ('chr14', 'NC_000014.9'), ('chr15',\n 'NC_000015.10'), ('chr16', 'NC_000016.10'), ('chr17',\n 'NC_000017.11'), ('chr18', 'NC_000018.10'), ('chr19',\n 'NC_000019.10'), ('chr20', 'NC_000020.11'), ('chr21',\n 'NC_000021.9'), ('chr22', 'NC_000022.11'), ('chrX',\n 'NC_000023.11'), ('chrY', 'NC_000024.10')]\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[df['type'] == 'gene'].copy()\n for l in replace_list:\n df['chr'] = np.where(df['chr'] == l[1], l[0], df['chr'])\n df = df[~df['chr'].str.contains('\\\\.')]\n df['name'] = df['gene_id'].str.split(';', expand=True)[0]\n df['name'] = df['name'].str.replace('ID=gene-', '')\n df['type'] = df['type'].astype(str)\n df_gene = df[df['type'] == 'gene'].copy()\n df_gene['gene_start'] = df_gene['start']\n df_gene['gene_end'] = df_gene['end']\n df_gene = df_gene[['name', 'gene_start', 'gene_end']].copy()\n df = pd.merge(df, df_gene, how='left', on='name')\n df = getAreas(df)\n df['start'] = df['gene_start']\n df['end'] = df['gene_end']\n if file_type == 'REFSEQBED':\n my_col = ['chr', 'start', 'end', 'name', 'dot', 'strand', 'start1',\n 'start2', 'dot2', 'dot3', 'gene_id', 'gene_id2']\n df = pd.read_csv(file_in, sep='\\t', header=None, names=my_col,\n comment='#', low_memory=False)\n df = df[['chr', 'start', 'end', 'name', 'strand']]\n df['FA_start'] = df['start']\n df['FA_end'] = df['end']\n df['LA_start'] = df['start']\n df['LA_end'] = df['end']\n df['dot'] = '.'\n df['dot2'] = '.'\n df['source'] = 'NA'\n df['type'] = 'NA'\n df['gene_id'] = df['name']\n df_plu = df[df['strand'] == '+'].copy()\n df_min = df[df['strand'] == '-'].copy()\n df_plu, df_plu_inside = removeInside(df_plu)\n df_min, df_min_inside = removeInside(df_min)\n df_plu.sort_values(by=['chr', 'end'], inplace=True, ascending=False)\n df_plu.drop_duplicates(subset=['start', 'chr'], keep='first', inplace=True)\n df_min.sort_values(by=['chr', 'start'], inplace=True, ascending=True)\n df_min.drop_duplicates(subset=['end', 'chr'], keep='first', inplace=True)\n df = pd.concat([df_plu, df_min])\n df = df.sort_values(by=['chr', 'end'], ascending=False)\n gtf = df[['chr', 'source', 'type', 'start', 'end', 'dot', 'strand',\n 'dot2', 'gene_id']]\n df = df[['chr', 'start', 'end', 'name', 'strand', 'FA_start', 'FA_end',\n 'LA_start', 'LA_end']]\n if NEXTFLOW:\n file_in = os.path.basename(file_in)\n fout = f'{file_in[:-4]}_flat.txt'\n fout2 = f'{file_in[:-4]}_flat.gtf'\n fout3 = f'{file_in[:-4]}_flat_CHROMNAMES.txt'\n print(f'Outputting flat file {fout}')\n df.to_csv(fout, sep='\\t', index=None)\n gtf.to_csv(fout2, sep='\\t', index=None, quoting=csv.QUOTE_NONE, header=None\n )\n gtf_names = gtf[['chr']].copy()\n gtf_names.drop_duplicates(subset=['chr'], keep='first', inplace=True)\n gtf_names.to_csv(fout3, sep='\\t', index=None)\n return df\n\n\n<mask token>\n\n\ndef parse_arguments():\n parser = argparse.ArgumentParser(description=\n 'Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED'\n )\n parser.add_argument('--annotation_in', action='store', metavar=\n 'annotation_in')\n parser.add_argument('--file_type', action='store', metavar='file_type',\n default='ENSEMBL')\n args = parser.parse_args()\n return args\n\n\nif __name__ == '__main__':\n args = parse_arguments()\n file_in = args.annotation_in\n file_type = args.file_type\n flattenGTF(file_in, file_type)\n",
"step-5": "\n#################################################\n### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ###\n#################################################\n# file to edit: dev_nb/10_DogcatcherFlatten.ipynb\nimport pandas as pd\nimport argparse\nimport csv\nimport os\n\nimport numpy as np\nimport string\n\ndef FivePrimeArea(df):\n df = df.sort_values(by=[\"chr\",\"end\"],ascending=True)\n df[\"FA_start\"] = df[\"gene_start\"]\n df_exon = df[df[\"type\"]==\"exon\"].copy()\n df_exon = df_exon.drop_duplicates(subset=['name'],keep=\"first\")\n df_exon[\"FA_end\"] = df_exon[\"end\"]\n df_exon = df_exon[[\"name\",\"FA_end\"]]\n df = pd.merge(df,df_exon,how=\"left\",on=\"name\")\n df[\"FA_length\"] = df[\"FA_end\"] - df[\"FA_start\"]\n df = df.drop_duplicates(subset=['name'],keep=\"first\")\n return df\n\n\ndef ThreePrimeArea(df):\n df = df.sort_values(by=[\"chr\",\"end\"],ascending=False)\n df[\"LA_end\"] = df[\"gene_end\"]\n df_exon = df[df[\"type\"]==\"exon\"].copy()\n # Keep first exon\n df_exon = df_exon.drop_duplicates(subset=['name'],keep=\"first\")\n df_exon[\"LA_start\"] = df_exon[\"start\"]\n df_exon = df_exon[[\"name\",\"LA_start\"]]\n df = pd.merge(df,df_exon,how=\"left\",on=\"name\")\n df[\"LA_length\"] = df[\"LA_end\"] - df[\"LA_start\"]\n df = df.drop_duplicates(subset=['name'],keep=\"first\")\n return df\n\n\ndef getAreas(df):\n \"\"\"\n This function will get the first and last exons for plu and min strand.\n Call it area because not necessarily exon.\n \"\"\"\n\n df_plu = df[df[\"strand\"]==\"+\"]\n df_min = df[df[\"strand\"]==\"-\"]\n df_plu_FA = FivePrimeArea(df_plu)\n df_min_FA = FivePrimeArea(df_min)\n df_plu_LA = ThreePrimeArea(df_plu)[[\"name\",\"LA_start\",\"LA_end\",\"LA_length\"]]\n df_min_LA = ThreePrimeArea(df_min)[[\"name\",\"LA_start\",\"LA_end\",\"LA_length\"]]\n df_plu = pd.merge(df_plu_FA,df_plu_LA,on=\"name\")\n df_min = pd.merge(df_min_FA,df_min_LA,on=\"name\")\n df = pd.concat([df_plu,df_min])\n return df\n\n\ndef chrDIC(df):\n \"\"\"This function will take a gtf and return strand specific dictionary of different chrm\"\"\"\n chr_names=df['chr'].unique().tolist()\n d_chr = d_gtf_chr = {chrom : df[df[\"chr\"]==chrom] for chrom in chr_names}\n return d_chr\n\ndef countInside(df, start, end):\n rows_df = df[ (start < df[\"start\"]) & (df[\"end\"] < end) ]\n names = rows_df['name'].unique().tolist()\n names = \",\".join(names)\n if len(names) >0:\n return names\n else:\n return np.nan\n\ndef removeInside(df):\n d_chr = chrDIC(df)\n\n df['genes_inside'] = df.apply(lambda row: countInside(d_chr[row['chr']], row[\"start\"], row[\"end\"]), axis=1)\n df2 = df.dropna(subset=['genes_inside'])\n all_names = []\n for i in range(len(df2)):\n names = df2[\"genes_inside\"].iloc[i]\n names = names.split(\",\")\n all_names = all_names + names\n\n inside_genes = list(set(all_names))\n l = len(inside_genes)\n print(f\"Removing {l} genes that are inside other genes\")\n\n df_inside = pd.DataFrame(inside_genes,columns=['name'])\n df = df[~df[\"name\"].isin(df_inside[\"name\"])].copy()\n del df[\"genes_inside\"]\n\n return df, df_inside\n\ndef flattenGTF(file_in,file_type,NEXTFLOW=True):\n if file_type == \"ENSEMBL\":\n print(f\"Flattening ENSEMBL like genome {file_in}\")\n my_col = [\"chr\",\"source\",\"type\",\"start\",\"end\",\"dot\",\"strand\",\"dot2\",\"gene_id\"]\n\n df = pd.read_csv(file_in, sep=\"\\t\",header=None,names=my_col, comment=\"#\",low_memory=False)\n\n df[\"chr\"] = df[\"chr\"].astype(str)\n df = df[~df[\"chr\"].str.contains(\"\\.\") ] # Take out patches\n\n df.sort_values(by=[\"chr\",\"start\"], inplace=True, ascending=True)\n fout = f\"{file_in[:-4]}_sort.gtf\"\n df.to_csv(fout,sep=\"\\t\", index=None,quoting=csv.QUOTE_NONE, header=None)\n\n\n df[\"name\"] = df[\"gene_id\"].str.split(';',expand=True)[0]\n df[\"name\"] = df[\"name\"].str.replace(\"gene_id \",\"\")\n df[\"name\"] = df[\"name\"].str.replace(\"\\\"\",\"\")\n\n df[\"type\"] = df[\"type\"].astype(str)\n\n df_gene = df[df[\"type\"]==\"gene\"].copy()\n df_gene[\"gene_start\"] = df_gene[\"start\"]\n df_gene[\"gene_end\"] = df_gene[\"end\"]\n\n df_gene = df_gene[[\"name\",\"gene_start\",\"gene_end\"]].copy()\n df = pd.merge(df,df_gene,how=\"left\",on=\"name\")\n df = getAreas(df)\n df[\"start\"] = df[\"gene_start\"]\n df[\"end\"] = df[\"gene_end\"]\n# df = df[[\"chr\",\"start\",\"end\",\"strand\",\"name\",\"type\"]].copy()\n\n\n if file_type == \"BED\":\n my_col = [\"chr\",\"start\",\"end\",\"name\",\"strand\"]\n df = pd.read_csv(file_in, sep=\"\\t\",header=None,names=my_col, comment=\"#\",low_memory=False)\n df[\"FA_start\"] = df[\"start\"]\n df[\"FA_end\"] = df[\"end\"]\n df[\"LA_start\"] = df[\"start\"]\n df[\"LA_end\"] = df[\"end\"]\n df[\"dot\"] = \".\"\n df[\"dot2\"] = \".\"\n df[\"source\"] = \"NA\"\n df[\"type\"] = \"NA\"\n df[\"gene_id\"] = df[\"name\"]\n\n\n\n\n if file_type == \"REFSEQGFF\":\n\n # Chrome numbers are changed. Need to change back to chr1 etc.\n# https://www.ncbi.nlm.nih.gov/assembly/GCF_000001405.39#/def_asm_Primary_Assembly\n print(f\"Flattening REFSEQGFF like genome\")\n# https://ftp.ncbi.nlm.nih.gov/genomes/refseq/vertebrate_mammalian/Homo_sapiens/reference/\n #download this GCF_000001405.39_GRCh38.p13_genomic.gtf.gz\n # sort and index in IGV\n# NC_000001.11\tBestRefSeq\tgene\t11874\t14409\t.\t+\t.\tgene_id \"DDX11L1\"; transcript_id \"\"; db_xref \"GeneID:100287102\"; db_xref \"HGNC:HGNC:37102\"; description \"DEAD/H-box helicase 11 like 1 (pseudogene)\"; gbkey \"Gene\"; gene \"DDX11L1\"; gene_biotype \"transcribed_pseudogene\"; pseudo \"true\";\n\n\n\n my_col = [\"chr\",\"source\",\"type\",\"start\",\"end\",\"dot\",\"strand\",\"dot2\",\"gene_id\"]\n\n replace_list = [(\"chr1\",\"NC_000001.11\"),\n (\"chr2\",\"NC_000002.12\"),\n (\"chr3\",\"NC_000003.12\"),\n (\"chr4\",\"NC_000004.12\"),\n (\"chr5\",\"NC_000005.10\"),\n (\"chr6\",\"NC_000006.12\"),\n (\"chr7\",\"NC_000007.14\"),\n (\"chr8\",\"NC_000008.11\"),\n (\"chr9\",\"NC_000009.12\"),\n (\"chr10\",\"NC_000010.11\"),\n (\"chr11\",\"NC_000011.10\"),\n (\"chr12\",\"NC_000012.12\"),\n (\"chr13\",\"NC_000013.11\"),\n (\"chr14\",\"NC_000014.9\"),\n (\"chr15\",\"NC_000015.10\"),\n (\"chr16\",\"NC_000016.10\"),\n (\"chr17\",\"NC_000017.11\"),\n (\"chr18\",\"NC_000018.10\"),\n (\"chr19\",\"NC_000019.10\"),\n (\"chr20\",\"NC_000020.11\"),\n (\"chr21\",\"NC_000021.9\"),\n (\"chr22\",\"NC_000022.11\"),\n (\"chrX\",\"NC_000023.11\"),\n (\"chrY\",\"NC_000024.10\")]\n\n\n df = pd.read_csv(file_in, sep=\"\\t\",header=None,names=my_col, comment=\"#\",low_memory=False)\n\n df = df[df[\"type\"]==\"gene\"].copy()\n\n # Change NC names to chr\n for l in replace_list:\n df[\"chr\"] = np.where(df[\"chr\"]==l[1],l[0],df[\"chr\"])\n\n df = df[~df[\"chr\"].str.contains(\"\\.\") ] # Take out patches\n\n\n df[\"name\"] = df[\"gene_id\"].str.split(';',expand=True)[0]\n df[\"name\"] = df[\"name\"].str.replace(\"ID=gene-\",\"\")\n\n df[\"type\"] = df[\"type\"].astype(str)\n\n df_gene = df[df[\"type\"]==\"gene\"].copy()\n df_gene[\"gene_start\"] = df_gene[\"start\"]\n df_gene[\"gene_end\"] = df_gene[\"end\"]\n\n df_gene = df_gene[[\"name\",\"gene_start\",\"gene_end\"]].copy()\n df = pd.merge(df,df_gene,how=\"left\",on=\"name\")\n df = getAreas(df)\n df[\"start\"] = df[\"gene_start\"]\n df[\"end\"] = df[\"gene_end\"]\n# df = df[[\"chr\",\"start\",\"end\",\"strand\",\"name\",\"type\"]].copy()\n\n\n\n\n\n\n\n if file_type == \"REFSEQBED\":\n\n# chr1\t11873\t14409\tNR_046018\t0\t+\t\n# 14409\t14409\t0\t3\t354,109,1189,\t0,739,1347,\n\n\n my_col = [\"chr\",\"start\",\"end\",\"name\",\"dot\",\"strand\",\"start1\",\"start2\",\"dot2\",\"dot3\",\"gene_id\",\"gene_id2\"]\n\n df = pd.read_csv(file_in, sep=\"\\t\",header=None,names=my_col, comment=\"#\",low_memory=False)\n df = df[[\"chr\",\"start\",\"end\",\"name\",\"strand\"]]\n df[\"FA_start\"] = df[\"start\"]\n df[\"FA_end\"] = df[\"end\"]\n df[\"LA_start\"] = df[\"start\"]\n df[\"LA_end\"] = df[\"end\"]\n df[\"dot\"] = \".\"\n df[\"dot2\"] = \".\"\n df[\"source\"] = \"NA\"\n df[\"type\"] = \"NA\"\n df[\"gene_id\"] = df[\"name\"]\n\n\n\n df_plu = df[df[\"strand\"]==\"+\"].copy()\n df_min = df[df[\"strand\"]==\"-\"].copy()\n\n df_plu, df_plu_inside = removeInside(df_plu)\n df_min, df_min_inside = removeInside(df_min)\n\n df_plu.sort_values(by=[\"chr\",\"end\"], inplace=True, ascending=False)\n df_plu.drop_duplicates(subset=[\"start\",\"chr\"], keep='first', inplace=True)\n\n df_min.sort_values(by=[\"chr\",\"start\"], inplace=True, ascending=True)\n df_min.drop_duplicates(subset=[\"end\",\"chr\"], keep='first', inplace=True)\n\n\n df = pd.concat([df_plu,df_min])\n df = df.sort_values(by=[\"chr\",\"end\"],ascending=False)\n\n\n gtf = df[[\"chr\",\"source\",\"type\",\"start\",\"end\",\"dot\",\"strand\",\"dot2\",\"gene_id\"] ]\n df = df[[\"chr\",\"start\",\"end\",\"name\",\"strand\",\"FA_start\",\"FA_end\",\"LA_start\",\"LA_end\"]]\n\n\n if NEXTFLOW:\n file_in = os.path.basename(file_in)\n\n fout = f\"{file_in[:-4]}_flat.txt\"\n fout2 = f\"{file_in[:-4]}_flat.gtf\"\n fout3 = f\"{file_in[:-4]}_flat_CHROMNAMES.txt\"\n\n\n\n print(f\"Outputting flat file {fout}\")\n df.to_csv(fout,sep=\"\\t\",index=None)\n\n\n gtf.to_csv(fout2,sep=\"\\t\", index=None,quoting=csv.QUOTE_NONE, header=None)\n\n gtf_names = gtf[[\"chr\"]].copy()\n gtf_names.drop_duplicates(subset=[\"chr\"], keep='first', inplace=True)\n\n gtf_names.to_csv(fout3,sep=\"\\t\", index=None)\n\n return df\n\n\nimport argparse\ndef parse_arguments():\n parser = argparse.ArgumentParser(description='Flatten gtf or bed to first and last exon file. Options in currently are ENSEMBL, BED')\n parser.add_argument('--annotation_in', action= 'store', metavar='annotation_in')\n parser.add_argument('--file_type', action= 'store', metavar='file_type',default=\"ENSEMBL\")\n args = parser.parse_args()\n return args\n\nif __name__==\"__main__\":\n args = parse_arguments()\n file_in = args.annotation_in\n file_type = args.file_type\n\n flattenGTF(file_in,file_type)\n",
"step-ids": [
4,
6,
8,
9,
11
]
}
|
[
4,
6,
8,
9,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _adjacent(word1, word2):
"""
Returns True if the input words differ by only a single character;
returns False otherwise.
>>> _adjacent('phone','phony')
True
>>> _adjacent('stone','money')
False
"""
<|reserved_special_token_1|>
def word_ladder(start_word, end_word, dictionary_file='words5.dict'):
"""
Returns a list satisfying the following properties:
1. the first element is `start_word`
2. the last element is `end_word`
3. elements at index i and i+1 are `_adjacent`
4. all elements are entries in the `dictionary_file` file
For example, running the command
```
word_ladder('stone','money')
```
may give the output
```
['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']
```
but the possible outputs are not unique,
so you may also get the output
```
['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']
```
(We cannot use doctests here because the outputs are not unique.)
Whenever it is impossible to generate a word ladder between the two words,
the function returns `None`.
HINT:
See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.
"""
<|reserved_special_token_0|>
def _adjacent(word1, word2):
"""
Returns True if the input words differ by only a single character;
returns False otherwise.
>>> _adjacent('phone','phony')
True
>>> _adjacent('stone','money')
False
"""
<|reserved_special_token_1|>
def word_ladder(start_word, end_word, dictionary_file='words5.dict'):
"""
Returns a list satisfying the following properties:
1. the first element is `start_word`
2. the last element is `end_word`
3. elements at index i and i+1 are `_adjacent`
4. all elements are entries in the `dictionary_file` file
For example, running the command
```
word_ladder('stone','money')
```
may give the output
```
['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']
```
but the possible outputs are not unique,
so you may also get the output
```
['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']
```
(We cannot use doctests here because the outputs are not unique.)
Whenever it is impossible to generate a word ladder between the two words,
the function returns `None`.
HINT:
See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.
"""
def verify_word_ladder(ladder):
"""
Returns True if each entry of the input list is adjacent to its neighbors;
otherwise returns False.
>>> verify_word_ladder(['stone', 'shone', 'phone', 'phony'])
True
>>> verify_word_ladder(['stone', 'shone', 'phony'])
False
"""
def _adjacent(word1, word2):
"""
Returns True if the input words differ by only a single character;
returns False otherwise.
>>> _adjacent('phone','phony')
True
>>> _adjacent('stone','money')
False
"""
<|reserved_special_token_1|>
#!/bin/python3
def word_ladder(start_word, end_word, dictionary_file='words5.dict'):
'''
Returns a list satisfying the following properties:
1. the first element is `start_word`
2. the last element is `end_word`
3. elements at index i and i+1 are `_adjacent`
4. all elements are entries in the `dictionary_file` file
For example, running the command
```
word_ladder('stone','money')
```
may give the output
```
['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']
```
but the possible outputs are not unique,
so you may also get the output
```
['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']
```
(We cannot use doctests here because the outputs are not unique.)
Whenever it is impossible to generate a word ladder between the two words,
the function returns `None`.
HINT:
See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.
'''
def verify_word_ladder(ladder):
'''
Returns True if each entry of the input list is adjacent to its neighbors;
otherwise returns False.
>>> verify_word_ladder(['stone', 'shone', 'phone', 'phony'])
True
>>> verify_word_ladder(['stone', 'shone', 'phony'])
False
'''
def _adjacent(word1, word2):
'''
Returns True if the input words differ by only a single character;
returns False otherwise.
>>> _adjacent('phone','phony')
True
>>> _adjacent('stone','money')
False
'''
|
flexible
|
{
"blob_id": "631323e79f4fb32611d7094af92cff8f923fa996",
"index": 303,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef _adjacent(word1, word2):\n \"\"\"\n Returns True if the input words differ by only a single character;\n returns False otherwise.\n\n >>> _adjacent('phone','phony')\n True\n >>> _adjacent('stone','money')\n False\n \"\"\"\n",
"step-3": "def word_ladder(start_word, end_word, dictionary_file='words5.dict'):\n \"\"\"\n Returns a list satisfying the following properties:\n\n 1. the first element is `start_word`\n 2. the last element is `end_word`\n 3. elements at index i and i+1 are `_adjacent`\n 4. all elements are entries in the `dictionary_file` file\n\n For example, running the command\n ```\n word_ladder('stone','money')\n ```\n may give the output\n ```\n ['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']\n ```\n but the possible outputs are not unique,\n so you may also get the output\n ```\n ['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']\n ```\n (We cannot use doctests here because the outputs are not unique.)\n\n Whenever it is impossible to generate a word ladder between the two words,\n the function returns `None`.\n\n HINT:\n See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.\n \"\"\"\n\n\n<mask token>\n\n\ndef _adjacent(word1, word2):\n \"\"\"\n Returns True if the input words differ by only a single character;\n returns False otherwise.\n\n >>> _adjacent('phone','phony')\n True\n >>> _adjacent('stone','money')\n False\n \"\"\"\n",
"step-4": "def word_ladder(start_word, end_word, dictionary_file='words5.dict'):\n \"\"\"\n Returns a list satisfying the following properties:\n\n 1. the first element is `start_word`\n 2. the last element is `end_word`\n 3. elements at index i and i+1 are `_adjacent`\n 4. all elements are entries in the `dictionary_file` file\n\n For example, running the command\n ```\n word_ladder('stone','money')\n ```\n may give the output\n ```\n ['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']\n ```\n but the possible outputs are not unique,\n so you may also get the output\n ```\n ['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']\n ```\n (We cannot use doctests here because the outputs are not unique.)\n\n Whenever it is impossible to generate a word ladder between the two words,\n the function returns `None`.\n\n HINT:\n See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.\n \"\"\"\n\n\ndef verify_word_ladder(ladder):\n \"\"\"\n Returns True if each entry of the input list is adjacent to its neighbors;\n otherwise returns False.\n\n >>> verify_word_ladder(['stone', 'shone', 'phone', 'phony'])\n True\n >>> verify_word_ladder(['stone', 'shone', 'phony'])\n False\n \"\"\"\n\n\ndef _adjacent(word1, word2):\n \"\"\"\n Returns True if the input words differ by only a single character;\n returns False otherwise.\n\n >>> _adjacent('phone','phony')\n True\n >>> _adjacent('stone','money')\n False\n \"\"\"\n",
"step-5": "#!/bin/python3\n\n\ndef word_ladder(start_word, end_word, dictionary_file='words5.dict'):\n '''\n Returns a list satisfying the following properties:\n\n 1. the first element is `start_word`\n 2. the last element is `end_word`\n 3. elements at index i and i+1 are `_adjacent`\n 4. all elements are entries in the `dictionary_file` file\n\n For example, running the command\n ```\n word_ladder('stone','money')\n ```\n may give the output\n ```\n ['stone', 'shone', 'phone', 'phony', 'peony', 'penny', 'benny', 'bonny', 'boney', 'money']\n ```\n but the possible outputs are not unique,\n so you may also get the output\n ```\n ['stone', 'shone', 'shote', 'shots', 'soots', 'hoots', 'hooty', 'hooey', 'honey', 'money']\n ```\n (We cannot use doctests here because the outputs are not unique.)\n\n Whenever it is impossible to generate a word ladder between the two words,\n the function returns `None`.\n\n HINT:\n See <https://github.com/mikeizbicki/cmc-csci046/issues/472> for a discussion about a common memory management bug that causes the generated word ladders to be too long in some cases.\n '''\n\n\ndef verify_word_ladder(ladder):\n '''\n Returns True if each entry of the input list is adjacent to its neighbors;\n otherwise returns False.\n\n >>> verify_word_ladder(['stone', 'shone', 'phone', 'phony'])\n True\n >>> verify_word_ladder(['stone', 'shone', 'phony'])\n False\n '''\n\n\ndef _adjacent(word1, word2):\n '''\n Returns True if the input words differ by only a single character;\n returns False otherwise.\n\n >>> _adjacent('phone','phony')\n True\n >>> _adjacent('stone','money')\n False\n '''\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Sale(Product):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Sale(Product):
def __init__(self, product_name, quantity, unit_price, attendant, date):
super(Sale, self).__init__(product_name, quantity, unit_price)
self.attendant = attendant
self.date = date
<|reserved_special_token_1|>
from app.models.product import Product
class Sale(Product):
def __init__(self, product_name, quantity, unit_price, attendant, date):
super(Sale, self).__init__(product_name, quantity, unit_price)
self.attendant = attendant
self.date = date
<|reserved_special_token_1|>
# model class for a sale record
from app.models.product import Product
class Sale(Product):
def __init__(self,product_name,quantity,unit_price,attendant,date):
super(Sale, self).__init__(product_name, quantity, unit_price)
self.attendant = attendant
self.date = date
|
flexible
|
{
"blob_id": "8ed14bb9af23055f4689e06df872a1d36185cd09",
"index": 6865,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Sale(Product):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Sale(Product):\n\n def __init__(self, product_name, quantity, unit_price, attendant, date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date\n",
"step-4": "from app.models.product import Product\n\n\nclass Sale(Product):\n\n def __init__(self, product_name, quantity, unit_price, attendant, date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date\n",
"step-5": "# model class for a sale record\nfrom app.models.product import Product\nclass Sale(Product):\n def __init__(self,product_name,quantity,unit_price,attendant,date):\n super(Sale, self).__init__(product_name, quantity, unit_price)\n self.attendant = attendant\n self.date = date",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def csv_usage():
"""
This function demonstrates how to use csv module to read and write csv files
"""
with open('example.csv', 'r', newline='') as csvfile:
reader_c = csv.reader(csvfile, delimiter=';')
for row in reader_c:
print(', '.join(row))
with open('new-2.csv', 'w', newline='') as csvfile:
writer_c = csv.writer(csvfile, delimiter=',')
writer_c.writerow(['Name', 'Age', 'City'])
writer_c.writerow(['Joe', '25', 'Miami'])
writer_c.writerow(['Nick', '21', 'Mexico'])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def csv_usage():
"""
This function demonstrates how to use csv module to read and write csv files
"""
with open('example.csv', 'r', newline='') as csvfile:
reader_c = csv.reader(csvfile, delimiter=';')
for row in reader_c:
print(', '.join(row))
with open('new-2.csv', 'w', newline='') as csvfile:
writer_c = csv.writer(csvfile, delimiter=',')
writer_c.writerow(['Name', 'Age', 'City'])
writer_c.writerow(['Joe', '25', 'Miami'])
writer_c.writerow(['Nick', '21', 'Mexico'])
if __name__ == '__main__':
csv_usage()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import csv
def csv_usage():
"""
This function demonstrates how to use csv module to read and write csv files
"""
with open('example.csv', 'r', newline='') as csvfile:
reader_c = csv.reader(csvfile, delimiter=';')
for row in reader_c:
print(', '.join(row))
with open('new-2.csv', 'w', newline='') as csvfile:
writer_c = csv.writer(csvfile, delimiter=',')
writer_c.writerow(['Name', 'Age', 'City'])
writer_c.writerow(['Joe', '25', 'Miami'])
writer_c.writerow(['Nick', '21', 'Mexico'])
if __name__ == '__main__':
csv_usage()
<|reserved_special_token_1|>
'''
This module demonstrates how to use some functionality of python built-in csv module
'''
import csv
def csv_usage():
'''
This function demonstrates how to use csv module to read and write csv files
'''
with open('example.csv', 'r', newline='') as csvfile:
reader_c = csv.reader(csvfile, delimiter=';')
for row in reader_c:
print(', '.join(row))
with open('new-2.csv', 'w', newline='') as csvfile:
writer_c = csv.writer(csvfile, delimiter=',')
writer_c.writerow(['Name', 'Age', 'City'])
writer_c.writerow(['Joe', '25', 'Miami'])
writer_c.writerow(['Nick', '21', 'Mexico'])
if __name__ == '__main__':
csv_usage()
|
flexible
|
{
"blob_id": "bcc2977f36ecc775f44ae4251ce230af9abf63ba",
"index": 7362,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef csv_usage():\n \"\"\"\n This function demonstrates how to use csv module to read and write csv files\n \"\"\"\n with open('example.csv', 'r', newline='') as csvfile:\n reader_c = csv.reader(csvfile, delimiter=';')\n for row in reader_c:\n print(', '.join(row))\n with open('new-2.csv', 'w', newline='') as csvfile:\n writer_c = csv.writer(csvfile, delimiter=',')\n writer_c.writerow(['Name', 'Age', 'City'])\n writer_c.writerow(['Joe', '25', 'Miami'])\n writer_c.writerow(['Nick', '21', 'Mexico'])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef csv_usage():\n \"\"\"\n This function demonstrates how to use csv module to read and write csv files\n \"\"\"\n with open('example.csv', 'r', newline='') as csvfile:\n reader_c = csv.reader(csvfile, delimiter=';')\n for row in reader_c:\n print(', '.join(row))\n with open('new-2.csv', 'w', newline='') as csvfile:\n writer_c = csv.writer(csvfile, delimiter=',')\n writer_c.writerow(['Name', 'Age', 'City'])\n writer_c.writerow(['Joe', '25', 'Miami'])\n writer_c.writerow(['Nick', '21', 'Mexico'])\n\n\nif __name__ == '__main__':\n csv_usage()\n",
"step-4": "<mask token>\nimport csv\n\n\ndef csv_usage():\n \"\"\"\n This function demonstrates how to use csv module to read and write csv files\n \"\"\"\n with open('example.csv', 'r', newline='') as csvfile:\n reader_c = csv.reader(csvfile, delimiter=';')\n for row in reader_c:\n print(', '.join(row))\n with open('new-2.csv', 'w', newline='') as csvfile:\n writer_c = csv.writer(csvfile, delimiter=',')\n writer_c.writerow(['Name', 'Age', 'City'])\n writer_c.writerow(['Joe', '25', 'Miami'])\n writer_c.writerow(['Nick', '21', 'Mexico'])\n\n\nif __name__ == '__main__':\n csv_usage()\n",
"step-5": "'''\nThis module demonstrates how to use some functionality of python built-in csv module\n'''\nimport csv\n\ndef csv_usage():\n '''\n This function demonstrates how to use csv module to read and write csv files\n '''\n with open('example.csv', 'r', newline='') as csvfile:\n reader_c = csv.reader(csvfile, delimiter=';')\n for row in reader_c:\n print(', '.join(row))\n\n with open('new-2.csv', 'w', newline='') as csvfile:\n writer_c = csv.writer(csvfile, delimiter=',')\n writer_c.writerow(['Name', 'Age', 'City'])\n writer_c.writerow(['Joe', '25', 'Miami'])\n writer_c.writerow(['Nick', '21', 'Mexico'])\n\nif __name__ == '__main__':\n csv_usage()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
import uuid
import cgi
import squareconnect
from squareconnect.rest import ApiException
from squareconnect.apis.transactions_api import TransactionsApi
from squareconnect.apis.locations_api import LocationsApi
from squareconnect.apis.customers_api import CustomersApi
# Create instance of FieldStorage
form = cgi.FieldStorage()
# Get data from fields
nonce = form.getvalue('nonce')
# Get amount data
donation = form.getvalue('amount')
boxChecked = form.getvalue('boxChecked')
firstName = form.getvalue('firstname')
lastName = form.getvalue('lastname')
email = form.getvalue('email')
# The access token to use in all Connect API requests. Use your *sandbox* access
# token if you're just testing things out.
squareconnect.configuration.access_token = 'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ'
# The ID of the business location to associate processed payments with.
# See [Retrieve your business's locations]
# (https://docs.connect.squareup.com/articles/getting-started/#retrievemerchantprofile)
# for an easy way to get your business's location IDs.
# If you're testing things out, use a sandbox location ID.
location_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'
transactions_api_instance = TransactionsApi()
customers_api_instance = CustomersApi()
# Every payment you process with the SDK must have a unique idempotency key.
# If you're unsure whether a particular payment succeeded, you can reattempt
# it with the same idempotency key without worrying about double charging
# the buyer.
idempotency_key = str(uuid.uuid1())
# Monetary amounts are specified in the smallest unit of the applicable currency.
# This amount is in cents. It's also hard-coded for $1.00, which isn't very useful.
amount = {'amount': int(donation) * 100, 'currency': 'USD'}
customersList = []
# Add a customer to file
if boxChecked == "true":
heading = "Recurring Donation"
customerRequest = {'given_name': firstName, 'family_name': lastName, 'email_address': email}
try:
customerResponse = customers_api_instance.create_customer(customerRequest)
except ApiException as e:
print ("customer creation failed")
print (e)
exit()
customer = customerResponse.customer
customerCardRequest = {'card_nonce': nonce}
try:
customerCardResponse = customers_api_instance.create_customer_card(customer.id, customerCardRequest)
except:
print ("customer card creation failed")
exit()
customerCard = customerCardResponse.card
body = {'customer_id': customer.id, 'customer_card_id': customerCard.id, 'idempotency_key': idempotency_key, 'amount_money': amount}
customersList = customers_api_instance.list_customers()
else:
# To learn more about splitting transactions with additional recipients,
# see the Transactions API documentation on our [developer site]
# (https://docs.connect.squareup.com/payments/transactions/overview#mpt-overview).
heading = "One time Donation"
body = {'idempotency_key': idempotency_key, 'card_nonce': nonce, 'amount_money': amount}
# customersList = Non
# The SDK throws an exception if a Connect endpoint responds with anything besides
# a 200-level HTTP code. This block catches any exceptions that occur from the request.
try:
api_response = transactions_api_instance.charge(location_id, body)
res = api_response.transaction
except ApiException as e:
res = "Exception when calling TransactionApi->charge: {}".format(e)
# Display the result
print ('Content-type:text/html\r\n\r\n')
print ('<html>')
print ('<head>')
print ('<title>Square Payment</title>')
print ('</head>')
print ('<body>')
print ('<h2>Result: </h2>')
print( '<h2>{}</h2>'.format(heading))
print ('<p>{}</p>'.format(res))
if customersList:
print( '<h2>Customers stored on File: </h2>')
for customer in customersList.customers:
print ('<p>{}</p>'.format(customer))
print ('</body>')
print ('</html>')
|
normal
|
{
"blob_id": "bb7910af5334641fd2db7146112afaff7a2e42b9",
"index": 565,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif boxChecked == 'true':\n heading = 'Recurring Donation'\n customerRequest = {'given_name': firstName, 'family_name': lastName,\n 'email_address': email}\n try:\n customerResponse = customers_api_instance.create_customer(\n customerRequest)\n except ApiException as e:\n print('customer creation failed')\n print(e)\n exit()\n customer = customerResponse.customer\n customerCardRequest = {'card_nonce': nonce}\n try:\n customerCardResponse = customers_api_instance.create_customer_card(\n customer.id, customerCardRequest)\n except:\n print('customer card creation failed')\n exit()\n customerCard = customerCardResponse.card\n body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,\n 'idempotency_key': idempotency_key, 'amount_money': amount}\n customersList = customers_api_instance.list_customers()\nelse:\n heading = 'One time Donation'\n body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,\n 'amount_money': amount}\ntry:\n api_response = transactions_api_instance.charge(location_id, body)\n res = api_response.transaction\nexcept ApiException as e:\n res = 'Exception when calling TransactionApi->charge: {}'.format(e)\nprint('Content-type:text/html\\r\\n\\r\\n')\nprint('<html>')\nprint('<head>')\nprint('<title>Square Payment</title>')\nprint('</head>')\nprint('<body>')\nprint('<h2>Result: </h2>')\nprint('<h2>{}</h2>'.format(heading))\nprint('<p>{}</p>'.format(res))\nif customersList:\n print('<h2>Customers stored on File: </h2>')\n for customer in customersList.customers:\n print('<p>{}</p>'.format(customer))\nprint('</body>')\nprint('</html>')\n",
"step-3": "<mask token>\nform = cgi.FieldStorage()\nnonce = form.getvalue('nonce')\ndonation = form.getvalue('amount')\nboxChecked = form.getvalue('boxChecked')\nfirstName = form.getvalue('firstname')\nlastName = form.getvalue('lastname')\nemail = form.getvalue('email')\nsquareconnect.configuration.access_token = (\n 'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ')\nlocation_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'\ntransactions_api_instance = TransactionsApi()\ncustomers_api_instance = CustomersApi()\nidempotency_key = str(uuid.uuid1())\namount = {'amount': int(donation) * 100, 'currency': 'USD'}\ncustomersList = []\nif boxChecked == 'true':\n heading = 'Recurring Donation'\n customerRequest = {'given_name': firstName, 'family_name': lastName,\n 'email_address': email}\n try:\n customerResponse = customers_api_instance.create_customer(\n customerRequest)\n except ApiException as e:\n print('customer creation failed')\n print(e)\n exit()\n customer = customerResponse.customer\n customerCardRequest = {'card_nonce': nonce}\n try:\n customerCardResponse = customers_api_instance.create_customer_card(\n customer.id, customerCardRequest)\n except:\n print('customer card creation failed')\n exit()\n customerCard = customerCardResponse.card\n body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,\n 'idempotency_key': idempotency_key, 'amount_money': amount}\n customersList = customers_api_instance.list_customers()\nelse:\n heading = 'One time Donation'\n body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,\n 'amount_money': amount}\ntry:\n api_response = transactions_api_instance.charge(location_id, body)\n res = api_response.transaction\nexcept ApiException as e:\n res = 'Exception when calling TransactionApi->charge: {}'.format(e)\nprint('Content-type:text/html\\r\\n\\r\\n')\nprint('<html>')\nprint('<head>')\nprint('<title>Square Payment</title>')\nprint('</head>')\nprint('<body>')\nprint('<h2>Result: </h2>')\nprint('<h2>{}</h2>'.format(heading))\nprint('<p>{}</p>'.format(res))\nif customersList:\n print('<h2>Customers stored on File: </h2>')\n for customer in customersList.customers:\n print('<p>{}</p>'.format(customer))\nprint('</body>')\nprint('</html>')\n",
"step-4": "from __future__ import print_function\nimport uuid\nimport cgi\nimport squareconnect\nfrom squareconnect.rest import ApiException\nfrom squareconnect.apis.transactions_api import TransactionsApi\nfrom squareconnect.apis.locations_api import LocationsApi\nfrom squareconnect.apis.customers_api import CustomersApi\nform = cgi.FieldStorage()\nnonce = form.getvalue('nonce')\ndonation = form.getvalue('amount')\nboxChecked = form.getvalue('boxChecked')\nfirstName = form.getvalue('firstname')\nlastName = form.getvalue('lastname')\nemail = form.getvalue('email')\nsquareconnect.configuration.access_token = (\n 'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ')\nlocation_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'\ntransactions_api_instance = TransactionsApi()\ncustomers_api_instance = CustomersApi()\nidempotency_key = str(uuid.uuid1())\namount = {'amount': int(donation) * 100, 'currency': 'USD'}\ncustomersList = []\nif boxChecked == 'true':\n heading = 'Recurring Donation'\n customerRequest = {'given_name': firstName, 'family_name': lastName,\n 'email_address': email}\n try:\n customerResponse = customers_api_instance.create_customer(\n customerRequest)\n except ApiException as e:\n print('customer creation failed')\n print(e)\n exit()\n customer = customerResponse.customer\n customerCardRequest = {'card_nonce': nonce}\n try:\n customerCardResponse = customers_api_instance.create_customer_card(\n customer.id, customerCardRequest)\n except:\n print('customer card creation failed')\n exit()\n customerCard = customerCardResponse.card\n body = {'customer_id': customer.id, 'customer_card_id': customerCard.id,\n 'idempotency_key': idempotency_key, 'amount_money': amount}\n customersList = customers_api_instance.list_customers()\nelse:\n heading = 'One time Donation'\n body = {'idempotency_key': idempotency_key, 'card_nonce': nonce,\n 'amount_money': amount}\ntry:\n api_response = transactions_api_instance.charge(location_id, body)\n res = api_response.transaction\nexcept ApiException as e:\n res = 'Exception when calling TransactionApi->charge: {}'.format(e)\nprint('Content-type:text/html\\r\\n\\r\\n')\nprint('<html>')\nprint('<head>')\nprint('<title>Square Payment</title>')\nprint('</head>')\nprint('<body>')\nprint('<h2>Result: </h2>')\nprint('<h2>{}</h2>'.format(heading))\nprint('<p>{}</p>'.format(res))\nif customersList:\n print('<h2>Customers stored on File: </h2>')\n for customer in customersList.customers:\n print('<p>{}</p>'.format(customer))\nprint('</body>')\nprint('</html>')\n",
"step-5": "#!/usr/bin/env python\n# coding: utf-8\nfrom __future__ import print_function\nimport uuid\nimport cgi\n\nimport squareconnect\nfrom squareconnect.rest import ApiException\nfrom squareconnect.apis.transactions_api import TransactionsApi\nfrom squareconnect.apis.locations_api import LocationsApi\nfrom squareconnect.apis.customers_api import CustomersApi\n\n# Create instance of FieldStorage\nform = cgi.FieldStorage()\n\n# Get data from fields\nnonce = form.getvalue('nonce')\n# Get amount data\ndonation = form.getvalue('amount')\n\nboxChecked = form.getvalue('boxChecked')\nfirstName = form.getvalue('firstname')\nlastName = form.getvalue('lastname')\nemail = form.getvalue('email')\n\n\n# The access token to use in all Connect API requests. Use your *sandbox* access\n# token if you're just testing things out.\nsquareconnect.configuration.access_token = 'sandbox-sq0atb-kfvpHvEa9Mz2098Nozk1RQ'\n\n# The ID of the business location to associate processed payments with.\n# See [Retrieve your business's locations]\n# (https://docs.connect.squareup.com/articles/getting-started/#retrievemerchantprofile)\n# for an easy way to get your business's location IDs.\n# If you're testing things out, use a sandbox location ID.\nlocation_id = 'CBASEGLb1fOhVH4Uvvi1aY_bOawgAQ'\n\ntransactions_api_instance = TransactionsApi()\ncustomers_api_instance = CustomersApi()\n\n# Every payment you process with the SDK must have a unique idempotency key.\n# If you're unsure whether a particular payment succeeded, you can reattempt\n# it with the same idempotency key without worrying about double charging\n# the buyer.\nidempotency_key = str(uuid.uuid1())\n\n# Monetary amounts are specified in the smallest unit of the applicable currency.\n# This amount is in cents. It's also hard-coded for $1.00, which isn't very useful.\namount = {'amount': int(donation) * 100, 'currency': 'USD'}\n\ncustomersList = []\n\n# Add a customer to file\nif boxChecked == \"true\": \n\theading = \"Recurring Donation\"\n\tcustomerRequest = {'given_name': firstName, 'family_name': lastName, 'email_address': email}\n\n\ttry:\n\t\tcustomerResponse = customers_api_instance.create_customer(customerRequest)\n\texcept ApiException as e:\n\t\tprint (\"customer creation failed\")\n\t\tprint (e)\n\t\texit()\n\n\tcustomer = customerResponse.customer\n\tcustomerCardRequest = {'card_nonce': nonce}\n\n\ttry:\n\t\tcustomerCardResponse = customers_api_instance.create_customer_card(customer.id, customerCardRequest)\n\texcept:\n\t\tprint (\"customer card creation failed\")\n\t\texit()\n\n\tcustomerCard = customerCardResponse.card\n\n\tbody = {'customer_id': customer.id, 'customer_card_id': customerCard.id, 'idempotency_key': idempotency_key, 'amount_money': amount}\n\tcustomersList = customers_api_instance.list_customers()\nelse:\n\t# To learn more about splitting transactions with additional recipients,\n\t# see the Transactions API documentation on our [developer site]\n\t# (https://docs.connect.squareup.com/payments/transactions/overview#mpt-overview).\n\theading = \"One time Donation\"\n\tbody = {'idempotency_key': idempotency_key, 'card_nonce': nonce, 'amount_money': amount}\n\t# customersList = Non\n\n\n# The SDK throws an exception if a Connect endpoint responds with anything besides\n# a 200-level HTTP code. This block catches any exceptions that occur from the request.\ntry:\n api_response = transactions_api_instance.charge(location_id, body)\n res = api_response.transaction\nexcept ApiException as e:\n res = \"Exception when calling TransactionApi->charge: {}\".format(e)\n\n# Display the result\nprint ('Content-type:text/html\\r\\n\\r\\n')\nprint ('<html>')\nprint ('<head>')\nprint ('<title>Square Payment</title>')\nprint ('</head>')\nprint ('<body>')\nprint ('<h2>Result: </h2>')\nprint( '<h2>{}</h2>'.format(heading))\nprint ('<p>{}</p>'.format(res))\nif customersList:\n\tprint( '<h2>Customers stored on File: </h2>')\n\tfor customer in customersList.customers:\n\t\tprint ('<p>{}</p>'.format(customer))\n\nprint ('</body>')\nprint ('</html>')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while i == 0:
num = int(input('What length do you want? '))
password = ''.join(random.sample(s, num))
print(password)
j = 0
while j == 0:
want = input('Do you this password? (yes or no) ')
want.lower()
if want == 'yes':
print('Your Password is ' + password)
break
elif want == 'no':
break
if want == 'yes':
fin = input('Do you want a new password. yes or no? ')
fin.lower()
while j == 0:
if fin == 'yes':
break
elif fin == 'no':
break
if fin == 'no':
print('This is your final password ' + password)
break
<|reserved_special_token_1|>
<|reserved_special_token_0|>
s = (
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?"
)
i = 0
fin = ''
while i == 0:
num = int(input('What length do you want? '))
password = ''.join(random.sample(s, num))
print(password)
j = 0
while j == 0:
want = input('Do you this password? (yes or no) ')
want.lower()
if want == 'yes':
print('Your Password is ' + password)
break
elif want == 'no':
break
if want == 'yes':
fin = input('Do you want a new password. yes or no? ')
fin.lower()
while j == 0:
if fin == 'yes':
break
elif fin == 'no':
break
if fin == 'no':
print('This is your final password ' + password)
break
<|reserved_special_token_1|>
import random
s = (
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?"
)
i = 0
fin = ''
while i == 0:
num = int(input('What length do you want? '))
password = ''.join(random.sample(s, num))
print(password)
j = 0
while j == 0:
want = input('Do you this password? (yes or no) ')
want.lower()
if want == 'yes':
print('Your Password is ' + password)
break
elif want == 'no':
break
if want == 'yes':
fin = input('Do you want a new password. yes or no? ')
fin.lower()
while j == 0:
if fin == 'yes':
break
elif fin == 'no':
break
if fin == 'no':
print('This is your final password ' + password)
break
<|reserved_special_token_1|>
import random
s = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?"
i = 0
fin = ""
while i == 0:
num = int(input("What length do you want? "))
password = "".join(random.sample(s, num))
print(password)
j = 0
while(j ==0):
want = input("Do you this password? (yes or no) ")
want.lower()
if want == "yes":
print("Your Password is " + password)
break
elif want == "no":
break
if want == "yes":
fin = input("Do you want a new password. yes or no? ")
fin.lower()
while j == 0:
if fin == "yes":
break
elif fin == "no":
break
if fin == "no":
print("This is your final password " + password)
break
|
flexible
|
{
"blob_id": "3089dba0956151bd43e443b679ec0b24da644d08",
"index": 3701,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile i == 0:\n num = int(input('What length do you want? '))\n password = ''.join(random.sample(s, num))\n print(password)\n j = 0\n while j == 0:\n want = input('Do you this password? (yes or no) ')\n want.lower()\n if want == 'yes':\n print('Your Password is ' + password)\n break\n elif want == 'no':\n break\n if want == 'yes':\n fin = input('Do you want a new password. yes or no? ')\n fin.lower()\n while j == 0:\n if fin == 'yes':\n break\n elif fin == 'no':\n break\n if fin == 'no':\n print('This is your final password ' + password)\n break\n",
"step-3": "<mask token>\ns = (\n \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?\"\n )\ni = 0\nfin = ''\nwhile i == 0:\n num = int(input('What length do you want? '))\n password = ''.join(random.sample(s, num))\n print(password)\n j = 0\n while j == 0:\n want = input('Do you this password? (yes or no) ')\n want.lower()\n if want == 'yes':\n print('Your Password is ' + password)\n break\n elif want == 'no':\n break\n if want == 'yes':\n fin = input('Do you want a new password. yes or no? ')\n fin.lower()\n while j == 0:\n if fin == 'yes':\n break\n elif fin == 'no':\n break\n if fin == 'no':\n print('This is your final password ' + password)\n break\n",
"step-4": "import random\ns = (\n \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?\"\n )\ni = 0\nfin = ''\nwhile i == 0:\n num = int(input('What length do you want? '))\n password = ''.join(random.sample(s, num))\n print(password)\n j = 0\n while j == 0:\n want = input('Do you this password? (yes or no) ')\n want.lower()\n if want == 'yes':\n print('Your Password is ' + password)\n break\n elif want == 'no':\n break\n if want == 'yes':\n fin = input('Do you want a new password. yes or no? ')\n fin.lower()\n while j == 0:\n if fin == 'yes':\n break\n elif fin == 'no':\n break\n if fin == 'no':\n print('This is your final password ' + password)\n break\n",
"step-5": "import random\n\ns = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?\"\ni = 0\nfin = \"\"\nwhile i == 0:\n num = int(input(\"What length do you want? \"))\n\n password = \"\".join(random.sample(s, num))\n\n print(password)\n j = 0\n while(j ==0):\n want = input(\"Do you this password? (yes or no) \")\n want.lower()\n if want == \"yes\":\n print(\"Your Password is \" + password)\n break\n elif want == \"no\":\n break\n if want == \"yes\":\n fin = input(\"Do you want a new password. yes or no? \")\n fin.lower()\n while j == 0:\n if fin == \"yes\":\n break\n elif fin == \"no\":\n break\n if fin == \"no\":\n print(\"This is your final password \" + password)\n break",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.shortcuts import render, Http404, HttpResponse, redirect
from django.contrib.auth import authenticate, login
from website.form import UserForm
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from website.models import UserProfile
from website.form import UserForm
import pandas as pd
from pandas import DataFrame
from sqlalchemy import create_engine
from django.contrib.auth.decorators import login_required
import sqlite3
import xlrd
import uuid
def df_to_sql_T_1(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns
#读取存在文件夹中的excel
excel_df = pd.read_excel(filefullpath, sheetname=sheet)
excel_df = excel_df.dropna(how="all")
excel_df = excel_df.dropna(axis=1, how="all")
excel_df = excel_df.T
excel_df.columns = excel_df.loc[row_name]
excel_df = excel_df.drop(row_name, axis=0, inplace=False)
excel_df.index = range(len(excel_df))
excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)
#数据库的读取
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM org_info"#!!!注意sql中没有表格会出错
sql_df = pd.read_sql(sql, con)
fund_name_list = sql_df['org_full_name'].tolist()
sql_number = len(fund_name_list)
#依次对数据库中的每一行添加一列id
org_id_number = 0
for org_full_name in sql_df['org_full_name'].unique():
org_id_number = org_id_number+1
org_id = 'O'+'0'*(5-len(str(org_id_number)))+str(org_id_number)
with con:
cur = con.cursor()
cur.execute("""UPDATE org_info SET org_id=? WHERE org_full_name=?""", (org_id, org_full_name))
#对excel进行读取
#excel_data = pd.read_excel(filefullpath, sheetname=sheet)
excel_name_list = excel_df['★机构全名'].tolist()
for name in excel_name_list:
if name in fund_name_list:
#提取数据库中的org_full_name为name的id
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM org_info"
sql_df = pd.read_sql(sql, con)
name_dataframe =sql_df[sql_df["org_full_name"] == name]
org_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'org_id']
#把excel的一行变成dataframe,并且加上id,并上传到数据库
commit_data = excel_df[excel_df["★机构全名"] == name]
commit_data.columns = ["org_name", "org_full_name", "reg_code", "reg_time", "found_date", "reg_capital",
"real_capital", "region", "profile", "address", "team", "fund_num",
"is_qualification", "prize", "team_scale", "investment_idea", "master_strategy",
"remark", "asset_mgt_scale", "linkman", "linkman_duty", "linkman_phone",
"linkman_email"]
commit_data["org_id"] = str(org_id)
#把一行表格dataframe提取其中的值
org_name = str(commit_data.loc[commit_data.org_full_name == name, 'org_name'].values[0])
org_full_name = str(name)
reg_code = str(commit_data.loc[commit_data.org_full_name == name, 'reg_code'].values[0])
reg_time = str(commit_data.loc[commit_data.org_full_name == name, 'reg_time'].values[0])
found_date = str(commit_data.loc[commit_data.org_full_name == name, 'found_date'].values[0])
reg_capital = str(commit_data.loc[commit_data.org_full_name == name, 'reg_capital'].values[0])
real_capital = str(commit_data.loc[commit_data.org_full_name == name, 'real_capital'].values[0])
region = str(commit_data.loc[commit_data.org_full_name == name, 'region'].values[0])
profile = str(commit_data.loc[commit_data.org_full_name == name, 'profile'].values[0])
address = str(commit_data.loc[commit_data.org_full_name == name, 'address'].values[0])
team = str(commit_data.loc[commit_data.org_full_name == name, 'org_name'].values[0])
fund_num = str(commit_data.loc[commit_data.org_full_name == name, 'team'].values[0])
is_qualification = str(commit_data.loc[commit_data.org_full_name == name, 'is_qualification'].values[0])
prize = str(commit_data.loc[commit_data.org_full_name == name, 'prize'].values[0])
team_scale = str(commit_data.loc[commit_data.org_full_name == name, 'team_scale'])
investment_idea = str(commit_data.loc[commit_data.org_full_name == name, 'investment_idea'].values[0])
master_strategy = str(commit_data.loc[commit_data.org_full_name == name, 'master_strategy'].values[0])
remark = str(commit_data.loc[commit_data.org_full_name == name, 'remark'].values[0])
asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name == name, 'asset_mgt_scale'].values[0])
linkman = str(commit_data.loc[commit_data.org_full_name == name, 'linkman'].values[0])
linkman_duty = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_duty'].values[0])
linkman_phone = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_phone'].values[0])
linkman_email = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_email'].values[0])
# org_name = str(commit_data.loc[index.last_valid_index(), "org_name"])
with con:
cur = con.cursor()
sql = """UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, \
reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, \
prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, \
linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?"""
l = (org_name, org_full_name, reg_code, reg_time, found_date, reg_capital, real_capital, region, profile,\
address, team, fund_num, is_qualification, prize, team_scale, investment_idea, master_strategy, remark,\
asset_mgt_scale, linkman, linkman_duty, linkman_phone, linkman_email, org_id)
cur.execute(sql, l)
print("if")
else:
sql_number = sql_number + 1
commit_data = excel_df[excel_df["★机构全名"] == name]
commit_data.columns = ["org_name", "org_full_name", "reg_code", "reg_time", "found_date", "reg_capital",
"real_capital", "region", "profile", "address", "team", "fund_num",
"is_qualification", "prize", "team_scale", "investment_idea", "master_strategy",
"remark", "asset_mgt_scale", "linkman", "linkman_duty", "linkman_phone",
"linkman_email"]
commit_data.loc[:, "org_id"] = 'O'+'0'*(5-len(str(sql_number)))+str(sql_number)
commit_data.to_sql("org_info", con, if_exists="append", index=False)
print("else")
def df_to_sql_T_2(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns
#读取存在文件夹中的excel
excel_df = pd.read_excel(filefullpath, sheetname=sheet)
excel_df = excel_df.dropna(how="all")
excel_df = excel_df.dropna(axis=1, how="all")
excel_df = excel_df.T
excel_df.columns = excel_df.loc[row_name]
excel_df = excel_df.drop(row_name, axis=0, inplace=False)
excel_df.index = range(len(excel_df))
excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)
#数据库的读取
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM fund_info"#!!!注意sql中没有表格会出错
sql_df = pd.read_sql(sql, con)
fund_name_list = sql_df['fund_full_name'].tolist()#list
sql_number = len(fund_name_list)
#依次对数据库中的每一行添加一列id
fund_id_number = 0
for fund_full_name in sql_df['fund_full_name'].unique():
fund_id_number = fund_id_number+1
fund_id = 'F'+'0'*(6-len(str(fund_id_number)))+str(fund_id_number)
with con:
cur = con.cursor()
cur.execute("""UPDATE fund_info SET fund_id=? WHERE fund_full_name=?""", (fund_id, fund_full_name))
#对excel进行读取
#excel_data = pd.read_excel(filefullpath, sheetname=sheet)
excel_name_list = excel_df['★基金全称'].tolist()#list
for name in excel_name_list:
if name in fund_name_list:
#提取数据库中的org_full_name为name的id
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM fund_info"
sql_df = pd.read_sql(sql, con)
name_dataframe =sql_df[sql_df["fund_full_name"] == name]
fund_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'fund_id']
#把excel的一行变成dataframe,并且加上id,并上传到数据库
commit_data = excel_df[excel_df["★基金全称"] == name]
commit_data.columns = ["group", "fund_type_strategy", "reg_code", "foundation_date", "fund_name",
"fund_full_name", "fund_manager", "fund_manager_nominal", "fund_stockbroker",
"fund_custodian", "fund_member", "fund_type_issuance", "fund_type_structure",
"fund_structure", "issue_scale", "asset_scale", "is_main_fund", "fee_pay",
"open_date", "locked_time_limit", "duration", "fee_manage", "fee_pay_remark",
"fee_redeem", "fee_subscription", "fee_trust", "investment_range",
"min_purchase_amount", "min_append_amount", "stop_line", "alert_line",
"manager_participation_scale", "investment_idea", "structure_hierarchy", "remark"]
commit_data["fund_id"] = str(fund_id)
#把一行表格dataframe提取其中的值
group = str(commit_data.loc[commit_data.fund_full_name == name, 'group'].values[0])
fund_type_strategy = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_strategy'].values[0])
reg_code = str(commit_data.loc[commit_data.fund_full_name == name, 'reg_code'].values[0])
foundation_date = str(commit_data.loc[commit_data.fund_full_name == name, 'foundation_date'].values[0])
fund_name = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_name'].values[0])
fund_full_name = str(name)
fund_manager = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_manager'].values[0])
fund_manager_nominal = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_manager_nominal'].values[0])
fund_stockbroker = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_stockbroker'].values[0])
fund_custodian = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_custodian'].values[0])
fund_member = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_member'].values[0])
fund_type_issuance = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_issuance'].values[0])
fund_type_structure = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_structure'].values[0])
fund_structure = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_structure'].values[0])
issue_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'issue_scale'].values[0])
asset_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'asset_scale'].values[0])
is_main_fund = str(commit_data.loc[commit_data.fund_full_name == name, 'is_main_fund'].values[0])
fee_pay = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_pay'].values[0])
open_date = str(commit_data.loc[commit_data.fund_full_name == name, 'open_date'])
locked_time_limit = str(commit_data.loc[commit_data.fund_full_name == name, 'locked_time_limit'].values[0])
duration = str(commit_data.loc[commit_data.fund_full_name == name, 'duration'].values[0])
fee_manage = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_manage'].values[0])
fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_pay_remark'].values[0])
fee_redeem = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_redeem'].values[0])
fee_subscription = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_subscription'].values[0])
fee_trust = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_trust'].values[0])
investment_range = str(commit_data.loc[commit_data.fund_full_name == name, 'investment_range'].values[0])
min_purchase_amount = str(commit_data.loc[commit_data.fund_full_name == name, 'min_purchase_amount'].values[0])
min_append_amount = str(commit_data.loc[commit_data.fund_full_name == name, 'min_append_amount'].values[0])
stop_line = str(commit_data.loc[commit_data.fund_full_name == name, 'stop_line'].values[0])
alert_line = str(commit_data.loc[commit_data.fund_full_name == name, 'alert_line'].values[0])
manager_participation_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'manager_participation_scale'].values[0])
investment_idea = str(commit_data.loc[commit_data.fund_full_name == name, 'investment_idea'].values[0])
structure_hierarchy = str(commit_data.loc[commit_data.fund_full_name == name, 'structure_hierarchy'].values[0])
remark = str(commit_data.loc[commit_data.fund_full_name == name, 'remark'].values[0])
with con:
cur = con.cursor()
sql = """UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?,\
fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?,\
fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?,\
open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?,\
investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, \
investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?"""
l = (group, fund_type_strategy, reg_code, foundation_date, fund_name, fund_full_name, fund_manager, \
fund_manager_nominal, fund_stockbroker, fund_custodian, fund_member, fund_type_issuance, \
fund_type_structure, fund_structure, issue_scale, asset_scale, is_main_fund, fee_pay, open_date, \
locked_time_limit, duration, fee_manage, fee_pay_remark, fee_redeem, fee_subscription, fee_trust, \
investment_range, min_purchase_amount, min_append_amount, stop_line, alert_line, manager_participation_scale, \
investment_idea, structure_hierarchy, remark, fund_id)
cur.execute(sql, l)
print("if")
else:
sql_number = sql_number + 1
commit_data = excel_df[excel_df["★基金全称"] == name]
commit_data.columns = ["group", "fund_type_strategy", "reg_code", "foundation_date", "fund_name", "fund_full_name", \
"fund_manager", "fund_manager_nominal", "fund_stockbroker", "fund_custodian", "fund_member", \
"fund_type_issuance", "fund_type_structure", "fund_structure", "issue_scale", "asset_scale", \
"is_main_fund", "fee_pay", "open_date", "locked_time_limit", "duration", "fee_manage", \
"fee_pay_remark", "fee_redeem", "fee_subscription", "fee_trust", "investment_range", \
"min_purchase_amount", "min_append_amount", "stop_line", "alert_line", "manager_participation_scale", \
"investment_idea", "structure_hierarchy", "remark"]
commit_data.loc[:, "fund_id"] = 'F'+'0'*(6-len(str(sql_number)))+str(sql_number)
commit_data.to_sql("fund_info", con, if_exists="append", index=False)
print("else")
def df_to_sql_T_3(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns
#读取存在文件夹中的excel
excel_df = pd.read_excel(filefullpath, sheetname=sheet)
excel_df = excel_df.dropna(how="all")
excel_df = excel_df.dropna(axis=1, how="all")
excel_df = excel_df.T
excel_df.columns = excel_df.loc[row_name]#把【人员简介】的这一行变成columns这一列
excel_df = excel_df.drop(row_name, axis=0, inplace=False)#去除【人员简介】这一行
excel_df.index = range(len(excel_df))
excel_df.drop_duplicates(subset=['★姓名'], inplace=True)
#数据库的读取
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM manager_info"#!!!注意sql中没有表格会出错
sql_df = pd.read_sql(sql, con)
user_list = sql_df['user_name'].tolist()#list
sql_number = len(user_list)
#依次对数据库中的每一行添加一列id
user_id_number = 0
for user_name in sql_df['user_name'].unique():
user_id_number = user_id_number+1
user_id = 'M'+'0'*(5-len(str(user_id_number)))+str(user_id_number)
with con:
cur = con.cursor()
cur.execute("""UPDATE manager_info SET user_id=? WHERE user_name=?""", (user_id, user_name))
#对excel进行读取
#excel_data = pd.read_excel(filefullpath, sheetname=sheet)
excel_name_list = excel_df['★姓名'].tolist()#list
for name in excel_name_list:
if name in user_list:
#提取数据库中的user_name为name的id
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM manager_info"
sql_df = pd.read_sql(sql, con)
name_dataframe =sql_df[sql_df["user_name"] == name]
user_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'user_id']#loc到最后一个有效的index和fund_id,取出值
#把excel的一行变成dataframe,并且加上id,并上传到数据库
commit_data = excel_df[excel_df["★姓名"] == name]
commit_data.columns = ["user_name", "sex", "org_name", "introduction", "photo", "entry_date",
"investment_years", "education", "duty", "qualification", "background", "is_fund_qualification",
"is_core_member", "resume", "max_asset_mgt_scale", "prize", "remark"]
commit_data["user_id"] = str(user_id)#不需要
#把一行表格dataframe提取其中的值
user_name = str(name)
sex = str(commit_data.loc[commit_data.user_name == name, 'sex'].values[0])
org_name = str(commit_data.loc[commit_data.user_name == name, 'org_name'].values[0])
introduction = str(commit_data.loc[commit_data.user_name == name, 'introduction'].values[0])
photo = str(commit_data.loc[commit_data.user_name == name, 'photo'].values[0])
entry_date = str(commit_data.loc[commit_data.user_name == name, 'entry_date'].values[0])
investment_years = str(commit_data.loc[commit_data.user_name == name, 'investment_years'].values[0])
education = str(commit_data.loc[commit_data.user_name == name, 'education'].values[0])
duty = str(commit_data.loc[commit_data.user_name == name, 'duty'].values[0])
qualification = str(commit_data.loc[commit_data.user_name == name, 'qualification'].values[0])
background = str(commit_data.loc[commit_data.user_name == name, 'background'].values[0])
is_fund_qualification = str(commit_data.loc[commit_data.user_name == name, 'is_fund_qualification'].values[0])
is_core_member = str(commit_data.loc[commit_data.user_name == name, 'is_core_member'].values[0])
resume = str(commit_data.loc[commit_data.user_name == name, 'resume'].values[0])
max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name == name, 'max_asset_mgt_scale'].values[0])
prize = str(commit_data.loc[commit_data.user_name == name, 'prize'].values[0])
remark = str(commit_data.loc[commit_data.user_name == name, 'remark'].values[0])
with con:
cur = con.cursor()
sql = """UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, \
entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, \
is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?"""
l = (user_name, sex, org_name, introduction, photo, entry_date, investment_years, education, \
duty, qualification, background, is_fund_qualification, is_core_member, resume, max_asset_mgt_scale, \
prize, remark, user_id)
cur.execute(sql, l)
print("if")
else:
sql_number = sql_number + 1
commit_data = excel_df[excel_df["★姓名"] == name]
commit_data.columns = ["user_name", "sex", "org_name", "introduction", "photo", "entry_date", \
"investment_years", "education", "duty", "qualification", "background", \
"is_fund_qualification", "is_core_member", "resume", "max_asset_mgt_scale", "prize", \
"remark"]
commit_data.loc[:, "user_id"] = 'M'+'0'*(5-len(str(sql_number)))+str(sql_number)
commit_data.to_sql("manager_info", con, if_exists="append", index=False)
print("else")
def df_to_sql_4(filefullpath, sheet, row_name):
#读取处理文件夹中的excel
excel_df = pd.read_excel(filefullpath, sheetname=sheet)
excel_df = excel_df.dropna(how="all")
#excel_df = excel_df.dropna(axis=1, how="all")
excel_df[row_name] = excel_df[row_name].ffill()
excel_df.index = range(len(excel_df))
print(excel_df)
#数据库的读取
con = sqlite3.connect(r"C:\Users\K\Desktop\excel-upload-sqlite3\mins\db.sqlite3")
sql = "SELECT * FROM fund_nav_data"
sql_df = pd.read_sql(sql, con)
name_list = sql_df['fund_name'].tolist()
date_list = sql_df['statistic_date'].tolist()
print("name_list")
#print(type(name_list[0]))
print(name_list)
print("date_list")
#print(type(date_list[0]))
print(date_list)
#从fund_info数据表中提取出fund_id,加入fund_nav_data数据表中的fund_id
for fund_name in sql_df['fund_name'].unique():
sql = "SELECT * FROM fund_info"
fund_info_sql_df = pd.read_sql(sql, con)
fund_id = fund_info_sql_df.loc[fund_info_sql_df.fund_name == fund_name, 'fund_id'].values[0]
with con:
cur = con.cursor()
cur.execute("""UPDATE fund_nav_data SET fund_id=? WHERE fund_name=?""", (fund_id, fund_name))
#对excel_df进行读取
excel_name_list = excel_df['基金简称'].tolist()
excel_name_list = list(set(excel_name_list))
print("excel_name_list")
#print(type(excel_name_list[0]))
print(excel_name_list)
for name in excel_name_list:
statistic_date_series = excel_df.loc[excel_df['基金简称'] == name, '净值日期']
excel_date_list = statistic_date_series.tolist()
excel_date_list = [str(i) for i in excel_date_list]
print("excel_date_list")
#print(type(excel_date_list[0]))
print(excel_date_list)
for date in excel_date_list:
if name in name_list and date in date_list:
commit_data = excel_df[excel_df['基金简称'] == name]
print(commit_data.columns)
commit_data.columns = ["fund_name", "statistic_date", "nav", "added_nav", "total_share", "total_asset", "total_nav", "is_split", "is_open_date", "split_ratio", "after_tax_bonus"]
commit_data["fund_id"] = str(fund_id)
fund_name = name
statistic_date = str(date)
nav = str(commit_data.loc[commit_data.statistic_date == date, 'nav'].values[0])
added_nav = str(commit_data.loc[commit_data.statistic_date == date, 'added_nav'].values[0])
total_share = str(commit_data.loc[commit_data.statistic_date == date, 'total_share'].values[0])
total_asset = str(commit_data.loc[commit_data.statistic_date == date, 'total_asset'].values[0])
total_nav = str(commit_data.loc[commit_data.statistic_date == date, 'total_nav'].values[0])
is_split = str(commit_data.loc[commit_data.statistic_date == date, 'is_split'].values[0])
is_open_date = str(commit_data.loc[commit_data.statistic_date == date, 'is_open_date'].values[0])
split_ratio = str(commit_data.loc[commit_data.statistic_date == date, 'split_ratio'].values[0])
after_tax_bonus = str(commit_data.loc[commit_data.statistic_date == date, 'after_tax_bonus'].values[0])
with con:
cur = con.cursor()
sql = """UPDATE fund_nav_data SET nav=?, added_nav=?, total_share=?, total_asset=?, total_nav=?, is_split=?, is_open_date=?, split_ratio=?, after_tax_bonus=? WHERE fund_name=? AND statistic_date=?"""
l = (nav, added_nav, total_share, total_asset, total_nav, is_split, is_open_date, split_ratio, after_tax_bonus, fund_name, statistic_date)
cur.execute(sql, l)
print("if")
else:
commit_data = excel_df[(excel_df["基金简称"] == name)&(excel_df["净值日期"] == date)]
commit_data.columns = ["fund_name", "statistic_date", "nav", "added_nav", "total_share", "total_asset", "total_nav", "is_split", "is_open_date", "split_ratio", "after_tax_bonus"]
commit_data.to_sql("fund_nav_data", con, if_exists="append", index=False)
print("else")
def listing(request):
context = {}
if request.method == "POST":
uf = UserForm(request.POST, request.FILES)
if request.user.username and uf.is_valid():
#username = uf.cleaned_data['username']
user_upload_file = uf.cleaned_data['user_upload_file']
#写入数据库
profile = UserProfile()
profile.username = request.user.username
profile.user_upload_file = user_upload_file
profile.save()
file_name = request.FILES.get('user_upload_file').name
path = "C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\upload\\upload\\"
#C:\Users\K\Desktop\excel - upload - sqlite3\excel - upload - sqlite3\mins\upload\upload\华泰大赛参赛私募基金数据填报模板.xlsx
filefullpath = path + file_name
#print(filefullpath)
if user_upload_file:
b = xlrd.open_workbook(filefullpath)
#count = len(b.sheets())#不需要,sheet数都是固定的
for sheet in range(1, 5):
if sheet == 1:
row_name = "公司资料简介"
df_to_sql_T_1(filefullpath, sheet, row_name)
if sheet == 2:
row_name = "基金简介"
df_to_sql_T_2(filefullpath, sheet, row_name)
if sheet == 3:
row_name = "人员简介"
df_to_sql_T_3(filefullpath, sheet, row_name)
if sheet == 4:
row_name = "基金简称"
df_to_sql_4(filefullpath, sheet, row_name)
return HttpResponse('upload ok!')
else:
return redirect(to='login')
else:
uf = UserForm()
context['uf'] = uf
return render(request, 'website/templates/listing.html', context)
def index_login(request):
context = {}
if request.method == "GET":
form = AuthenticationForm
if request.method == "POST":
form = AuthenticationForm(data=request.POST)
if form.is_valid():
login(request, form.get_user())
return redirect(to='list')
context['form'] = form
return render(request, 'register_login.html', context)
def index_register(request):
context = {}
if request.method == 'GET':
form = UserCreationForm
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return redirect(to='login')
context['form'] = form
return render(request, 'register_login.html', context)
|
normal
|
{
"blob_id": "d261efa72e1ab77507a1fd84aa2e462c6969af56",
"index": 6579,
"step-1": "<mask token>\n\n\ndef df_to_sql_T_1(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['org_full_name'].tolist()\n sql_number = len(fund_name_list)\n org_id_number = 0\n for org_full_name in sql_df['org_full_name'].unique():\n org_id_number = org_id_number + 1\n org_id = 'O' + '0' * (5 - len(str(org_id_number))) + str(org_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE org_info SET org_id=? WHERE org_full_name=?',\n (org_id, org_full_name))\n excel_name_list = excel_df['★机构全名'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['org_full_name'] == name]\n org_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'org_id']\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data['org_id'] = str(org_id)\n org_name = str(commit_data.loc[commit_data.org_full_name ==\n name, 'org_name'].values[0])\n org_full_name = str(name)\n reg_code = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_code'].values[0])\n reg_time = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_time'].values[0])\n found_date = str(commit_data.loc[commit_data.org_full_name ==\n name, 'found_date'].values[0])\n reg_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_capital'].values[0])\n real_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'real_capital'].values[0])\n region = str(commit_data.loc[commit_data.org_full_name == name,\n 'region'].values[0])\n profile = str(commit_data.loc[commit_data.org_full_name == name,\n 'profile'].values[0])\n address = str(commit_data.loc[commit_data.org_full_name == name,\n 'address'].values[0])\n team = str(commit_data.loc[commit_data.org_full_name == name,\n 'org_name'].values[0])\n fund_num = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team'].values[0])\n is_qualification = str(commit_data.loc[commit_data.\n org_full_name == name, 'is_qualification'].values[0])\n prize = str(commit_data.loc[commit_data.org_full_name == name,\n 'prize'].values[0])\n team_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team_scale'])\n investment_idea = str(commit_data.loc[commit_data.org_full_name ==\n name, 'investment_idea'].values[0])\n master_strategy = str(commit_data.loc[commit_data.org_full_name ==\n name, 'master_strategy'].values[0])\n remark = str(commit_data.loc[commit_data.org_full_name == name,\n 'remark'].values[0])\n asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'asset_mgt_scale'].values[0])\n linkman = str(commit_data.loc[commit_data.org_full_name == name,\n 'linkman'].values[0])\n linkman_duty = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_duty'].values[0])\n linkman_phone = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_phone'].values[0])\n linkman_email = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_email'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?'\n )\n l = (org_name, org_full_name, reg_code, reg_time,\n found_date, reg_capital, real_capital, region, profile,\n address, team, fund_num, is_qualification, prize,\n team_scale, investment_idea, master_strategy, remark,\n asset_mgt_scale, linkman, linkman_duty, linkman_phone,\n linkman_email, org_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data.loc[:, 'org_id'] = 'O' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('org_info', con, if_exists='append', index=False\n )\n print('else')\n\n\ndef df_to_sql_T_2(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['fund_full_name'].tolist()\n sql_number = len(fund_name_list)\n fund_id_number = 0\n for fund_full_name in sql_df['fund_full_name'].unique():\n fund_id_number = fund_id_number + 1\n fund_id = 'F' + '0' * (6 - len(str(fund_id_number))) + str(\n fund_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_info SET fund_id=? WHERE fund_full_name=?'\n , (fund_id, fund_full_name))\n excel_name_list = excel_df['★基金全称'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['fund_full_name'] == name]\n fund_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'fund_id']\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data['fund_id'] = str(fund_id)\n group = str(commit_data.loc[commit_data.fund_full_name == name,\n 'group'].values[0])\n fund_type_strategy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_strategy'].values[0])\n reg_code = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'reg_code'].values[0])\n foundation_date = str(commit_data.loc[commit_data.\n fund_full_name == name, 'foundation_date'].values[0])\n fund_name = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_name'].values[0])\n fund_full_name = str(name)\n fund_manager = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_manager'].values[0])\n fund_manager_nominal = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_manager_nominal'].values[0])\n fund_stockbroker = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_stockbroker'].values[0])\n fund_custodian = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_custodian'].values[0])\n fund_member = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_member'].values[0])\n fund_type_issuance = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_issuance'].values[0])\n fund_type_structure = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_structure'].values[0])\n fund_structure = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_structure'].values[0])\n issue_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'issue_scale'].values[0])\n asset_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'asset_scale'].values[0])\n is_main_fund = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'is_main_fund'].values[0])\n fee_pay = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay'].values[0])\n open_date = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'open_date'])\n locked_time_limit = str(commit_data.loc[commit_data.\n fund_full_name == name, 'locked_time_limit'].values[0])\n duration = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'duration'].values[0])\n fee_manage = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_manage'].values[0])\n fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay_remark'].values[0])\n fee_redeem = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_redeem'].values[0])\n fee_subscription = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fee_subscription'].values[0])\n fee_trust = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_trust'].values[0])\n investment_range = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_range'].values[0])\n min_purchase_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_purchase_amount'].values[0])\n min_append_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_append_amount'].values[0])\n stop_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'stop_line'].values[0])\n alert_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'alert_line'].values[0])\n manager_participation_scale = str(commit_data.loc[commit_data.\n fund_full_name == name, 'manager_participation_scale'].\n values[0])\n investment_idea = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_idea'].values[0])\n structure_hierarchy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'structure_hierarchy'].values[0])\n remark = str(commit_data.loc[commit_data.fund_full_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n \"UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?, fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?, fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?, open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?, investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?\"\n )\n l = (group, fund_type_strategy, reg_code, foundation_date,\n fund_name, fund_full_name, fund_manager,\n fund_manager_nominal, fund_stockbroker, fund_custodian,\n fund_member, fund_type_issuance, fund_type_structure,\n fund_structure, issue_scale, asset_scale, is_main_fund,\n fee_pay, open_date, locked_time_limit, duration,\n fee_manage, fee_pay_remark, fee_redeem,\n fee_subscription, fee_trust, investment_range,\n min_purchase_amount, min_append_amount, stop_line,\n alert_line, manager_participation_scale,\n investment_idea, structure_hierarchy, remark, fund_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data.loc[:, 'fund_id'] = 'F' + '0' * (6 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('fund_info', con, if_exists='append', index=\n False)\n print('else')\n\n\ndef df_to_sql_T_3(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★姓名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n user_list = sql_df['user_name'].tolist()\n sql_number = len(user_list)\n user_id_number = 0\n for user_name in sql_df['user_name'].unique():\n user_id_number = user_id_number + 1\n user_id = 'M' + '0' * (5 - len(str(user_id_number))) + str(\n user_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE manager_info SET user_id=? WHERE user_name=?',\n (user_id, user_name))\n excel_name_list = excel_df['★姓名'].tolist()\n for name in excel_name_list:\n if name in user_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['user_name'] == name]\n user_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'user_id']\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data['user_id'] = str(user_id)\n user_name = str(name)\n sex = str(commit_data.loc[commit_data.user_name == name, 'sex']\n .values[0])\n org_name = str(commit_data.loc[commit_data.user_name == name,\n 'org_name'].values[0])\n introduction = str(commit_data.loc[commit_data.user_name ==\n name, 'introduction'].values[0])\n photo = str(commit_data.loc[commit_data.user_name == name,\n 'photo'].values[0])\n entry_date = str(commit_data.loc[commit_data.user_name == name,\n 'entry_date'].values[0])\n investment_years = str(commit_data.loc[commit_data.user_name ==\n name, 'investment_years'].values[0])\n education = str(commit_data.loc[commit_data.user_name == name,\n 'education'].values[0])\n duty = str(commit_data.loc[commit_data.user_name == name,\n 'duty'].values[0])\n qualification = str(commit_data.loc[commit_data.user_name ==\n name, 'qualification'].values[0])\n background = str(commit_data.loc[commit_data.user_name == name,\n 'background'].values[0])\n is_fund_qualification = str(commit_data.loc[commit_data.\n user_name == name, 'is_fund_qualification'].values[0])\n is_core_member = str(commit_data.loc[commit_data.user_name ==\n name, 'is_core_member'].values[0])\n resume = str(commit_data.loc[commit_data.user_name == name,\n 'resume'].values[0])\n max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name ==\n name, 'max_asset_mgt_scale'].values[0])\n prize = str(commit_data.loc[commit_data.user_name == name,\n 'prize'].values[0])\n remark = str(commit_data.loc[commit_data.user_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?'\n )\n l = (user_name, sex, org_name, introduction, photo,\n entry_date, investment_years, education, duty,\n qualification, background, is_fund_qualification,\n is_core_member, resume, max_asset_mgt_scale, prize,\n remark, user_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data.loc[:, 'user_id'] = 'M' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('manager_info', con, if_exists='append',\n index=False)\n print('else')\n\n\n<mask token>\n\n\ndef listing(request):\n context = {}\n if request.method == 'POST':\n uf = UserForm(request.POST, request.FILES)\n if request.user.username and uf.is_valid():\n user_upload_file = uf.cleaned_data['user_upload_file']\n profile = UserProfile()\n profile.username = request.user.username\n profile.user_upload_file = user_upload_file\n profile.save()\n file_name = request.FILES.get('user_upload_file').name\n path = (\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\upload\\\\upload\\\\'\n )\n filefullpath = path + file_name\n if user_upload_file:\n b = xlrd.open_workbook(filefullpath)\n for sheet in range(1, 5):\n if sheet == 1:\n row_name = '公司资料简介'\n df_to_sql_T_1(filefullpath, sheet, row_name)\n if sheet == 2:\n row_name = '基金简介'\n df_to_sql_T_2(filefullpath, sheet, row_name)\n if sheet == 3:\n row_name = '人员简介'\n df_to_sql_T_3(filefullpath, sheet, row_name)\n if sheet == 4:\n row_name = '基金简称'\n df_to_sql_4(filefullpath, sheet, row_name)\n return HttpResponse('upload ok!')\n else:\n return redirect(to='login')\n else:\n uf = UserForm()\n context['uf'] = uf\n return render(request, 'website/templates/listing.html', context)\n\n\n<mask token>\n\n\ndef index_register(request):\n context = {}\n if request.method == 'GET':\n form = UserCreationForm\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect(to='login')\n context['form'] = form\n return render(request, 'register_login.html', context)\n",
"step-2": "<mask token>\n\n\ndef df_to_sql_T_1(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['org_full_name'].tolist()\n sql_number = len(fund_name_list)\n org_id_number = 0\n for org_full_name in sql_df['org_full_name'].unique():\n org_id_number = org_id_number + 1\n org_id = 'O' + '0' * (5 - len(str(org_id_number))) + str(org_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE org_info SET org_id=? WHERE org_full_name=?',\n (org_id, org_full_name))\n excel_name_list = excel_df['★机构全名'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['org_full_name'] == name]\n org_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'org_id']\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data['org_id'] = str(org_id)\n org_name = str(commit_data.loc[commit_data.org_full_name ==\n name, 'org_name'].values[0])\n org_full_name = str(name)\n reg_code = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_code'].values[0])\n reg_time = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_time'].values[0])\n found_date = str(commit_data.loc[commit_data.org_full_name ==\n name, 'found_date'].values[0])\n reg_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_capital'].values[0])\n real_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'real_capital'].values[0])\n region = str(commit_data.loc[commit_data.org_full_name == name,\n 'region'].values[0])\n profile = str(commit_data.loc[commit_data.org_full_name == name,\n 'profile'].values[0])\n address = str(commit_data.loc[commit_data.org_full_name == name,\n 'address'].values[0])\n team = str(commit_data.loc[commit_data.org_full_name == name,\n 'org_name'].values[0])\n fund_num = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team'].values[0])\n is_qualification = str(commit_data.loc[commit_data.\n org_full_name == name, 'is_qualification'].values[0])\n prize = str(commit_data.loc[commit_data.org_full_name == name,\n 'prize'].values[0])\n team_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team_scale'])\n investment_idea = str(commit_data.loc[commit_data.org_full_name ==\n name, 'investment_idea'].values[0])\n master_strategy = str(commit_data.loc[commit_data.org_full_name ==\n name, 'master_strategy'].values[0])\n remark = str(commit_data.loc[commit_data.org_full_name == name,\n 'remark'].values[0])\n asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'asset_mgt_scale'].values[0])\n linkman = str(commit_data.loc[commit_data.org_full_name == name,\n 'linkman'].values[0])\n linkman_duty = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_duty'].values[0])\n linkman_phone = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_phone'].values[0])\n linkman_email = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_email'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?'\n )\n l = (org_name, org_full_name, reg_code, reg_time,\n found_date, reg_capital, real_capital, region, profile,\n address, team, fund_num, is_qualification, prize,\n team_scale, investment_idea, master_strategy, remark,\n asset_mgt_scale, linkman, linkman_duty, linkman_phone,\n linkman_email, org_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data.loc[:, 'org_id'] = 'O' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('org_info', con, if_exists='append', index=False\n )\n print('else')\n\n\ndef df_to_sql_T_2(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['fund_full_name'].tolist()\n sql_number = len(fund_name_list)\n fund_id_number = 0\n for fund_full_name in sql_df['fund_full_name'].unique():\n fund_id_number = fund_id_number + 1\n fund_id = 'F' + '0' * (6 - len(str(fund_id_number))) + str(\n fund_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_info SET fund_id=? WHERE fund_full_name=?'\n , (fund_id, fund_full_name))\n excel_name_list = excel_df['★基金全称'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['fund_full_name'] == name]\n fund_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'fund_id']\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data['fund_id'] = str(fund_id)\n group = str(commit_data.loc[commit_data.fund_full_name == name,\n 'group'].values[0])\n fund_type_strategy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_strategy'].values[0])\n reg_code = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'reg_code'].values[0])\n foundation_date = str(commit_data.loc[commit_data.\n fund_full_name == name, 'foundation_date'].values[0])\n fund_name = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_name'].values[0])\n fund_full_name = str(name)\n fund_manager = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_manager'].values[0])\n fund_manager_nominal = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_manager_nominal'].values[0])\n fund_stockbroker = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_stockbroker'].values[0])\n fund_custodian = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_custodian'].values[0])\n fund_member = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_member'].values[0])\n fund_type_issuance = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_issuance'].values[0])\n fund_type_structure = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_structure'].values[0])\n fund_structure = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_structure'].values[0])\n issue_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'issue_scale'].values[0])\n asset_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'asset_scale'].values[0])\n is_main_fund = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'is_main_fund'].values[0])\n fee_pay = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay'].values[0])\n open_date = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'open_date'])\n locked_time_limit = str(commit_data.loc[commit_data.\n fund_full_name == name, 'locked_time_limit'].values[0])\n duration = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'duration'].values[0])\n fee_manage = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_manage'].values[0])\n fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay_remark'].values[0])\n fee_redeem = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_redeem'].values[0])\n fee_subscription = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fee_subscription'].values[0])\n fee_trust = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_trust'].values[0])\n investment_range = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_range'].values[0])\n min_purchase_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_purchase_amount'].values[0])\n min_append_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_append_amount'].values[0])\n stop_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'stop_line'].values[0])\n alert_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'alert_line'].values[0])\n manager_participation_scale = str(commit_data.loc[commit_data.\n fund_full_name == name, 'manager_participation_scale'].\n values[0])\n investment_idea = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_idea'].values[0])\n structure_hierarchy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'structure_hierarchy'].values[0])\n remark = str(commit_data.loc[commit_data.fund_full_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n \"UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?, fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?, fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?, open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?, investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?\"\n )\n l = (group, fund_type_strategy, reg_code, foundation_date,\n fund_name, fund_full_name, fund_manager,\n fund_manager_nominal, fund_stockbroker, fund_custodian,\n fund_member, fund_type_issuance, fund_type_structure,\n fund_structure, issue_scale, asset_scale, is_main_fund,\n fee_pay, open_date, locked_time_limit, duration,\n fee_manage, fee_pay_remark, fee_redeem,\n fee_subscription, fee_trust, investment_range,\n min_purchase_amount, min_append_amount, stop_line,\n alert_line, manager_participation_scale,\n investment_idea, structure_hierarchy, remark, fund_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data.loc[:, 'fund_id'] = 'F' + '0' * (6 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('fund_info', con, if_exists='append', index=\n False)\n print('else')\n\n\ndef df_to_sql_T_3(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★姓名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n user_list = sql_df['user_name'].tolist()\n sql_number = len(user_list)\n user_id_number = 0\n for user_name in sql_df['user_name'].unique():\n user_id_number = user_id_number + 1\n user_id = 'M' + '0' * (5 - len(str(user_id_number))) + str(\n user_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE manager_info SET user_id=? WHERE user_name=?',\n (user_id, user_name))\n excel_name_list = excel_df['★姓名'].tolist()\n for name in excel_name_list:\n if name in user_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['user_name'] == name]\n user_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'user_id']\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data['user_id'] = str(user_id)\n user_name = str(name)\n sex = str(commit_data.loc[commit_data.user_name == name, 'sex']\n .values[0])\n org_name = str(commit_data.loc[commit_data.user_name == name,\n 'org_name'].values[0])\n introduction = str(commit_data.loc[commit_data.user_name ==\n name, 'introduction'].values[0])\n photo = str(commit_data.loc[commit_data.user_name == name,\n 'photo'].values[0])\n entry_date = str(commit_data.loc[commit_data.user_name == name,\n 'entry_date'].values[0])\n investment_years = str(commit_data.loc[commit_data.user_name ==\n name, 'investment_years'].values[0])\n education = str(commit_data.loc[commit_data.user_name == name,\n 'education'].values[0])\n duty = str(commit_data.loc[commit_data.user_name == name,\n 'duty'].values[0])\n qualification = str(commit_data.loc[commit_data.user_name ==\n name, 'qualification'].values[0])\n background = str(commit_data.loc[commit_data.user_name == name,\n 'background'].values[0])\n is_fund_qualification = str(commit_data.loc[commit_data.\n user_name == name, 'is_fund_qualification'].values[0])\n is_core_member = str(commit_data.loc[commit_data.user_name ==\n name, 'is_core_member'].values[0])\n resume = str(commit_data.loc[commit_data.user_name == name,\n 'resume'].values[0])\n max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name ==\n name, 'max_asset_mgt_scale'].values[0])\n prize = str(commit_data.loc[commit_data.user_name == name,\n 'prize'].values[0])\n remark = str(commit_data.loc[commit_data.user_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?'\n )\n l = (user_name, sex, org_name, introduction, photo,\n entry_date, investment_years, education, duty,\n qualification, background, is_fund_qualification,\n is_core_member, resume, max_asset_mgt_scale, prize,\n remark, user_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data.loc[:, 'user_id'] = 'M' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('manager_info', con, if_exists='append',\n index=False)\n print('else')\n\n\n<mask token>\n\n\ndef listing(request):\n context = {}\n if request.method == 'POST':\n uf = UserForm(request.POST, request.FILES)\n if request.user.username and uf.is_valid():\n user_upload_file = uf.cleaned_data['user_upload_file']\n profile = UserProfile()\n profile.username = request.user.username\n profile.user_upload_file = user_upload_file\n profile.save()\n file_name = request.FILES.get('user_upload_file').name\n path = (\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\upload\\\\upload\\\\'\n )\n filefullpath = path + file_name\n if user_upload_file:\n b = xlrd.open_workbook(filefullpath)\n for sheet in range(1, 5):\n if sheet == 1:\n row_name = '公司资料简介'\n df_to_sql_T_1(filefullpath, sheet, row_name)\n if sheet == 2:\n row_name = '基金简介'\n df_to_sql_T_2(filefullpath, sheet, row_name)\n if sheet == 3:\n row_name = '人员简介'\n df_to_sql_T_3(filefullpath, sheet, row_name)\n if sheet == 4:\n row_name = '基金简称'\n df_to_sql_4(filefullpath, sheet, row_name)\n return HttpResponse('upload ok!')\n else:\n return redirect(to='login')\n else:\n uf = UserForm()\n context['uf'] = uf\n return render(request, 'website/templates/listing.html', context)\n\n\ndef index_login(request):\n context = {}\n if request.method == 'GET':\n form = AuthenticationForm\n if request.method == 'POST':\n form = AuthenticationForm(data=request.POST)\n if form.is_valid():\n login(request, form.get_user())\n return redirect(to='list')\n context['form'] = form\n return render(request, 'register_login.html', context)\n\n\ndef index_register(request):\n context = {}\n if request.method == 'GET':\n form = UserCreationForm\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect(to='login')\n context['form'] = form\n return render(request, 'register_login.html', context)\n",
"step-3": "<mask token>\n\n\ndef df_to_sql_T_1(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['org_full_name'].tolist()\n sql_number = len(fund_name_list)\n org_id_number = 0\n for org_full_name in sql_df['org_full_name'].unique():\n org_id_number = org_id_number + 1\n org_id = 'O' + '0' * (5 - len(str(org_id_number))) + str(org_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE org_info SET org_id=? WHERE org_full_name=?',\n (org_id, org_full_name))\n excel_name_list = excel_df['★机构全名'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['org_full_name'] == name]\n org_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'org_id']\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data['org_id'] = str(org_id)\n org_name = str(commit_data.loc[commit_data.org_full_name ==\n name, 'org_name'].values[0])\n org_full_name = str(name)\n reg_code = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_code'].values[0])\n reg_time = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_time'].values[0])\n found_date = str(commit_data.loc[commit_data.org_full_name ==\n name, 'found_date'].values[0])\n reg_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_capital'].values[0])\n real_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'real_capital'].values[0])\n region = str(commit_data.loc[commit_data.org_full_name == name,\n 'region'].values[0])\n profile = str(commit_data.loc[commit_data.org_full_name == name,\n 'profile'].values[0])\n address = str(commit_data.loc[commit_data.org_full_name == name,\n 'address'].values[0])\n team = str(commit_data.loc[commit_data.org_full_name == name,\n 'org_name'].values[0])\n fund_num = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team'].values[0])\n is_qualification = str(commit_data.loc[commit_data.\n org_full_name == name, 'is_qualification'].values[0])\n prize = str(commit_data.loc[commit_data.org_full_name == name,\n 'prize'].values[0])\n team_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team_scale'])\n investment_idea = str(commit_data.loc[commit_data.org_full_name ==\n name, 'investment_idea'].values[0])\n master_strategy = str(commit_data.loc[commit_data.org_full_name ==\n name, 'master_strategy'].values[0])\n remark = str(commit_data.loc[commit_data.org_full_name == name,\n 'remark'].values[0])\n asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'asset_mgt_scale'].values[0])\n linkman = str(commit_data.loc[commit_data.org_full_name == name,\n 'linkman'].values[0])\n linkman_duty = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_duty'].values[0])\n linkman_phone = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_phone'].values[0])\n linkman_email = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_email'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?'\n )\n l = (org_name, org_full_name, reg_code, reg_time,\n found_date, reg_capital, real_capital, region, profile,\n address, team, fund_num, is_qualification, prize,\n team_scale, investment_idea, master_strategy, remark,\n asset_mgt_scale, linkman, linkman_duty, linkman_phone,\n linkman_email, org_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data.loc[:, 'org_id'] = 'O' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('org_info', con, if_exists='append', index=False\n )\n print('else')\n\n\ndef df_to_sql_T_2(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['fund_full_name'].tolist()\n sql_number = len(fund_name_list)\n fund_id_number = 0\n for fund_full_name in sql_df['fund_full_name'].unique():\n fund_id_number = fund_id_number + 1\n fund_id = 'F' + '0' * (6 - len(str(fund_id_number))) + str(\n fund_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_info SET fund_id=? WHERE fund_full_name=?'\n , (fund_id, fund_full_name))\n excel_name_list = excel_df['★基金全称'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['fund_full_name'] == name]\n fund_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'fund_id']\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data['fund_id'] = str(fund_id)\n group = str(commit_data.loc[commit_data.fund_full_name == name,\n 'group'].values[0])\n fund_type_strategy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_strategy'].values[0])\n reg_code = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'reg_code'].values[0])\n foundation_date = str(commit_data.loc[commit_data.\n fund_full_name == name, 'foundation_date'].values[0])\n fund_name = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_name'].values[0])\n fund_full_name = str(name)\n fund_manager = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_manager'].values[0])\n fund_manager_nominal = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_manager_nominal'].values[0])\n fund_stockbroker = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_stockbroker'].values[0])\n fund_custodian = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_custodian'].values[0])\n fund_member = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_member'].values[0])\n fund_type_issuance = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_issuance'].values[0])\n fund_type_structure = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_structure'].values[0])\n fund_structure = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_structure'].values[0])\n issue_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'issue_scale'].values[0])\n asset_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'asset_scale'].values[0])\n is_main_fund = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'is_main_fund'].values[0])\n fee_pay = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay'].values[0])\n open_date = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'open_date'])\n locked_time_limit = str(commit_data.loc[commit_data.\n fund_full_name == name, 'locked_time_limit'].values[0])\n duration = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'duration'].values[0])\n fee_manage = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_manage'].values[0])\n fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay_remark'].values[0])\n fee_redeem = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_redeem'].values[0])\n fee_subscription = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fee_subscription'].values[0])\n fee_trust = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_trust'].values[0])\n investment_range = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_range'].values[0])\n min_purchase_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_purchase_amount'].values[0])\n min_append_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_append_amount'].values[0])\n stop_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'stop_line'].values[0])\n alert_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'alert_line'].values[0])\n manager_participation_scale = str(commit_data.loc[commit_data.\n fund_full_name == name, 'manager_participation_scale'].\n values[0])\n investment_idea = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_idea'].values[0])\n structure_hierarchy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'structure_hierarchy'].values[0])\n remark = str(commit_data.loc[commit_data.fund_full_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n \"UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?, fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?, fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?, open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?, investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?\"\n )\n l = (group, fund_type_strategy, reg_code, foundation_date,\n fund_name, fund_full_name, fund_manager,\n fund_manager_nominal, fund_stockbroker, fund_custodian,\n fund_member, fund_type_issuance, fund_type_structure,\n fund_structure, issue_scale, asset_scale, is_main_fund,\n fee_pay, open_date, locked_time_limit, duration,\n fee_manage, fee_pay_remark, fee_redeem,\n fee_subscription, fee_trust, investment_range,\n min_purchase_amount, min_append_amount, stop_line,\n alert_line, manager_participation_scale,\n investment_idea, structure_hierarchy, remark, fund_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data.loc[:, 'fund_id'] = 'F' + '0' * (6 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('fund_info', con, if_exists='append', index=\n False)\n print('else')\n\n\ndef df_to_sql_T_3(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★姓名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n user_list = sql_df['user_name'].tolist()\n sql_number = len(user_list)\n user_id_number = 0\n for user_name in sql_df['user_name'].unique():\n user_id_number = user_id_number + 1\n user_id = 'M' + '0' * (5 - len(str(user_id_number))) + str(\n user_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE manager_info SET user_id=? WHERE user_name=?',\n (user_id, user_name))\n excel_name_list = excel_df['★姓名'].tolist()\n for name in excel_name_list:\n if name in user_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['user_name'] == name]\n user_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'user_id']\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data['user_id'] = str(user_id)\n user_name = str(name)\n sex = str(commit_data.loc[commit_data.user_name == name, 'sex']\n .values[0])\n org_name = str(commit_data.loc[commit_data.user_name == name,\n 'org_name'].values[0])\n introduction = str(commit_data.loc[commit_data.user_name ==\n name, 'introduction'].values[0])\n photo = str(commit_data.loc[commit_data.user_name == name,\n 'photo'].values[0])\n entry_date = str(commit_data.loc[commit_data.user_name == name,\n 'entry_date'].values[0])\n investment_years = str(commit_data.loc[commit_data.user_name ==\n name, 'investment_years'].values[0])\n education = str(commit_data.loc[commit_data.user_name == name,\n 'education'].values[0])\n duty = str(commit_data.loc[commit_data.user_name == name,\n 'duty'].values[0])\n qualification = str(commit_data.loc[commit_data.user_name ==\n name, 'qualification'].values[0])\n background = str(commit_data.loc[commit_data.user_name == name,\n 'background'].values[0])\n is_fund_qualification = str(commit_data.loc[commit_data.\n user_name == name, 'is_fund_qualification'].values[0])\n is_core_member = str(commit_data.loc[commit_data.user_name ==\n name, 'is_core_member'].values[0])\n resume = str(commit_data.loc[commit_data.user_name == name,\n 'resume'].values[0])\n max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name ==\n name, 'max_asset_mgt_scale'].values[0])\n prize = str(commit_data.loc[commit_data.user_name == name,\n 'prize'].values[0])\n remark = str(commit_data.loc[commit_data.user_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?'\n )\n l = (user_name, sex, org_name, introduction, photo,\n entry_date, investment_years, education, duty,\n qualification, background, is_fund_qualification,\n is_core_member, resume, max_asset_mgt_scale, prize,\n remark, user_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data.loc[:, 'user_id'] = 'M' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('manager_info', con, if_exists='append',\n index=False)\n print('else')\n\n\ndef df_to_sql_4(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df[row_name] = excel_df[row_name].ffill()\n excel_df.index = range(len(excel_df))\n print(excel_df)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_nav_data'\n sql_df = pd.read_sql(sql, con)\n name_list = sql_df['fund_name'].tolist()\n date_list = sql_df['statistic_date'].tolist()\n print('name_list')\n print(name_list)\n print('date_list')\n print(date_list)\n for fund_name in sql_df['fund_name'].unique():\n sql = 'SELECT * FROM fund_info'\n fund_info_sql_df = pd.read_sql(sql, con)\n fund_id = fund_info_sql_df.loc[fund_info_sql_df.fund_name ==\n fund_name, 'fund_id'].values[0]\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_nav_data SET fund_id=? WHERE fund_name=?',\n (fund_id, fund_name))\n excel_name_list = excel_df['基金简称'].tolist()\n excel_name_list = list(set(excel_name_list))\n print('excel_name_list')\n print(excel_name_list)\n for name in excel_name_list:\n statistic_date_series = excel_df.loc[excel_df['基金简称'] == name, '净值日期']\n excel_date_list = statistic_date_series.tolist()\n excel_date_list = [str(i) for i in excel_date_list]\n print('excel_date_list')\n print(excel_date_list)\n for date in excel_date_list:\n if name in name_list and date in date_list:\n commit_data = excel_df[excel_df['基金简称'] == name]\n print(commit_data.columns)\n commit_data.columns = ['fund_name', 'statistic_date', 'nav',\n 'added_nav', 'total_share', 'total_asset', 'total_nav',\n 'is_split', 'is_open_date', 'split_ratio',\n 'after_tax_bonus']\n commit_data['fund_id'] = str(fund_id)\n fund_name = name\n statistic_date = str(date)\n nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'nav'].values[0])\n added_nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'added_nav'].values[0])\n total_share = str(commit_data.loc[commit_data.\n statistic_date == date, 'total_share'].values[0])\n total_asset = str(commit_data.loc[commit_data.\n statistic_date == date, 'total_asset'].values[0])\n total_nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'total_nav'].values[0])\n is_split = str(commit_data.loc[commit_data.statistic_date ==\n date, 'is_split'].values[0])\n is_open_date = str(commit_data.loc[commit_data.\n statistic_date == date, 'is_open_date'].values[0])\n split_ratio = str(commit_data.loc[commit_data.\n statistic_date == date, 'split_ratio'].values[0])\n after_tax_bonus = str(commit_data.loc[commit_data.\n statistic_date == date, 'after_tax_bonus'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE fund_nav_data SET nav=?, added_nav=?, total_share=?, total_asset=?, total_nav=?, is_split=?, is_open_date=?, split_ratio=?, after_tax_bonus=? WHERE fund_name=? AND statistic_date=?'\n )\n l = (nav, added_nav, total_share, total_asset,\n total_nav, is_split, is_open_date, split_ratio,\n after_tax_bonus, fund_name, statistic_date)\n cur.execute(sql, l)\n print('if')\n else:\n commit_data = excel_df[(excel_df['基金简称'] == name) & (\n excel_df['净值日期'] == date)]\n commit_data.columns = ['fund_name', 'statistic_date', 'nav',\n 'added_nav', 'total_share', 'total_asset', 'total_nav',\n 'is_split', 'is_open_date', 'split_ratio',\n 'after_tax_bonus']\n commit_data.to_sql('fund_nav_data', con, if_exists='append',\n index=False)\n print('else')\n\n\ndef listing(request):\n context = {}\n if request.method == 'POST':\n uf = UserForm(request.POST, request.FILES)\n if request.user.username and uf.is_valid():\n user_upload_file = uf.cleaned_data['user_upload_file']\n profile = UserProfile()\n profile.username = request.user.username\n profile.user_upload_file = user_upload_file\n profile.save()\n file_name = request.FILES.get('user_upload_file').name\n path = (\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\upload\\\\upload\\\\'\n )\n filefullpath = path + file_name\n if user_upload_file:\n b = xlrd.open_workbook(filefullpath)\n for sheet in range(1, 5):\n if sheet == 1:\n row_name = '公司资料简介'\n df_to_sql_T_1(filefullpath, sheet, row_name)\n if sheet == 2:\n row_name = '基金简介'\n df_to_sql_T_2(filefullpath, sheet, row_name)\n if sheet == 3:\n row_name = '人员简介'\n df_to_sql_T_3(filefullpath, sheet, row_name)\n if sheet == 4:\n row_name = '基金简称'\n df_to_sql_4(filefullpath, sheet, row_name)\n return HttpResponse('upload ok!')\n else:\n return redirect(to='login')\n else:\n uf = UserForm()\n context['uf'] = uf\n return render(request, 'website/templates/listing.html', context)\n\n\ndef index_login(request):\n context = {}\n if request.method == 'GET':\n form = AuthenticationForm\n if request.method == 'POST':\n form = AuthenticationForm(data=request.POST)\n if form.is_valid():\n login(request, form.get_user())\n return redirect(to='list')\n context['form'] = form\n return render(request, 'register_login.html', context)\n\n\ndef index_register(request):\n context = {}\n if request.method == 'GET':\n form = UserCreationForm\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect(to='login')\n context['form'] = form\n return render(request, 'register_login.html', context)\n",
"step-4": "from django.shortcuts import render, Http404, HttpResponse, redirect\nfrom django.contrib.auth import authenticate, login\nfrom website.form import UserForm\nfrom django.contrib.auth.forms import UserCreationForm, AuthenticationForm\nfrom website.models import UserProfile\nfrom website.form import UserForm\nimport pandas as pd\nfrom pandas import DataFrame\nfrom sqlalchemy import create_engine\nfrom django.contrib.auth.decorators import login_required\nimport sqlite3\nimport xlrd\nimport uuid\n\n\ndef df_to_sql_T_1(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['org_full_name'].tolist()\n sql_number = len(fund_name_list)\n org_id_number = 0\n for org_full_name in sql_df['org_full_name'].unique():\n org_id_number = org_id_number + 1\n org_id = 'O' + '0' * (5 - len(str(org_id_number))) + str(org_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE org_info SET org_id=? WHERE org_full_name=?',\n (org_id, org_full_name))\n excel_name_list = excel_df['★机构全名'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM org_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['org_full_name'] == name]\n org_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'org_id']\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data['org_id'] = str(org_id)\n org_name = str(commit_data.loc[commit_data.org_full_name ==\n name, 'org_name'].values[0])\n org_full_name = str(name)\n reg_code = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_code'].values[0])\n reg_time = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_time'].values[0])\n found_date = str(commit_data.loc[commit_data.org_full_name ==\n name, 'found_date'].values[0])\n reg_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'reg_capital'].values[0])\n real_capital = str(commit_data.loc[commit_data.org_full_name ==\n name, 'real_capital'].values[0])\n region = str(commit_data.loc[commit_data.org_full_name == name,\n 'region'].values[0])\n profile = str(commit_data.loc[commit_data.org_full_name == name,\n 'profile'].values[0])\n address = str(commit_data.loc[commit_data.org_full_name == name,\n 'address'].values[0])\n team = str(commit_data.loc[commit_data.org_full_name == name,\n 'org_name'].values[0])\n fund_num = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team'].values[0])\n is_qualification = str(commit_data.loc[commit_data.\n org_full_name == name, 'is_qualification'].values[0])\n prize = str(commit_data.loc[commit_data.org_full_name == name,\n 'prize'].values[0])\n team_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'team_scale'])\n investment_idea = str(commit_data.loc[commit_data.org_full_name ==\n name, 'investment_idea'].values[0])\n master_strategy = str(commit_data.loc[commit_data.org_full_name ==\n name, 'master_strategy'].values[0])\n remark = str(commit_data.loc[commit_data.org_full_name == name,\n 'remark'].values[0])\n asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name ==\n name, 'asset_mgt_scale'].values[0])\n linkman = str(commit_data.loc[commit_data.org_full_name == name,\n 'linkman'].values[0])\n linkman_duty = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_duty'].values[0])\n linkman_phone = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_phone'].values[0])\n linkman_email = str(commit_data.loc[commit_data.org_full_name ==\n name, 'linkman_email'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?'\n )\n l = (org_name, org_full_name, reg_code, reg_time,\n found_date, reg_capital, real_capital, region, profile,\n address, team, fund_num, is_qualification, prize,\n team_scale, investment_idea, master_strategy, remark,\n asset_mgt_scale, linkman, linkman_duty, linkman_phone,\n linkman_email, org_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★机构全名'] == name]\n commit_data.columns = ['org_name', 'org_full_name', 'reg_code',\n 'reg_time', 'found_date', 'reg_capital', 'real_capital',\n 'region', 'profile', 'address', 'team', 'fund_num',\n 'is_qualification', 'prize', 'team_scale',\n 'investment_idea', 'master_strategy', 'remark',\n 'asset_mgt_scale', 'linkman', 'linkman_duty',\n 'linkman_phone', 'linkman_email']\n commit_data.loc[:, 'org_id'] = 'O' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('org_info', con, if_exists='append', index=False\n )\n print('else')\n\n\ndef df_to_sql_T_2(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['fund_full_name'].tolist()\n sql_number = len(fund_name_list)\n fund_id_number = 0\n for fund_full_name in sql_df['fund_full_name'].unique():\n fund_id_number = fund_id_number + 1\n fund_id = 'F' + '0' * (6 - len(str(fund_id_number))) + str(\n fund_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_info SET fund_id=? WHERE fund_full_name=?'\n , (fund_id, fund_full_name))\n excel_name_list = excel_df['★基金全称'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM fund_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['fund_full_name'] == name]\n fund_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'fund_id']\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data['fund_id'] = str(fund_id)\n group = str(commit_data.loc[commit_data.fund_full_name == name,\n 'group'].values[0])\n fund_type_strategy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_strategy'].values[0])\n reg_code = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'reg_code'].values[0])\n foundation_date = str(commit_data.loc[commit_data.\n fund_full_name == name, 'foundation_date'].values[0])\n fund_name = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_name'].values[0])\n fund_full_name = str(name)\n fund_manager = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_manager'].values[0])\n fund_manager_nominal = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_manager_nominal'].values[0])\n fund_stockbroker = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_stockbroker'].values[0])\n fund_custodian = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_custodian'].values[0])\n fund_member = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_member'].values[0])\n fund_type_issuance = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_issuance'].values[0])\n fund_type_structure = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fund_type_structure'].values[0])\n fund_structure = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fund_structure'].values[0])\n issue_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'issue_scale'].values[0])\n asset_scale = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'asset_scale'].values[0])\n is_main_fund = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'is_main_fund'].values[0])\n fee_pay = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay'].values[0])\n open_date = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'open_date'])\n locked_time_limit = str(commit_data.loc[commit_data.\n fund_full_name == name, 'locked_time_limit'].values[0])\n duration = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'duration'].values[0])\n fee_manage = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_manage'].values[0])\n fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_pay_remark'].values[0])\n fee_redeem = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_redeem'].values[0])\n fee_subscription = str(commit_data.loc[commit_data.\n fund_full_name == name, 'fee_subscription'].values[0])\n fee_trust = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'fee_trust'].values[0])\n investment_range = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_range'].values[0])\n min_purchase_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_purchase_amount'].values[0])\n min_append_amount = str(commit_data.loc[commit_data.\n fund_full_name == name, 'min_append_amount'].values[0])\n stop_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'stop_line'].values[0])\n alert_line = str(commit_data.loc[commit_data.fund_full_name ==\n name, 'alert_line'].values[0])\n manager_participation_scale = str(commit_data.loc[commit_data.\n fund_full_name == name, 'manager_participation_scale'].\n values[0])\n investment_idea = str(commit_data.loc[commit_data.\n fund_full_name == name, 'investment_idea'].values[0])\n structure_hierarchy = str(commit_data.loc[commit_data.\n fund_full_name == name, 'structure_hierarchy'].values[0])\n remark = str(commit_data.loc[commit_data.fund_full_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n \"UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?, fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?, fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?, open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?, investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?\"\n )\n l = (group, fund_type_strategy, reg_code, foundation_date,\n fund_name, fund_full_name, fund_manager,\n fund_manager_nominal, fund_stockbroker, fund_custodian,\n fund_member, fund_type_issuance, fund_type_structure,\n fund_structure, issue_scale, asset_scale, is_main_fund,\n fee_pay, open_date, locked_time_limit, duration,\n fee_manage, fee_pay_remark, fee_redeem,\n fee_subscription, fee_trust, investment_range,\n min_purchase_amount, min_append_amount, stop_line,\n alert_line, manager_participation_scale,\n investment_idea, structure_hierarchy, remark, fund_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★基金全称'] == name]\n commit_data.columns = ['group', 'fund_type_strategy',\n 'reg_code', 'foundation_date', 'fund_name',\n 'fund_full_name', 'fund_manager', 'fund_manager_nominal',\n 'fund_stockbroker', 'fund_custodian', 'fund_member',\n 'fund_type_issuance', 'fund_type_structure',\n 'fund_structure', 'issue_scale', 'asset_scale',\n 'is_main_fund', 'fee_pay', 'open_date', 'locked_time_limit',\n 'duration', 'fee_manage', 'fee_pay_remark', 'fee_redeem',\n 'fee_subscription', 'fee_trust', 'investment_range',\n 'min_purchase_amount', 'min_append_amount', 'stop_line',\n 'alert_line', 'manager_participation_scale',\n 'investment_idea', 'structure_hierarchy', 'remark']\n commit_data.loc[:, 'fund_id'] = 'F' + '0' * (6 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('fund_info', con, if_exists='append', index=\n False)\n print('else')\n\n\ndef df_to_sql_T_3(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df = excel_df.dropna(axis=1, how='all')\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★姓名'], inplace=True)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n user_list = sql_df['user_name'].tolist()\n sql_number = len(user_list)\n user_id_number = 0\n for user_name in sql_df['user_name'].unique():\n user_id_number = user_id_number + 1\n user_id = 'M' + '0' * (5 - len(str(user_id_number))) + str(\n user_id_number)\n with con:\n cur = con.cursor()\n cur.execute('UPDATE manager_info SET user_id=? WHERE user_name=?',\n (user_id, user_name))\n excel_name_list = excel_df['★姓名'].tolist()\n for name in excel_name_list:\n if name in user_list:\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3'\n )\n sql = 'SELECT * FROM manager_info'\n sql_df = pd.read_sql(sql, con)\n name_dataframe = sql_df[sql_df['user_name'] == name]\n user_id = name_dataframe.loc[name_dataframe.last_valid_index(),\n 'user_id']\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data['user_id'] = str(user_id)\n user_name = str(name)\n sex = str(commit_data.loc[commit_data.user_name == name, 'sex']\n .values[0])\n org_name = str(commit_data.loc[commit_data.user_name == name,\n 'org_name'].values[0])\n introduction = str(commit_data.loc[commit_data.user_name ==\n name, 'introduction'].values[0])\n photo = str(commit_data.loc[commit_data.user_name == name,\n 'photo'].values[0])\n entry_date = str(commit_data.loc[commit_data.user_name == name,\n 'entry_date'].values[0])\n investment_years = str(commit_data.loc[commit_data.user_name ==\n name, 'investment_years'].values[0])\n education = str(commit_data.loc[commit_data.user_name == name,\n 'education'].values[0])\n duty = str(commit_data.loc[commit_data.user_name == name,\n 'duty'].values[0])\n qualification = str(commit_data.loc[commit_data.user_name ==\n name, 'qualification'].values[0])\n background = str(commit_data.loc[commit_data.user_name == name,\n 'background'].values[0])\n is_fund_qualification = str(commit_data.loc[commit_data.\n user_name == name, 'is_fund_qualification'].values[0])\n is_core_member = str(commit_data.loc[commit_data.user_name ==\n name, 'is_core_member'].values[0])\n resume = str(commit_data.loc[commit_data.user_name == name,\n 'resume'].values[0])\n max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name ==\n name, 'max_asset_mgt_scale'].values[0])\n prize = str(commit_data.loc[commit_data.user_name == name,\n 'prize'].values[0])\n remark = str(commit_data.loc[commit_data.user_name == name,\n 'remark'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?'\n )\n l = (user_name, sex, org_name, introduction, photo,\n entry_date, investment_years, education, duty,\n qualification, background, is_fund_qualification,\n is_core_member, resume, max_asset_mgt_scale, prize,\n remark, user_id)\n cur.execute(sql, l)\n print('if')\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df['★姓名'] == name]\n commit_data.columns = ['user_name', 'sex', 'org_name',\n 'introduction', 'photo', 'entry_date', 'investment_years',\n 'education', 'duty', 'qualification', 'background',\n 'is_fund_qualification', 'is_core_member', 'resume',\n 'max_asset_mgt_scale', 'prize', 'remark']\n commit_data.loc[:, 'user_id'] = 'M' + '0' * (5 - len(str(\n sql_number))) + str(sql_number)\n commit_data.to_sql('manager_info', con, if_exists='append',\n index=False)\n print('else')\n\n\ndef df_to_sql_4(filefullpath, sheet, row_name):\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how='all')\n excel_df[row_name] = excel_df[row_name].ffill()\n excel_df.index = range(len(excel_df))\n print(excel_df)\n con = sqlite3.connect(\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\db.sqlite3')\n sql = 'SELECT * FROM fund_nav_data'\n sql_df = pd.read_sql(sql, con)\n name_list = sql_df['fund_name'].tolist()\n date_list = sql_df['statistic_date'].tolist()\n print('name_list')\n print(name_list)\n print('date_list')\n print(date_list)\n for fund_name in sql_df['fund_name'].unique():\n sql = 'SELECT * FROM fund_info'\n fund_info_sql_df = pd.read_sql(sql, con)\n fund_id = fund_info_sql_df.loc[fund_info_sql_df.fund_name ==\n fund_name, 'fund_id'].values[0]\n with con:\n cur = con.cursor()\n cur.execute('UPDATE fund_nav_data SET fund_id=? WHERE fund_name=?',\n (fund_id, fund_name))\n excel_name_list = excel_df['基金简称'].tolist()\n excel_name_list = list(set(excel_name_list))\n print('excel_name_list')\n print(excel_name_list)\n for name in excel_name_list:\n statistic_date_series = excel_df.loc[excel_df['基金简称'] == name, '净值日期']\n excel_date_list = statistic_date_series.tolist()\n excel_date_list = [str(i) for i in excel_date_list]\n print('excel_date_list')\n print(excel_date_list)\n for date in excel_date_list:\n if name in name_list and date in date_list:\n commit_data = excel_df[excel_df['基金简称'] == name]\n print(commit_data.columns)\n commit_data.columns = ['fund_name', 'statistic_date', 'nav',\n 'added_nav', 'total_share', 'total_asset', 'total_nav',\n 'is_split', 'is_open_date', 'split_ratio',\n 'after_tax_bonus']\n commit_data['fund_id'] = str(fund_id)\n fund_name = name\n statistic_date = str(date)\n nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'nav'].values[0])\n added_nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'added_nav'].values[0])\n total_share = str(commit_data.loc[commit_data.\n statistic_date == date, 'total_share'].values[0])\n total_asset = str(commit_data.loc[commit_data.\n statistic_date == date, 'total_asset'].values[0])\n total_nav = str(commit_data.loc[commit_data.statistic_date ==\n date, 'total_nav'].values[0])\n is_split = str(commit_data.loc[commit_data.statistic_date ==\n date, 'is_split'].values[0])\n is_open_date = str(commit_data.loc[commit_data.\n statistic_date == date, 'is_open_date'].values[0])\n split_ratio = str(commit_data.loc[commit_data.\n statistic_date == date, 'split_ratio'].values[0])\n after_tax_bonus = str(commit_data.loc[commit_data.\n statistic_date == date, 'after_tax_bonus'].values[0])\n with con:\n cur = con.cursor()\n sql = (\n 'UPDATE fund_nav_data SET nav=?, added_nav=?, total_share=?, total_asset=?, total_nav=?, is_split=?, is_open_date=?, split_ratio=?, after_tax_bonus=? WHERE fund_name=? AND statistic_date=?'\n )\n l = (nav, added_nav, total_share, total_asset,\n total_nav, is_split, is_open_date, split_ratio,\n after_tax_bonus, fund_name, statistic_date)\n cur.execute(sql, l)\n print('if')\n else:\n commit_data = excel_df[(excel_df['基金简称'] == name) & (\n excel_df['净值日期'] == date)]\n commit_data.columns = ['fund_name', 'statistic_date', 'nav',\n 'added_nav', 'total_share', 'total_asset', 'total_nav',\n 'is_split', 'is_open_date', 'split_ratio',\n 'after_tax_bonus']\n commit_data.to_sql('fund_nav_data', con, if_exists='append',\n index=False)\n print('else')\n\n\ndef listing(request):\n context = {}\n if request.method == 'POST':\n uf = UserForm(request.POST, request.FILES)\n if request.user.username and uf.is_valid():\n user_upload_file = uf.cleaned_data['user_upload_file']\n profile = UserProfile()\n profile.username = request.user.username\n profile.user_upload_file = user_upload_file\n profile.save()\n file_name = request.FILES.get('user_upload_file').name\n path = (\n 'C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\upload\\\\upload\\\\'\n )\n filefullpath = path + file_name\n if user_upload_file:\n b = xlrd.open_workbook(filefullpath)\n for sheet in range(1, 5):\n if sheet == 1:\n row_name = '公司资料简介'\n df_to_sql_T_1(filefullpath, sheet, row_name)\n if sheet == 2:\n row_name = '基金简介'\n df_to_sql_T_2(filefullpath, sheet, row_name)\n if sheet == 3:\n row_name = '人员简介'\n df_to_sql_T_3(filefullpath, sheet, row_name)\n if sheet == 4:\n row_name = '基金简称'\n df_to_sql_4(filefullpath, sheet, row_name)\n return HttpResponse('upload ok!')\n else:\n return redirect(to='login')\n else:\n uf = UserForm()\n context['uf'] = uf\n return render(request, 'website/templates/listing.html', context)\n\n\ndef index_login(request):\n context = {}\n if request.method == 'GET':\n form = AuthenticationForm\n if request.method == 'POST':\n form = AuthenticationForm(data=request.POST)\n if form.is_valid():\n login(request, form.get_user())\n return redirect(to='list')\n context['form'] = form\n return render(request, 'register_login.html', context)\n\n\ndef index_register(request):\n context = {}\n if request.method == 'GET':\n form = UserCreationForm\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect(to='login')\n context['form'] = form\n return render(request, 'register_login.html', context)\n",
"step-5": "from django.shortcuts import render, Http404, HttpResponse, redirect\nfrom django.contrib.auth import authenticate, login\nfrom website.form import UserForm\nfrom django.contrib.auth.forms import UserCreationForm, AuthenticationForm\nfrom website.models import UserProfile\nfrom website.form import UserForm\nimport pandas as pd\nfrom pandas import DataFrame\nfrom sqlalchemy import create_engine\nfrom django.contrib.auth.decorators import login_required\nimport sqlite3\nimport xlrd\nimport uuid\n\n\ndef df_to_sql_T_1(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns\n #读取存在文件夹中的excel\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how=\"all\")\n excel_df = excel_df.dropna(axis=1, how=\"all\")\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★机构全名'], inplace=True)\n\n #数据库的读取\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM org_info\"#!!!注意sql中没有表格会出错\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['org_full_name'].tolist()\n sql_number = len(fund_name_list)\n\n\n #依次对数据库中的每一行添加一列id\n org_id_number = 0\n for org_full_name in sql_df['org_full_name'].unique():\n org_id_number = org_id_number+1\n org_id = 'O'+'0'*(5-len(str(org_id_number)))+str(org_id_number)\n with con:\n cur = con.cursor()\n cur.execute(\"\"\"UPDATE org_info SET org_id=? WHERE org_full_name=?\"\"\", (org_id, org_full_name))\n\n\n #对excel进行读取\n #excel_data = pd.read_excel(filefullpath, sheetname=sheet)\n excel_name_list = excel_df['★机构全名'].tolist()\n for name in excel_name_list:\n if name in fund_name_list:\n #提取数据库中的org_full_name为name的id\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM org_info\"\n sql_df = pd.read_sql(sql, con)\n name_dataframe =sql_df[sql_df[\"org_full_name\"] == name]\n org_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'org_id']\n\n #把excel的一行变成dataframe,并且加上id,并上传到数据库\n commit_data = excel_df[excel_df[\"★机构全名\"] == name]\n commit_data.columns = [\"org_name\", \"org_full_name\", \"reg_code\", \"reg_time\", \"found_date\", \"reg_capital\",\n \"real_capital\", \"region\", \"profile\", \"address\", \"team\", \"fund_num\",\n \"is_qualification\", \"prize\", \"team_scale\", \"investment_idea\", \"master_strategy\",\n \"remark\", \"asset_mgt_scale\", \"linkman\", \"linkman_duty\", \"linkman_phone\",\n \"linkman_email\"]\n commit_data[\"org_id\"] = str(org_id)\n\n #把一行表格dataframe提取其中的值\n org_name = str(commit_data.loc[commit_data.org_full_name == name, 'org_name'].values[0])\n org_full_name = str(name)\n reg_code = str(commit_data.loc[commit_data.org_full_name == name, 'reg_code'].values[0])\n reg_time = str(commit_data.loc[commit_data.org_full_name == name, 'reg_time'].values[0])\n found_date = str(commit_data.loc[commit_data.org_full_name == name, 'found_date'].values[0])\n reg_capital = str(commit_data.loc[commit_data.org_full_name == name, 'reg_capital'].values[0])\n real_capital = str(commit_data.loc[commit_data.org_full_name == name, 'real_capital'].values[0])\n region = str(commit_data.loc[commit_data.org_full_name == name, 'region'].values[0])\n profile = str(commit_data.loc[commit_data.org_full_name == name, 'profile'].values[0])\n address = str(commit_data.loc[commit_data.org_full_name == name, 'address'].values[0])\n team = str(commit_data.loc[commit_data.org_full_name == name, 'org_name'].values[0])\n fund_num = str(commit_data.loc[commit_data.org_full_name == name, 'team'].values[0])\n is_qualification = str(commit_data.loc[commit_data.org_full_name == name, 'is_qualification'].values[0])\n prize = str(commit_data.loc[commit_data.org_full_name == name, 'prize'].values[0])\n team_scale = str(commit_data.loc[commit_data.org_full_name == name, 'team_scale'])\n investment_idea = str(commit_data.loc[commit_data.org_full_name == name, 'investment_idea'].values[0])\n master_strategy = str(commit_data.loc[commit_data.org_full_name == name, 'master_strategy'].values[0])\n remark = str(commit_data.loc[commit_data.org_full_name == name, 'remark'].values[0])\n asset_mgt_scale = str(commit_data.loc[commit_data.org_full_name == name, 'asset_mgt_scale'].values[0])\n linkman = str(commit_data.loc[commit_data.org_full_name == name, 'linkman'].values[0])\n linkman_duty = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_duty'].values[0])\n linkman_phone = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_phone'].values[0])\n linkman_email = str(commit_data.loc[commit_data.org_full_name == name, 'linkman_email'].values[0])\n # org_name = str(commit_data.loc[index.last_valid_index(), \"org_name\"])\n\n with con:\n cur = con.cursor()\n sql = \"\"\"UPDATE org_info SET org_name=?, org_full_name=?, reg_code=?, reg_time=?, found_date=?, \\\n reg_capital=?, real_capital=?, region=?,profile=?, address=?, team=?, fund_num=?, is_qualification=?, \\\n prize=?, team_scale=?, investment_idea=?, master_strategy=?, remark=?, asset_mgt_scale=?, linkman=?, \\\n linkman_duty=?, linkman_phone=?, linkman_email=? WHERE org_id=?\"\"\"\n l = (org_name, org_full_name, reg_code, reg_time, found_date, reg_capital, real_capital, region, profile,\\\n address, team, fund_num, is_qualification, prize, team_scale, investment_idea, master_strategy, remark,\\\n asset_mgt_scale, linkman, linkman_duty, linkman_phone, linkman_email, org_id)\n cur.execute(sql, l)\n print(\"if\")\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df[\"★机构全名\"] == name]\n commit_data.columns = [\"org_name\", \"org_full_name\", \"reg_code\", \"reg_time\", \"found_date\", \"reg_capital\",\n \"real_capital\", \"region\", \"profile\", \"address\", \"team\", \"fund_num\",\n \"is_qualification\", \"prize\", \"team_scale\", \"investment_idea\", \"master_strategy\",\n \"remark\", \"asset_mgt_scale\", \"linkman\", \"linkman_duty\", \"linkman_phone\",\n \"linkman_email\"]\n commit_data.loc[:, \"org_id\"] = 'O'+'0'*(5-len(str(sql_number)))+str(sql_number)\n commit_data.to_sql(\"org_info\", con, if_exists=\"append\", index=False)\n print(\"else\")\n\ndef df_to_sql_T_2(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns\n #读取存在文件夹中的excel\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how=\"all\")\n excel_df = excel_df.dropna(axis=1, how=\"all\")\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★基金全称'], inplace=True)\n\n #数据库的读取\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM fund_info\"#!!!注意sql中没有表格会出错\n sql_df = pd.read_sql(sql, con)\n fund_name_list = sql_df['fund_full_name'].tolist()#list\n sql_number = len(fund_name_list)\n\n\n #依次对数据库中的每一行添加一列id\n fund_id_number = 0\n for fund_full_name in sql_df['fund_full_name'].unique():\n fund_id_number = fund_id_number+1\n fund_id = 'F'+'0'*(6-len(str(fund_id_number)))+str(fund_id_number)\n with con:\n cur = con.cursor()\n cur.execute(\"\"\"UPDATE fund_info SET fund_id=? WHERE fund_full_name=?\"\"\", (fund_id, fund_full_name))\n\n\n #对excel进行读取\n #excel_data = pd.read_excel(filefullpath, sheetname=sheet)\n excel_name_list = excel_df['★基金全称'].tolist()#list\n for name in excel_name_list:\n if name in fund_name_list:\n #提取数据库中的org_full_name为name的id\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM fund_info\"\n sql_df = pd.read_sql(sql, con)\n name_dataframe =sql_df[sql_df[\"fund_full_name\"] == name]\n fund_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'fund_id']\n\n #把excel的一行变成dataframe,并且加上id,并上传到数据库\n commit_data = excel_df[excel_df[\"★基金全称\"] == name]\n commit_data.columns = [\"group\", \"fund_type_strategy\", \"reg_code\", \"foundation_date\", \"fund_name\",\n \"fund_full_name\", \"fund_manager\", \"fund_manager_nominal\", \"fund_stockbroker\",\n \"fund_custodian\", \"fund_member\", \"fund_type_issuance\", \"fund_type_structure\",\n \"fund_structure\", \"issue_scale\", \"asset_scale\", \"is_main_fund\", \"fee_pay\",\n \"open_date\", \"locked_time_limit\", \"duration\", \"fee_manage\", \"fee_pay_remark\",\n \"fee_redeem\", \"fee_subscription\", \"fee_trust\", \"investment_range\",\n \"min_purchase_amount\", \"min_append_amount\", \"stop_line\", \"alert_line\",\n \"manager_participation_scale\", \"investment_idea\", \"structure_hierarchy\", \"remark\"]\n commit_data[\"fund_id\"] = str(fund_id)\n\n #把一行表格dataframe提取其中的值\n group = str(commit_data.loc[commit_data.fund_full_name == name, 'group'].values[0])\n fund_type_strategy = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_strategy'].values[0])\n reg_code = str(commit_data.loc[commit_data.fund_full_name == name, 'reg_code'].values[0])\n foundation_date = str(commit_data.loc[commit_data.fund_full_name == name, 'foundation_date'].values[0])\n fund_name = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_name'].values[0])\n fund_full_name = str(name)\n fund_manager = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_manager'].values[0])\n fund_manager_nominal = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_manager_nominal'].values[0])\n fund_stockbroker = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_stockbroker'].values[0])\n fund_custodian = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_custodian'].values[0])\n fund_member = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_member'].values[0])\n fund_type_issuance = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_issuance'].values[0])\n fund_type_structure = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_type_structure'].values[0])\n fund_structure = str(commit_data.loc[commit_data.fund_full_name == name, 'fund_structure'].values[0])\n issue_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'issue_scale'].values[0])\n asset_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'asset_scale'].values[0])\n is_main_fund = str(commit_data.loc[commit_data.fund_full_name == name, 'is_main_fund'].values[0])\n fee_pay = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_pay'].values[0])\n open_date = str(commit_data.loc[commit_data.fund_full_name == name, 'open_date'])\n locked_time_limit = str(commit_data.loc[commit_data.fund_full_name == name, 'locked_time_limit'].values[0])\n duration = str(commit_data.loc[commit_data.fund_full_name == name, 'duration'].values[0])\n fee_manage = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_manage'].values[0])\n fee_pay_remark = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_pay_remark'].values[0])\n fee_redeem = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_redeem'].values[0])\n fee_subscription = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_subscription'].values[0])\n fee_trust = str(commit_data.loc[commit_data.fund_full_name == name, 'fee_trust'].values[0])\n investment_range = str(commit_data.loc[commit_data.fund_full_name == name, 'investment_range'].values[0])\n min_purchase_amount = str(commit_data.loc[commit_data.fund_full_name == name, 'min_purchase_amount'].values[0])\n min_append_amount = str(commit_data.loc[commit_data.fund_full_name == name, 'min_append_amount'].values[0])\n stop_line = str(commit_data.loc[commit_data.fund_full_name == name, 'stop_line'].values[0])\n alert_line = str(commit_data.loc[commit_data.fund_full_name == name, 'alert_line'].values[0])\n manager_participation_scale = str(commit_data.loc[commit_data.fund_full_name == name, 'manager_participation_scale'].values[0])\n investment_idea = str(commit_data.loc[commit_data.fund_full_name == name, 'investment_idea'].values[0])\n structure_hierarchy = str(commit_data.loc[commit_data.fund_full_name == name, 'structure_hierarchy'].values[0])\n remark = str(commit_data.loc[commit_data.fund_full_name == name, 'remark'].values[0])\n\n with con:\n cur = con.cursor()\n sql = \"\"\"UPDATE fund_info SET 'group'=?, fund_type_strategy=?, reg_code=?, foundation_date=?, fund_name=?,\\\n fund_full_name=?, fund_manager=?, fund_manager_nominal=?, fund_stockbroker=?, fund_custodian=?, fund_member=?,\\\n fund_type_issuance=?, fund_type_structure=?, fund_structure=?, issue_scale=?, asset_scale=?, is_main_fund=?, fee_pay=?,\\\n open_date=?, locked_time_limit=?, duration=?, fee_manage=?, fee_pay_remark=?, fee_redeem=?, fee_subscription=?, fee_trust=?,\\\n investment_range=?, min_purchase_amount=?, min_append_amount=?, stop_line=?, alert_line=?, manager_participation_scale=?, \\\n investment_idea=?, structure_hierarchy=?, remark=? WHERE fund_id=?\"\"\"\n l = (group, fund_type_strategy, reg_code, foundation_date, fund_name, fund_full_name, fund_manager, \\\n fund_manager_nominal, fund_stockbroker, fund_custodian, fund_member, fund_type_issuance, \\\n fund_type_structure, fund_structure, issue_scale, asset_scale, is_main_fund, fee_pay, open_date, \\\n locked_time_limit, duration, fee_manage, fee_pay_remark, fee_redeem, fee_subscription, fee_trust, \\\n investment_range, min_purchase_amount, min_append_amount, stop_line, alert_line, manager_participation_scale, \\\n investment_idea, structure_hierarchy, remark, fund_id)\n cur.execute(sql, l)\n print(\"if\")\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df[\"★基金全称\"] == name]\n commit_data.columns = [\"group\", \"fund_type_strategy\", \"reg_code\", \"foundation_date\", \"fund_name\", \"fund_full_name\", \\\n \"fund_manager\", \"fund_manager_nominal\", \"fund_stockbroker\", \"fund_custodian\", \"fund_member\", \\\n \"fund_type_issuance\", \"fund_type_structure\", \"fund_structure\", \"issue_scale\", \"asset_scale\", \\\n \"is_main_fund\", \"fee_pay\", \"open_date\", \"locked_time_limit\", \"duration\", \"fee_manage\", \\\n \"fee_pay_remark\", \"fee_redeem\", \"fee_subscription\", \"fee_trust\", \"investment_range\", \\\n \"min_purchase_amount\", \"min_append_amount\", \"stop_line\", \"alert_line\", \"manager_participation_scale\", \\\n \"investment_idea\", \"structure_hierarchy\", \"remark\"]\n commit_data.loc[:, \"fund_id\"] = 'F'+'0'*(6-len(str(sql_number)))+str(sql_number)\n commit_data.to_sql(\"fund_info\", con, if_exists=\"append\", index=False)\n print(\"else\")\n\ndef df_to_sql_T_3(filefullpath, sheet, row_name):#路径名,sheet为sheet数,row_name为指定行为columns\n #读取存在文件夹中的excel\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how=\"all\")\n excel_df = excel_df.dropna(axis=1, how=\"all\")\n excel_df = excel_df.T\n excel_df.columns = excel_df.loc[row_name]#把【人员简介】的这一行变成columns这一列\n excel_df = excel_df.drop(row_name, axis=0, inplace=False)#去除【人员简介】这一行\n excel_df.index = range(len(excel_df))\n excel_df.drop_duplicates(subset=['★姓名'], inplace=True)\n\n #数据库的读取\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM manager_info\"#!!!注意sql中没有表格会出错\n sql_df = pd.read_sql(sql, con)\n user_list = sql_df['user_name'].tolist()#list\n sql_number = len(user_list)\n\n\n #依次对数据库中的每一行添加一列id\n user_id_number = 0\n for user_name in sql_df['user_name'].unique():\n user_id_number = user_id_number+1\n user_id = 'M'+'0'*(5-len(str(user_id_number)))+str(user_id_number)\n with con:\n cur = con.cursor()\n cur.execute(\"\"\"UPDATE manager_info SET user_id=? WHERE user_name=?\"\"\", (user_id, user_name))\n\n\n #对excel进行读取\n #excel_data = pd.read_excel(filefullpath, sheetname=sheet)\n excel_name_list = excel_df['★姓名'].tolist()#list\n for name in excel_name_list:\n if name in user_list:\n #提取数据库中的user_name为name的id\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM manager_info\"\n sql_df = pd.read_sql(sql, con)\n name_dataframe =sql_df[sql_df[\"user_name\"] == name]\n user_id = name_dataframe.loc[name_dataframe.last_valid_index(), 'user_id']#loc到最后一个有效的index和fund_id,取出值\n\n #把excel的一行变成dataframe,并且加上id,并上传到数据库\n commit_data = excel_df[excel_df[\"★姓名\"] == name]\n commit_data.columns = [\"user_name\", \"sex\", \"org_name\", \"introduction\", \"photo\", \"entry_date\",\n \"investment_years\", \"education\", \"duty\", \"qualification\", \"background\", \"is_fund_qualification\",\n \"is_core_member\", \"resume\", \"max_asset_mgt_scale\", \"prize\", \"remark\"]\n commit_data[\"user_id\"] = str(user_id)#不需要\n\n #把一行表格dataframe提取其中的值\n user_name = str(name)\n sex = str(commit_data.loc[commit_data.user_name == name, 'sex'].values[0])\n org_name = str(commit_data.loc[commit_data.user_name == name, 'org_name'].values[0])\n introduction = str(commit_data.loc[commit_data.user_name == name, 'introduction'].values[0])\n photo = str(commit_data.loc[commit_data.user_name == name, 'photo'].values[0])\n entry_date = str(commit_data.loc[commit_data.user_name == name, 'entry_date'].values[0])\n investment_years = str(commit_data.loc[commit_data.user_name == name, 'investment_years'].values[0])\n education = str(commit_data.loc[commit_data.user_name == name, 'education'].values[0])\n duty = str(commit_data.loc[commit_data.user_name == name, 'duty'].values[0])\n qualification = str(commit_data.loc[commit_data.user_name == name, 'qualification'].values[0])\n background = str(commit_data.loc[commit_data.user_name == name, 'background'].values[0])\n is_fund_qualification = str(commit_data.loc[commit_data.user_name == name, 'is_fund_qualification'].values[0])\n is_core_member = str(commit_data.loc[commit_data.user_name == name, 'is_core_member'].values[0])\n resume = str(commit_data.loc[commit_data.user_name == name, 'resume'].values[0])\n max_asset_mgt_scale = str(commit_data.loc[commit_data.user_name == name, 'max_asset_mgt_scale'].values[0])\n prize = str(commit_data.loc[commit_data.user_name == name, 'prize'].values[0])\n remark = str(commit_data.loc[commit_data.user_name == name, 'remark'].values[0])\n\n with con:\n cur = con.cursor()\n sql = \"\"\"UPDATE manager_info SET user_name=?, sex=?, org_name=?, introduction=?, photo=?, \\\n entry_date=?, investment_years=?, education=?, duty=?, qualification=?, background=?, is_fund_qualification=?, \\\n is_core_member=?, resume=?, max_asset_mgt_scale=?, prize=?, remark=? WHERE user_id=?\"\"\"\n l = (user_name, sex, org_name, introduction, photo, entry_date, investment_years, education, \\\n duty, qualification, background, is_fund_qualification, is_core_member, resume, max_asset_mgt_scale, \\\n prize, remark, user_id)\n cur.execute(sql, l)\n print(\"if\")\n else:\n sql_number = sql_number + 1\n commit_data = excel_df[excel_df[\"★姓名\"] == name]\n commit_data.columns = [\"user_name\", \"sex\", \"org_name\", \"introduction\", \"photo\", \"entry_date\", \\\n \"investment_years\", \"education\", \"duty\", \"qualification\", \"background\", \\\n \"is_fund_qualification\", \"is_core_member\", \"resume\", \"max_asset_mgt_scale\", \"prize\", \\\n \"remark\"]\n commit_data.loc[:, \"user_id\"] = 'M'+'0'*(5-len(str(sql_number)))+str(sql_number)\n commit_data.to_sql(\"manager_info\", con, if_exists=\"append\", index=False)\n print(\"else\")\n\ndef df_to_sql_4(filefullpath, sheet, row_name):\n #读取处理文件夹中的excel\n excel_df = pd.read_excel(filefullpath, sheetname=sheet)\n excel_df = excel_df.dropna(how=\"all\")\n #excel_df = excel_df.dropna(axis=1, how=\"all\")\n excel_df[row_name] = excel_df[row_name].ffill()\n excel_df.index = range(len(excel_df))\n print(excel_df)\n\n #数据库的读取\n con = sqlite3.connect(r\"C:\\Users\\K\\Desktop\\excel-upload-sqlite3\\mins\\db.sqlite3\")\n sql = \"SELECT * FROM fund_nav_data\"\n sql_df = pd.read_sql(sql, con)\n name_list = sql_df['fund_name'].tolist()\n date_list = sql_df['statistic_date'].tolist()\n print(\"name_list\")\n #print(type(name_list[0]))\n print(name_list)\n print(\"date_list\")\n #print(type(date_list[0]))\n print(date_list)\n\n #从fund_info数据表中提取出fund_id,加入fund_nav_data数据表中的fund_id\n for fund_name in sql_df['fund_name'].unique():\n sql = \"SELECT * FROM fund_info\"\n fund_info_sql_df = pd.read_sql(sql, con)\n fund_id = fund_info_sql_df.loc[fund_info_sql_df.fund_name == fund_name, 'fund_id'].values[0]\n with con:\n cur = con.cursor()\n cur.execute(\"\"\"UPDATE fund_nav_data SET fund_id=? WHERE fund_name=?\"\"\", (fund_id, fund_name))\n\n #对excel_df进行读取\n excel_name_list = excel_df['基金简称'].tolist()\n excel_name_list = list(set(excel_name_list))\n print(\"excel_name_list\")\n #print(type(excel_name_list[0]))\n print(excel_name_list)\n\n for name in excel_name_list:\n statistic_date_series = excel_df.loc[excel_df['基金简称'] == name, '净值日期']\n excel_date_list = statistic_date_series.tolist()\n excel_date_list = [str(i) for i in excel_date_list]\n print(\"excel_date_list\")\n #print(type(excel_date_list[0]))\n print(excel_date_list)\n for date in excel_date_list:\n if name in name_list and date in date_list:\n commit_data = excel_df[excel_df['基金简称'] == name]\n print(commit_data.columns)\n commit_data.columns = [\"fund_name\", \"statistic_date\", \"nav\", \"added_nav\", \"total_share\", \"total_asset\", \"total_nav\", \"is_split\", \"is_open_date\", \"split_ratio\", \"after_tax_bonus\"]\n commit_data[\"fund_id\"] = str(fund_id)\n\n fund_name = name\n statistic_date = str(date)\n nav = str(commit_data.loc[commit_data.statistic_date == date, 'nav'].values[0])\n added_nav = str(commit_data.loc[commit_data.statistic_date == date, 'added_nav'].values[0])\n total_share = str(commit_data.loc[commit_data.statistic_date == date, 'total_share'].values[0])\n total_asset = str(commit_data.loc[commit_data.statistic_date == date, 'total_asset'].values[0])\n total_nav = str(commit_data.loc[commit_data.statistic_date == date, 'total_nav'].values[0])\n is_split = str(commit_data.loc[commit_data.statistic_date == date, 'is_split'].values[0])\n is_open_date = str(commit_data.loc[commit_data.statistic_date == date, 'is_open_date'].values[0])\n split_ratio = str(commit_data.loc[commit_data.statistic_date == date, 'split_ratio'].values[0])\n after_tax_bonus = str(commit_data.loc[commit_data.statistic_date == date, 'after_tax_bonus'].values[0])\n\n with con:\n cur = con.cursor()\n sql = \"\"\"UPDATE fund_nav_data SET nav=?, added_nav=?, total_share=?, total_asset=?, total_nav=?, is_split=?, is_open_date=?, split_ratio=?, after_tax_bonus=? WHERE fund_name=? AND statistic_date=?\"\"\"\n l = (nav, added_nav, total_share, total_asset, total_nav, is_split, is_open_date, split_ratio, after_tax_bonus, fund_name, statistic_date)\n cur.execute(sql, l)\n print(\"if\")\n else:\n commit_data = excel_df[(excel_df[\"基金简称\"] == name)&(excel_df[\"净值日期\"] == date)]\n commit_data.columns = [\"fund_name\", \"statistic_date\", \"nav\", \"added_nav\", \"total_share\", \"total_asset\", \"total_nav\", \"is_split\", \"is_open_date\", \"split_ratio\", \"after_tax_bonus\"]\n commit_data.to_sql(\"fund_nav_data\", con, if_exists=\"append\", index=False)\n print(\"else\")\n\ndef listing(request):\n context = {}\n if request.method == \"POST\":\n uf = UserForm(request.POST, request.FILES)\n if request.user.username and uf.is_valid():\n #username = uf.cleaned_data['username']\n user_upload_file = uf.cleaned_data['user_upload_file']\n #写入数据库\n profile = UserProfile()\n profile.username = request.user.username\n profile.user_upload_file = user_upload_file\n profile.save()\n file_name = request.FILES.get('user_upload_file').name\n path = \"C:\\\\Users\\\\K\\\\Desktop\\\\excel-upload-sqlite3\\\\mins\\\\upload\\\\upload\\\\\"\n #C:\\Users\\K\\Desktop\\excel - upload - sqlite3\\excel - upload - sqlite3\\mins\\upload\\upload\\华泰大赛参赛私募基金数据填报模板.xlsx\n filefullpath = path + file_name\n #print(filefullpath)\n if user_upload_file:\n b = xlrd.open_workbook(filefullpath)\n #count = len(b.sheets())#不需要,sheet数都是固定的\n for sheet in range(1, 5):\n if sheet == 1:\n row_name = \"公司资料简介\"\n df_to_sql_T_1(filefullpath, sheet, row_name)\n if sheet == 2:\n row_name = \"基金简介\"\n df_to_sql_T_2(filefullpath, sheet, row_name)\n if sheet == 3:\n row_name = \"人员简介\"\n df_to_sql_T_3(filefullpath, sheet, row_name)\n if sheet == 4:\n row_name = \"基金简称\"\n df_to_sql_4(filefullpath, sheet, row_name)\n return HttpResponse('upload ok!')\n else:\n return redirect(to='login')\n else:\n uf = UserForm()\n context['uf'] = uf\n return render(request, 'website/templates/listing.html', context)\n\ndef index_login(request):\n context = {}\n if request.method == \"GET\":\n form = AuthenticationForm\n if request.method == \"POST\":\n form = AuthenticationForm(data=request.POST)\n if form.is_valid():\n login(request, form.get_user())\n return redirect(to='list')\n context['form'] = form\n return render(request, 'register_login.html', context)\n\ndef index_register(request):\n context = {}\n if request.method == 'GET':\n form = UserCreationForm\n if request.method == 'POST':\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return redirect(to='login')\n context['form'] = form\n return render(request, 'register_login.html', context)",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class BreakoutDeterministic_v4(Environment):
def __init__(self):
self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)
super(BreakoutDeterministic_v4, self).__init__()
self.action_shape = self.get_action_shape()
self.state_shape = self.get_state_shape()
self.cnn_input_height = self.state_shape[0]
self.cnn_input_width = self.state_shape[1]
self.cnn_input_channels = self.state_shape[2]
self.continuous = False
self.last_ball_lives = -1
self.skipping_state_fq = 3
self.skipping_state_index = 0
<|reserved_special_token_0|>
@staticmethod
def downsample(img):
return img[::2, ::2]
<|reserved_special_token_0|>
def preprocess(self, img):
gray_frame = self.to_grayscale(self.downsample(img))
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
state = np.expand_dims(gray_frame, axis=0)
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
state = gray_frame.flatten()
else:
state = None
return state
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def action_meanings(self):
action_meanings = self.env.get_action_meanings()
action_meanings.remove('FIRE')
return action_meanings
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def reset(self):
self.env.reset()
next_state, reward, done, info = self.env.step(1)
self.last_ball_lives = info['ale.lives']
info['dead'] = False
return self.preprocess(next_state)
def step(self, action):
if action == 1:
env_action = 2
elif action == 2:
env_action = 3
else:
env_action = 0
next_state, reward, done, info = self.env.step(env_action)
if self.last_ball_lives != info['ale.lives']:
env_action = 1
self.last_ball_lives = info['ale.lives']
next_state, reward, done, info = self.env.step(env_action)
info['dead'] = True
reward = -5.0
adjusted_reward = self.transform_reward(reward)
return self.preprocess(next_state), reward, adjusted_reward, done, info
def render(self):
self.env.render()
def close(self):
self.env.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BreakoutDeterministic_v4(Environment):
def __init__(self):
self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)
super(BreakoutDeterministic_v4, self).__init__()
self.action_shape = self.get_action_shape()
self.state_shape = self.get_state_shape()
self.cnn_input_height = self.state_shape[0]
self.cnn_input_width = self.state_shape[1]
self.cnn_input_channels = self.state_shape[2]
self.continuous = False
self.last_ball_lives = -1
self.skipping_state_fq = 3
self.skipping_state_index = 0
<|reserved_special_token_0|>
@staticmethod
def downsample(img):
return img[::2, ::2]
<|reserved_special_token_0|>
def preprocess(self, img):
gray_frame = self.to_grayscale(self.downsample(img))
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
state = np.expand_dims(gray_frame, axis=0)
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
state = gray_frame.flatten()
else:
state = None
return state
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def action_meanings(self):
action_meanings = self.env.get_action_meanings()
action_meanings.remove('FIRE')
return action_meanings
<|reserved_special_token_0|>
def get_action_shape(self):
action_shape = self.env.action_space.n - 1
return action_shape,
def get_action_space(self):
return self.env.action_space
def reset(self):
self.env.reset()
next_state, reward, done, info = self.env.step(1)
self.last_ball_lives = info['ale.lives']
info['dead'] = False
return self.preprocess(next_state)
def step(self, action):
if action == 1:
env_action = 2
elif action == 2:
env_action = 3
else:
env_action = 0
next_state, reward, done, info = self.env.step(env_action)
if self.last_ball_lives != info['ale.lives']:
env_action = 1
self.last_ball_lives = info['ale.lives']
next_state, reward, done, info = self.env.step(env_action)
info['dead'] = True
reward = -5.0
adjusted_reward = self.transform_reward(reward)
return self.preprocess(next_state), reward, adjusted_reward, done, info
def render(self):
self.env.render()
def close(self):
self.env.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BreakoutDeterministic_v4(Environment):
def __init__(self):
self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)
super(BreakoutDeterministic_v4, self).__init__()
self.action_shape = self.get_action_shape()
self.state_shape = self.get_state_shape()
self.cnn_input_height = self.state_shape[0]
self.cnn_input_width = self.state_shape[1]
self.cnn_input_channels = self.state_shape[2]
self.continuous = False
self.last_ball_lives = -1
self.skipping_state_fq = 3
self.skipping_state_index = 0
@staticmethod
def to_grayscale(img):
r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]
gray = 0.2989 * r + 0.587 * g + 0.114 * b
return gray
@staticmethod
def downsample(img):
return img[::2, ::2]
@staticmethod
def transform_reward(reward):
return np.sign(reward)
def preprocess(self, img):
gray_frame = self.to_grayscale(self.downsample(img))
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
state = np.expand_dims(gray_frame, axis=0)
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
state = gray_frame.flatten()
else:
state = None
return state
def get_n_states(self):
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
return 1, 105, 80
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
return 8400
else:
return None
def get_n_actions(self):
return self.env.action_space.n - 1
@property
def action_meanings(self):
action_meanings = self.env.get_action_meanings()
action_meanings.remove('FIRE')
return action_meanings
def get_state_shape(self):
state_shape = int(self.env.observation_space.shape[0] / 2), int(
self.env.observation_space.shape[1] / 2), 1
return state_shape
def get_action_shape(self):
action_shape = self.env.action_space.n - 1
return action_shape,
def get_action_space(self):
return self.env.action_space
def reset(self):
self.env.reset()
next_state, reward, done, info = self.env.step(1)
self.last_ball_lives = info['ale.lives']
info['dead'] = False
return self.preprocess(next_state)
def step(self, action):
if action == 1:
env_action = 2
elif action == 2:
env_action = 3
else:
env_action = 0
next_state, reward, done, info = self.env.step(env_action)
if self.last_ball_lives != info['ale.lives']:
env_action = 1
self.last_ball_lives = info['ale.lives']
next_state, reward, done, info = self.env.step(env_action)
info['dead'] = True
reward = -5.0
adjusted_reward = self.transform_reward(reward)
return self.preprocess(next_state), reward, adjusted_reward, done, info
def render(self):
self.env.render()
def close(self):
self.env.close()
<|reserved_special_token_1|>
import gym
import numpy as np
from rl_main.conf.names import EnvironmentName, DeepLearningModelName
from rl_main.environments.environment import Environment
from rl_main.main_constants import DEEP_LEARNING_MODEL
class BreakoutDeterministic_v4(Environment):
def __init__(self):
self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)
super(BreakoutDeterministic_v4, self).__init__()
self.action_shape = self.get_action_shape()
self.state_shape = self.get_state_shape()
self.cnn_input_height = self.state_shape[0]
self.cnn_input_width = self.state_shape[1]
self.cnn_input_channels = self.state_shape[2]
self.continuous = False
self.last_ball_lives = -1
self.skipping_state_fq = 3
self.skipping_state_index = 0
@staticmethod
def to_grayscale(img):
r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]
gray = 0.2989 * r + 0.587 * g + 0.114 * b
return gray
@staticmethod
def downsample(img):
return img[::2, ::2]
@staticmethod
def transform_reward(reward):
return np.sign(reward)
def preprocess(self, img):
gray_frame = self.to_grayscale(self.downsample(img))
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
state = np.expand_dims(gray_frame, axis=0)
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
state = gray_frame.flatten()
else:
state = None
return state
def get_n_states(self):
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
return 1, 105, 80
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
return 8400
else:
return None
def get_n_actions(self):
return self.env.action_space.n - 1
@property
def action_meanings(self):
action_meanings = self.env.get_action_meanings()
action_meanings.remove('FIRE')
return action_meanings
def get_state_shape(self):
state_shape = int(self.env.observation_space.shape[0] / 2), int(
self.env.observation_space.shape[1] / 2), 1
return state_shape
def get_action_shape(self):
action_shape = self.env.action_space.n - 1
return action_shape,
def get_action_space(self):
return self.env.action_space
def reset(self):
self.env.reset()
next_state, reward, done, info = self.env.step(1)
self.last_ball_lives = info['ale.lives']
info['dead'] = False
return self.preprocess(next_state)
def step(self, action):
if action == 1:
env_action = 2
elif action == 2:
env_action = 3
else:
env_action = 0
next_state, reward, done, info = self.env.step(env_action)
if self.last_ball_lives != info['ale.lives']:
env_action = 1
self.last_ball_lives = info['ale.lives']
next_state, reward, done, info = self.env.step(env_action)
info['dead'] = True
reward = -5.0
adjusted_reward = self.transform_reward(reward)
return self.preprocess(next_state), reward, adjusted_reward, done, info
def render(self):
self.env.render()
def close(self):
self.env.close()
<|reserved_special_token_1|>
# https://github.com/openai/gym/blob/master/gym/envs/__init__.py#L449
import gym
import numpy as np
from rl_main.conf.names import EnvironmentName, DeepLearningModelName
from rl_main.environments.environment import Environment
from rl_main.main_constants import DEEP_LEARNING_MODEL
class BreakoutDeterministic_v4(Environment):
def __init__(self):
self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)
super(BreakoutDeterministic_v4, self).__init__()
self.action_shape = self.get_action_shape()
self.state_shape = self.get_state_shape()
self.cnn_input_height = self.state_shape[0]
self.cnn_input_width = self.state_shape[1]
self.cnn_input_channels = self.state_shape[2]
self.continuous = False
self.last_ball_lives = -1
self.skipping_state_fq = 3
self.skipping_state_index = 0
@staticmethod
def to_grayscale(img):
r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]
gray = 0.2989 * r + 0.5870 * g + 0.1140 * b
return gray
@staticmethod
def downsample(img):
return img[::2, ::2]
@staticmethod
def transform_reward(reward):
return np.sign(reward)
def preprocess(self, img):
gray_frame = self.to_grayscale(self.downsample(img))
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
state = np.expand_dims(gray_frame, axis=0)
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
state = gray_frame.flatten()
else:
state = None
return state
def get_n_states(self):
if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:
return 1, 105, 80 # input_channels, input_height, input_width
elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:
return 8400
else:
return None
def get_n_actions(self):
return self.env.action_space.n - 1
@property
def action_meanings(self):
action_meanings = self.env.get_action_meanings()
action_meanings.remove('FIRE')
return action_meanings
def get_state_shape(self):
state_shape = (int(self.env.observation_space.shape[0]/2), int(self.env.observation_space.shape[1]/2), 1)
return state_shape
def get_action_shape(self):
action_shape = self.env.action_space.n - 1
return action_shape,
def get_action_space(self):
return self.env.action_space
def reset(self):
self.env.reset()
next_state, reward, done, info = self.env.step(1)
self.last_ball_lives = info['ale.lives']
info["dead"] = False #if a ball fall down, dead is true
return self.preprocess(next_state)
def step(self, action):
if action == 1:
env_action = 2
elif action == 2:
env_action = 3
else:
env_action = 0
next_state, reward, done, info = self.env.step(env_action)
if self.last_ball_lives != info['ale.lives']:
env_action = 1
self.last_ball_lives = info['ale.lives']
next_state, reward, done, info = self.env.step(env_action)
info["dead"] = True
reward = -5.0
# info["skipping"] = True
# if self.skipping_state_index == self.skipping_state_fq:
# self.skipping_state_index = 0
# info["skipping"] = False
adjusted_reward = self.transform_reward(reward)
# self.skipping_state_index += 1
return self.preprocess(next_state), reward, adjusted_reward, done, info
def render(self):
self.env.render()
def close(self):
self.env.close()
|
flexible
|
{
"blob_id": "05e57ed95427f0de74ea5b0589c5cd56e4a96f73",
"index": 8776,
"step-1": "<mask token>\n\n\nclass BreakoutDeterministic_v4(Environment):\n\n def __init__(self):\n self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)\n super(BreakoutDeterministic_v4, self).__init__()\n self.action_shape = self.get_action_shape()\n self.state_shape = self.get_state_shape()\n self.cnn_input_height = self.state_shape[0]\n self.cnn_input_width = self.state_shape[1]\n self.cnn_input_channels = self.state_shape[2]\n self.continuous = False\n self.last_ball_lives = -1\n self.skipping_state_fq = 3\n self.skipping_state_index = 0\n <mask token>\n\n @staticmethod\n def downsample(img):\n return img[::2, ::2]\n <mask token>\n\n def preprocess(self, img):\n gray_frame = self.to_grayscale(self.downsample(img))\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n state = np.expand_dims(gray_frame, axis=0)\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n state = gray_frame.flatten()\n else:\n state = None\n return state\n <mask token>\n <mask token>\n\n @property\n def action_meanings(self):\n action_meanings = self.env.get_action_meanings()\n action_meanings.remove('FIRE')\n return action_meanings\n <mask token>\n <mask token>\n <mask token>\n\n def reset(self):\n self.env.reset()\n next_state, reward, done, info = self.env.step(1)\n self.last_ball_lives = info['ale.lives']\n info['dead'] = False\n return self.preprocess(next_state)\n\n def step(self, action):\n if action == 1:\n env_action = 2\n elif action == 2:\n env_action = 3\n else:\n env_action = 0\n next_state, reward, done, info = self.env.step(env_action)\n if self.last_ball_lives != info['ale.lives']:\n env_action = 1\n self.last_ball_lives = info['ale.lives']\n next_state, reward, done, info = self.env.step(env_action)\n info['dead'] = True\n reward = -5.0\n adjusted_reward = self.transform_reward(reward)\n return self.preprocess(next_state), reward, adjusted_reward, done, info\n\n def render(self):\n self.env.render()\n\n def close(self):\n self.env.close()\n",
"step-2": "<mask token>\n\n\nclass BreakoutDeterministic_v4(Environment):\n\n def __init__(self):\n self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)\n super(BreakoutDeterministic_v4, self).__init__()\n self.action_shape = self.get_action_shape()\n self.state_shape = self.get_state_shape()\n self.cnn_input_height = self.state_shape[0]\n self.cnn_input_width = self.state_shape[1]\n self.cnn_input_channels = self.state_shape[2]\n self.continuous = False\n self.last_ball_lives = -1\n self.skipping_state_fq = 3\n self.skipping_state_index = 0\n <mask token>\n\n @staticmethod\n def downsample(img):\n return img[::2, ::2]\n <mask token>\n\n def preprocess(self, img):\n gray_frame = self.to_grayscale(self.downsample(img))\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n state = np.expand_dims(gray_frame, axis=0)\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n state = gray_frame.flatten()\n else:\n state = None\n return state\n <mask token>\n <mask token>\n\n @property\n def action_meanings(self):\n action_meanings = self.env.get_action_meanings()\n action_meanings.remove('FIRE')\n return action_meanings\n <mask token>\n\n def get_action_shape(self):\n action_shape = self.env.action_space.n - 1\n return action_shape,\n\n def get_action_space(self):\n return self.env.action_space\n\n def reset(self):\n self.env.reset()\n next_state, reward, done, info = self.env.step(1)\n self.last_ball_lives = info['ale.lives']\n info['dead'] = False\n return self.preprocess(next_state)\n\n def step(self, action):\n if action == 1:\n env_action = 2\n elif action == 2:\n env_action = 3\n else:\n env_action = 0\n next_state, reward, done, info = self.env.step(env_action)\n if self.last_ball_lives != info['ale.lives']:\n env_action = 1\n self.last_ball_lives = info['ale.lives']\n next_state, reward, done, info = self.env.step(env_action)\n info['dead'] = True\n reward = -5.0\n adjusted_reward = self.transform_reward(reward)\n return self.preprocess(next_state), reward, adjusted_reward, done, info\n\n def render(self):\n self.env.render()\n\n def close(self):\n self.env.close()\n",
"step-3": "<mask token>\n\n\nclass BreakoutDeterministic_v4(Environment):\n\n def __init__(self):\n self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)\n super(BreakoutDeterministic_v4, self).__init__()\n self.action_shape = self.get_action_shape()\n self.state_shape = self.get_state_shape()\n self.cnn_input_height = self.state_shape[0]\n self.cnn_input_width = self.state_shape[1]\n self.cnn_input_channels = self.state_shape[2]\n self.continuous = False\n self.last_ball_lives = -1\n self.skipping_state_fq = 3\n self.skipping_state_index = 0\n\n @staticmethod\n def to_grayscale(img):\n r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]\n gray = 0.2989 * r + 0.587 * g + 0.114 * b\n return gray\n\n @staticmethod\n def downsample(img):\n return img[::2, ::2]\n\n @staticmethod\n def transform_reward(reward):\n return np.sign(reward)\n\n def preprocess(self, img):\n gray_frame = self.to_grayscale(self.downsample(img))\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n state = np.expand_dims(gray_frame, axis=0)\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n state = gray_frame.flatten()\n else:\n state = None\n return state\n\n def get_n_states(self):\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n return 1, 105, 80\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n return 8400\n else:\n return None\n\n def get_n_actions(self):\n return self.env.action_space.n - 1\n\n @property\n def action_meanings(self):\n action_meanings = self.env.get_action_meanings()\n action_meanings.remove('FIRE')\n return action_meanings\n\n def get_state_shape(self):\n state_shape = int(self.env.observation_space.shape[0] / 2), int(\n self.env.observation_space.shape[1] / 2), 1\n return state_shape\n\n def get_action_shape(self):\n action_shape = self.env.action_space.n - 1\n return action_shape,\n\n def get_action_space(self):\n return self.env.action_space\n\n def reset(self):\n self.env.reset()\n next_state, reward, done, info = self.env.step(1)\n self.last_ball_lives = info['ale.lives']\n info['dead'] = False\n return self.preprocess(next_state)\n\n def step(self, action):\n if action == 1:\n env_action = 2\n elif action == 2:\n env_action = 3\n else:\n env_action = 0\n next_state, reward, done, info = self.env.step(env_action)\n if self.last_ball_lives != info['ale.lives']:\n env_action = 1\n self.last_ball_lives = info['ale.lives']\n next_state, reward, done, info = self.env.step(env_action)\n info['dead'] = True\n reward = -5.0\n adjusted_reward = self.transform_reward(reward)\n return self.preprocess(next_state), reward, adjusted_reward, done, info\n\n def render(self):\n self.env.render()\n\n def close(self):\n self.env.close()\n",
"step-4": "import gym\nimport numpy as np\nfrom rl_main.conf.names import EnvironmentName, DeepLearningModelName\nfrom rl_main.environments.environment import Environment\nfrom rl_main.main_constants import DEEP_LEARNING_MODEL\n\n\nclass BreakoutDeterministic_v4(Environment):\n\n def __init__(self):\n self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)\n super(BreakoutDeterministic_v4, self).__init__()\n self.action_shape = self.get_action_shape()\n self.state_shape = self.get_state_shape()\n self.cnn_input_height = self.state_shape[0]\n self.cnn_input_width = self.state_shape[1]\n self.cnn_input_channels = self.state_shape[2]\n self.continuous = False\n self.last_ball_lives = -1\n self.skipping_state_fq = 3\n self.skipping_state_index = 0\n\n @staticmethod\n def to_grayscale(img):\n r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]\n gray = 0.2989 * r + 0.587 * g + 0.114 * b\n return gray\n\n @staticmethod\n def downsample(img):\n return img[::2, ::2]\n\n @staticmethod\n def transform_reward(reward):\n return np.sign(reward)\n\n def preprocess(self, img):\n gray_frame = self.to_grayscale(self.downsample(img))\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n state = np.expand_dims(gray_frame, axis=0)\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n state = gray_frame.flatten()\n else:\n state = None\n return state\n\n def get_n_states(self):\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n return 1, 105, 80\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n return 8400\n else:\n return None\n\n def get_n_actions(self):\n return self.env.action_space.n - 1\n\n @property\n def action_meanings(self):\n action_meanings = self.env.get_action_meanings()\n action_meanings.remove('FIRE')\n return action_meanings\n\n def get_state_shape(self):\n state_shape = int(self.env.observation_space.shape[0] / 2), int(\n self.env.observation_space.shape[1] / 2), 1\n return state_shape\n\n def get_action_shape(self):\n action_shape = self.env.action_space.n - 1\n return action_shape,\n\n def get_action_space(self):\n return self.env.action_space\n\n def reset(self):\n self.env.reset()\n next_state, reward, done, info = self.env.step(1)\n self.last_ball_lives = info['ale.lives']\n info['dead'] = False\n return self.preprocess(next_state)\n\n def step(self, action):\n if action == 1:\n env_action = 2\n elif action == 2:\n env_action = 3\n else:\n env_action = 0\n next_state, reward, done, info = self.env.step(env_action)\n if self.last_ball_lives != info['ale.lives']:\n env_action = 1\n self.last_ball_lives = info['ale.lives']\n next_state, reward, done, info = self.env.step(env_action)\n info['dead'] = True\n reward = -5.0\n adjusted_reward = self.transform_reward(reward)\n return self.preprocess(next_state), reward, adjusted_reward, done, info\n\n def render(self):\n self.env.render()\n\n def close(self):\n self.env.close()\n",
"step-5": "# https://github.com/openai/gym/blob/master/gym/envs/__init__.py#L449\nimport gym\nimport numpy as np\n\nfrom rl_main.conf.names import EnvironmentName, DeepLearningModelName\nfrom rl_main.environments.environment import Environment\nfrom rl_main.main_constants import DEEP_LEARNING_MODEL\n\n\nclass BreakoutDeterministic_v4(Environment):\n def __init__(self):\n self.env = gym.make(EnvironmentName.BREAKOUT_DETERMINISTIC_V4.value)\n super(BreakoutDeterministic_v4, self).__init__()\n self.action_shape = self.get_action_shape()\n self.state_shape = self.get_state_shape()\n\n self.cnn_input_height = self.state_shape[0]\n self.cnn_input_width = self.state_shape[1]\n self.cnn_input_channels = self.state_shape[2]\n self.continuous = False\n\n self.last_ball_lives = -1\n\n self.skipping_state_fq = 3\n self.skipping_state_index = 0\n\n @staticmethod\n def to_grayscale(img):\n r, g, b = img[:, :, 0], img[:, :, 1], img[:, :, 2]\n gray = 0.2989 * r + 0.5870 * g + 0.1140 * b\n return gray\n\n @staticmethod\n def downsample(img):\n return img[::2, ::2]\n\n @staticmethod\n def transform_reward(reward):\n return np.sign(reward)\n\n def preprocess(self, img):\n gray_frame = self.to_grayscale(self.downsample(img))\n\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n state = np.expand_dims(gray_frame, axis=0)\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n state = gray_frame.flatten()\n else:\n state = None\n\n return state\n\n def get_n_states(self):\n if DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticCNN:\n return 1, 105, 80 # input_channels, input_height, input_width\n elif DEEP_LEARNING_MODEL == DeepLearningModelName.ActorCriticMLP:\n return 8400\n else:\n return None\n\n def get_n_actions(self):\n return self.env.action_space.n - 1\n\n @property\n def action_meanings(self):\n action_meanings = self.env.get_action_meanings()\n action_meanings.remove('FIRE')\n return action_meanings\n\n def get_state_shape(self):\n state_shape = (int(self.env.observation_space.shape[0]/2), int(self.env.observation_space.shape[1]/2), 1)\n return state_shape\n\n def get_action_shape(self):\n action_shape = self.env.action_space.n - 1\n return action_shape,\n\n def get_action_space(self):\n return self.env.action_space\n\n def reset(self):\n self.env.reset()\n next_state, reward, done, info = self.env.step(1)\n self.last_ball_lives = info['ale.lives']\n info[\"dead\"] = False #if a ball fall down, dead is true\n\n return self.preprocess(next_state)\n\n def step(self, action):\n if action == 1:\n env_action = 2\n elif action == 2:\n env_action = 3\n else:\n env_action = 0\n\n next_state, reward, done, info = self.env.step(env_action)\n\n if self.last_ball_lives != info['ale.lives']:\n env_action = 1\n self.last_ball_lives = info['ale.lives']\n next_state, reward, done, info = self.env.step(env_action)\n info[\"dead\"] = True\n reward = -5.0\n\n # info[\"skipping\"] = True\n # if self.skipping_state_index == self.skipping_state_fq:\n # self.skipping_state_index = 0\n # info[\"skipping\"] = False\n\n adjusted_reward = self.transform_reward(reward)\n\n # self.skipping_state_index += 1\n\n return self.preprocess(next_state), reward, adjusted_reward, done, info\n\n def render(self):\n self.env.render()\n\n def close(self):\n self.env.close()\n",
"step-ids": [
9,
11,
16,
17,
18
]
}
|
[
9,
11,
16,
17,
18
] |
"""added Trail.Geometry without srid
Revision ID: 56afb969b589
Revises: 2cf6c7c1f0d7
Create Date: 2014-12-05 18:13:55.512637
"""
# revision identifiers, used by Alembic.
revision = '56afb969b589'
down_revision = '2cf6c7c1f0d7'
from alembic import op
import sqlalchemy as sa
import flask_admin
import geoalchemy2
def upgrade():
### commands auto generated by Alembic - please adjust! ###
#with op.batch_alter_table('POI', schema=None) as batch_op:
# batch_op.drop_index('idx_POI_point')
with op.batch_alter_table('trail', schema=None) as batch_op:
batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='MULTILINESTRING'), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('trail', schema=None) as batch_op:
batch_op.drop_column('geom')
#with op.batch_alter_table('POI', schema=None) as batch_op:
# batch_op.create_index('idx_POI_point', ['point'], unique=False)
### end Alembic commands ###
|
normal
|
{
"blob_id": "d724b4f57cf7683d6b6385bf991ed23a5dd8208f",
"index": 3881,
"step-1": "<mask token>\n\n\ndef upgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(\n geometry_type='MULTILINESTRING'), nullable=True))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef upgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(\n geometry_type='MULTILINESTRING'), nullable=True))\n\n\ndef downgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.drop_column('geom')\n",
"step-3": "<mask token>\nrevision = '56afb969b589'\ndown_revision = '2cf6c7c1f0d7'\n<mask token>\n\n\ndef upgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(\n geometry_type='MULTILINESTRING'), nullable=True))\n\n\ndef downgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.drop_column('geom')\n",
"step-4": "<mask token>\nrevision = '56afb969b589'\ndown_revision = '2cf6c7c1f0d7'\nfrom alembic import op\nimport sqlalchemy as sa\nimport flask_admin\nimport geoalchemy2\n\n\ndef upgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(\n geometry_type='MULTILINESTRING'), nullable=True))\n\n\ndef downgrade():\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.drop_column('geom')\n",
"step-5": "\"\"\"added Trail.Geometry without srid\n\nRevision ID: 56afb969b589\nRevises: 2cf6c7c1f0d7\nCreate Date: 2014-12-05 18:13:55.512637\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '56afb969b589'\ndown_revision = '2cf6c7c1f0d7'\n\nfrom alembic import op\nimport sqlalchemy as sa\nimport flask_admin\nimport geoalchemy2\n\n\ndef upgrade():\n ### commands auto generated by Alembic - please adjust! ###\n #with op.batch_alter_table('POI', schema=None) as batch_op:\n # batch_op.drop_index('idx_POI_point')\n\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.add_column(sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='MULTILINESTRING'), nullable=True))\n\n ### end Alembic commands ###\n\n\ndef downgrade():\n ### commands auto generated by Alembic - please adjust! ###\n with op.batch_alter_table('trail', schema=None) as batch_op:\n batch_op.drop_column('geom')\n\n #with op.batch_alter_table('POI', schema=None) as batch_op:\n # batch_op.create_index('idx_POI_point', ['point'], unique=False)\n\n ### end Alembic commands ###\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import collections
s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]
d = collections.defaultdict(list)
d2 = {'test':121}
for k, v in s:
d[k].append(v)
d['test'].append('value')
print list(d.items())
print d
print d['blue']
print type(d)
print type(d2)
|
normal
|
{
"blob_id": "15a894e6f94fc62b97d1614a4213f21331ef12a0",
"index": 7843,
"step-1": "import collections\ns = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]\n\nd = collections.defaultdict(list)\nd2 = {'test':121}\nfor k, v in s:\n d[k].append(v)\n\nd['test'].append('value')\n\nprint list(d.items())\nprint d\nprint d['blue']\nprint type(d)\nprint type(d2)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#encoding:utf-8
from flask import Flask
import config
from flask_rabbitmq import Queue, RabbitMQ
app = Flask(__name__)
app.config.from_object(config)
queue = Queue()
mq = RabbitMQ(app, queue)
from app import demo
|
normal
|
{
"blob_id": "ccf9c389a65d1420e87deec2100e37bccdcb5539",
"index": 6323,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp.config.from_object(config)\n<mask token>\n",
"step-3": "<mask token>\napp = Flask(__name__)\napp.config.from_object(config)\nqueue = Queue()\nmq = RabbitMQ(app, queue)\n<mask token>\n",
"step-4": "from flask import Flask\nimport config\nfrom flask_rabbitmq import Queue, RabbitMQ\napp = Flask(__name__)\napp.config.from_object(config)\nqueue = Queue()\nmq = RabbitMQ(app, queue)\nfrom app import demo\n",
"step-5": "#encoding:utf-8\nfrom flask import Flask\nimport config\nfrom flask_rabbitmq import Queue, RabbitMQ\n\napp = Flask(__name__)\napp.config.from_object(config)\n\nqueue = Queue()\nmq = RabbitMQ(app, queue)\n\nfrom app import demo\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from hierarchical_envs.pb_envs.gym_locomotion_envs import InsectBulletEnv
import argparse
import joblib
import tensorflow as tf
from rllab.misc.console import query_yes_no
# from rllab.sampler.utils import rollout
#from pybullet_my_envs.gym_locomotion_envs import Ant6BulletEnv, AntBulletEnv, SwimmerBulletEnv
from hierarchical_envs.pb_envs.gym_locomotion_envs import InsectBulletEnv, AntBulletEnv, SwimmerBulletEnv
from rllab.envs.gym_wrapper import GymEnv
import numpy as np
from rllab.misc import tensor_utils
import time
north_x = 0
north_y = 1e3
def simple_high(states):
x, y, tx, ty = states
if x < tx - 50:
return 0
if x > tx + 50:
return np.pi
if y < ty:
return np.pi / 2
return -np.pi / 2
def rollout(env, pi_low, pi_high, tx=700, ty=0, max_path_length=np.inf, animated=False, speedup=1,
always_return_paths=False):
observations = []
actions = []
rewards = []
agent_infos = []
env_infos = []
o = env.reset()
x, y, z = env.robot.body_xyz
r, p, yaw = env.robot.body_rpy
target_theta = np.arctan2(
ty - y,
tx - x)
angle_to_target = target_theta - yaw
print('direction: ', o[0], o[1])
path_length = 0
if animated:
env.render()
while path_length < max_path_length:
a_high = pi_high([x, y, tx, ty])
feed_o[0] = np.cos(a_high) # get direction
feed_o[1] = np.sin(a_high)
a, agent_info = agent.get_action(feed_o)
next_o, r, d, env_info = env.step(a)
observations.append(env.observation_space.flatten(o))
rewards.append(r)
actions.append(env.action_space.flatten(a))
agent_infos.append(agent_info)
env_infos.append(env_info)
path_length += 1
if d:
break
o = next_o
if animated:
env.render()
timestep = 0.05
time.sleep(timestep / speedup)
if animated and not always_return_paths:
return
return dict(
observations=tensor_utils.stack_tensor_list(observations),
actions=tensor_utils.stack_tensor_list(actions),
rewards=tensor_utils.stack_tensor_list(rewards),
agent_infos=tensor_utils.stack_tensor_dict_list(agent_infos),
env_infos=tensor_utils.stack_tensor_dict_list(env_infos),
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('low_level', type=str,
help='path to lower_level policy')
parser.add_argument('--max_path_length', type=int, default=500,
help='Max length of rollout')
parser.add_argument('--speedup', type=float, default=1,
help='Speedup')
args = parser.parse_args()
data = joblib.load(args.file)
pi_low = data['policy']
pi_high = simple_high
env = GymEnv(InsectBulletEnv(render=True, d=0.75, r_init=None, d_angle=True))
while True:
path = rollout(env, pi_low, pi_high, max_path_length=args.max_path_length,
animated=True, speedup=args.speedup)
if not query_yes_no('Continue simulation?'):
break
|
normal
|
{
"blob_id": "e85f203e71c8fdad86bd82b19104263cca72caf1",
"index": 4817,
"step-1": "from hierarchical_envs.pb_envs.gym_locomotion_envs import InsectBulletEnv\nimport argparse\n\nimport joblib\nimport tensorflow as tf\n\nfrom rllab.misc.console import query_yes_no\n# from rllab.sampler.utils import rollout\n#from pybullet_my_envs.gym_locomotion_envs import Ant6BulletEnv, AntBulletEnv, SwimmerBulletEnv\nfrom hierarchical_envs.pb_envs.gym_locomotion_envs import InsectBulletEnv, AntBulletEnv, SwimmerBulletEnv\nfrom rllab.envs.gym_wrapper import GymEnv\n\nimport numpy as np\nfrom rllab.misc import tensor_utils\nimport time\n\nnorth_x = 0\nnorth_y = 1e3\n\ndef simple_high(states):\n\tx, y, tx, ty = states\n\tif x < tx - 50:\n\t\treturn 0\n\tif x > tx + 50:\n\t\treturn np.pi\n\tif y < ty:\n\t\treturn np.pi / 2\n\treturn -np.pi / 2\n\n\ndef rollout(env, pi_low, pi_high, tx=700, ty=0, max_path_length=np.inf, animated=False, speedup=1,\n always_return_paths=False):\n observations = []\n actions = []\n rewards = []\n agent_infos = []\n env_infos = []\n o = env.reset()\n x, y, z = env.robot.body_xyz\n r, p, yaw = env.robot.body_rpy\n target_theta = np.arctan2(\n ty - y,\n tx - x)\n angle_to_target = target_theta - yaw\n \n print('direction: ', o[0], o[1])\n path_length = 0\n if animated:\n env.render()\n while path_length < max_path_length:\n a_high = pi_high([x, y, tx, ty]) \n feed_o[0] = np.cos(a_high) # get direction\n feed_o[1] = np.sin(a_high)\n a, agent_info = agent.get_action(feed_o)\n next_o, r, d, env_info = env.step(a)\n observations.append(env.observation_space.flatten(o))\n rewards.append(r)\n actions.append(env.action_space.flatten(a))\n agent_infos.append(agent_info)\n env_infos.append(env_info)\n path_length += 1\n if d:\n break\n o = next_o\n if animated:\n env.render()\n timestep = 0.05\n time.sleep(timestep / speedup)\n if animated and not always_return_paths:\n return\n\n return dict(\n observations=tensor_utils.stack_tensor_list(observations),\n actions=tensor_utils.stack_tensor_list(actions),\n rewards=tensor_utils.stack_tensor_list(rewards),\n agent_infos=tensor_utils.stack_tensor_dict_list(agent_infos),\n env_infos=tensor_utils.stack_tensor_dict_list(env_infos),\n )\n\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument('low_level', type=str,\n help='path to lower_level policy')\n parser.add_argument('--max_path_length', type=int, default=500,\n help='Max length of rollout')\n parser.add_argument('--speedup', type=float, default=1,\n help='Speedup')\n args = parser.parse_args()\n\n data = joblib.load(args.file)\n pi_low = data['policy']\n pi_high = simple_high\n\tenv = GymEnv(InsectBulletEnv(render=True, d=0.75, r_init=None, d_angle=True))\n while True:\n path = rollout(env, pi_low, pi_high, max_path_length=args.max_path_length,\n animated=True, speedup=args.speedup)\n if not query_yes_no('Continue simulation?'):\n break\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def find_max_three(a, b, c):
return find_max(a, find_max(b, c))
<|reserved_special_token_1|>
def find_max(a, b):
if a > b:
return a
return b
def find_max_three(a, b, c):
return find_max(a, find_max(b, c))
|
flexible
|
{
"blob_id": "71dc429033b159f6ed806358f2286b4315e842d9",
"index": 9617,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_max_three(a, b, c):\n return find_max(a, find_max(b, c))\n",
"step-3": "def find_max(a, b):\n if a > b:\n return a\n return b\n\n\ndef find_max_three(a, b, c):\n return find_max(a, find_max(b, c))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!d:\python_projects\env2\scripts\python.exe
# EASY-INSTALL-DEV-SCRIPT: 'Django==2.1.dev20180209010235','django-admin.py'
__requires__ = 'Django==2.1.dev20180209010235'
__import__('pkg_resources').require('Django==2.1.dev20180209010235')
__file__ = 'D:\\python_projects\\ENV2\\django\\django\\bin\\django-admin.py'
exec(compile(open(__file__).read(), __file__, 'exec'))
|
normal
|
{
"blob_id": "4bbf0a0fadc506ad3674912f1885525a94b5b1e9",
"index": 2807,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n<mask token>\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-3": "__requires__ = 'Django==2.1.dev20180209010235'\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n__file__ = 'D:\\\\python_projects\\\\ENV2\\\\django\\\\django\\\\bin\\\\django-admin.py'\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-4": "#!d:\\python_projects\\env2\\scripts\\python.exe\n# EASY-INSTALL-DEV-SCRIPT: 'Django==2.1.dev20180209010235','django-admin.py'\n__requires__ = 'Django==2.1.dev20180209010235'\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n__file__ = 'D:\\\\python_projects\\\\ENV2\\\\django\\\\django\\\\bin\\\\django-admin.py'\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class store:
<|reserved_special_token_0|>
def add(self, iname, itq, iup):
i = item(iname, itq, iup)
self.dic[iname] = [itq, iup]
def callbill(self, rname, rq):
for i in range(0, len(self.dic)):
if rname in self.dic.keys():
if self.dic[rname][0] == 0:
return None
elif self.dic[rname][0] >= rq:
tem = self.dic[rname][1] * rq
self.dic[rname][0] = self.dic[rname][0] - rq
return tem
elif self.dic[rname][0] < rq:
tem = self.dic[rname][1] * self.dic[rname][0]
self.dic[rname][0] = 0
return tem
else:
return None
def pri(self):
for i in self.dic.keys():
print(i, self.dic.get(i)[0])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class item:
<|reserved_special_token_0|>
class store:
def __init__(self, dic):
self.dic = dic
def add(self, iname, itq, iup):
i = item(iname, itq, iup)
self.dic[iname] = [itq, iup]
def callbill(self, rname, rq):
for i in range(0, len(self.dic)):
if rname in self.dic.keys():
if self.dic[rname][0] == 0:
return None
elif self.dic[rname][0] >= rq:
tem = self.dic[rname][1] * rq
self.dic[rname][0] = self.dic[rname][0] - rq
return tem
elif self.dic[rname][0] < rq:
tem = self.dic[rname][1] * self.dic[rname][0]
self.dic[rname][0] = 0
return tem
else:
return None
def pri(self):
for i in self.dic.keys():
print(i, self.dic.get(i)[0])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class item:
def __init__(self, iname, itq, iup):
self.iname = iname
self.itq = itq
self.iup = iup
class store:
def __init__(self, dic):
self.dic = dic
def add(self, iname, itq, iup):
i = item(iname, itq, iup)
self.dic[iname] = [itq, iup]
def callbill(self, rname, rq):
for i in range(0, len(self.dic)):
if rname in self.dic.keys():
if self.dic[rname][0] == 0:
return None
elif self.dic[rname][0] >= rq:
tem = self.dic[rname][1] * rq
self.dic[rname][0] = self.dic[rname][0] - rq
return tem
elif self.dic[rname][0] < rq:
tem = self.dic[rname][1] * self.dic[rname][0]
self.dic[rname][0] = 0
return tem
else:
return None
def pri(self):
for i in self.dic.keys():
print(i, self.dic.get(i)[0])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class item:
def __init__(self, iname, itq, iup):
self.iname = iname
self.itq = itq
self.iup = iup
class store:
def __init__(self, dic):
self.dic = dic
def add(self, iname, itq, iup):
i = item(iname, itq, iup)
self.dic[iname] = [itq, iup]
def callbill(self, rname, rq):
for i in range(0, len(self.dic)):
if rname in self.dic.keys():
if self.dic[rname][0] == 0:
return None
elif self.dic[rname][0] >= rq:
tem = self.dic[rname][1] * rq
self.dic[rname][0] = self.dic[rname][0] - rq
return tem
elif self.dic[rname][0] < rq:
tem = self.dic[rname][1] * self.dic[rname][0]
self.dic[rname][0] = 0
return tem
else:
return None
def pri(self):
for i in self.dic.keys():
print(i, self.dic.get(i)[0])
n = int(input())
dic = {}
s = store(dic)
for i in range(0, n):
iname = input()
iup = int(input())
itq = int(input())
s.add(iname, itq, iup)
r = int(input())
for i in range(0, r):
rname = input()
rq = int(input())
print('Bill of item', rname, '=', s.callbill(rname, rq))
s.pri()
<|reserved_special_token_1|>
class item():
def __init__(self,iname,itq,iup):
self.iname = iname
self.itq = itq
self.iup = iup
class store():
def __init__(self,dic):
self.dic = dic
def add(self,iname,itq,iup):
i = item(iname,itq,iup)
self.dic[iname]=[itq,iup]
def callbill(self,rname,rq):
#print("ih"self.dic[rname][0],,self.dic[rname][1])
for i in range(0,len(self.dic)):
if rname in self.dic.keys():
#print(self.dic.keys(ranme))
if(self.dic[rname][0]==0):
return(None)
elif(self.dic[rname][0]>=rq):
tem = self.dic[rname][1]*rq
self.dic[rname][0] = self.dic[rname][0]-rq
return(tem)
elif(self.dic[rname][0]<rq):
tem = self.dic[rname][1]*self.dic[rname][0]
self.dic[rname][0] = 0
return(tem)
else:
return(None)
def pri(self):
for i in self.dic.keys():
print(i,(self.dic.get(i))[0])
n = int(input())
dic = {}
s = store(dic)
for i in range (0,n):
iname = input()
iup = int(input())
itq = int(input())
s.add(iname,itq,iup)
#s.pri()
r = int(input())
for i in range(0,r):
rname = input()
rq = int(input())
print("Bill of item",rname,"=",s.callbill(rname,rq))
s.pri()
|
flexible
|
{
"blob_id": "b11210e73b403bc7a9ee24a53201ab2366ec1808",
"index": 7106,
"step-1": "<mask token>\n\n\nclass store:\n <mask token>\n\n def add(self, iname, itq, iup):\n i = item(iname, itq, iup)\n self.dic[iname] = [itq, iup]\n\n def callbill(self, rname, rq):\n for i in range(0, len(self.dic)):\n if rname in self.dic.keys():\n if self.dic[rname][0] == 0:\n return None\n elif self.dic[rname][0] >= rq:\n tem = self.dic[rname][1] * rq\n self.dic[rname][0] = self.dic[rname][0] - rq\n return tem\n elif self.dic[rname][0] < rq:\n tem = self.dic[rname][1] * self.dic[rname][0]\n self.dic[rname][0] = 0\n return tem\n else:\n return None\n\n def pri(self):\n for i in self.dic.keys():\n print(i, self.dic.get(i)[0])\n\n\n<mask token>\n",
"step-2": "class item:\n <mask token>\n\n\nclass store:\n\n def __init__(self, dic):\n self.dic = dic\n\n def add(self, iname, itq, iup):\n i = item(iname, itq, iup)\n self.dic[iname] = [itq, iup]\n\n def callbill(self, rname, rq):\n for i in range(0, len(self.dic)):\n if rname in self.dic.keys():\n if self.dic[rname][0] == 0:\n return None\n elif self.dic[rname][0] >= rq:\n tem = self.dic[rname][1] * rq\n self.dic[rname][0] = self.dic[rname][0] - rq\n return tem\n elif self.dic[rname][0] < rq:\n tem = self.dic[rname][1] * self.dic[rname][0]\n self.dic[rname][0] = 0\n return tem\n else:\n return None\n\n def pri(self):\n for i in self.dic.keys():\n print(i, self.dic.get(i)[0])\n\n\n<mask token>\n",
"step-3": "class item:\n\n def __init__(self, iname, itq, iup):\n self.iname = iname\n self.itq = itq\n self.iup = iup\n\n\nclass store:\n\n def __init__(self, dic):\n self.dic = dic\n\n def add(self, iname, itq, iup):\n i = item(iname, itq, iup)\n self.dic[iname] = [itq, iup]\n\n def callbill(self, rname, rq):\n for i in range(0, len(self.dic)):\n if rname in self.dic.keys():\n if self.dic[rname][0] == 0:\n return None\n elif self.dic[rname][0] >= rq:\n tem = self.dic[rname][1] * rq\n self.dic[rname][0] = self.dic[rname][0] - rq\n return tem\n elif self.dic[rname][0] < rq:\n tem = self.dic[rname][1] * self.dic[rname][0]\n self.dic[rname][0] = 0\n return tem\n else:\n return None\n\n def pri(self):\n for i in self.dic.keys():\n print(i, self.dic.get(i)[0])\n\n\n<mask token>\n",
"step-4": "class item:\n\n def __init__(self, iname, itq, iup):\n self.iname = iname\n self.itq = itq\n self.iup = iup\n\n\nclass store:\n\n def __init__(self, dic):\n self.dic = dic\n\n def add(self, iname, itq, iup):\n i = item(iname, itq, iup)\n self.dic[iname] = [itq, iup]\n\n def callbill(self, rname, rq):\n for i in range(0, len(self.dic)):\n if rname in self.dic.keys():\n if self.dic[rname][0] == 0:\n return None\n elif self.dic[rname][0] >= rq:\n tem = self.dic[rname][1] * rq\n self.dic[rname][0] = self.dic[rname][0] - rq\n return tem\n elif self.dic[rname][0] < rq:\n tem = self.dic[rname][1] * self.dic[rname][0]\n self.dic[rname][0] = 0\n return tem\n else:\n return None\n\n def pri(self):\n for i in self.dic.keys():\n print(i, self.dic.get(i)[0])\n\n\nn = int(input())\ndic = {}\ns = store(dic)\nfor i in range(0, n):\n iname = input()\n iup = int(input())\n itq = int(input())\n s.add(iname, itq, iup)\nr = int(input())\nfor i in range(0, r):\n rname = input()\n rq = int(input())\n print('Bill of item', rname, '=', s.callbill(rname, rq))\ns.pri()\n",
"step-5": "class item():\r\n def __init__(self,iname,itq,iup):\r\n self.iname = iname\r\n self.itq = itq\r\n self.iup = iup\r\n\r\nclass store():\r\n def __init__(self,dic):\r\n self.dic = dic\r\n\r\n def add(self,iname,itq,iup):\r\n i = item(iname,itq,iup)\r\n self.dic[iname]=[itq,iup]\r\n\r\n\r\n def callbill(self,rname,rq):\r\n #print(\"ih\"self.dic[rname][0],,self.dic[rname][1])\r\n for i in range(0,len(self.dic)):\r\n if rname in self.dic.keys():\r\n #print(self.dic.keys(ranme))\r\n if(self.dic[rname][0]==0):\r\n return(None)\r\n elif(self.dic[rname][0]>=rq):\r\n tem = self.dic[rname][1]*rq\r\n self.dic[rname][0] = self.dic[rname][0]-rq\r\n return(tem)\r\n elif(self.dic[rname][0]<rq):\r\n tem = self.dic[rname][1]*self.dic[rname][0]\r\n self.dic[rname][0] = 0\r\n return(tem)\r\n else:\r\n return(None)\r\n\r\n def pri(self):\r\n for i in self.dic.keys():\r\n print(i,(self.dic.get(i))[0])\r\n\r\n\r\nn = int(input())\r\ndic = {}\r\ns = store(dic)\r\nfor i in range (0,n):\r\n iname = input()\r\n iup = int(input())\r\n itq = int(input())\r\n s.add(iname,itq,iup)\r\n#s.pri()\r\n\r\nr = int(input())\r\nfor i in range(0,r):\r\n rname = input()\r\n rq = int(input())\r\n print(\"Bill of item\",rname,\"=\",s.callbill(rname,rq))\r\ns.pri()\r\n",
"step-ids": [
4,
6,
7,
9,
10
]
}
|
[
4,
6,
7,
9,
10
] |
<|reserved_special_token_0|>
class Renderer(base.Renderer):
render = ViewPageTemplateFile('twitterportlet.pt')
def __init__(self, context, request, view, manager, data):
self.context = context
self.request = request
self.view = view
self.manager = manager
self.data = data
def contents(self):
return self.data
class AddForm(base.AddForm):
form_fields = form.Fields(IContentNavigation)
label = u'Add Twitter Portlet'
description = ''
def create(self, data):
assignment = Assignment()
form.applyChanges(assignment, self.form_fields, data)
return assignment
class EditForm(base.EditForm):
form_fields = form.Fields(IContentNavigation)
label = u'Edit Twitter Portlet'
description = ''
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Assignment(base.Assignment):
implements(IContentNavigation)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Renderer(base.Renderer):
render = ViewPageTemplateFile('twitterportlet.pt')
def __init__(self, context, request, view, manager, data):
self.context = context
self.request = request
self.view = view
self.manager = manager
self.data = data
def contents(self):
return self.data
class AddForm(base.AddForm):
form_fields = form.Fields(IContentNavigation)
label = u'Add Twitter Portlet'
description = ''
def create(self, data):
assignment = Assignment()
form.applyChanges(assignment, self.form_fields, data)
return assignment
class EditForm(base.EditForm):
form_fields = form.Fields(IContentNavigation)
label = u'Edit Twitter Portlet'
description = ''
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class IContentNavigation(IPortletDataProvider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Assignment(base.Assignment):
implements(IContentNavigation)
def __init__(self, portlet_header=None, twitter_username=None,
twitter_widgetId=None):
self.portlet_header = portlet_header
self.twitter_username = twitter_username
self.twitter_widgetId = twitter_widgetId
@property
def title(self):
return self.portlet_header
class Renderer(base.Renderer):
render = ViewPageTemplateFile('twitterportlet.pt')
def __init__(self, context, request, view, manager, data):
self.context = context
self.request = request
self.view = view
self.manager = manager
self.data = data
def contents(self):
return self.data
class AddForm(base.AddForm):
form_fields = form.Fields(IContentNavigation)
label = u'Add Twitter Portlet'
description = ''
def create(self, data):
assignment = Assignment()
form.applyChanges(assignment, self.form_fields, data)
return assignment
class EditForm(base.EditForm):
form_fields = form.Fields(IContentNavigation)
label = u'Edit Twitter Portlet'
description = ''
<|reserved_special_token_1|>
from five import grok
from zope.formlib import form
from zope import schema
from zope.interface import implements
from zope.component import getMultiAdapter
from plone.app.portlets.portlets import base
from plone.memoize.instance import memoize
from plone.portlets.interfaces import IPortletDataProvider
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.CMFCore.utils import getToolByName
class IContentNavigation(IPortletDataProvider):
portlet_header = schema.TextLine(title=u'Portlet Header', default=
u'TWITTER FEED', required=False)
twitter_username = schema.TextLine(title=u'Twitter Username', default=
u'isclimatechange')
twitter_widgetId = schema.TextLine(title=u'Twitter Widget ID', default=
u'565570873433006080')
class Assignment(base.Assignment):
implements(IContentNavigation)
def __init__(self, portlet_header=None, twitter_username=None,
twitter_widgetId=None):
self.portlet_header = portlet_header
self.twitter_username = twitter_username
self.twitter_widgetId = twitter_widgetId
@property
def title(self):
return self.portlet_header
class Renderer(base.Renderer):
render = ViewPageTemplateFile('twitterportlet.pt')
def __init__(self, context, request, view, manager, data):
self.context = context
self.request = request
self.view = view
self.manager = manager
self.data = data
def contents(self):
return self.data
class AddForm(base.AddForm):
form_fields = form.Fields(IContentNavigation)
label = u'Add Twitter Portlet'
description = ''
def create(self, data):
assignment = Assignment()
form.applyChanges(assignment, self.form_fields, data)
return assignment
class EditForm(base.EditForm):
form_fields = form.Fields(IContentNavigation)
label = u'Edit Twitter Portlet'
description = ''
<|reserved_special_token_1|>
from five import grok
from zope.formlib import form
from zope import schema
from zope.interface import implements
from zope.component import getMultiAdapter
from plone.app.portlets.portlets import base
from plone.memoize.instance import memoize
from plone.portlets.interfaces import IPortletDataProvider
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.CMFCore.utils import getToolByName
#grok.templatedir('templates')
class IContentNavigation(IPortletDataProvider):
portlet_header = schema.TextLine(
title = u"Portlet Header",
default = u"TWITTER FEED",
required = False
)
twitter_username = schema.TextLine(
title = u"Twitter Username",
default = u"isclimatechange"
)
twitter_widgetId = schema.TextLine(
title = u"Twitter Widget ID",
default = u"565570873433006080"
)
class Assignment(base.Assignment):
implements(IContentNavigation)
def __init__(self,portlet_header=None, twitter_username= None, twitter_widgetId=None):
self.portlet_header = portlet_header
self.twitter_username = twitter_username
self.twitter_widgetId = twitter_widgetId
@property
def title(self):
return self.portlet_header
class Renderer(base.Renderer):
render = ViewPageTemplateFile('twitterportlet.pt')
def __init__(self, context, request, view, manager, data):
self.context = context
self.request = request
self.view = view
self.manager = manager
self.data = data
def contents(self):
return self.data
class AddForm(base.AddForm):
form_fields = form.Fields(IContentNavigation)
label = u"Add Twitter Portlet"
description = ''
def create(self, data):
assignment = Assignment()
form.applyChanges(assignment, self.form_fields, data)
return assignment
class EditForm(base.EditForm):
form_fields = form.Fields(IContentNavigation)
label = u"Edit Twitter Portlet"
description = ''
|
flexible
|
{
"blob_id": "214585956e44ce006db0702fd23692b11459f9e1",
"index": 7664,
"step-1": "<mask token>\n\n\nclass Renderer(base.Renderer):\n render = ViewPageTemplateFile('twitterportlet.pt')\n\n def __init__(self, context, request, view, manager, data):\n self.context = context\n self.request = request\n self.view = view\n self.manager = manager\n self.data = data\n\n def contents(self):\n return self.data\n\n\nclass AddForm(base.AddForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Add Twitter Portlet'\n description = ''\n\n def create(self, data):\n assignment = Assignment()\n form.applyChanges(assignment, self.form_fields, data)\n return assignment\n\n\nclass EditForm(base.EditForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Edit Twitter Portlet'\n description = ''\n",
"step-2": "<mask token>\n\n\nclass Assignment(base.Assignment):\n implements(IContentNavigation)\n <mask token>\n <mask token>\n\n\nclass Renderer(base.Renderer):\n render = ViewPageTemplateFile('twitterportlet.pt')\n\n def __init__(self, context, request, view, manager, data):\n self.context = context\n self.request = request\n self.view = view\n self.manager = manager\n self.data = data\n\n def contents(self):\n return self.data\n\n\nclass AddForm(base.AddForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Add Twitter Portlet'\n description = ''\n\n def create(self, data):\n assignment = Assignment()\n form.applyChanges(assignment, self.form_fields, data)\n return assignment\n\n\nclass EditForm(base.EditForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Edit Twitter Portlet'\n description = ''\n",
"step-3": "<mask token>\n\n\nclass IContentNavigation(IPortletDataProvider):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Assignment(base.Assignment):\n implements(IContentNavigation)\n\n def __init__(self, portlet_header=None, twitter_username=None,\n twitter_widgetId=None):\n self.portlet_header = portlet_header\n self.twitter_username = twitter_username\n self.twitter_widgetId = twitter_widgetId\n\n @property\n def title(self):\n return self.portlet_header\n\n\nclass Renderer(base.Renderer):\n render = ViewPageTemplateFile('twitterportlet.pt')\n\n def __init__(self, context, request, view, manager, data):\n self.context = context\n self.request = request\n self.view = view\n self.manager = manager\n self.data = data\n\n def contents(self):\n return self.data\n\n\nclass AddForm(base.AddForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Add Twitter Portlet'\n description = ''\n\n def create(self, data):\n assignment = Assignment()\n form.applyChanges(assignment, self.form_fields, data)\n return assignment\n\n\nclass EditForm(base.EditForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Edit Twitter Portlet'\n description = ''\n",
"step-4": "from five import grok\nfrom zope.formlib import form\nfrom zope import schema\nfrom zope.interface import implements\nfrom zope.component import getMultiAdapter\nfrom plone.app.portlets.portlets import base\nfrom plone.memoize.instance import memoize\nfrom plone.portlets.interfaces import IPortletDataProvider\nfrom Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\nfrom Products.CMFCore.utils import getToolByName\n\n\nclass IContentNavigation(IPortletDataProvider):\n portlet_header = schema.TextLine(title=u'Portlet Header', default=\n u'TWITTER FEED', required=False)\n twitter_username = schema.TextLine(title=u'Twitter Username', default=\n u'isclimatechange')\n twitter_widgetId = schema.TextLine(title=u'Twitter Widget ID', default=\n u'565570873433006080')\n\n\nclass Assignment(base.Assignment):\n implements(IContentNavigation)\n\n def __init__(self, portlet_header=None, twitter_username=None,\n twitter_widgetId=None):\n self.portlet_header = portlet_header\n self.twitter_username = twitter_username\n self.twitter_widgetId = twitter_widgetId\n\n @property\n def title(self):\n return self.portlet_header\n\n\nclass Renderer(base.Renderer):\n render = ViewPageTemplateFile('twitterportlet.pt')\n\n def __init__(self, context, request, view, manager, data):\n self.context = context\n self.request = request\n self.view = view\n self.manager = manager\n self.data = data\n\n def contents(self):\n return self.data\n\n\nclass AddForm(base.AddForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Add Twitter Portlet'\n description = ''\n\n def create(self, data):\n assignment = Assignment()\n form.applyChanges(assignment, self.form_fields, data)\n return assignment\n\n\nclass EditForm(base.EditForm):\n form_fields = form.Fields(IContentNavigation)\n label = u'Edit Twitter Portlet'\n description = ''\n",
"step-5": "from five import grok\nfrom zope.formlib import form\nfrom zope import schema\nfrom zope.interface import implements\nfrom zope.component import getMultiAdapter\nfrom plone.app.portlets.portlets import base\nfrom plone.memoize.instance import memoize\nfrom plone.portlets.interfaces import IPortletDataProvider\nfrom Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\nfrom Products.CMFCore.utils import getToolByName\n\n\n#grok.templatedir('templates')\n\nclass IContentNavigation(IPortletDataProvider):\n \n portlet_header = schema.TextLine(\n title = u\"Portlet Header\",\n default = u\"TWITTER FEED\",\n required = False\n )\n\n twitter_username = schema.TextLine(\n title = u\"Twitter Username\",\n default = u\"isclimatechange\"\n )\n\n twitter_widgetId = schema.TextLine(\n title = u\"Twitter Widget ID\",\n default = u\"565570873433006080\"\n )\n\nclass Assignment(base.Assignment):\n implements(IContentNavigation)\n \n \n def __init__(self,portlet_header=None, twitter_username= None, twitter_widgetId=None):\n self.portlet_header = portlet_header\n self.twitter_username = twitter_username\n self.twitter_widgetId = twitter_widgetId\n \n @property\n def title(self):\n return self.portlet_header\n \n\nclass Renderer(base.Renderer):\n render = ViewPageTemplateFile('twitterportlet.pt')\n \n \n def __init__(self, context, request, view, manager, data):\n self.context = context\n self.request = request\n self.view = view\n self.manager = manager\n self.data = data\n \n \n def contents(self):\n return self.data\n\n \n \n\nclass AddForm(base.AddForm):\n form_fields = form.Fields(IContentNavigation)\n label = u\"Add Twitter Portlet\"\n description = ''\n \n def create(self, data):\n assignment = Assignment()\n form.applyChanges(assignment, self.form_fields, data)\n return assignment\n \n\nclass EditForm(base.EditForm):\n form_fields = form.Fields(IContentNavigation)\n label = u\"Edit Twitter Portlet\"\n description = ''\n",
"step-ids": [
9,
10,
13,
15,
16
]
}
|
[
9,
10,
13,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def f(p_arg, *s_args, **kw_args):
return s_args[0] + kw_args['py'] + p_arg
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def f(p_arg, *s_args, **kw_args):
return s_args[0] + kw_args['py'] + p_arg
r = f(3, 2, py=1)
<|reserved_special_token_1|>
def f(p_arg, *s_args, **kw_args):
return (s_args[0] + kw_args['py'])+p_arg
r = f(3, 2, py = 1) ## value r => 6
|
flexible
|
{
"blob_id": "4a913cfdbddb2f6b5098395814f5fc1203192b9a",
"index": 4847,
"step-1": "<mask token>\n",
"step-2": "def f(p_arg, *s_args, **kw_args):\n return s_args[0] + kw_args['py'] + p_arg\n\n\n<mask token>\n",
"step-3": "def f(p_arg, *s_args, **kw_args):\n return s_args[0] + kw_args['py'] + p_arg\n\n\nr = f(3, 2, py=1)\n",
"step-4": "\r\ndef f(p_arg, *s_args, **kw_args):\r\n return (s_args[0] + kw_args['py'])+p_arg\r\nr = f(3, 2, py = 1) ## value r => 6\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
n = int(input())
a = [list(map(int, input().split())) for _ in range(n)]
b = [list(map(int, input().split())) for _ in range(n)]
a = np.array(a)
b = np.array(b)
print(np.dot(a, b))
|
normal
|
{
"blob_id": "17b8fec5583f2544bd02a2409528082fa1dc2a1e",
"index": 4107,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(np.dot(a, b))\n",
"step-3": "<mask token>\nn = int(input())\na = [list(map(int, input().split())) for _ in range(n)]\nb = [list(map(int, input().split())) for _ in range(n)]\na = np.array(a)\nb = np.array(b)\nprint(np.dot(a, b))\n",
"step-4": "import numpy as np\nn = int(input())\na = [list(map(int, input().split())) for _ in range(n)]\nb = [list(map(int, input().split())) for _ in range(n)]\na = np.array(a)\nb = np.array(b)\nprint(np.dot(a, b))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if wht == 0:
print('wht is', wht)
else:
print('whtsdsb')
<|reserved_special_token_0|>
print('BMI=', bmi)
if bmi < 18.5:
print('too light')
elif bmi < 25:
print('normal')
elif bmi < 30:
print('over')
else:
print('wanghantangshidssb')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
wht = 2
if wht == 0:
print('wht is', wht)
else:
print('whtsdsb')
wei = float(input('wei='))
hei = float(input('hei='))
bmi = wei * 0.45259227 / (hei * 0.0254) ** 2
print('BMI=', bmi)
if bmi < 18.5:
print('too light')
elif bmi < 25:
print('normal')
elif bmi < 30:
print('over')
else:
print('wanghantangshidssb')
<|reserved_special_token_1|>
'''
BMI=weight*0.45259227/(hei*0.0254)**
'''
wht=2
if wht==0:
print("wht is",wht)
else:
print("whtsdsb")
#今天也完成了100波比跳
wei=float(input("wei="))
hei=float(input("hei="))
bmi=(wei*0.45259227)/((hei*0.0254)**2)
print("BMI=",bmi)
if bmi<18.5:
print("too light")
elif bmi<25:
print("normal")
elif bmi<30:
print("over")
else:
print("wanghantangshidssb")
|
flexible
|
{
"blob_id": "48d0bfdc607a4605ef82f5c7dc7fd6fc85c4255f",
"index": 377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif wht == 0:\n print('wht is', wht)\nelse:\n print('whtsdsb')\n<mask token>\nprint('BMI=', bmi)\nif bmi < 18.5:\n print('too light')\nelif bmi < 25:\n print('normal')\nelif bmi < 30:\n print('over')\nelse:\n print('wanghantangshidssb')\n",
"step-3": "<mask token>\nwht = 2\nif wht == 0:\n print('wht is', wht)\nelse:\n print('whtsdsb')\nwei = float(input('wei='))\nhei = float(input('hei='))\nbmi = wei * 0.45259227 / (hei * 0.0254) ** 2\nprint('BMI=', bmi)\nif bmi < 18.5:\n print('too light')\nelif bmi < 25:\n print('normal')\nelif bmi < 30:\n print('over')\nelse:\n print('wanghantangshidssb')\n",
"step-4": "\n'''\nBMI=weight*0.45259227/(hei*0.0254)**\n'''\nwht=2\nif wht==0:\n print(\"wht is\",wht)\nelse:\n print(\"whtsdsb\")\n#今天也完成了100波比跳\nwei=float(input(\"wei=\"))\nhei=float(input(\"hei=\"))\nbmi=(wei*0.45259227)/((hei*0.0254)**2)\nprint(\"BMI=\",bmi)\nif bmi<18.5:\n print(\"too light\")\nelif bmi<25:\n print(\"normal\")\nelif bmi<30:\n print(\"over\")\nelse:\n print(\"wanghantangshidssb\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from rest_framework import permissions
class AdminUrlUserPermission(permissions.BasePermission):
def has_permission(self, request, view):
return (request.user.is_authenticated
and (request.user.role == 'admin'
or request.user.is_superuser))
def has_object_permission(self, request, view, obj):
return (request.user.role == 'admin'
or request.user.is_superuser)
class ReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS
class AuthorModeratorAdminOrReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
is_safe = request.method in permissions.SAFE_METHODS
is_auth = request.user.is_authenticated
return is_safe or is_auth
def has_object_permission(self, request, view, obj):
is_safe = request.method in permissions.SAFE_METHODS
is_author = obj.author == request.user
is_privileged = None
if request.user.is_authenticated:
is_privileged = request.user.role in ('moderator', 'admin')
return is_author or is_safe or is_privileged
|
normal
|
{
"blob_id": "4549f26cf8051535f9d3486d111fc7afe7514dea",
"index": 5674,
"step-1": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n <mask token>\n <mask token>\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-2": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n <mask token>\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-3": "<mask token>\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n\n def has_object_permission(self, request, view, obj):\n return request.user.role == 'admin' or request.user.is_superuser\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-4": "from rest_framework import permissions\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.user.is_authenticated and (request.user.role ==\n 'admin' or request.user.is_superuser)\n\n def has_object_permission(self, request, view, obj):\n return request.user.role == 'admin' or request.user.is_superuser\n\n\nclass ReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-5": "from rest_framework import permissions\n\n\nclass AdminUrlUserPermission(permissions.BasePermission):\n def has_permission(self, request, view):\n return (request.user.is_authenticated\n and (request.user.role == 'admin'\n or request.user.is_superuser))\n\n def has_object_permission(self, request, view, obj):\n return (request.user.role == 'admin'\n or request.user.is_superuser)\n\n\nclass ReadOnly(permissions.BasePermission):\n def has_permission(self, request, view):\n return request.method in permissions.SAFE_METHODS\n\n\nclass AuthorModeratorAdminOrReadOnly(permissions.BasePermission):\n def has_permission(self, request, view):\n is_safe = request.method in permissions.SAFE_METHODS\n is_auth = request.user.is_authenticated\n return is_safe or is_auth\n\n def has_object_permission(self, request, view, obj):\n is_safe = request.method in permissions.SAFE_METHODS\n is_author = obj.author == request.user\n is_privileged = None\n if request.user.is_authenticated:\n is_privileged = request.user.role in ('moderator', 'admin')\n return is_author or is_safe or is_privileged\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
#
# @lc app=leetcode id=14 lang=python3
#
# [14] Longest Common Prefix
#
# @lc code=start
class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
pass
# At the moment I just wanna test my workspace so it's working tomorrow it's time for the problems
# @lc code=end
|
normal
|
{
"blob_id": "401c6b09edf593e00aecf5bbb1b2201effc9e78c",
"index": 7384,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def longestCommonPrefix(self, strs: List[str]) ->str:\n pass\n",
"step-4": "#\n# @lc app=leetcode id=14 lang=python3\n#\n# [14] Longest Common Prefix\n#\n\n# @lc code=start\nclass Solution:\n def longestCommonPrefix(self, strs: List[str]) -> str:\n pass\n # At the moment I just wanna test my workspace so it's working tomorrow it's time for the problems\n \n# @lc code=end\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import hashlib
import os
def fileMD(self):
salt_ = os.urandom(32).hex()
hash_object = hashlib.md5()
hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))
print("MD5 Hash: "+hash_object.hexdigest())
|
normal
|
{
"blob_id": "bc9718fa57046888961d1b5245abefa8f752e983",
"index": 8103,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print('MD5 Hash: ' + hash_object.hexdigest())\n",
"step-3": "import hashlib\nimport os\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print('MD5 Hash: ' + hash_object.hexdigest())\n",
"step-4": "import hashlib\nimport os\n\n\ndef fileMD(self):\n salt_ = os.urandom(32).hex()\n hash_object = hashlib.md5()\n hash_object.update(('%s%s' % (salt_, self.theFile)).encode('utf-8'))\n print(\"MD5 Hash: \"+hash_object.hexdigest())",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def get_cat_fact():
myFacts = [
'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'
,
'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'
,
'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'
, 'Tylenol and chocolate are both poisionous to cats.',
'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'
,
'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'
,
'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'
,
'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'
, 'A domestic cat can run at speeds of 30 mph.',
'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'
,
'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'
,
'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'
,
'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'
, 'Cats see six times better in the dark and at night than humans.',
"The cat's tail is used to maintain balance.",
'Cats have 300 million neurons; dogs have about 160 million',
'Both humans and cats have identical regions in the brain responsible for emotion.'
,
'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'
,
"An adult lion's roar can be heard up to five miles (eight kilometers) away."
,
'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'
,
'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'
,
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
'Unlike other cats, lions have a tuft of hair at the end of their tails.'
,
'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'
, 'The average cat food meal is the equivalent to about five mice.',
'The first official cat show in the UK was organised at Crystal Palace in 1871.'
,
'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'
]
fact = myFacts[random.ranint(0, len(myFacts) - 1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
<|reserved_special_token_0|>
@ask.intent('NoIntent')
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_cat_fact():
myFacts = [
'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'
,
'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'
,
'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'
, 'Tylenol and chocolate are both poisionous to cats.',
'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'
,
'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'
,
'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'
,
'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'
, 'A domestic cat can run at speeds of 30 mph.',
'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'
,
'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'
,
'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'
,
'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'
, 'Cats see six times better in the dark and at night than humans.',
"The cat's tail is used to maintain balance.",
'Cats have 300 million neurons; dogs have about 160 million',
'Both humans and cats have identical regions in the brain responsible for emotion.'
,
'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'
,
"An adult lion's roar can be heard up to five miles (eight kilometers) away."
,
'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'
,
'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'
,
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
'Unlike other cats, lions have a tuft of hair at the end of their tails.'
,
'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'
, 'The average cat food meal is the equivalent to about five mice.',
'The first official cat show in the UK was organised at Crystal Palace in 1871.'
,
'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'
]
fact = myFacts[random.ranint(0, len(myFacts) - 1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
@ask.intent('YesIntent')
def share_headlines():
fact = get_cat_fact()
cat_fact = 'Did you know, ' + fact
return statement(cat_fact)
@ask.intent('NoIntent')
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_cat_fact():
myFacts = [
'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'
,
'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'
,
'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'
, 'Tylenol and chocolate are both poisionous to cats.',
'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'
,
'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'
,
'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'
,
'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'
, 'A domestic cat can run at speeds of 30 mph.',
'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'
,
'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'
,
'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'
,
'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'
, 'Cats see six times better in the dark and at night than humans.',
"The cat's tail is used to maintain balance.",
'Cats have 300 million neurons; dogs have about 160 million',
'Both humans and cats have identical regions in the brain responsible for emotion.'
,
'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'
,
"An adult lion's roar can be heard up to five miles (eight kilometers) away."
,
'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'
,
'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'
,
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
'Unlike other cats, lions have a tuft of hair at the end of their tails.'
,
'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'
, 'The average cat food meal is the equivalent to about five mice.',
'The first official cat show in the UK was organised at Crystal Palace in 1871.'
,
'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'
]
fact = myFacts[random.ranint(0, len(myFacts) - 1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
@ask.intent('YesIntent')
def share_headlines():
fact = get_cat_fact()
cat_fact = 'Did you know, ' + fact
return statement(cat_fact)
@ask.intent('NoIntent')
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask
from flask_ask import Ask, statement, question, session
import random
app = Flask(__name__)
ask = Ask(app, '/')
def get_cat_fact():
myFacts = [
'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'
,
'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'
,
'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'
, 'Tylenol and chocolate are both poisionous to cats.',
'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'
,
'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'
,
'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'
,
'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'
, 'A domestic cat can run at speeds of 30 mph.',
'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'
,
'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'
,
'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'
,
'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'
, 'Cats see six times better in the dark and at night than humans.',
"The cat's tail is used to maintain balance.",
'Cats have 300 million neurons; dogs have about 160 million',
'Both humans and cats have identical regions in the brain responsible for emotion.'
,
'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'
,
"An adult lion's roar can be heard up to five miles (eight kilometers) away."
,
'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'
,
'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'
,
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
'Unlike other cats, lions have a tuft of hair at the end of their tails.'
,
'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'
, 'The average cat food meal is the equivalent to about five mice.',
'The first official cat show in the UK was organised at Crystal Palace in 1871.'
,
'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'
]
fact = myFacts[random.ranint(0, len(myFacts) - 1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
@ask.intent('YesIntent')
def share_headlines():
fact = get_cat_fact()
cat_fact = 'Did you know, ' + fact
return statement(cat_fact)
@ask.intent('NoIntent')
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask
from flask_ask import Ask, statement, question, session
# import json, requests
import random
app = Flask(__name__)
ask = Ask(app, "/")
def get_cat_fact():
myFacts = [
"Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.",
"The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.",
"A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.",
"Tylenol and chocolate are both poisionous to cats.",
"Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.",
"It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.",
"A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.",
"Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.",
"A domestic cat can run at speeds of 30 mph.",
"Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.",
"A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.",
"The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.",
"Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more",
"Cats see six times better in the dark and at night than humans.",
"The cat's tail is used to maintain balance.",
"Cats have 300 million neurons; dogs have about 160 million",
"Both humans and cats have identical regions in the brain responsible for emotion.",
"The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.",
"An adult lion's roar can be heard up to five miles (eight kilometers) away.",
"You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.",
"The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).",
"A cat's normal temperature varies around 101 degrees Fahrenheit.",
"Unlike other cats, lions have a tuft of hair at the end of their tails.",
"Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.",
"The average cat food meal is the equivalent to about five mice.",
"The first official cat show in the UK was organised at Crystal Palace in 1871.",
"In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens."
]
fact = myFacts[random.ranint(0,len(myFacts)-1)]
return fact
@ask.launch
def start_skill():
welcome_message = 'Hello there, would you like to hear a cat fact?'
return question(welcome_message)
@ask.intent("YesIntent")
def share_headlines():
fact = get_cat_fact()
cat_fact = 'Did you know, ' + fact
return statement(cat_fact)
@ask.intent("NoIntent")
def no_intent():
bye_text = 'Ok! Have a wonderful day!'
return statement(bye_text)
if __name__ == '__main__':
app.run(debug=True)
|
flexible
|
{
"blob_id": "77971b088a7e076e3bf6d7aa320981a50e7756ce",
"index": 429,
"step-1": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n<mask token>\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask\nfrom flask_ask import Ask, statement, question, session\nimport random\napp = Flask(__name__)\nask = Ask(app, '/')\n\n\ndef get_cat_fact():\n myFacts = [\n 'Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.'\n ,\n 'The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.'\n ,\n 'A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.'\n , 'Tylenol and chocolate are both poisionous to cats.',\n 'Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.'\n ,\n 'It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.'\n ,\n 'A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.'\n ,\n 'Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.'\n , 'A domestic cat can run at speeds of 30 mph.',\n 'Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.'\n ,\n 'A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.'\n ,\n 'The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.'\n ,\n 'Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more'\n , 'Cats see six times better in the dark and at night than humans.',\n \"The cat's tail is used to maintain balance.\",\n 'Cats have 300 million neurons; dogs have about 160 million',\n 'Both humans and cats have identical regions in the brain responsible for emotion.'\n ,\n 'The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.'\n ,\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\"\n ,\n 'You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.'\n ,\n 'The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).'\n ,\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n 'Unlike other cats, lions have a tuft of hair at the end of their tails.'\n ,\n 'Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.'\n , 'The average cat food meal is the equivalent to about five mice.',\n 'The first official cat show in the UK was organised at Crystal Palace in 1871.'\n ,\n 'In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.'\n ]\n fact = myFacts[random.ranint(0, len(myFacts) - 1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n\n@ask.intent('YesIntent')\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n\n@ask.intent('NoIntent')\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask\nfrom flask_ask import Ask, statement, question, session\n# import json, requests\nimport random\n\n\napp = Flask(__name__)\nask = Ask(app, \"/\")\n\ndef get_cat_fact():\n myFacts = [\n \"Cats should not be fed tuna exclusively, as it lacks taurine, an essential nutrient required for good feline health. Make sure you have the proper Pet supplies to keep your cat happy and healthy.\",\n \"The strongest climber among the big cats, a leopard can carry prey twice its weight up a tree.\",\n \"A cat’s hearing is better than a dog’s. A cat can hear high-frequency sounds up to two octaves higher than a human.\",\n \"Tylenol and chocolate are both poisionous to cats.\",\n \"Cats have 30 teeth (12 incisors, 10 premolars, 4 canines, and 4 molars), while dogs have 42. Kittens have baby teeth, which are replaced by permanent teeth around the age of 7 months.\",\n \"It has been scientifically proven that owning cats is good for our health and can decrease the occurrence of high blood pressure and other illnesses.\",\n \"A cat can’t climb head first down a tree because every claw on a cat’s paw points the same way. To get down from a tree, a cat must back down.\",\n \"Cats are subject to gum disease and to dental caries. They should have their teeth cleaned by the vet or the cat dentist once a year.\",\n \"A domestic cat can run at speeds of 30 mph.\",\n \"Cat families usually play best in even numbers. Cats and kittens should be aquired in pairs whenever possible.\",\n \"A cat’s back is extremely flexible because it has up to 53 loosely fitting vertebrae. Humans only have 34.\",\n \"The claws on the cat’s back paws aren’t as sharp as the claws on the front paws because the claws in the back don’t retract and, consequently, become worn.\",\n \"Cat paws act as tempetature regulators, shock absorbers, hunting and grooming tools, sensors, and more\",\n \"Cats see six times better in the dark and at night than humans.\",\n \"The cat's tail is used to maintain balance.\",\n \"Cats have 300 million neurons; dogs have about 160 million\",\n \"Both humans and cats have identical regions in the brain responsible for emotion.\",\n \"The lightest cat on record is a blue point Himalayan called Tinker Toy, who weighed 1 pound, 6 ounces (616 g). Tinker Toy was 2.75 inches (7 cm) tall and 7.5 inches (19 cm) long.\",\n \"An adult lion's roar can be heard up to five miles (eight kilometers) away.\",\n \"You check your cats pulse on the inside of the back thigh, where the leg joins to the body. Normal for cats: 110-170 beats per minute.\",\n \"The largest cat breed is the Ragdoll. Male Ragdolls weigh between 12 and 20 lbs (5.4-9.0 k). Females weigh between 10 and 15 lbs (4.5-6.8 k).\",\n \"A cat's normal temperature varies around 101 degrees Fahrenheit.\",\n \"Unlike other cats, lions have a tuft of hair at the end of their tails.\",\n \"Cats don’t have sweat glands over their bodies like humans do. Instead, they sweat only through their paws.\",\n \"The average cat food meal is the equivalent to about five mice.\",\n \"The first official cat show in the UK was organised at Crystal Palace in 1871.\",\n \"In just seven years, a single pair of cats and their offspring could produce a staggering total of 420,000 kittens.\"\n ]\n fact = myFacts[random.ranint(0,len(myFacts)-1)]\n return fact\n\n\n@ask.launch\ndef start_skill():\n welcome_message = 'Hello there, would you like to hear a cat fact?'\n return question(welcome_message)\n\n@ask.intent(\"YesIntent\")\ndef share_headlines():\n fact = get_cat_fact()\n cat_fact = 'Did you know, ' + fact\n return statement(cat_fact)\n\n@ask.intent(\"NoIntent\")\ndef no_intent():\n bye_text = 'Ok! Have a wonderful day!'\n return statement(bye_text)\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-ids": [
3,
4,
5,
7,
8
]
}
|
[
3,
4,
5,
7,
8
] |
<|reserved_special_token_0|>
def eval_genome(genome, config):
net = neat.nn.FeedForwardNetwork.create(genome, config)
env.reset()
ob, _, _, _ = env.step(env.action_space.sample())
inx = int(ob.shape[0] / 8)
iny = int(ob.shape[1] / 8)
fitnesses = []
score1 = 0
score2 = 0
fitness = 0.0
done = False
start_time = time.time()
series_of_keys = []
series_of_nnOut = []
while not done:
env.render()
ob = cv2.resize(ob, (inx, iny))
ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)
ob = np.reshape(ob, (inx, iny))
imgarray = np.ndarray.flatten(ob)
imgarray = np.interp(imgarray, (0, 254), (-1, +1))
nnOut = net.activate(imgarray)
for o in nnOut:
if o > 0.0:
keys = [1, 0]
else:
keys = [0, 1]
actions = [0] * 4 + keys + [0] * 2
series_of_keys.append(keys)
series_of_nnOut.append(nnOut)
ob, rew, done, info = env.step(actions)
score1 = info['score1']
score2 = info['score2']
if score1 > 19 or score2 > 19:
done = True
print(series_of_keys)
run_time = time.time() - start_time
fitness = score2 - score1 / (run_time - 2)
return fitness
def eval_genomes(genomes, config):
for genome_id, genome in genomes:
genome.fitness = eval_genome(genome, config)
def run():
local_dir = os.path.dirname(__file__)
config_path = os.path.join(local_dir, 'pong_config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat
.DefaultSpeciesSet, neat.DefaultStagnation, config_path)
pop = neat.Population(config)
stats = neat.StatisticsReporter()
pop.add_reporter(stats)
pop.add_reporter(neat.StdOutReporter(True))
pe = neat.ParallelEvaluator(10, eval_genome)
winner = pop.run(pe.evaluate)
with open('winner-feedforward', 'wb') as f:
pickle.dump(winner, f)
print(winner)
visualize.plot_stats(stats, ylog=True, view=True, filename=
'feedforward-fitness.svg')
visualize.plot_species(stats, view=True, filename=
'feedforward-speciation.svg')
node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):
'control'}
visualize.draw_net(config, winner, True, node_names=node_names)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward.gv')
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled.gv', show_disabled=False)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled-pruned.gv', show_disabled=
False, prune_unused=True)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def eval_genome(genome, config):
net = neat.nn.FeedForwardNetwork.create(genome, config)
env.reset()
ob, _, _, _ = env.step(env.action_space.sample())
inx = int(ob.shape[0] / 8)
iny = int(ob.shape[1] / 8)
fitnesses = []
score1 = 0
score2 = 0
fitness = 0.0
done = False
start_time = time.time()
series_of_keys = []
series_of_nnOut = []
while not done:
env.render()
ob = cv2.resize(ob, (inx, iny))
ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)
ob = np.reshape(ob, (inx, iny))
imgarray = np.ndarray.flatten(ob)
imgarray = np.interp(imgarray, (0, 254), (-1, +1))
nnOut = net.activate(imgarray)
for o in nnOut:
if o > 0.0:
keys = [1, 0]
else:
keys = [0, 1]
actions = [0] * 4 + keys + [0] * 2
series_of_keys.append(keys)
series_of_nnOut.append(nnOut)
ob, rew, done, info = env.step(actions)
score1 = info['score1']
score2 = info['score2']
if score1 > 19 or score2 > 19:
done = True
print(series_of_keys)
run_time = time.time() - start_time
fitness = score2 - score1 / (run_time - 2)
return fitness
def eval_genomes(genomes, config):
for genome_id, genome in genomes:
genome.fitness = eval_genome(genome, config)
def run():
local_dir = os.path.dirname(__file__)
config_path = os.path.join(local_dir, 'pong_config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat
.DefaultSpeciesSet, neat.DefaultStagnation, config_path)
pop = neat.Population(config)
stats = neat.StatisticsReporter()
pop.add_reporter(stats)
pop.add_reporter(neat.StdOutReporter(True))
pe = neat.ParallelEvaluator(10, eval_genome)
winner = pop.run(pe.evaluate)
with open('winner-feedforward', 'wb') as f:
pickle.dump(winner, f)
print(winner)
visualize.plot_stats(stats, ylog=True, view=True, filename=
'feedforward-fitness.svg')
visualize.plot_species(stats, view=True, filename=
'feedforward-speciation.svg')
node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):
'control'}
visualize.draw_net(config, winner, True, node_names=node_names)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward.gv')
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled.gv', show_disabled=False)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled-pruned.gv', show_disabled=
False, prune_unused=True)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
env = retro.make(game='Pong-Atari2600')
def eval_genome(genome, config):
net = neat.nn.FeedForwardNetwork.create(genome, config)
env.reset()
ob, _, _, _ = env.step(env.action_space.sample())
inx = int(ob.shape[0] / 8)
iny = int(ob.shape[1] / 8)
fitnesses = []
score1 = 0
score2 = 0
fitness = 0.0
done = False
start_time = time.time()
series_of_keys = []
series_of_nnOut = []
while not done:
env.render()
ob = cv2.resize(ob, (inx, iny))
ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)
ob = np.reshape(ob, (inx, iny))
imgarray = np.ndarray.flatten(ob)
imgarray = np.interp(imgarray, (0, 254), (-1, +1))
nnOut = net.activate(imgarray)
for o in nnOut:
if o > 0.0:
keys = [1, 0]
else:
keys = [0, 1]
actions = [0] * 4 + keys + [0] * 2
series_of_keys.append(keys)
series_of_nnOut.append(nnOut)
ob, rew, done, info = env.step(actions)
score1 = info['score1']
score2 = info['score2']
if score1 > 19 or score2 > 19:
done = True
print(series_of_keys)
run_time = time.time() - start_time
fitness = score2 - score1 / (run_time - 2)
return fitness
def eval_genomes(genomes, config):
for genome_id, genome in genomes:
genome.fitness = eval_genome(genome, config)
def run():
local_dir = os.path.dirname(__file__)
config_path = os.path.join(local_dir, 'pong_config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat
.DefaultSpeciesSet, neat.DefaultStagnation, config_path)
pop = neat.Population(config)
stats = neat.StatisticsReporter()
pop.add_reporter(stats)
pop.add_reporter(neat.StdOutReporter(True))
pe = neat.ParallelEvaluator(10, eval_genome)
winner = pop.run(pe.evaluate)
with open('winner-feedforward', 'wb') as f:
pickle.dump(winner, f)
print(winner)
visualize.plot_stats(stats, ylog=True, view=True, filename=
'feedforward-fitness.svg')
visualize.plot_species(stats, view=True, filename=
'feedforward-speciation.svg')
node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):
'control'}
visualize.draw_net(config, winner, True, node_names=node_names)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward.gv')
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled.gv', show_disabled=False)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled-pruned.gv', show_disabled=
False, prune_unused=True)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
import retro
import numpy as np
import neat
import pickle
import os
import multiprocessing
import cv2
import time
env = retro.make(game='Pong-Atari2600')
def eval_genome(genome, config):
net = neat.nn.FeedForwardNetwork.create(genome, config)
env.reset()
ob, _, _, _ = env.step(env.action_space.sample())
inx = int(ob.shape[0] / 8)
iny = int(ob.shape[1] / 8)
fitnesses = []
score1 = 0
score2 = 0
fitness = 0.0
done = False
start_time = time.time()
series_of_keys = []
series_of_nnOut = []
while not done:
env.render()
ob = cv2.resize(ob, (inx, iny))
ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)
ob = np.reshape(ob, (inx, iny))
imgarray = np.ndarray.flatten(ob)
imgarray = np.interp(imgarray, (0, 254), (-1, +1))
nnOut = net.activate(imgarray)
for o in nnOut:
if o > 0.0:
keys = [1, 0]
else:
keys = [0, 1]
actions = [0] * 4 + keys + [0] * 2
series_of_keys.append(keys)
series_of_nnOut.append(nnOut)
ob, rew, done, info = env.step(actions)
score1 = info['score1']
score2 = info['score2']
if score1 > 19 or score2 > 19:
done = True
print(series_of_keys)
run_time = time.time() - start_time
fitness = score2 - score1 / (run_time - 2)
return fitness
def eval_genomes(genomes, config):
for genome_id, genome in genomes:
genome.fitness = eval_genome(genome, config)
def run():
local_dir = os.path.dirname(__file__)
config_path = os.path.join(local_dir, 'pong_config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat
.DefaultSpeciesSet, neat.DefaultStagnation, config_path)
pop = neat.Population(config)
stats = neat.StatisticsReporter()
pop.add_reporter(stats)
pop.add_reporter(neat.StdOutReporter(True))
pe = neat.ParallelEvaluator(10, eval_genome)
winner = pop.run(pe.evaluate)
with open('winner-feedforward', 'wb') as f:
pickle.dump(winner, f)
print(winner)
visualize.plot_stats(stats, ylog=True, view=True, filename=
'feedforward-fitness.svg')
visualize.plot_species(stats, view=True, filename=
'feedforward-speciation.svg')
node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):
'control'}
visualize.draw_net(config, winner, True, node_names=node_names)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward.gv')
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled.gv', show_disabled=False)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename='winner-feedforward-enabled-pruned.gv', show_disabled=
False, prune_unused=True)
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
import retro # pip install gym-retro
import numpy as np # pip install numpy
#import cv2 # pip install opencv-python
import neat # pip install neat-python
import pickle # pip install cloudpickle
import os
import multiprocessing
import cv2
import time
env = retro.make(game='Pong-Atari2600')
def eval_genome(genome, config):
net = neat.nn.FeedForwardNetwork.create(genome, config)
env.reset()
ob, _, _, _ = env.step(env.action_space.sample())
inx = int(ob.shape[0]/8)
iny = int(ob.shape[1]/8)
fitnesses = []
score1=0
score2=0
# Run the given simulation for up to num_steps time steps.
fitness = 0.0
done = False
start_time=time.time()
series_of_keys=[]
series_of_nnOut=[]
while not done:
env.render()
ob = cv2.resize(ob, (inx, iny))
ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)
ob = np.reshape(ob, (inx, iny))
imgarray = np.ndarray.flatten(ob)
imgarray = np.interp(imgarray, (0, 254), (-1, +1))
nnOut = net.activate(imgarray)
for o in nnOut:
if o > 0.:
keys = [1, 0]
else:
keys = [0, 1]
actions=[0]*4+keys+[0]*2
series_of_keys.append(keys)
series_of_nnOut.append(nnOut)
ob, rew, done, info = env.step(actions)
score1=info['score1']
score2=info['score2']
if score1 >19 or score2 >19:
done = True
print(series_of_keys)
# print(series_of_actions)
run_time=time.time()-start_time
fitness=score2-score1/(run_time-2)
return fitness
def eval_genomes(genomes, config):
for genome_id, genome in genomes:
genome.fitness = eval_genome(genome, config)
def run():
# Load the config file, which is assumed to live in
# the same directory as this script.
local_dir = os.path.dirname(__file__)
config_path = os.path.join(local_dir, 'pong_config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction,
neat.DefaultSpeciesSet, neat.DefaultStagnation,
config_path)
pop = neat.Population(config)
stats = neat.StatisticsReporter()
pop.add_reporter(stats)
pop.add_reporter(neat.StdOutReporter(True))
pe = neat.ParallelEvaluator(10, eval_genome)
winner = pop.run(pe.evaluate)
# Save the winner.
with open('winner-feedforward', 'wb') as f:
pickle.dump(winner, f)
print(winner)
visualize.plot_stats(stats, ylog=True, view=True, filename="feedforward-fitness.svg")
visualize.plot_species(stats, view=True, filename="feedforward-speciation.svg")
node_names = {-1: 'x', -2: 'dx', -3: 'theta', -4: 'dtheta', 0: 'control'}
visualize.draw_net(config, winner, True, node_names=node_names)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename="winner-feedforward.gv")
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename="winner-feedforward-enabled.gv", show_disabled=False)
visualize.draw_net(config, winner, view=True, node_names=node_names,
filename="winner-feedforward-enabled-pruned.gv", show_disabled=False, prune_unused=True)
if __name__ == '__main__':
run()
|
flexible
|
{
"blob_id": "36e350e0d578e169efaafb9e311566d71d6bc59e",
"index": 1438,
"step-1": "<mask token>\n\n\ndef eval_genome(genome, config):\n net = neat.nn.FeedForwardNetwork.create(genome, config)\n env.reset()\n ob, _, _, _ = env.step(env.action_space.sample())\n inx = int(ob.shape[0] / 8)\n iny = int(ob.shape[1] / 8)\n fitnesses = []\n score1 = 0\n score2 = 0\n fitness = 0.0\n done = False\n start_time = time.time()\n series_of_keys = []\n series_of_nnOut = []\n while not done:\n env.render()\n ob = cv2.resize(ob, (inx, iny))\n ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)\n ob = np.reshape(ob, (inx, iny))\n imgarray = np.ndarray.flatten(ob)\n imgarray = np.interp(imgarray, (0, 254), (-1, +1))\n nnOut = net.activate(imgarray)\n for o in nnOut:\n if o > 0.0:\n keys = [1, 0]\n else:\n keys = [0, 1]\n actions = [0] * 4 + keys + [0] * 2\n series_of_keys.append(keys)\n series_of_nnOut.append(nnOut)\n ob, rew, done, info = env.step(actions)\n score1 = info['score1']\n score2 = info['score2']\n if score1 > 19 or score2 > 19:\n done = True\n print(series_of_keys)\n run_time = time.time() - start_time\n fitness = score2 - score1 / (run_time - 2)\n return fitness\n\n\ndef eval_genomes(genomes, config):\n for genome_id, genome in genomes:\n genome.fitness = eval_genome(genome, config)\n\n\ndef run():\n local_dir = os.path.dirname(__file__)\n config_path = os.path.join(local_dir, 'pong_config')\n config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat\n .DefaultSpeciesSet, neat.DefaultStagnation, config_path)\n pop = neat.Population(config)\n stats = neat.StatisticsReporter()\n pop.add_reporter(stats)\n pop.add_reporter(neat.StdOutReporter(True))\n pe = neat.ParallelEvaluator(10, eval_genome)\n winner = pop.run(pe.evaluate)\n with open('winner-feedforward', 'wb') as f:\n pickle.dump(winner, f)\n print(winner)\n visualize.plot_stats(stats, ylog=True, view=True, filename=\n 'feedforward-fitness.svg')\n visualize.plot_species(stats, view=True, filename=\n 'feedforward-speciation.svg')\n node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):\n 'control'}\n visualize.draw_net(config, winner, True, node_names=node_names)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward.gv')\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled.gv', show_disabled=False)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled-pruned.gv', show_disabled=\n False, prune_unused=True)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef eval_genome(genome, config):\n net = neat.nn.FeedForwardNetwork.create(genome, config)\n env.reset()\n ob, _, _, _ = env.step(env.action_space.sample())\n inx = int(ob.shape[0] / 8)\n iny = int(ob.shape[1] / 8)\n fitnesses = []\n score1 = 0\n score2 = 0\n fitness = 0.0\n done = False\n start_time = time.time()\n series_of_keys = []\n series_of_nnOut = []\n while not done:\n env.render()\n ob = cv2.resize(ob, (inx, iny))\n ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)\n ob = np.reshape(ob, (inx, iny))\n imgarray = np.ndarray.flatten(ob)\n imgarray = np.interp(imgarray, (0, 254), (-1, +1))\n nnOut = net.activate(imgarray)\n for o in nnOut:\n if o > 0.0:\n keys = [1, 0]\n else:\n keys = [0, 1]\n actions = [0] * 4 + keys + [0] * 2\n series_of_keys.append(keys)\n series_of_nnOut.append(nnOut)\n ob, rew, done, info = env.step(actions)\n score1 = info['score1']\n score2 = info['score2']\n if score1 > 19 or score2 > 19:\n done = True\n print(series_of_keys)\n run_time = time.time() - start_time\n fitness = score2 - score1 / (run_time - 2)\n return fitness\n\n\ndef eval_genomes(genomes, config):\n for genome_id, genome in genomes:\n genome.fitness = eval_genome(genome, config)\n\n\ndef run():\n local_dir = os.path.dirname(__file__)\n config_path = os.path.join(local_dir, 'pong_config')\n config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat\n .DefaultSpeciesSet, neat.DefaultStagnation, config_path)\n pop = neat.Population(config)\n stats = neat.StatisticsReporter()\n pop.add_reporter(stats)\n pop.add_reporter(neat.StdOutReporter(True))\n pe = neat.ParallelEvaluator(10, eval_genome)\n winner = pop.run(pe.evaluate)\n with open('winner-feedforward', 'wb') as f:\n pickle.dump(winner, f)\n print(winner)\n visualize.plot_stats(stats, ylog=True, view=True, filename=\n 'feedforward-fitness.svg')\n visualize.plot_species(stats, view=True, filename=\n 'feedforward-speciation.svg')\n node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):\n 'control'}\n visualize.draw_net(config, winner, True, node_names=node_names)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward.gv')\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled.gv', show_disabled=False)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled-pruned.gv', show_disabled=\n False, prune_unused=True)\n\n\nif __name__ == '__main__':\n run()\n",
"step-3": "<mask token>\nenv = retro.make(game='Pong-Atari2600')\n\n\ndef eval_genome(genome, config):\n net = neat.nn.FeedForwardNetwork.create(genome, config)\n env.reset()\n ob, _, _, _ = env.step(env.action_space.sample())\n inx = int(ob.shape[0] / 8)\n iny = int(ob.shape[1] / 8)\n fitnesses = []\n score1 = 0\n score2 = 0\n fitness = 0.0\n done = False\n start_time = time.time()\n series_of_keys = []\n series_of_nnOut = []\n while not done:\n env.render()\n ob = cv2.resize(ob, (inx, iny))\n ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)\n ob = np.reshape(ob, (inx, iny))\n imgarray = np.ndarray.flatten(ob)\n imgarray = np.interp(imgarray, (0, 254), (-1, +1))\n nnOut = net.activate(imgarray)\n for o in nnOut:\n if o > 0.0:\n keys = [1, 0]\n else:\n keys = [0, 1]\n actions = [0] * 4 + keys + [0] * 2\n series_of_keys.append(keys)\n series_of_nnOut.append(nnOut)\n ob, rew, done, info = env.step(actions)\n score1 = info['score1']\n score2 = info['score2']\n if score1 > 19 or score2 > 19:\n done = True\n print(series_of_keys)\n run_time = time.time() - start_time\n fitness = score2 - score1 / (run_time - 2)\n return fitness\n\n\ndef eval_genomes(genomes, config):\n for genome_id, genome in genomes:\n genome.fitness = eval_genome(genome, config)\n\n\ndef run():\n local_dir = os.path.dirname(__file__)\n config_path = os.path.join(local_dir, 'pong_config')\n config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat\n .DefaultSpeciesSet, neat.DefaultStagnation, config_path)\n pop = neat.Population(config)\n stats = neat.StatisticsReporter()\n pop.add_reporter(stats)\n pop.add_reporter(neat.StdOutReporter(True))\n pe = neat.ParallelEvaluator(10, eval_genome)\n winner = pop.run(pe.evaluate)\n with open('winner-feedforward', 'wb') as f:\n pickle.dump(winner, f)\n print(winner)\n visualize.plot_stats(stats, ylog=True, view=True, filename=\n 'feedforward-fitness.svg')\n visualize.plot_species(stats, view=True, filename=\n 'feedforward-speciation.svg')\n node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):\n 'control'}\n visualize.draw_net(config, winner, True, node_names=node_names)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward.gv')\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled.gv', show_disabled=False)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled-pruned.gv', show_disabled=\n False, prune_unused=True)\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "import retro\nimport numpy as np\nimport neat\nimport pickle\nimport os\nimport multiprocessing\nimport cv2\nimport time\nenv = retro.make(game='Pong-Atari2600')\n\n\ndef eval_genome(genome, config):\n net = neat.nn.FeedForwardNetwork.create(genome, config)\n env.reset()\n ob, _, _, _ = env.step(env.action_space.sample())\n inx = int(ob.shape[0] / 8)\n iny = int(ob.shape[1] / 8)\n fitnesses = []\n score1 = 0\n score2 = 0\n fitness = 0.0\n done = False\n start_time = time.time()\n series_of_keys = []\n series_of_nnOut = []\n while not done:\n env.render()\n ob = cv2.resize(ob, (inx, iny))\n ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)\n ob = np.reshape(ob, (inx, iny))\n imgarray = np.ndarray.flatten(ob)\n imgarray = np.interp(imgarray, (0, 254), (-1, +1))\n nnOut = net.activate(imgarray)\n for o in nnOut:\n if o > 0.0:\n keys = [1, 0]\n else:\n keys = [0, 1]\n actions = [0] * 4 + keys + [0] * 2\n series_of_keys.append(keys)\n series_of_nnOut.append(nnOut)\n ob, rew, done, info = env.step(actions)\n score1 = info['score1']\n score2 = info['score2']\n if score1 > 19 or score2 > 19:\n done = True\n print(series_of_keys)\n run_time = time.time() - start_time\n fitness = score2 - score1 / (run_time - 2)\n return fitness\n\n\ndef eval_genomes(genomes, config):\n for genome_id, genome in genomes:\n genome.fitness = eval_genome(genome, config)\n\n\ndef run():\n local_dir = os.path.dirname(__file__)\n config_path = os.path.join(local_dir, 'pong_config')\n config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat\n .DefaultSpeciesSet, neat.DefaultStagnation, config_path)\n pop = neat.Population(config)\n stats = neat.StatisticsReporter()\n pop.add_reporter(stats)\n pop.add_reporter(neat.StdOutReporter(True))\n pe = neat.ParallelEvaluator(10, eval_genome)\n winner = pop.run(pe.evaluate)\n with open('winner-feedforward', 'wb') as f:\n pickle.dump(winner, f)\n print(winner)\n visualize.plot_stats(stats, ylog=True, view=True, filename=\n 'feedforward-fitness.svg')\n visualize.plot_species(stats, view=True, filename=\n 'feedforward-speciation.svg')\n node_names = {(-1): 'x', (-2): 'dx', (-3): 'theta', (-4): 'dtheta', (0):\n 'control'}\n visualize.draw_net(config, winner, True, node_names=node_names)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward.gv')\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled.gv', show_disabled=False)\n visualize.draw_net(config, winner, view=True, node_names=node_names,\n filename='winner-feedforward-enabled-pruned.gv', show_disabled=\n False, prune_unused=True)\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "import retro # pip install gym-retro\r\nimport numpy as np # pip install numpy\r\n#import cv2 # pip install opencv-python\r\nimport neat # pip install neat-python\r\nimport pickle # pip install cloudpickle\r\nimport os\r\nimport multiprocessing\r\nimport cv2\r\nimport time\r\n\r\nenv = retro.make(game='Pong-Atari2600')\r\n\r\n\r\n\r\ndef eval_genome(genome, config):\r\n net = neat.nn.FeedForwardNetwork.create(genome, config)\r\n\r\n\r\n env.reset()\r\n ob, _, _, _ = env.step(env.action_space.sample())\r\n inx = int(ob.shape[0]/8)\r\n iny = int(ob.shape[1]/8)\r\n fitnesses = []\r\n\r\n\r\n score1=0\r\n score2=0\r\n # Run the given simulation for up to num_steps time steps.\r\n fitness = 0.0\r\n done = False\r\n start_time=time.time()\r\n series_of_keys=[]\r\n series_of_nnOut=[]\r\n while not done:\r\n env.render()\r\n\r\n\r\n ob = cv2.resize(ob, (inx, iny))\r\n ob = cv2.cvtColor(ob, cv2.COLOR_BGR2GRAY)\r\n ob = np.reshape(ob, (inx, iny))\r\n imgarray = np.ndarray.flatten(ob)\r\n imgarray = np.interp(imgarray, (0, 254), (-1, +1))\r\n nnOut = net.activate(imgarray)\r\n\r\n\r\n for o in nnOut:\r\n if o > 0.:\r\n keys = [1, 0]\r\n else:\r\n keys = [0, 1]\r\n actions=[0]*4+keys+[0]*2\r\n\r\n series_of_keys.append(keys)\r\n series_of_nnOut.append(nnOut)\r\n\r\n ob, rew, done, info = env.step(actions)\r\n\r\n score1=info['score1']\r\n score2=info['score2']\r\n\r\n\r\n if score1 >19 or score2 >19:\r\n done = True\r\n\r\n print(series_of_keys)\r\n# print(series_of_actions)\r\n run_time=time.time()-start_time\r\n\r\n fitness=score2-score1/(run_time-2)\r\n return fitness\r\n\r\ndef eval_genomes(genomes, config):\r\n for genome_id, genome in genomes:\r\n genome.fitness = eval_genome(genome, config)\r\n\r\n\r\ndef run():\r\n # Load the config file, which is assumed to live in\r\n # the same directory as this script.\r\n local_dir = os.path.dirname(__file__)\r\n config_path = os.path.join(local_dir, 'pong_config')\r\n config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction,\r\n neat.DefaultSpeciesSet, neat.DefaultStagnation,\r\n config_path)\r\n\r\n pop = neat.Population(config)\r\n stats = neat.StatisticsReporter()\r\n pop.add_reporter(stats)\r\n pop.add_reporter(neat.StdOutReporter(True))\r\n\r\n pe = neat.ParallelEvaluator(10, eval_genome)\r\n winner = pop.run(pe.evaluate)\r\n\r\n # Save the winner.\r\n with open('winner-feedforward', 'wb') as f:\r\n pickle.dump(winner, f)\r\n\r\n print(winner)\r\n\r\n visualize.plot_stats(stats, ylog=True, view=True, filename=\"feedforward-fitness.svg\")\r\n visualize.plot_species(stats, view=True, filename=\"feedforward-speciation.svg\")\r\n\r\n node_names = {-1: 'x', -2: 'dx', -3: 'theta', -4: 'dtheta', 0: 'control'}\r\n visualize.draw_net(config, winner, True, node_names=node_names)\r\n\r\n visualize.draw_net(config, winner, view=True, node_names=node_names,\r\n filename=\"winner-feedforward.gv\")\r\n visualize.draw_net(config, winner, view=True, node_names=node_names,\r\n filename=\"winner-feedforward-enabled.gv\", show_disabled=False)\r\n visualize.draw_net(config, winner, view=True, node_names=node_names,\r\n filename=\"winner-feedforward-enabled-pruned.gv\", show_disabled=False, prune_unused=True)\r\n\r\n\r\nif __name__ == '__main__':\r\n run()\r\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
name = input("Введите ваше имя ")
print("Добрый день,", name)
|
normal
|
{
"blob_id": "e44c4b2c3b60d34d4540ec2d3a782c777c52fbc0",
"index": 8662,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Добрый день,', name)\n",
"step-3": "name = input('Введите ваше имя ')\nprint('Добрый день,', name)\n",
"step-4": "name = input(\"Введите ваше имя \")\nprint(\"Добрый день,\", name)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Volume(bt.Strategy):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Volume(bt.Strategy):
<|reserved_special_token_0|>
def __init__(self):
self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,
period=self.params.avg_volume_period) >= self.params.ratio
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Volume(bt.Strategy):
params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)
def __init__(self):
self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,
period=self.params.avg_volume_period) >= self.params.ratio
def next(self):
self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')
self.today = datetime.now().strftime('%Y-%m-%d')
if self.mysignal and self.step_date == self.today:
TelegramBot.send(
'{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.
format(self.params.ticker, self.params.avg_volume_period))
<|reserved_special_token_1|>
import math
import backtrader as bt
from datetime import datetime
from bots.TelegramBot import TelegramBot
import logging
class Volume(bt.Strategy):
params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)
def __init__(self):
self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,
period=self.params.avg_volume_period) >= self.params.ratio
def next(self):
self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')
self.today = datetime.now().strftime('%Y-%m-%d')
if self.mysignal and self.step_date == self.today:
TelegramBot.send(
'{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.
format(self.params.ticker, self.params.avg_volume_period))
<|reserved_special_token_1|>
import math
import backtrader as bt
from datetime import datetime
from bots.TelegramBot import TelegramBot
import logging
class Volume(bt.Strategy):
params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))
def __init__(self):
self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio
def next(self):
self.step_date = self.data.datetime.date().strftime("%Y-%m-%d")
self.today = datetime.now().strftime("%Y-%m-%d")
if self.mysignal and self.step_date == self.today:
TelegramBot.send("{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.".format(self.params.ticker, self.params.avg_volume_period))
|
flexible
|
{
"blob_id": "acbe9a9501c6a8532249496f327c2470c1d2f8e0",
"index": 898,
"step-1": "<mask token>\n\n\nclass Volume(bt.Strategy):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Volume(bt.Strategy):\n <mask token>\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"step-4": "import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\n\n\nclass Volume(bt.Strategy):\n params = ('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25)\n\n def __init__(self):\n self.mysignal = self.data.volume / bt.ind.Average(self.data.volume,\n period=self.params.avg_volume_period) >= self.params.ratio\n\n def next(self):\n self.step_date = self.data.datetime.date().strftime('%Y-%m-%d')\n self.today = datetime.now().strftime('%Y-%m-%d')\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\n '{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.'.\n format(self.params.ticker, self.params.avg_volume_period))\n",
"step-5": "import math\nimport backtrader as bt\nfrom datetime import datetime\nfrom bots.TelegramBot import TelegramBot\nimport logging\nclass Volume(bt.Strategy):\n params = (('avg_volume_period', 10), ('ticker', 'hpg'), ('ratio', 1.25))\n\n def __init__(self):\n self.mysignal = (self.data.volume / bt.ind.Average(self.data.volume, period=self.params.avg_volume_period)) >= self.params.ratio\n def next(self):\n self.step_date = self.data.datetime.date().strftime(\"%Y-%m-%d\")\n self.today = datetime.now().strftime(\"%Y-%m-%d\")\n if self.mysignal and self.step_date == self.today:\n TelegramBot.send(\"{} - KLGD lớn hơn KLGD trung bình {} ngày gần nhất.\".format(self.params.ticker, self.params.avg_volume_period))\n ",
"step-ids": [
1,
2,
4,
5,
6
]
}
|
[
1,
2,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db.users.create_index([('names', pymongo.ASCENDING)])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
MyClient = MongoClient()
db = MyClient.mydatabase
users = db.users
db.users.create_index([('names', pymongo.ASCENDING)])
<|reserved_special_token_1|>
from enum import unique
import pymongo
from pymongo import MongoClient
MyClient = MongoClient()
db = MyClient.mydatabase
users = db.users
db.users.create_index([('names', pymongo.ASCENDING)])
<|reserved_special_token_1|>
#pymongo and mongo DB search is like by line inside in a document then it moves to the other document
from enum import unique
import pymongo
from pymongo import MongoClient
MyClient = MongoClient() # again this is connecting to deault host and port
db = MyClient.mydatabase #db is a variable to store the database my database
users = db.users #this is the table
db.users.create_index([("names" ,pymongo.ASCENDING)]) #to create an in dex for a whole row
|
flexible
|
{
"blob_id": "31f302775ef19a07137622ef9d33495cc2a8eed2",
"index": 5775,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.users.create_index([('names', pymongo.ASCENDING)])\n",
"step-3": "<mask token>\nMyClient = MongoClient()\ndb = MyClient.mydatabase\nusers = db.users\ndb.users.create_index([('names', pymongo.ASCENDING)])\n",
"step-4": "from enum import unique\nimport pymongo\nfrom pymongo import MongoClient\nMyClient = MongoClient()\ndb = MyClient.mydatabase\nusers = db.users\ndb.users.create_index([('names', pymongo.ASCENDING)])\n",
"step-5": "#pymongo and mongo DB search is like by line inside in a document then it moves to the other document\nfrom enum import unique\n\nimport pymongo\nfrom pymongo import MongoClient\n\nMyClient = MongoClient() # again this is connecting to deault host and port\n\ndb = MyClient.mydatabase #db is a variable to store the database my database\n\nusers = db.users #this is the table\n\ndb.users.create_index([(\"names\" ,pymongo.ASCENDING)]) #to create an in dex for a whole row\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import division
rates = { "GBP-EUR":1.10, "EUR-USD":1.11, "GBP-USD":1.22, "GBP-YEN": 129.36 }
def find(rates, fx):
try:
return rates[fx]
except:
return -1
def getInputs():
amount = raw_input("Enter amount: ")
firstCurrency = raw_input("Enter Currency To Convert From: ")
secCurrency = raw_input("Enter Currency To Convert To: ")
try:
fAmount = float(amount)
sFirst = str(firstCurrency)
sSecond = str(secCurrency)
if fAmount>0 and len(sFirst)==3 and len(sSecond)==3:
return fAmount, sFirst, sSecond
except Exception as e:
print e
else:
print "Please specify a positive number and a Currency Symbol e.g. USD"
if amount=="-999" or firstCurrency=="-999" or secCurrency=="-999":
return 0,"","" #Something to escape the recursion
return getInputs()
def main():
amount,currency1,currency2 = getInputs()
rate = find(rates,currency1 + "-" + currency2)
if rate == -1:
rate = 1/ find(rates,currency2 + "-" + currency1) #Try the other way around
if rate == -1:
print "Currency Pair Not Found" #Neither way works, wrong inputs
return main()
return "{} {} converted to {} is: {:.2f}".format(amount,currency1,currency2,rate*amount)
print main()
|
normal
|
{
"blob_id": "c664257d64b269002964ce95c05f132e563a65d4",
"index": 8736,
"step-1": "from __future__ import division\n\nrates = { \"GBP-EUR\":1.10, \"EUR-USD\":1.11, \"GBP-USD\":1.22, \"GBP-YEN\": 129.36 }\n\ndef find(rates, fx):\n\ttry:\n\t\treturn rates[fx]\n\texcept:\n\t\treturn -1\n\n\ndef getInputs():\n\tamount = raw_input(\"Enter amount: \")\n\tfirstCurrency = raw_input(\"Enter Currency To Convert From: \")\n\tsecCurrency = raw_input(\"Enter Currency To Convert To: \")\n\ttry:\n\t\tfAmount = float(amount)\n\t\tsFirst = str(firstCurrency)\n\t\tsSecond = str(secCurrency) \n\t\tif fAmount>0 and len(sFirst)==3 and len(sSecond)==3:\t\t\n\t\t\treturn fAmount, sFirst, sSecond\t\n\texcept Exception as e:\n\t\tprint e\n\telse:\n\t\tprint \"Please specify a positive number and a Currency Symbol e.g. USD\"\n\tif amount==\"-999\" or firstCurrency==\"-999\" or secCurrency==\"-999\":\n\t\treturn 0,\"\",\"\"\t\t#Something to escape the recursion\n\treturn getInputs()\n\n\ndef main():\n\tamount,currency1,currency2 = getInputs()\n\t\n\trate = find(rates,currency1 + \"-\" + currency2)\t\n\tif rate == -1:\n\t\trate = 1/ find(rates,currency2 + \"-\" + currency1) \t#Try the other way around\n\tif rate == -1:\n\t\tprint \"Currency Pair Not Found\"\t\t#Neither way works, wrong inputs\n\t\treturn main()\n\treturn \"{} {} converted to {} is: {:.2f}\".format(amount,currency1,currency2,rate*amount)\n\nprint main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import times_series_learning as tsl
import numpy as np
import time
import datetime as dt
import sortedcontainers
import pandas as pd
from collections import defaultdict
class ServerProfileLearning(object):
def __init__(self, data, parameters, distribution, distribution_period, level_threshold,
processus=True, moving_window=60,train_mode=True, verbose=False):
self.label_number = 1 #len(np.unique(data['label'].values))
self.label = 1 #np.unique(data['label'].values)
self.data = data
self.parameters = np.ones((self.label_number + 1, 4)) * parameters # see parameters in times_series_learning
self.data_prep = None
self.hostname = self.data.iloc[0, 1]
self.server_profile = dict()
self.distribution = distribution # distribution of distance list same for all servers all clusters be carefull sorted containers
self.distribution_period = distribution_period # distribution period where we compute metrics
self.level_threshold = level_threshold # level we consider for outliers
self.verbose = verbose
self.processus = processus
self.moving_window = moving_window
self.train_mode = train_mode
self.measures = self.initdict()
self.timestamp_anomaly = pd.DataFrame(columns=['Timestamp','Area_Difference'])
def initdict(self):
d = defaultdict(dict)
for i in range(int((24*6*60)/self.distribution_period)+1):
d[i] = {}
d[i]['Area_Difference'] = []
d[i]['Max_Spread'] = []
return d
# sortedcontainers.SortedDict(sortedcontainers.SortedList())
def preprocess_data(self, data):
data_prep = data.drop(self.data.columns[1:len(self.data.columns) - 1], axis=1)
data_prep = data_prep.groupby(['label'])
return data_prep
def set_profile(self):
t0 = time.time()
t = tsl.TimesSeriesLearning(self.parameters[0, :],
self.distribution_period,
self.level_threshold, self.timestamp_anomaly, self.processus)
t.set_profile(self.data)
self.server_profile[self.hostname + "_general"] = t
#self.data_prep = self.preprocess_data(self.data)
# i = 0
# for k, v in self.data_prep:
# t = tsl.TimesSeriesLearning(self.parameters[i, :],
# self.distribution_period, self.level_threshold, self.processus)
# t.set_profile(v)
# self.server_profile[self.hostname + "_" + str(k)] = t
# print('cluster number ' + str(k) + ' of hostname: ' + self.hostname)
# i += 1
print("Learning Server" + self.hostname + " Done in " + str(time.time() - t0))
# Process distance and update distribution
def process_distance(self, streaming_data):
t0 = time.time()
cluster_name = self.hostname + "_general"
t = self.server_profile[cluster_name]
anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(streaming_data,
self.distribution,
self.measures,
self.train_mode,
self.verbose)
#streaming_data_prep = self.preprocess_data(streaming_data)
# for k, v in streaming_data_prep:
# cluster_name = self.hostname + "_" + str(k)
# if cluster_name in self.server_profile.keys():
# t = self.server_profile[cluster_name]
# anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(v,
# self.distribution,
# self.train_mode,
# self.verbose)
# #if anomaly:
# # break
# else:
# print('cluster: ',k)
# print("Logs does not belong to any cluster")
# break
#print("stream proccessed in :", time.time()-t0)
return anomaly, max_spread, min_spread, d, date, threshold, quant
# def simulate_streaming(self, streaming_data,date_start):
# streaming_data.index = pd.to_datetime(streaming_data.timestamp, format='%Y-%m-%d %H:%M:%S')
# streaming_data = streaming_data.sort_index()
# data_list = []
# date = streaming_data.index[0]
# while date < streaming_data.index[-1]:
# data_to_add = streaming_data.loc[date.isoformat():
# (date + dt.timedelta(minutes=self.parameters[2, 0]))].reset_index(drop=True)
# if data_to_add.shape[0]>0:
# data_list.append(data_to_add)
# date += dt.timedelta(minutes=self.parameters[0, 2])
#
# return data
|
normal
|
{
"blob_id": "53dd753356d8a8d60975c8f4cdaf20de66c2db46",
"index": 3486,
"step-1": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-2": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n <mask token>\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-3": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n <mask token>\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-4": "<mask token>\n\n\nclass ServerProfileLearning(object):\n <mask token>\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int(24 * 6 * 60 / self.distribution_period) + 1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - \n 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n\n def set_profile(self):\n t0 = time.time()\n t = tsl.TimesSeriesLearning(self.parameters[0, :], self.\n distribution_period, self.level_threshold, self.\n timestamp_anomaly, self.processus)\n t.set_profile(self.data)\n self.server_profile[self.hostname + '_general'] = t\n print('Learning Server' + self.hostname + ' Done in ' + str(time.\n time() - t0))\n\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + '_general'\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = (t.\n compute_distance_profile(streaming_data, self.distribution,\n self.measures, self.train_mode, self.verbose))\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n",
"step-5": "import times_series_learning as tsl\nimport numpy as np\nimport time\nimport datetime as dt\nimport sortedcontainers\nimport pandas as pd\nfrom collections import defaultdict\n\n\nclass ServerProfileLearning(object):\n\n def __init__(self, data, parameters, distribution, distribution_period, level_threshold,\n processus=True, moving_window=60,train_mode=True, verbose=False):\n self.label_number = 1 #len(np.unique(data['label'].values))\n self.label = 1 #np.unique(data['label'].values)\n self.data = data\n self.parameters = np.ones((self.label_number + 1, 4)) * parameters # see parameters in times_series_learning\n self.data_prep = None\n self.hostname = self.data.iloc[0, 1]\n self.server_profile = dict()\n self.distribution = distribution # distribution of distance list same for all servers all clusters be carefull sorted containers\n self.distribution_period = distribution_period # distribution period where we compute metrics\n self.level_threshold = level_threshold # level we consider for outliers\n self.verbose = verbose\n self.processus = processus\n self.moving_window = moving_window\n self.train_mode = train_mode\n self.measures = self.initdict()\n self.timestamp_anomaly = pd.DataFrame(columns=['Timestamp','Area_Difference'])\n\n def initdict(self):\n d = defaultdict(dict)\n for i in range(int((24*6*60)/self.distribution_period)+1):\n d[i] = {}\n d[i]['Area_Difference'] = []\n d[i]['Max_Spread'] = []\n return d\n\n\n # sortedcontainers.SortedDict(sortedcontainers.SortedList())\n\n def preprocess_data(self, data):\n data_prep = data.drop(self.data.columns[1:len(self.data.columns) - 1], axis=1)\n data_prep = data_prep.groupby(['label'])\n return data_prep\n\n def set_profile(self):\n t0 = time.time()\n t = tsl.TimesSeriesLearning(self.parameters[0, :],\n self.distribution_period,\n self.level_threshold, self.timestamp_anomaly, self.processus)\n t.set_profile(self.data)\n self.server_profile[self.hostname + \"_general\"] = t\n #self.data_prep = self.preprocess_data(self.data)\n # i = 0\n # for k, v in self.data_prep:\n # t = tsl.TimesSeriesLearning(self.parameters[i, :],\n # self.distribution_period, self.level_threshold, self.processus)\n # t.set_profile(v)\n # self.server_profile[self.hostname + \"_\" + str(k)] = t\n # print('cluster number ' + str(k) + ' of hostname: ' + self.hostname)\n # i += 1\n print(\"Learning Server\" + self.hostname + \" Done in \" + str(time.time() - t0))\n\n # Process distance and update distribution\n def process_distance(self, streaming_data):\n t0 = time.time()\n cluster_name = self.hostname + \"_general\"\n t = self.server_profile[cluster_name]\n anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(streaming_data,\n self.distribution,\n self.measures,\n self.train_mode,\n self.verbose)\n #streaming_data_prep = self.preprocess_data(streaming_data)\n # for k, v in streaming_data_prep:\n # cluster_name = self.hostname + \"_\" + str(k)\n # if cluster_name in self.server_profile.keys():\n # t = self.server_profile[cluster_name]\n # anomaly, max_spread, min_spread, d, date, threshold, quant = t.compute_distance_profile(v,\n # self.distribution,\n # self.train_mode,\n # self.verbose)\n # #if anomaly:\n # # break\n # else:\n # print('cluster: ',k)\n # print(\"Logs does not belong to any cluster\")\n # break\n #print(\"stream proccessed in :\", time.time()-t0)\n return anomaly, max_spread, min_spread, d, date, threshold, quant\n\n # def simulate_streaming(self, streaming_data,date_start):\n # streaming_data.index = pd.to_datetime(streaming_data.timestamp, format='%Y-%m-%d %H:%M:%S')\n # streaming_data = streaming_data.sort_index()\n # data_list = []\n # date = streaming_data.index[0]\n # while date < streaming_data.index[-1]:\n # data_to_add = streaming_data.loc[date.isoformat():\n # (date + dt.timedelta(minutes=self.parameters[2, 0]))].reset_index(drop=True)\n # if data_to_add.shape[0]>0:\n # data_list.append(data_to_add)\n # date += dt.timedelta(minutes=self.parameters[0, 2])\n #\n # return data\n",
"step-ids": [
2,
3,
4,
5,
8
]
}
|
[
2,
3,
4,
5,
8
] |
<|reserved_special_token_0|>
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path, label, element = model(file_url)
result = []
for el in path:
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg', quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html', image=file_url, label=
element, results=zip(result, label))
return render_template('index.html')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.insert(1, 'script')
<|reserved_special_token_0|>
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path, label, element = model(file_url)
result = []
for el in path:
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg', quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html', image=file_url, label=
element, results=zip(result, label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.insert(1, 'script')
<|reserved_special_token_0|>
app = Flask(__name__)
app.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')
photos = UploadSet('photos', IMAGES)
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path, label, element = model(file_url)
result = []
for el in path:
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg', quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html', image=file_url, label=
element, results=zip(result, label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
<|reserved_special_token_1|>
import os
from flask import Flask, request, render_template, url_for
from flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class
import sys
sys.path.insert(1, 'script')
from backend import model
import io
from PIL import Image
import base64
import numpy as np
app = Flask(__name__)
app.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')
photos = UploadSet('photos', IMAGES)
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path, label, element = model(file_url)
result = []
for el in path:
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg', quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html', image=file_url, label=
element, results=zip(result, label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import os
from flask import Flask, request,render_template,url_for
from flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class
import sys
sys.path.insert(1, 'script')
from backend import model
import io
from PIL import Image
import base64
import numpy as np
app = Flask(__name__)
app.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')
photos = UploadSet('photos', IMAGES)
configure_uploads(app, photos)
patch_request_class(app)
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
file_url = photos.url(filename)
path,label,element = model(file_url)
result = []
for el in path :
img = Image.fromarray((el * 255).astype(np.uint8))
file_object = io.BytesIO()
img.save(file_object, 'jpeg',quality=100)
figdata_jgp = base64.b64encode(file_object.getvalue())
result.append(figdata_jgp.decode('ascii'))
return render_template('display.html',image = file_url,label = element, results=zip(result,label))
return render_template('index.html')
app.run(threaded=False)
render_template('index.html')
|
flexible
|
{
"blob_id": "93d0d73d56b04bba505265958fccff229f5eaf49",
"index": 872,
"step-1": "<mask token>\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.path.insert(1, 'script')\n<mask token>\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-3": "<mask token>\nsys.path.insert(1, 'script')\n<mask token>\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-4": "import os\nfrom flask import Flask, request, render_template, url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app)\n\n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path, label, element = model(file_url)\n result = []\n for el in path:\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg', quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html', image=file_url, label=\n element, results=zip(result, label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-5": "\n# -*- coding: utf-8 -*-\nimport os\nfrom flask import Flask, request,render_template,url_for\nfrom flask_uploads import UploadSet, configure_uploads, IMAGES, patch_request_class\nimport sys\nsys.path.insert(1, 'script')\nfrom backend import model\nimport io\nfrom PIL import Image\nimport base64\nimport numpy as np\n\n\n\n\napp = Flask(__name__)\napp.config['UPLOADED_PHOTOS_DEST'] = os.path.realpath('images')\n\n\n\nphotos = UploadSet('photos', IMAGES)\nconfigure_uploads(app, photos)\npatch_request_class(app) \n\n@app.route('/', methods=['GET', 'POST'])\ndef upload_file():\n if request.method == 'POST' and 'photo' in request.files:\n filename = photos.save(request.files['photo'])\n file_url = photos.url(filename)\n path,label,element = model(file_url)\n result = []\n for el in path :\n img = Image.fromarray((el * 255).astype(np.uint8))\n file_object = io.BytesIO()\n img.save(file_object, 'jpeg',quality=100)\n figdata_jgp = base64.b64encode(file_object.getvalue())\n result.append(figdata_jgp.decode('ascii'))\n return render_template('display.html',image = file_url,label = element, results=zip(result,label))\n return render_template('index.html')\n\n\napp.run(threaded=False)\nrender_template('index.html')\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try:
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': 'Session Expired'}))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({'Error': 'Internal Error'}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
<|reserved_special_token_0|>
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try:
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': 'Session Expired'}))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({'Error': 'Internal Error'}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
def edit(request, req):
return db.Support.edit(req)
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try:
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': 'Session Expired'}))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({'Error': 'Internal Error'}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
def edit(request, req):
return db.Support.edit(req)
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
handlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}
<|reserved_special_token_1|>
from werkzeug.wrappers import Response
from p.DRequest import DRequest
from db.Support import SupportSession
from db.Exceptions import DbError, SupportSessionExpired
import db.Db as Db
import db.Support
import cgi
import simplejson as json
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try:
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': 'Session Expired'}))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({'Error': 'Internal Error'}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
def edit(request, req):
return db.Support.edit(req)
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
handlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}
<|reserved_special_token_1|>
# $Header: //depot/cs/s/ajax_support.wsgi#10 $
from werkzeug.wrappers import Response
from p.DRequest import DRequest
from db.Support import SupportSession
from db.Exceptions import DbError, SupportSessionExpired
import db.Db as Db
import db.Support
import cgi
import simplejson as json
def application(environ, start_response):
"""AJAX scripts for email templates."""
request = DRequest(environ)
resp = None
try :
Db.start_transaction()
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)
args = form['args'].value
req = json.loads(args)
support = SupportSession(key=request.support_key())
handler = handlers[req['command']]
resp = Response(json.dumps(handler(request, req)))
Db.finish_transaction()
except SupportSessionExpired:
Db.cancel_transaction()
resp = Response(json.dumps({ 'Error': 'Session Expired' }))
except DbError as e:
Db.cancel_transaction()
resp = Response(json.dumps({ 'Error': e.args[0]}))
except Exception as e:
Db.cancel_transaction()
import traceback
traceback.print_exc()
resp = Response(json.dumps({ 'Error': "Internal Error"}))
request.cookie_freshen(resp)
resp.headers['content-type'] = 'application/json'
resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'
return resp(environ, start_response)
def get(request, req):
return db.Support.get_all()
def edit(request, req):
return db.Support.edit(req);
def delete(request, req):
return db.Support.delete(req['support_id'])
def add(request, req):
return db.Support.new()
handlers = { 'get': get, 'edit': edit, 'delete': delete, 'add': add }
|
flexible
|
{
"blob_id": "be58862b66708c9de8cf7642c9de52ec744b079e",
"index": 805,
"step-1": "<mask token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\n<mask token>\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}\n",
"step-4": "from werkzeug.wrappers import Response\nfrom p.DRequest import DRequest\nfrom db.Support import SupportSession\nfrom db.Exceptions import DbError, SupportSessionExpired\nimport db.Db as Db\nimport db.Support\nimport cgi\nimport simplejson as json\n\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n request = DRequest(environ)\n resp = None\n try:\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': 'Session Expired'}))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({'Error': 'Internal Error'}))\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\n\ndef edit(request, req):\n return db.Support.edit(req)\n\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = {'get': get, 'edit': edit, 'delete': delete, 'add': add}\n",
"step-5": "# $Header: //depot/cs/s/ajax_support.wsgi#10 $\nfrom werkzeug.wrappers import Response\nfrom p.DRequest import DRequest\nfrom db.Support import SupportSession\nfrom db.Exceptions import DbError, SupportSessionExpired\nimport db.Db as Db\nimport db.Support\n\nimport cgi\nimport simplejson as json\n\ndef application(environ, start_response):\n \"\"\"AJAX scripts for email templates.\"\"\"\n\n request = DRequest(environ)\n\n resp = None\n\n try :\n Db.start_transaction()\n form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ)\n args = form['args'].value\n req = json.loads(args)\n\n support = SupportSession(key=request.support_key())\n handler = handlers[req['command']]\n resp = Response(json.dumps(handler(request, req)))\n Db.finish_transaction()\n\n except SupportSessionExpired:\n Db.cancel_transaction()\n resp = Response(json.dumps({ 'Error': 'Session Expired' }))\n except DbError as e:\n Db.cancel_transaction()\n resp = Response(json.dumps({ 'Error': e.args[0]}))\n except Exception as e:\n Db.cancel_transaction()\n import traceback\n traceback.print_exc()\n resp = Response(json.dumps({ 'Error': \"Internal Error\"}))\n\n request.cookie_freshen(resp)\n resp.headers['content-type'] = 'application/json'\n resp.headers['cache-control'] = 'no-cache, must-revalidate, no-store'\n return resp(environ, start_response)\n\n\ndef get(request, req):\n return db.Support.get_all()\n\ndef edit(request, req):\n return db.Support.edit(req);\n\ndef delete(request, req):\n return db.Support.delete(req['support_id'])\n\ndef add(request, req):\n return db.Support.new()\n\n\nhandlers = { 'get': get, 'edit': edit, 'delete': delete, 'add': add }\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class HtmlDownload(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class HtmlDownload(object):
<|reserved_special_token_0|>
def html_download(city, keyWords, pages):
paras = {'jl': city, 'kw': keyWords, 'pages': pages, 'isadv': 0}
url = 'http://sou.zhaopin.com/jobs/searchresult.ashx?' + urlencode(
paras)
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return None
<|reserved_special_token_1|>
class HtmlDownload(object):
"""docstring for HtmlDownload"""
def html_download(city, keyWords, pages):
paras = {'jl': city, 'kw': keyWords, 'pages': pages, 'isadv': 0}
url = 'http://sou.zhaopin.com/jobs/searchresult.ashx?' + urlencode(
paras)
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return None
<|reserved_special_token_1|>
# coding=utf-8
class HtmlDownload(object):
"""docstring for HtmlDownload"""
def html_download(city, keyWords, pages):
# root URL
paras = {
'jl': city,
'kw': keyWords,
'pages': pages,
'isadv': 0
}
url = "http://sou.zhaopin.com/jobs/searchresult.ashx?" + urlencode(paras)
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return None
|
flexible
|
{
"blob_id": "e33aca56e4c9f82779278e836308c2e22d3356e2",
"index": 3770,
"step-1": "<mask token>\n",
"step-2": "class HtmlDownload(object):\n <mask token>\n <mask token>\n",
"step-3": "class HtmlDownload(object):\n <mask token>\n\n def html_download(city, keyWords, pages):\n paras = {'jl': city, 'kw': keyWords, 'pages': pages, 'isadv': 0}\n url = 'http://sou.zhaopin.com/jobs/searchresult.ashx?' + urlencode(\n paras)\n response = requests.get(url)\n if response.status_code == 200:\n return response.text\n else:\n return None\n",
"step-4": "class HtmlDownload(object):\n \"\"\"docstring for HtmlDownload\"\"\"\n\n def html_download(city, keyWords, pages):\n paras = {'jl': city, 'kw': keyWords, 'pages': pages, 'isadv': 0}\n url = 'http://sou.zhaopin.com/jobs/searchresult.ashx?' + urlencode(\n paras)\n response = requests.get(url)\n if response.status_code == 200:\n return response.text\n else:\n return None\n",
"step-5": "# coding=utf-8\nclass HtmlDownload(object):\n\t\"\"\"docstring for HtmlDownload\"\"\"\n\n\tdef html_download(city, keyWords, pages):\n # root URL\n\t paras = {\n\t 'jl': city,\n\t 'kw': keyWords,\n\t 'pages': pages,\n\t 'isadv': 0\n\t }\n\t url = \"http://sou.zhaopin.com/jobs/searchresult.ashx?\" + urlencode(paras)\n\t response = requests.get(url)\n\t if response.status_code == 200:\n\t return response.text\n\t else:\n\t return None",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class SlackEvent:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def time_stamp(self):
pass
@property
def channel(self):
pass
<|reserved_special_token_0|>
@property
def event_time_stamp(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def user_id(self):
pass
@property
def bot_id(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def files(self):
pass
@property
def message(self):
pass
<|reserved_special_token_1|>
class SlackEvent:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def time_stamp(self):
pass
@property
def channel(self):
pass
<|reserved_special_token_0|>
@property
def event_time_stamp(self):
pass
@property
def channel_type(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def user_id(self):
pass
@property
def bot_id(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def item_channel(self):
pass
@property
def files(self):
pass
@property
def message(self):
pass
<|reserved_special_token_1|>
class SlackEvent:
@property
def client_msg_id(self):
pass
<|reserved_special_token_0|>
@property
def subtype(self):
pass
@property
def text(self):
pass
@property
def time_stamp(self):
pass
@property
def channel(self):
pass
@property
def channel_id(self):
pass
@property
def event_time_stamp(self):
pass
@property
def channel_type(self):
pass
<|reserved_special_token_0|>
@property
def user(self):
pass
@property
def user_id(self):
pass
@property
def bot_id(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def item_channel(self):
pass
@property
def files(self):
pass
@property
def message(self):
pass
<|reserved_special_token_1|>
class SlackEvent:
@property
def client_msg_id(self):
pass
@property
def type(self):
pass
@property
def subtype(self):
pass
@property
def text(self):
pass
@property
def time_stamp(self):
pass
@property
def channel(self):
pass
@property
def channel_id(self):
pass
@property
def event_time_stamp(self):
pass
@property
def channel_type(self):
pass
<|reserved_special_token_0|>
@property
def user(self):
pass
@property
def user_id(self):
pass
@property
def bot_id(self):
pass
@property
def actions(self):
pass
<|reserved_special_token_0|>
@property
def item_channel(self):
pass
@property
def files(self):
pass
@property
def message(self):
pass
<|reserved_special_token_1|>
class SlackEvent:
@property
def client_msg_id(self):
pass
@property
def type(self):
pass
@property
def subtype(self):
pass
@property
def text(self):
pass
@property
def time_stamp(self):
pass
@property
def channel(self):
pass
@property
def channel_id(self):
pass
@property
def event_time_stamp(self):
pass
@property
def channel_type(self):
pass
@property
def thread_time_stamp(self):
pass
@property
def user(self):
pass
@property
def user_id(self):
pass
@property
def bot_id(self):
pass
@property
def actions(self):
pass
@property
def item(self):
pass
@property
def item_channel(self):
pass
@property
def files(self):
pass
@property
def message(self):
pass
|
flexible
|
{
"blob_id": "4a4745f202275e45fd78c12431e355fd59ac964a",
"index": 6722,
"step-1": "class SlackEvent:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def time_stamp(self):\n pass\n\n @property\n def channel(self):\n pass\n <mask token>\n\n @property\n def event_time_stamp(self):\n pass\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def user_id(self):\n pass\n\n @property\n def bot_id(self):\n pass\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def files(self):\n pass\n\n @property\n def message(self):\n pass\n",
"step-2": "class SlackEvent:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def time_stamp(self):\n pass\n\n @property\n def channel(self):\n pass\n <mask token>\n\n @property\n def event_time_stamp(self):\n pass\n\n @property\n def channel_type(self):\n pass\n <mask token>\n <mask token>\n\n @property\n def user_id(self):\n pass\n\n @property\n def bot_id(self):\n pass\n <mask token>\n <mask token>\n\n @property\n def item_channel(self):\n pass\n\n @property\n def files(self):\n pass\n\n @property\n def message(self):\n pass\n",
"step-3": "class SlackEvent:\n\n @property\n def client_msg_id(self):\n pass\n <mask token>\n\n @property\n def subtype(self):\n pass\n\n @property\n def text(self):\n pass\n\n @property\n def time_stamp(self):\n pass\n\n @property\n def channel(self):\n pass\n\n @property\n def channel_id(self):\n pass\n\n @property\n def event_time_stamp(self):\n pass\n\n @property\n def channel_type(self):\n pass\n <mask token>\n\n @property\n def user(self):\n pass\n\n @property\n def user_id(self):\n pass\n\n @property\n def bot_id(self):\n pass\n <mask token>\n <mask token>\n\n @property\n def item_channel(self):\n pass\n\n @property\n def files(self):\n pass\n\n @property\n def message(self):\n pass\n",
"step-4": "class SlackEvent:\n\n @property\n def client_msg_id(self):\n pass\n\n @property\n def type(self):\n pass\n\n @property\n def subtype(self):\n pass\n\n @property\n def text(self):\n pass\n\n @property\n def time_stamp(self):\n pass\n\n @property\n def channel(self):\n pass\n\n @property\n def channel_id(self):\n pass\n\n @property\n def event_time_stamp(self):\n pass\n\n @property\n def channel_type(self):\n pass\n <mask token>\n\n @property\n def user(self):\n pass\n\n @property\n def user_id(self):\n pass\n\n @property\n def bot_id(self):\n pass\n\n @property\n def actions(self):\n pass\n <mask token>\n\n @property\n def item_channel(self):\n pass\n\n @property\n def files(self):\n pass\n\n @property\n def message(self):\n pass\n",
"step-5": "class SlackEvent:\r\n @property\r\n def client_msg_id(self):\r\n pass\r\n\r\n @property\r\n def type(self):\r\n pass\r\n\r\n @property\r\n def subtype(self):\r\n pass\r\n\r\n @property\r\n def text(self):\r\n pass\r\n\r\n @property\r\n def time_stamp(self):\r\n pass\r\n\r\n @property\r\n def channel(self):\r\n pass\r\n\r\n @property\r\n def channel_id(self):\r\n pass\r\n\r\n @property\r\n def event_time_stamp(self):\r\n pass\r\n\r\n @property\r\n def channel_type(self):\r\n pass\r\n\r\n @property\r\n def thread_time_stamp(self):\r\n pass\r\n\r\n @property\r\n def user(self):\r\n pass\r\n\r\n @property\r\n def user_id(self):\r\n pass\r\n\r\n @property\r\n def bot_id(self):\r\n pass\r\n\r\n @property\r\n def actions(self):\r\n pass\r\n\r\n @property\r\n def item(self):\r\n pass\r\n\r\n @property\r\n def item_channel(self):\r\n pass\r\n\r\n @property\r\n def files(self):\r\n pass\r\n\r\n @property\r\n def message(self):\r\n pass\r\n",
"step-ids": [
8,
10,
15,
17,
20
]
}
|
[
8,
10,
15,
17,
20
] |
# dealing with the packet fragments and their reconsttruction
import logging
# shut up scapy
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
conf.verb=0
from collections import OrderedDict
pkt_frag_loads = OrderedDict()
def get_load(pkt):
ack = str(pkt[TCP].ack)
seq = str(pkt[TCP].seq)
src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)
dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)
#create full load from load fragments
load = pkt[Raw].load
pkt_frag_loads = frag_remover(ack, load)
pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)
full_load = pkt_frag_loads[src_ip_port][ack]
return full_load
def frag_remover(ack, load):
'''
Keep the FILO OrderedDict of frag loads from getting too large
3 points of limit:
Number of ip_ports < 50
Number of acks per ip:port < 25
Number of chars in load < 5000
'''
global pkt_frag_loads
# Keep the number of IP:port mappings below 50
# last=False pops the oldest item rather than the latest
while len(pkt_frag_loads) > 50:
pkt_frag_loads.popitem(last=False)
# Loop through a deep copy dict but modify the original dict
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
if len(copy_pkt_frag_loads[ip_port]) > 0:
# Keep 25 ack:load's per ip:port
while len(copy_pkt_frag_loads[ip_port]) > 25:
pkt_frag_loads[ip_port].popitem(last=False)
# Recopy the new dict to prevent KeyErrors for modifying dict in loop
copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)
for ip_port in copy_pkt_frag_loads:
# Keep the load less than 75,000 chars
for ack in copy_pkt_frag_loads[ip_port]:
# If load > 5000 chars, just keep the last 200 chars
if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:
pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]
return pkt_frag_loads
def frag_joiner(ack, src_ip_port, load):
'''
Keep a store of previous fragments in an OrderedDict named pkt_frag_loads
'''
global pkt_frag_loads
for ip_port in pkt_frag_loads:
if src_ip_port == ip_port:
if ack in pkt_frag_loads[src_ip_port]:
# Make pkt_frag_loads[src_ip_port][ack] = full load
old_load = pkt_frag_loads[src_ip_port][ack]
concat_load = old_load + load
return OrderedDict([(ack, concat_load)])
return OrderedDict([(ack, load)])
|
normal
|
{
"blob_id": "3e0bc91b81d0f503b78c9ac685b05b7ecb754e28",
"index": 3460,
"step-1": "<mask token>\n\n\ndef get_load(pkt):\n ack = str(pkt[TCP].ack)\n seq = str(pkt[TCP].seq)\n src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)\n dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)\n load = pkt[Raw].load\n pkt_frag_loads = frag_remover(ack, load)\n pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)\n full_load = pkt_frag_loads[src_ip_port][ack]\n return full_load\n\n\ndef frag_remover(ack, load):\n \"\"\"\n Keep the FILO OrderedDict of frag loads from getting too large\n 3 points of limit:\n Number of ip_ports < 50\n Number of acks per ip:port < 25\n Number of chars in load < 5000\n \"\"\"\n global pkt_frag_loads\n while len(pkt_frag_loads) > 50:\n pkt_frag_loads.popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n if len(copy_pkt_frag_loads[ip_port]) > 0:\n while len(copy_pkt_frag_loads[ip_port]) > 25:\n pkt_frag_loads[ip_port].popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n for ack in copy_pkt_frag_loads[ip_port]:\n if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:\n pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][\n -200:]\n return pkt_frag_loads\n\n\ndef frag_joiner(ack, src_ip_port, load):\n \"\"\"\n Keep a store of previous fragments in an OrderedDict named pkt_frag_loads\n \"\"\"\n global pkt_frag_loads\n for ip_port in pkt_frag_loads:\n if src_ip_port == ip_port:\n if ack in pkt_frag_loads[src_ip_port]:\n old_load = pkt_frag_loads[src_ip_port][ack]\n concat_load = old_load + load\n return OrderedDict([(ack, concat_load)])\n return OrderedDict([(ack, load)])\n",
"step-2": "<mask token>\nlogging.getLogger('scapy.runtime').setLevel(logging.ERROR)\n<mask token>\n\n\ndef get_load(pkt):\n ack = str(pkt[TCP].ack)\n seq = str(pkt[TCP].seq)\n src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)\n dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)\n load = pkt[Raw].load\n pkt_frag_loads = frag_remover(ack, load)\n pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)\n full_load = pkt_frag_loads[src_ip_port][ack]\n return full_load\n\n\ndef frag_remover(ack, load):\n \"\"\"\n Keep the FILO OrderedDict of frag loads from getting too large\n 3 points of limit:\n Number of ip_ports < 50\n Number of acks per ip:port < 25\n Number of chars in load < 5000\n \"\"\"\n global pkt_frag_loads\n while len(pkt_frag_loads) > 50:\n pkt_frag_loads.popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n if len(copy_pkt_frag_loads[ip_port]) > 0:\n while len(copy_pkt_frag_loads[ip_port]) > 25:\n pkt_frag_loads[ip_port].popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n for ack in copy_pkt_frag_loads[ip_port]:\n if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:\n pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][\n -200:]\n return pkt_frag_loads\n\n\ndef frag_joiner(ack, src_ip_port, load):\n \"\"\"\n Keep a store of previous fragments in an OrderedDict named pkt_frag_loads\n \"\"\"\n global pkt_frag_loads\n for ip_port in pkt_frag_loads:\n if src_ip_port == ip_port:\n if ack in pkt_frag_loads[src_ip_port]:\n old_load = pkt_frag_loads[src_ip_port][ack]\n concat_load = old_load + load\n return OrderedDict([(ack, concat_load)])\n return OrderedDict([(ack, load)])\n",
"step-3": "<mask token>\nlogging.getLogger('scapy.runtime').setLevel(logging.ERROR)\n<mask token>\nconf.verb = 0\n<mask token>\npkt_frag_loads = OrderedDict()\n\n\ndef get_load(pkt):\n ack = str(pkt[TCP].ack)\n seq = str(pkt[TCP].seq)\n src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)\n dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)\n load = pkt[Raw].load\n pkt_frag_loads = frag_remover(ack, load)\n pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)\n full_load = pkt_frag_loads[src_ip_port][ack]\n return full_load\n\n\ndef frag_remover(ack, load):\n \"\"\"\n Keep the FILO OrderedDict of frag loads from getting too large\n 3 points of limit:\n Number of ip_ports < 50\n Number of acks per ip:port < 25\n Number of chars in load < 5000\n \"\"\"\n global pkt_frag_loads\n while len(pkt_frag_loads) > 50:\n pkt_frag_loads.popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n if len(copy_pkt_frag_loads[ip_port]) > 0:\n while len(copy_pkt_frag_loads[ip_port]) > 25:\n pkt_frag_loads[ip_port].popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n for ack in copy_pkt_frag_loads[ip_port]:\n if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:\n pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][\n -200:]\n return pkt_frag_loads\n\n\ndef frag_joiner(ack, src_ip_port, load):\n \"\"\"\n Keep a store of previous fragments in an OrderedDict named pkt_frag_loads\n \"\"\"\n global pkt_frag_loads\n for ip_port in pkt_frag_loads:\n if src_ip_port == ip_port:\n if ack in pkt_frag_loads[src_ip_port]:\n old_load = pkt_frag_loads[src_ip_port][ack]\n concat_load = old_load + load\n return OrderedDict([(ack, concat_load)])\n return OrderedDict([(ack, load)])\n",
"step-4": "import logging\nlogging.getLogger('scapy.runtime').setLevel(logging.ERROR)\nfrom scapy.all import *\nconf.verb = 0\nfrom collections import OrderedDict\npkt_frag_loads = OrderedDict()\n\n\ndef get_load(pkt):\n ack = str(pkt[TCP].ack)\n seq = str(pkt[TCP].seq)\n src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)\n dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)\n load = pkt[Raw].load\n pkt_frag_loads = frag_remover(ack, load)\n pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)\n full_load = pkt_frag_loads[src_ip_port][ack]\n return full_load\n\n\ndef frag_remover(ack, load):\n \"\"\"\n Keep the FILO OrderedDict of frag loads from getting too large\n 3 points of limit:\n Number of ip_ports < 50\n Number of acks per ip:port < 25\n Number of chars in load < 5000\n \"\"\"\n global pkt_frag_loads\n while len(pkt_frag_loads) > 50:\n pkt_frag_loads.popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n if len(copy_pkt_frag_loads[ip_port]) > 0:\n while len(copy_pkt_frag_loads[ip_port]) > 25:\n pkt_frag_loads[ip_port].popitem(last=False)\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n for ack in copy_pkt_frag_loads[ip_port]:\n if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:\n pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][\n -200:]\n return pkt_frag_loads\n\n\ndef frag_joiner(ack, src_ip_port, load):\n \"\"\"\n Keep a store of previous fragments in an OrderedDict named pkt_frag_loads\n \"\"\"\n global pkt_frag_loads\n for ip_port in pkt_frag_loads:\n if src_ip_port == ip_port:\n if ack in pkt_frag_loads[src_ip_port]:\n old_load = pkt_frag_loads[src_ip_port][ack]\n concat_load = old_load + load\n return OrderedDict([(ack, concat_load)])\n return OrderedDict([(ack, load)])\n",
"step-5": "# dealing with the packet fragments and their reconsttruction \n\nimport logging\n# shut up scapy\nlogging.getLogger(\"scapy.runtime\").setLevel(logging.ERROR)\nfrom scapy.all import *\nconf.verb=0\nfrom collections import OrderedDict\n\n\npkt_frag_loads = OrderedDict()\n\ndef get_load(pkt):\n ack = str(pkt[TCP].ack)\n seq = str(pkt[TCP].seq)\n src_ip_port = str(pkt[IP].src) + ':' + str(pkt[TCP].sport)\n dst_ip_port = str(pkt[IP].dst) + ':' + str(pkt[TCP].dport)\n\n #create full load from load fragments\t\n load = pkt[Raw].load\n pkt_frag_loads = frag_remover(ack, load)\n pkt_frag_loads[src_ip_port] = frag_joiner(ack, src_ip_port, load)\n full_load = pkt_frag_loads[src_ip_port][ack]\n\n return full_load\n\ndef frag_remover(ack, load):\n '''\n Keep the FILO OrderedDict of frag loads from getting too large\n 3 points of limit:\n Number of ip_ports < 50\n Number of acks per ip:port < 25\n Number of chars in load < 5000\n '''\n\n global pkt_frag_loads\n\n # Keep the number of IP:port mappings below 50\n # last=False pops the oldest item rather than the latest\n while len(pkt_frag_loads) > 50:\n pkt_frag_loads.popitem(last=False)\n\n # Loop through a deep copy dict but modify the original dict\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n if len(copy_pkt_frag_loads[ip_port]) > 0:\n # Keep 25 ack:load's per ip:port\n while len(copy_pkt_frag_loads[ip_port]) > 25:\n pkt_frag_loads[ip_port].popitem(last=False)\n\n # Recopy the new dict to prevent KeyErrors for modifying dict in loop\n copy_pkt_frag_loads = copy.deepcopy(pkt_frag_loads)\n for ip_port in copy_pkt_frag_loads:\n # Keep the load less than 75,000 chars\n for ack in copy_pkt_frag_loads[ip_port]:\n # If load > 5000 chars, just keep the last 200 chars\n if len(copy_pkt_frag_loads[ip_port][ack]) > 5000:\n pkt_frag_loads[ip_port][ack] = pkt_frag_loads[ip_port][ack][-200:]\n return pkt_frag_loads\n\ndef frag_joiner(ack, src_ip_port, load):\n '''\n Keep a store of previous fragments in an OrderedDict named pkt_frag_loads\n '''\n\n global pkt_frag_loads\n\n for ip_port in pkt_frag_loads:\n if src_ip_port == ip_port:\n if ack in pkt_frag_loads[src_ip_port]:\n # Make pkt_frag_loads[src_ip_port][ack] = full load\n old_load = pkt_frag_loads[src_ip_port][ack]\n concat_load = old_load + load\n return OrderedDict([(ack, concat_load)])\n\n return OrderedDict([(ack, load)])\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from flask import render_template, request, redirect, url_for, send_file
from flask_app import app
import re
import os
from werkzeug.utils import secure_filename
import numpy as np
import cv2 as cv
from flask_mail import Message, Mail
file_path_file = open('flask_app/file_path.txt', 'r')
vars = file_path_file.readlines()
def get_req_var(var):
result = 0
for s in vars:
s = re.search("((?<=" + var + ">).+)", s)
if s:
result = s[0]
break
return result
image_path = get_req_var("IMAGE_UPLOADS")
app.config["IMAGE_UPLOADS"] = image_path
app.config["ALLOWED_IMAGE_EXTENSIONS"] = ["JPEG", "JPG"]
app.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024
app.config["MAX_IMAGE_FILESIZE"] = 50 * 1024 * 1024
# for mail
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = 465
app.config['MAIL_USERNAME'] = get_req_var("MAIL_USERNAME")
app.config['MAIL_PASSWORD'] = get_req_var("MAIL_PASSWORD")
app.config['MAIL_USE_TLS'] = False
app.config['MAIL_USE_SSL'] = True
# for mail
@app.route('/')
def home():
return render_template('index.html')
def allowed_image_filesize(filesize):
if int(filesize) <= app.config["MAX_IMAGE_FILESIZE"]:
return True
else:
return False
def allowed_image(filename):
# We only want files with a . in the filename
if "." not in filename:
return False
# Split the extension from the filename
ext = filename.rsplit(".", 1)[1]
# Check if the extension is in ALLOWED_IMAGE_EXTENSIONS
if ext.upper() in app.config["ALLOWED_IMAGE_EXTENSIONS"]:
return True
else:
return False
@app.route('/success')
def success(filename):
return render_template('success.html', filename=filename)
@app.route('/display/<filename>')
def display_image(filename):
filename = 'uploaded_images/' + filename
return redirect(url_for('static', filename=filename), code=301)
@app.route("/download/<filename>")
def download_image(filename):
filename = 'static/uploaded_images/' + filename
return send_file(filename, as_attachment=True)
@app.route("/send-mail/<filename>")
def send_mail(filename):
filename = 'static/uploaded_images/' + filename
mail = Mail(app)
mail.init_app(app)
msg = Message(
"Sent from flask_app",
sender=app.config["MAIL_USERNAME"],
recipients=["adithyasuresh201@gmail.com",
"adithyansraj20@gmail.com", app.config["MAIL_USERNAME"]],
)
with app.open_resource(filename) as fp:
msg.attach("image.jpg", "image/jpg", fp.read())
mail.send(msg)
return render_template("mail_sent.html")
@app.route("/upload-image", methods=["GET", "POST"])
def upload_image():
# cwd = os.path.join(os.getcwd(), image_path)
# print(cwd)
for f in os.listdir(image_path):
os.remove(os.path.join(image_path, f))
print(f"file {f}")
print(image_path)
if request.method == "POST":
if request.files:
if "filesize" in request.cookies:
if not allowed_image_filesize(request.cookies["filesize"]):
return redirect(request.url)
image = request.files["image"]
if image.filename == "":
return redirect(request.url)
if allowed_image(image.filename):
filename = secure_filename(image.filename)
img = np.fromfile(image, np.uint8)
img = cv.imdecode(img, cv.IMREAD_COLOR)
quality = 80
quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]
img_path = app.config["IMAGE_UPLOADS"] + "/" + filename
cv.imwrite(img_path, img, quality_param)
return render_template('success.html', filename=filename)
else:
return redirect(request.url)
return render_template("upload_image.html")
|
normal
|
{
"blob_id": "e30aaf1616a107662924da3671b179a1887974f7",
"index": 2404,
"step-1": "<mask token>\n\n\ndef get_req_var(var):\n result = 0\n for s in vars:\n s = re.search('((?<=' + var + '>).+)', s)\n if s:\n result = s[0]\n break\n return result\n\n\n<mask token>\n\n\n@app.route('/')\ndef home():\n return render_template('index.html')\n\n\n<mask token>\n\n\ndef allowed_image(filename):\n if '.' not in filename:\n return False\n ext = filename.rsplit('.', 1)[1]\n if ext.upper() in app.config['ALLOWED_IMAGE_EXTENSIONS']:\n return True\n else:\n return False\n\n\n<mask token>\n\n\n@app.route('/display/<filename>')\ndef display_image(filename):\n filename = 'uploaded_images/' + filename\n return redirect(url_for('static', filename=filename), code=301)\n\n\n@app.route('/download/<filename>')\ndef download_image(filename):\n filename = 'static/uploaded_images/' + filename\n return send_file(filename, as_attachment=True)\n\n\n@app.route('/send-mail/<filename>')\ndef send_mail(filename):\n filename = 'static/uploaded_images/' + filename\n mail = Mail(app)\n mail.init_app(app)\n msg = Message('Sent from flask_app', sender=app.config['MAIL_USERNAME'],\n recipients=['adithyasuresh201@gmail.com',\n 'adithyansraj20@gmail.com', app.config['MAIL_USERNAME']])\n with app.open_resource(filename) as fp:\n msg.attach('image.jpg', 'image/jpg', fp.read())\n mail.send(msg)\n return render_template('mail_sent.html')\n\n\n@app.route('/upload-image', methods=['GET', 'POST'])\ndef upload_image():\n for f in os.listdir(image_path):\n os.remove(os.path.join(image_path, f))\n print(f'file {f}')\n print(image_path)\n if request.method == 'POST':\n if request.files:\n if 'filesize' in request.cookies:\n if not allowed_image_filesize(request.cookies['filesize']):\n return redirect(request.url)\n image = request.files['image']\n if image.filename == '':\n return redirect(request.url)\n if allowed_image(image.filename):\n filename = secure_filename(image.filename)\n img = np.fromfile(image, np.uint8)\n img = cv.imdecode(img, cv.IMREAD_COLOR)\n quality = 80\n quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]\n img_path = app.config['IMAGE_UPLOADS'] + '/' + filename\n cv.imwrite(img_path, img, quality_param)\n return render_template('success.html', filename=filename)\n else:\n return redirect(request.url)\n return render_template('upload_image.html')\n",
"step-2": "<mask token>\n\n\ndef get_req_var(var):\n result = 0\n for s in vars:\n s = re.search('((?<=' + var + '>).+)', s)\n if s:\n result = s[0]\n break\n return result\n\n\n<mask token>\n\n\n@app.route('/')\ndef home():\n return render_template('index.html')\n\n\ndef allowed_image_filesize(filesize):\n if int(filesize) <= app.config['MAX_IMAGE_FILESIZE']:\n return True\n else:\n return False\n\n\ndef allowed_image(filename):\n if '.' not in filename:\n return False\n ext = filename.rsplit('.', 1)[1]\n if ext.upper() in app.config['ALLOWED_IMAGE_EXTENSIONS']:\n return True\n else:\n return False\n\n\n<mask token>\n\n\n@app.route('/display/<filename>')\ndef display_image(filename):\n filename = 'uploaded_images/' + filename\n return redirect(url_for('static', filename=filename), code=301)\n\n\n@app.route('/download/<filename>')\ndef download_image(filename):\n filename = 'static/uploaded_images/' + filename\n return send_file(filename, as_attachment=True)\n\n\n@app.route('/send-mail/<filename>')\ndef send_mail(filename):\n filename = 'static/uploaded_images/' + filename\n mail = Mail(app)\n mail.init_app(app)\n msg = Message('Sent from flask_app', sender=app.config['MAIL_USERNAME'],\n recipients=['adithyasuresh201@gmail.com',\n 'adithyansraj20@gmail.com', app.config['MAIL_USERNAME']])\n with app.open_resource(filename) as fp:\n msg.attach('image.jpg', 'image/jpg', fp.read())\n mail.send(msg)\n return render_template('mail_sent.html')\n\n\n@app.route('/upload-image', methods=['GET', 'POST'])\ndef upload_image():\n for f in os.listdir(image_path):\n os.remove(os.path.join(image_path, f))\n print(f'file {f}')\n print(image_path)\n if request.method == 'POST':\n if request.files:\n if 'filesize' in request.cookies:\n if not allowed_image_filesize(request.cookies['filesize']):\n return redirect(request.url)\n image = request.files['image']\n if image.filename == '':\n return redirect(request.url)\n if allowed_image(image.filename):\n filename = secure_filename(image.filename)\n img = np.fromfile(image, np.uint8)\n img = cv.imdecode(img, cv.IMREAD_COLOR)\n quality = 80\n quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]\n img_path = app.config['IMAGE_UPLOADS'] + '/' + filename\n cv.imwrite(img_path, img, quality_param)\n return render_template('success.html', filename=filename)\n else:\n return redirect(request.url)\n return render_template('upload_image.html')\n",
"step-3": "<mask token>\nfile_path_file = open('flask_app/file_path.txt', 'r')\nvars = file_path_file.readlines()\n\n\ndef get_req_var(var):\n result = 0\n for s in vars:\n s = re.search('((?<=' + var + '>).+)', s)\n if s:\n result = s[0]\n break\n return result\n\n\nimage_path = get_req_var('IMAGE_UPLOADS')\napp.config['IMAGE_UPLOADS'] = image_path\napp.config['ALLOWED_IMAGE_EXTENSIONS'] = ['JPEG', 'JPG']\napp.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024\napp.config['MAX_IMAGE_FILESIZE'] = 50 * 1024 * 1024\napp.config['MAIL_SERVER'] = 'smtp.gmail.com'\napp.config['MAIL_PORT'] = 465\napp.config['MAIL_USERNAME'] = get_req_var('MAIL_USERNAME')\napp.config['MAIL_PASSWORD'] = get_req_var('MAIL_PASSWORD')\napp.config['MAIL_USE_TLS'] = False\napp.config['MAIL_USE_SSL'] = True\n\n\n@app.route('/')\ndef home():\n return render_template('index.html')\n\n\ndef allowed_image_filesize(filesize):\n if int(filesize) <= app.config['MAX_IMAGE_FILESIZE']:\n return True\n else:\n return False\n\n\ndef allowed_image(filename):\n if '.' not in filename:\n return False\n ext = filename.rsplit('.', 1)[1]\n if ext.upper() in app.config['ALLOWED_IMAGE_EXTENSIONS']:\n return True\n else:\n return False\n\n\n@app.route('/success')\ndef success(filename):\n return render_template('success.html', filename=filename)\n\n\n@app.route('/display/<filename>')\ndef display_image(filename):\n filename = 'uploaded_images/' + filename\n return redirect(url_for('static', filename=filename), code=301)\n\n\n@app.route('/download/<filename>')\ndef download_image(filename):\n filename = 'static/uploaded_images/' + filename\n return send_file(filename, as_attachment=True)\n\n\n@app.route('/send-mail/<filename>')\ndef send_mail(filename):\n filename = 'static/uploaded_images/' + filename\n mail = Mail(app)\n mail.init_app(app)\n msg = Message('Sent from flask_app', sender=app.config['MAIL_USERNAME'],\n recipients=['adithyasuresh201@gmail.com',\n 'adithyansraj20@gmail.com', app.config['MAIL_USERNAME']])\n with app.open_resource(filename) as fp:\n msg.attach('image.jpg', 'image/jpg', fp.read())\n mail.send(msg)\n return render_template('mail_sent.html')\n\n\n@app.route('/upload-image', methods=['GET', 'POST'])\ndef upload_image():\n for f in os.listdir(image_path):\n os.remove(os.path.join(image_path, f))\n print(f'file {f}')\n print(image_path)\n if request.method == 'POST':\n if request.files:\n if 'filesize' in request.cookies:\n if not allowed_image_filesize(request.cookies['filesize']):\n return redirect(request.url)\n image = request.files['image']\n if image.filename == '':\n return redirect(request.url)\n if allowed_image(image.filename):\n filename = secure_filename(image.filename)\n img = np.fromfile(image, np.uint8)\n img = cv.imdecode(img, cv.IMREAD_COLOR)\n quality = 80\n quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]\n img_path = app.config['IMAGE_UPLOADS'] + '/' + filename\n cv.imwrite(img_path, img, quality_param)\n return render_template('success.html', filename=filename)\n else:\n return redirect(request.url)\n return render_template('upload_image.html')\n",
"step-4": "from flask import render_template, request, redirect, url_for, send_file\nfrom flask_app import app\nimport re\nimport os\nfrom werkzeug.utils import secure_filename\nimport numpy as np\nimport cv2 as cv\nfrom flask_mail import Message, Mail\nfile_path_file = open('flask_app/file_path.txt', 'r')\nvars = file_path_file.readlines()\n\n\ndef get_req_var(var):\n result = 0\n for s in vars:\n s = re.search('((?<=' + var + '>).+)', s)\n if s:\n result = s[0]\n break\n return result\n\n\nimage_path = get_req_var('IMAGE_UPLOADS')\napp.config['IMAGE_UPLOADS'] = image_path\napp.config['ALLOWED_IMAGE_EXTENSIONS'] = ['JPEG', 'JPG']\napp.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024\napp.config['MAX_IMAGE_FILESIZE'] = 50 * 1024 * 1024\napp.config['MAIL_SERVER'] = 'smtp.gmail.com'\napp.config['MAIL_PORT'] = 465\napp.config['MAIL_USERNAME'] = get_req_var('MAIL_USERNAME')\napp.config['MAIL_PASSWORD'] = get_req_var('MAIL_PASSWORD')\napp.config['MAIL_USE_TLS'] = False\napp.config['MAIL_USE_SSL'] = True\n\n\n@app.route('/')\ndef home():\n return render_template('index.html')\n\n\ndef allowed_image_filesize(filesize):\n if int(filesize) <= app.config['MAX_IMAGE_FILESIZE']:\n return True\n else:\n return False\n\n\ndef allowed_image(filename):\n if '.' not in filename:\n return False\n ext = filename.rsplit('.', 1)[1]\n if ext.upper() in app.config['ALLOWED_IMAGE_EXTENSIONS']:\n return True\n else:\n return False\n\n\n@app.route('/success')\ndef success(filename):\n return render_template('success.html', filename=filename)\n\n\n@app.route('/display/<filename>')\ndef display_image(filename):\n filename = 'uploaded_images/' + filename\n return redirect(url_for('static', filename=filename), code=301)\n\n\n@app.route('/download/<filename>')\ndef download_image(filename):\n filename = 'static/uploaded_images/' + filename\n return send_file(filename, as_attachment=True)\n\n\n@app.route('/send-mail/<filename>')\ndef send_mail(filename):\n filename = 'static/uploaded_images/' + filename\n mail = Mail(app)\n mail.init_app(app)\n msg = Message('Sent from flask_app', sender=app.config['MAIL_USERNAME'],\n recipients=['adithyasuresh201@gmail.com',\n 'adithyansraj20@gmail.com', app.config['MAIL_USERNAME']])\n with app.open_resource(filename) as fp:\n msg.attach('image.jpg', 'image/jpg', fp.read())\n mail.send(msg)\n return render_template('mail_sent.html')\n\n\n@app.route('/upload-image', methods=['GET', 'POST'])\ndef upload_image():\n for f in os.listdir(image_path):\n os.remove(os.path.join(image_path, f))\n print(f'file {f}')\n print(image_path)\n if request.method == 'POST':\n if request.files:\n if 'filesize' in request.cookies:\n if not allowed_image_filesize(request.cookies['filesize']):\n return redirect(request.url)\n image = request.files['image']\n if image.filename == '':\n return redirect(request.url)\n if allowed_image(image.filename):\n filename = secure_filename(image.filename)\n img = np.fromfile(image, np.uint8)\n img = cv.imdecode(img, cv.IMREAD_COLOR)\n quality = 80\n quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]\n img_path = app.config['IMAGE_UPLOADS'] + '/' + filename\n cv.imwrite(img_path, img, quality_param)\n return render_template('success.html', filename=filename)\n else:\n return redirect(request.url)\n return render_template('upload_image.html')\n",
"step-5": "from flask import render_template, request, redirect, url_for, send_file\nfrom flask_app import app\nimport re\nimport os\nfrom werkzeug.utils import secure_filename\nimport numpy as np\nimport cv2 as cv\nfrom flask_mail import Message, Mail\n\nfile_path_file = open('flask_app/file_path.txt', 'r')\nvars = file_path_file.readlines()\n\n\ndef get_req_var(var):\n result = 0\n for s in vars:\n s = re.search(\"((?<=\" + var + \">).+)\", s)\n if s:\n result = s[0]\n break\n return result\n\n\nimage_path = get_req_var(\"IMAGE_UPLOADS\")\napp.config[\"IMAGE_UPLOADS\"] = image_path\napp.config[\"ALLOWED_IMAGE_EXTENSIONS\"] = [\"JPEG\", \"JPG\"]\napp.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024\napp.config[\"MAX_IMAGE_FILESIZE\"] = 50 * 1024 * 1024\n\n# for mail\napp.config['MAIL_SERVER'] = 'smtp.gmail.com'\napp.config['MAIL_PORT'] = 465\napp.config['MAIL_USERNAME'] = get_req_var(\"MAIL_USERNAME\")\napp.config['MAIL_PASSWORD'] = get_req_var(\"MAIL_PASSWORD\")\napp.config['MAIL_USE_TLS'] = False\napp.config['MAIL_USE_SSL'] = True\n# for mail\n\n\n@app.route('/')\ndef home():\n return render_template('index.html')\n\n\ndef allowed_image_filesize(filesize):\n if int(filesize) <= app.config[\"MAX_IMAGE_FILESIZE\"]:\n return True\n else:\n return False\n\n\ndef allowed_image(filename):\n # We only want files with a . in the filename\n if \".\" not in filename:\n return False\n # Split the extension from the filename\n ext = filename.rsplit(\".\", 1)[1]\n # Check if the extension is in ALLOWED_IMAGE_EXTENSIONS\n if ext.upper() in app.config[\"ALLOWED_IMAGE_EXTENSIONS\"]:\n return True\n else:\n return False\n\n\n@app.route('/success')\ndef success(filename):\n return render_template('success.html', filename=filename)\n\n\n@app.route('/display/<filename>')\ndef display_image(filename):\n filename = 'uploaded_images/' + filename\n return redirect(url_for('static', filename=filename), code=301)\n\n\n@app.route(\"/download/<filename>\")\ndef download_image(filename):\n filename = 'static/uploaded_images/' + filename\n return send_file(filename, as_attachment=True)\n\n\n@app.route(\"/send-mail/<filename>\")\ndef send_mail(filename):\n filename = 'static/uploaded_images/' + filename\n mail = Mail(app)\n mail.init_app(app)\n msg = Message(\n \"Sent from flask_app\",\n sender=app.config[\"MAIL_USERNAME\"],\n recipients=[\"adithyasuresh201@gmail.com\",\n \"adithyansraj20@gmail.com\", app.config[\"MAIL_USERNAME\"]],\n )\n with app.open_resource(filename) as fp:\n msg.attach(\"image.jpg\", \"image/jpg\", fp.read())\n mail.send(msg)\n return render_template(\"mail_sent.html\")\n\n\n@app.route(\"/upload-image\", methods=[\"GET\", \"POST\"])\ndef upload_image():\n # cwd = os.path.join(os.getcwd(), image_path)\n # print(cwd)\n for f in os.listdir(image_path):\n os.remove(os.path.join(image_path, f))\n print(f\"file {f}\")\n print(image_path)\n if request.method == \"POST\":\n if request.files:\n if \"filesize\" in request.cookies:\n if not allowed_image_filesize(request.cookies[\"filesize\"]):\n return redirect(request.url)\n\n image = request.files[\"image\"]\n\n if image.filename == \"\":\n return redirect(request.url)\n\n if allowed_image(image.filename):\n filename = secure_filename(image.filename)\n img = np.fromfile(image, np.uint8)\n img = cv.imdecode(img, cv.IMREAD_COLOR)\n quality = 80\n quality_param = [int(cv.IMWRITE_JPEG_QUALITY), quality]\n img_path = app.config[\"IMAGE_UPLOADS\"] + \"/\" + filename\n cv.imwrite(img_path, img, quality_param)\n return render_template('success.html', filename=filename)\n\n else:\n return redirect(request.url)\n return render_template(\"upload_image.html\")\n",
"step-ids": [
7,
8,
10,
11,
12
]
}
|
[
7,
8,
10,
11,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def get_value(li, row, column):
if row < 0 or column < 0:
return 0
try:
return li[row][column]
except IndexError:
return 0
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def get_value(li, row, column):
if row < 0 or column < 0:
return 0
try:
return li[row][column]
except IndexError:
return 0
<|reserved_special_token_0|>
for asdf in range(n):
table = []
title, rows, columns = input().split()
rows = int(rows)
columns = int(columns)
for r in range(rows):
table.append([int(x) for x in input().split()])
flattened = [j for sub in table for j in sub]
sort = sorted(range(len(flattened)), key=lambda k: flattened[k])
distance = [[(0) for i in range(columns)] for j in range(rows)]
maxdist = 0
for i in sort:
r = i // columns
c = i % columns
w = 1
x = 1
y = 1
z = 1
if get_value(table, r, c) == get_value(table, r - 1, c):
w = 0
if get_value(table, r, c) == get_value(table, r + 1, c):
x = 0
if get_value(table, r, c) == get_value(table, r, c - 1):
y = 0
if get_value(table, r, c) == get_value(table, r, c + 1):
z = 0
distance[r][c] = max(max(get_value(distance, r - 1, c) * w,
get_value(distance, r + 1, c) * x), max(get_value(distance, r,
c - 1) * y, get_value(distance, r, c + 1) * z)) + 1
if distance[r][c] > maxdist:
maxdist = distance[r][c]
results[title] = maxdist
for key in results:
print(key + ': ' + str(results[key]))
<|reserved_special_token_1|>
def get_value(li, row, column):
if row < 0 or column < 0:
return 0
try:
return li[row][column]
except IndexError:
return 0
n = int(input())
results = {}
for asdf in range(n):
table = []
title, rows, columns = input().split()
rows = int(rows)
columns = int(columns)
for r in range(rows):
table.append([int(x) for x in input().split()])
flattened = [j for sub in table for j in sub]
sort = sorted(range(len(flattened)), key=lambda k: flattened[k])
distance = [[(0) for i in range(columns)] for j in range(rows)]
maxdist = 0
for i in sort:
r = i // columns
c = i % columns
w = 1
x = 1
y = 1
z = 1
if get_value(table, r, c) == get_value(table, r - 1, c):
w = 0
if get_value(table, r, c) == get_value(table, r + 1, c):
x = 0
if get_value(table, r, c) == get_value(table, r, c - 1):
y = 0
if get_value(table, r, c) == get_value(table, r, c + 1):
z = 0
distance[r][c] = max(max(get_value(distance, r - 1, c) * w,
get_value(distance, r + 1, c) * x), max(get_value(distance, r,
c - 1) * y, get_value(distance, r, c + 1) * z)) + 1
if distance[r][c] > maxdist:
maxdist = distance[r][c]
results[title] = maxdist
for key in results:
print(key + ': ' + str(results[key]))
<|reserved_special_token_1|>
def get_value(li, row, column):
if row < 0 or column < 0:
return 0
try:
return li[row][column]
except IndexError:
return 0
n = int(input())
results = {}
for asdf in range(n):
table = []
title, rows, columns = input().split()
rows = int(rows)
columns = int(columns)
for r in range(rows):
table.append([int(x) for x in input().split()])
flattened = [j for sub in table for j in sub]
sort = sorted(range(len(flattened)), key=lambda k: flattened[k])
distance = [[0 for i in range(columns)] for j in range(rows)]
#print(sort)
maxdist = 0
for i in sort:
r = i//columns
c = i % columns
#print(r)
#print(c)
w = 1
x = 1
y = 1
z = 1
if get_value(table, r, c) == get_value(table, r-1, c):
w = 0
if get_value(table, r, c) == get_value(table, r+1, c):
x = 0
if get_value(table, r, c) == get_value(table, r, c-1):
y = 0
if get_value(table, r, c) == get_value(table, r, c+1):
z = 0
#print(distance)
distance[r][c] = max(max(get_value(distance, r-1, c)*w, get_value(distance, r+1, c)*x),
max(get_value(distance, r, c-1)*y, get_value(distance, r, c+1)*z)) + 1
if distance[r][c] > maxdist:
maxdist = distance[r][c]
results[title] = maxdist
for key in results:
print(key + ": " + str(results[key]))
|
flexible
|
{
"blob_id": "badbfdbdeb8b4fd40b1c44bf7dcff6457a0c8795",
"index": 7162,
"step-1": "<mask token>\n",
"step-2": "def get_value(li, row, column):\n if row < 0 or column < 0:\n return 0\n try:\n return li[row][column]\n except IndexError:\n return 0\n\n\n<mask token>\n",
"step-3": "def get_value(li, row, column):\n if row < 0 or column < 0:\n return 0\n try:\n return li[row][column]\n except IndexError:\n return 0\n\n\n<mask token>\nfor asdf in range(n):\n table = []\n title, rows, columns = input().split()\n rows = int(rows)\n columns = int(columns)\n for r in range(rows):\n table.append([int(x) for x in input().split()])\n flattened = [j for sub in table for j in sub]\n sort = sorted(range(len(flattened)), key=lambda k: flattened[k])\n distance = [[(0) for i in range(columns)] for j in range(rows)]\n maxdist = 0\n for i in sort:\n r = i // columns\n c = i % columns\n w = 1\n x = 1\n y = 1\n z = 1\n if get_value(table, r, c) == get_value(table, r - 1, c):\n w = 0\n if get_value(table, r, c) == get_value(table, r + 1, c):\n x = 0\n if get_value(table, r, c) == get_value(table, r, c - 1):\n y = 0\n if get_value(table, r, c) == get_value(table, r, c + 1):\n z = 0\n distance[r][c] = max(max(get_value(distance, r - 1, c) * w, \n get_value(distance, r + 1, c) * x), max(get_value(distance, r, \n c - 1) * y, get_value(distance, r, c + 1) * z)) + 1\n if distance[r][c] > maxdist:\n maxdist = distance[r][c]\n results[title] = maxdist\nfor key in results:\n print(key + ': ' + str(results[key]))\n",
"step-4": "def get_value(li, row, column):\n if row < 0 or column < 0:\n return 0\n try:\n return li[row][column]\n except IndexError:\n return 0\n\n\nn = int(input())\nresults = {}\nfor asdf in range(n):\n table = []\n title, rows, columns = input().split()\n rows = int(rows)\n columns = int(columns)\n for r in range(rows):\n table.append([int(x) for x in input().split()])\n flattened = [j for sub in table for j in sub]\n sort = sorted(range(len(flattened)), key=lambda k: flattened[k])\n distance = [[(0) for i in range(columns)] for j in range(rows)]\n maxdist = 0\n for i in sort:\n r = i // columns\n c = i % columns\n w = 1\n x = 1\n y = 1\n z = 1\n if get_value(table, r, c) == get_value(table, r - 1, c):\n w = 0\n if get_value(table, r, c) == get_value(table, r + 1, c):\n x = 0\n if get_value(table, r, c) == get_value(table, r, c - 1):\n y = 0\n if get_value(table, r, c) == get_value(table, r, c + 1):\n z = 0\n distance[r][c] = max(max(get_value(distance, r - 1, c) * w, \n get_value(distance, r + 1, c) * x), max(get_value(distance, r, \n c - 1) * y, get_value(distance, r, c + 1) * z)) + 1\n if distance[r][c] > maxdist:\n maxdist = distance[r][c]\n results[title] = maxdist\nfor key in results:\n print(key + ': ' + str(results[key]))\n",
"step-5": "def get_value(li, row, column):\r\n if row < 0 or column < 0:\r\n return 0\r\n try:\r\n return li[row][column]\r\n except IndexError:\r\n return 0\r\n\r\n\r\nn = int(input())\r\nresults = {}\r\nfor asdf in range(n):\r\n table = []\r\n title, rows, columns = input().split()\r\n rows = int(rows)\r\n columns = int(columns)\r\n\r\n for r in range(rows):\r\n table.append([int(x) for x in input().split()])\r\n\r\n flattened = [j for sub in table for j in sub]\r\n\r\n sort = sorted(range(len(flattened)), key=lambda k: flattened[k])\r\n\r\n distance = [[0 for i in range(columns)] for j in range(rows)]\r\n #print(sort)\r\n maxdist = 0\r\n for i in sort:\r\n r = i//columns\r\n c = i % columns\r\n #print(r)\r\n #print(c)\r\n w = 1\r\n x = 1\r\n y = 1\r\n z = 1\r\n if get_value(table, r, c) == get_value(table, r-1, c):\r\n w = 0\r\n if get_value(table, r, c) == get_value(table, r+1, c):\r\n x = 0\r\n if get_value(table, r, c) == get_value(table, r, c-1):\r\n y = 0\r\n if get_value(table, r, c) == get_value(table, r, c+1):\r\n z = 0\r\n #print(distance)\r\n distance[r][c] = max(max(get_value(distance, r-1, c)*w, get_value(distance, r+1, c)*x),\r\n max(get_value(distance, r, c-1)*y, get_value(distance, r, c+1)*z)) + 1\r\n if distance[r][c] > maxdist:\r\n maxdist = distance[r][c]\r\n results[title] = maxdist\r\n\r\nfor key in results:\r\n print(key + \": \" + str(results[key])) \r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class BaseStrategy(BaseConsumer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
<|reserved_special_token_0|>
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
<|reserved_special_token_0|>
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
<|reserved_special_token_0|>
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BaseStrategy(BaseConsumer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
<|reserved_special_token_0|>
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
<|reserved_special_token_0|>
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
<|reserved_special_token_0|>
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
<|reserved_special_token_0|>
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BaseStrategy(BaseConsumer):
<|reserved_special_token_0|>
def __init__(self, symbol_list, allocation, freq, positions, start, end,
warmup=0, fixed_allocation=True, batch_size=10000):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,
days=1)
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio',
1 / num_pos), rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0))
pos_dict[perm_tick] = pos
self.pos = pos_dict
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
def subscriptions(self):
return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (
'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (
'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',
self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),
('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp:
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info('Publish Order={} for Strategy={}'.format(order,
self.id))
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger('Strategy')
class BaseStrategy(BaseConsumer):
"""Strategy is an abstract base class providing an interface for
all subsequent (inherited) strategy handling objects.
Goal
----
The goal of a (derived) Strategy object
- based on the inbound 'Tick', calcualte signals
- 'Signal' is at the symbol level which will be published
Note
----
This is designed to work both with historic and live data as
the Strategy object is agnostic to the data source,
since it obtains the 'Tick' object from MarketEvent message
"""
def __init__(self, symbol_list, allocation, freq, positions, start, end,
warmup=0, fixed_allocation=True, batch_size=10000):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,
days=1)
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio',
1 / num_pos), rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0))
pos_dict[perm_tick] = pos
self.pos = pos_dict
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError('Should implement calculate_signals()\n' +
"By calling this method to calculate 'Signal' Events")
def subscriptions(self):
return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (
'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (
'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',
self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),
('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,
self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(
np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.
value))
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
self.pos[fill.symbol].on_fill(fill)
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq:
return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
equity = self.total_bp
bp = copy(self.avaliable_bp)
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
self._pbar.update(1)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp:
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info('Publish Order={} for Strategy={}'.format(order,
self.id))
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':
self.cash, 'commission': self.commission, 'nav': self.nav}
for k, v in self.pos.items():
output[str(k) + '_quantity'] = v.quantity
output[str(k) + '_mv'] = v.mv
self._hist.append(output)
<|reserved_special_token_1|>
import logging, numpy as np, time, pandas as pd
from abc import abstractmethod
from kombu import binding
from tqdm import tqdm
from functools import lru_cache
from threading import Thread
from math import ceil
from copy import copy
from .pos import Position
from .base import BaseConsumer
from .event import SignalEventPct, OrderEvent
from .conf import LONG, SHORT, EXIT, MKT, BUY, SELL, LOCAL_TZ
from .util import clean_timestamp
from .errors import OverFilling
logger = logging.getLogger('Strategy')
class BaseStrategy(BaseConsumer):
"""Strategy is an abstract base class providing an interface for
all subsequent (inherited) strategy handling objects.
Goal
----
The goal of a (derived) Strategy object
- based on the inbound 'Tick', calcualte signals
- 'Signal' is at the symbol level which will be published
Note
----
This is designed to work both with historic and live data as
the Strategy object is agnostic to the data source,
since it obtains the 'Tick' object from MarketEvent message
"""
def __init__(
self, symbol_list, allocation, freq, positions,
start, end, warmup=0, fixed_allocation=True,
batch_size=10000
):
"""
Parameter:
----------
symbol_list (list): A list of Contract perm_tick (for data)
allocation (float): Dollar amount that this strategy is able to use
freq (conf.FREQ): Data Frequency type for this strategy (for data)
positions (dict of dict):
A dictionary with perm_tick and a dictionary of arguments
- pct_portfolio (float): percentage of the allocation
- rebalance (int): # of days to rebalance to pct_portfolio
- hard_stop (float): hard drawdown gate to close position
warmup (int): # of days to warmup the strategy
env_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}
which environment to run the startegy
start, end (datetime):
Only for backtesting to specificy the range of data to test
"""
n = ceil(freq.one_day)
num_pos = len(positions)
# getting neccesary parameters
self.symbol_list = symbol_list
self.freq = freq
self.warmup = warmup * n
if start:
self.start_dt = clean_timestamp(start)
if end:
self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1, days=1)
# allocation parameters for tracking portfolio
self.allocation = allocation
self.cash = allocation
self.commission = 0
self.fixed_allocation = fixed_allocation
pos_dict = {}
for perm_tick, v in positions.items():
# want to have position, must know its market ticks for decision
if perm_tick not in self.symbol_list:
self.symbol_list.append(perm_tick)
pos = Position(
perm_tick,
pct_portfolio=v.get('pct_portfolio', 1/num_pos),
rebalance=v.get('rebalance', 0) * n,
hard_stop=v.get('hard_stop', 0),
)
pos_dict[perm_tick] = pos
self.pos = pos_dict
# starting is always 0, it will increment itself every market tick
self.t = 0
self._hist = []
self.batch_size = batch_size
super().__init__(comp_type='STGY', required=['feed', 'exe'])
@abstractmethod
def calculate_signals(self):
"""Provide the mechanism to calculate a list of signals"""
raise NotImplementedError(
"Should implement calculate_signals()\n" + \
"By calling this method to calculate 'Signal' Events"
)
def subscriptions(self):
return [
('ack-reg-feed', self.id, self.on_ack_reg_feed),
('ack-dereg_feed', self.id, self.on_ack_dereg_feed),
('ack-reg-exe', self.id, self.on_ack_reg_exe),
('ack-dereg-exe', self.id, self.on_ack_dereg_exe),
('eod', self.id, self.on_eod),
('tick', self.id, self.on_market),
('fill', self.id, self.on_fill),
]
def update_data(self, ticks):
pass
def on_hard_stop(self, symbol):
pass
def on_rebalance(self, symbol):
pass
def has_position(self, symbol):
return self.pos[symbol].has_position
def has_open_orders(self, symbol):
return self.pos[symbol].has_open_orders
def has_long(self, symbol):
return self.pos[symbol].has_long
def has_short(self, symbol):
return self.pos[symbol].has_short
@property
def nav(self):
"""Net Account Value / Net Liquidating Value"""
return sum(pos.mv for pos in self.pos.values()) + self.cash
@property
def total_cost(self):
return sum(pos.cost for pos in self.pos.values())
@property
def total_bp(self):
if self.fixed_allocation:
return self.allocation
else:
return self.nav
@property
def avaliable_bp(self):
return self.total_bp - self.total_cost
def start(self):
while self.status != 'RUNNING':
time.sleep(2)
# setting up progress bar
self._pbar = tqdm(
total=int(np.ceil(
pd.bdate_range(self.start_dt, self.end_dt).size
* np.ceil(self.freq.one_day)
)),
miniters=int(np.ceil(self.freq.one_day)),
unit=' tick<{}>'.format(self.freq.value),
)
# publish event to get started
logger.info('Warming up Strategy')
self.basic_publish('warmup', sender=self.id)
logger.info('Really Starting up calculating Signals')
self.basic_publish('next', sender=self.id)
def on_ack_reg_feed(self, oid, body):
self.required['feed'] = True
def on_ack_reg_exe(self, oid, body):
self.required['exe'] = True
def on_ack_dereg_feed(self, oid, body):
self.required['feed'] = False
def on_ack_dereg_exe(self, oid, body):
self.required['exe'] = False
def on_eod(self, oid, body):
"""Handlering End of Data Event"""
self._pbar.update(self._pbar.total - self._pbar.n)
self._pbar.close()
self.basic_publish('dereg-feed', sender=self.id)
self.basic_publish('dereg-exe', sender=self.id)
self._stop()
def on_fill(self, oid, body):
"""Upon filled order
- update strategy's position, spot position reversion
- update holding time
- update position quantity
Parameter:
----------
fill (Fill Event)
"""
logger.info('Consuming filled Order')
fill = body['fill']
# update the position first
self.pos[fill.symbol].on_fill(fill)
# getting data from the fill event
Q = fill.quantity
K, D, C = fill.fill_cost, fill.fill_type, fill.commission
cost = D.value * K * Q
self.commission += C
self.cash -= cost + C
def on_market(self, oid, body):
"""On market event
- update information for each existing poistion
- generate orders for rebalancing()
- the strategy will calculate signal(s)
- and publish them to the exchange for processing
- then a "done" will be published to indicate
the strategy is finish doing everything this heartbeat
- so then the risk manager will collect all signals
before sending order for execution
Parameter:
----------
ticks (Market Event)
"""
if body['freq'] != self.freq: return
ticks = body['ticks']
self._update_data(ticks)
if self.t >= self.warmup:
self._calculate_signals()
# publish generated signals
equity = self.total_bp
bp = copy(self.avaliable_bp) # current snap_shot of buying power
for S, pos in self.pos.items():
for order, lvl in pos.generate_orders(equity):
used_bp = self.on_order(order, lvl, bp)
bp -= used_bp
# save old strategy performance history
self._pbar.update(1)
# if ticks.timestamp >= self.start_dt:
# self.basic_publish('next', sender=self.id)
if self.t >= self.warmup:
self._save_positions()
def on_order(self, order, lvl, bp):
"""Handling new order
- Orders are generated from signals
- will have to check currently avaliable buying power before publish
Parameter:
---------
order (Order Event)
lvl (str): Level of urgency for the order
This flag will be used to call corresponding callback
bp (float): The amount of avaliable buying power
Return:
-------
used buying power (float)
"""
S = order.symbol
need_bp = order.quantity * self.ticks[S].close
if need_bp <= bp: # have enough buying power to place order
used_bp = need_bp
if lvl == 'hard_stop':
self.on_hard_stop(S)
elif lvl == 'rebalance':
self.on_rebalance(S)
self.pos[order.symbol].confirm_order(order)
logger.info(
'Publish Order={} for Strategy={}'
.format(order, self.id)
)
self.basic_publish('order', sender=self.id, order=order)
else:
used_bp = 0
return used_bp
def generate_signal(self, symbol, signal_type, **kws):
"""Generate a signal that will stored at Strategy level
- Then all signals will be batch processed
Parameter
---------
symbol: str, the target symbol for the signal
signal_type: {LONG, SHORT, EXIT}
kws: additional arguments passes to the SignalEvent class
- especially the `strength` for percentage of portfolio
- if not passed, the default `pct_portfolio` will be used
"""
self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)
def _calculate_signals(self):
# update existing position information
for pos in self.pos.values():
pos._calculate_signals()
self.calculate_signals()
def _update_data(self, ticks):
"""Update the existing state of strategies
- based on given market observation
Note:
-----
1. It will always be called before calculating the new signal
2. this will be called no matter strategy is in warmup period or not
becuase warmup period is used for gathering nessceary data
"""
self.ticks = ticks
self.t += 1
for S, pos in self.pos.items():
pos._update_data(ticks[S])
self.update_data(ticks)
def _save_positions(self):
output = {
'timestamp': self.ticks.timestamp, 't': self.t,
'cash': self.cash, 'commission': self.commission,
'nav': self.nav,
}
for k, v in self.pos.items():
output[str(k)+'_quantity'] = v.quantity
output[str(k)+'_mv'] = v.mv
self._hist.append(output)
|
flexible
|
{
"blob_id": "76d166bc227986863db77aa784be3de8110437ff",
"index": 530,
"step-1": "<mask token>\n\n\nclass BaseStrategy(BaseConsumer):\n <mask token>\n <mask token>\n\n @abstractmethod\n def calculate_signals(self):\n \"\"\"Provide the mechanism to calculate a list of signals\"\"\"\n raise NotImplementedError('Should implement calculate_signals()\\n' +\n \"By calling this method to calculate 'Signal' Events\")\n <mask token>\n <mask token>\n\n def on_hard_stop(self, symbol):\n pass\n\n def on_rebalance(self, symbol):\n pass\n\n def has_position(self, symbol):\n return self.pos[symbol].has_position\n\n def has_open_orders(self, symbol):\n return self.pos[symbol].has_open_orders\n\n def has_long(self, symbol):\n return self.pos[symbol].has_long\n <mask token>\n <mask token>\n\n @property\n def total_cost(self):\n return sum(pos.cost for pos in self.pos.values())\n\n @property\n def total_bp(self):\n if self.fixed_allocation:\n return self.allocation\n else:\n return self.nav\n <mask token>\n\n def start(self):\n while self.status != 'RUNNING':\n time.sleep(2)\n self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,\n self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(\n np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.\n value))\n logger.info('Warming up Strategy')\n self.basic_publish('warmup', sender=self.id)\n logger.info('Really Starting up calculating Signals')\n self.basic_publish('next', sender=self.id)\n\n def on_ack_reg_feed(self, oid, body):\n self.required['feed'] = True\n\n def on_ack_reg_exe(self, oid, body):\n self.required['exe'] = True\n <mask token>\n <mask token>\n\n def on_eod(self, oid, body):\n \"\"\"Handlering End of Data Event\"\"\"\n self._pbar.update(self._pbar.total - self._pbar.n)\n self._pbar.close()\n self.basic_publish('dereg-feed', sender=self.id)\n self.basic_publish('dereg-exe', sender=self.id)\n self._stop()\n\n def on_fill(self, oid, body):\n \"\"\"Upon filled order\n\t\t- update strategy's position, spot position reversion\n\t\t- update holding time\n\t\t- update position quantity\n\n\t\tParameter:\n\t\t----------\n\t\tfill (Fill Event)\n\t\t\"\"\"\n logger.info('Consuming filled Order')\n fill = body['fill']\n self.pos[fill.symbol].on_fill(fill)\n Q = fill.quantity\n K, D, C = fill.fill_cost, fill.fill_type, fill.commission\n cost = D.value * K * Q\n self.commission += C\n self.cash -= cost + C\n\n def on_market(self, oid, body):\n \"\"\"On market event\n\t\t- update information for each existing poistion\n\t\t- generate orders for rebalancing()\n\t\t- the strategy will calculate signal(s)\n\t\t- and publish them to the exchange for processing\n\t\t- then a \"done\" will be published to indicate\n\t\t\tthe strategy is finish doing everything this heartbeat\n\t\t- so then the risk manager will collect all signals\n\t\t\tbefore sending order for execution\n\n\t\tParameter:\n\t\t----------\n\t\tticks (Market Event)\n\t\t\"\"\"\n if body['freq'] != self.freq:\n return\n ticks = body['ticks']\n self._update_data(ticks)\n if self.t >= self.warmup:\n self._calculate_signals()\n equity = self.total_bp\n bp = copy(self.avaliable_bp)\n for S, pos in self.pos.items():\n for order, lvl in pos.generate_orders(equity):\n used_bp = self.on_order(order, lvl, bp)\n bp -= used_bp\n self._pbar.update(1)\n if self.t >= self.warmup:\n self._save_positions()\n <mask token>\n\n def generate_signal(self, symbol, signal_type, **kws):\n \"\"\"Generate a signal that will stored at Strategy level\n\t\t- Then all signals will be batch processed\n\n\t\tParameter\n\t\t---------\n\t\tsymbol: str, the target symbol for the signal\n\t\tsignal_type: {LONG, SHORT, EXIT}\n\t\tkws: additional arguments passes to the SignalEvent class\n\t\t\t- especially the `strength` for percentage of portfolio\n\t\t\t- if not passed, the default `pct_portfolio` will be used\n\t\t\"\"\"\n self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)\n <mask token>\n\n def _update_data(self, ticks):\n \"\"\"Update the existing state of strategies\n\t\t- based on given market observation\n\n\t\tNote:\n\t\t-----\n\t\t1. It will always be called before calculating the new signal\n\t\t2. this will be called no matter strategy is in warmup period or not\n\t\t\tbecuase warmup period is used for gathering nessceary data\n\t\t\"\"\"\n self.ticks = ticks\n self.t += 1\n for S, pos in self.pos.items():\n pos._update_data(ticks[S])\n self.update_data(ticks)\n\n def _save_positions(self):\n output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':\n self.cash, 'commission': self.commission, 'nav': self.nav}\n for k, v in self.pos.items():\n output[str(k) + '_quantity'] = v.quantity\n output[str(k) + '_mv'] = v.mv\n self._hist.append(output)\n",
"step-2": "<mask token>\n\n\nclass BaseStrategy(BaseConsumer):\n <mask token>\n <mask token>\n\n @abstractmethod\n def calculate_signals(self):\n \"\"\"Provide the mechanism to calculate a list of signals\"\"\"\n raise NotImplementedError('Should implement calculate_signals()\\n' +\n \"By calling this method to calculate 'Signal' Events\")\n <mask token>\n <mask token>\n\n def on_hard_stop(self, symbol):\n pass\n\n def on_rebalance(self, symbol):\n pass\n\n def has_position(self, symbol):\n return self.pos[symbol].has_position\n\n def has_open_orders(self, symbol):\n return self.pos[symbol].has_open_orders\n\n def has_long(self, symbol):\n return self.pos[symbol].has_long\n <mask token>\n\n @property\n def nav(self):\n \"\"\"Net Account Value / Net Liquidating Value\"\"\"\n return sum(pos.mv for pos in self.pos.values()) + self.cash\n\n @property\n def total_cost(self):\n return sum(pos.cost for pos in self.pos.values())\n\n @property\n def total_bp(self):\n if self.fixed_allocation:\n return self.allocation\n else:\n return self.nav\n <mask token>\n\n def start(self):\n while self.status != 'RUNNING':\n time.sleep(2)\n self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,\n self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(\n np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.\n value))\n logger.info('Warming up Strategy')\n self.basic_publish('warmup', sender=self.id)\n logger.info('Really Starting up calculating Signals')\n self.basic_publish('next', sender=self.id)\n\n def on_ack_reg_feed(self, oid, body):\n self.required['feed'] = True\n\n def on_ack_reg_exe(self, oid, body):\n self.required['exe'] = True\n <mask token>\n <mask token>\n\n def on_eod(self, oid, body):\n \"\"\"Handlering End of Data Event\"\"\"\n self._pbar.update(self._pbar.total - self._pbar.n)\n self._pbar.close()\n self.basic_publish('dereg-feed', sender=self.id)\n self.basic_publish('dereg-exe', sender=self.id)\n self._stop()\n\n def on_fill(self, oid, body):\n \"\"\"Upon filled order\n\t\t- update strategy's position, spot position reversion\n\t\t- update holding time\n\t\t- update position quantity\n\n\t\tParameter:\n\t\t----------\n\t\tfill (Fill Event)\n\t\t\"\"\"\n logger.info('Consuming filled Order')\n fill = body['fill']\n self.pos[fill.symbol].on_fill(fill)\n Q = fill.quantity\n K, D, C = fill.fill_cost, fill.fill_type, fill.commission\n cost = D.value * K * Q\n self.commission += C\n self.cash -= cost + C\n\n def on_market(self, oid, body):\n \"\"\"On market event\n\t\t- update information for each existing poistion\n\t\t- generate orders for rebalancing()\n\t\t- the strategy will calculate signal(s)\n\t\t- and publish them to the exchange for processing\n\t\t- then a \"done\" will be published to indicate\n\t\t\tthe strategy is finish doing everything this heartbeat\n\t\t- so then the risk manager will collect all signals\n\t\t\tbefore sending order for execution\n\n\t\tParameter:\n\t\t----------\n\t\tticks (Market Event)\n\t\t\"\"\"\n if body['freq'] != self.freq:\n return\n ticks = body['ticks']\n self._update_data(ticks)\n if self.t >= self.warmup:\n self._calculate_signals()\n equity = self.total_bp\n bp = copy(self.avaliable_bp)\n for S, pos in self.pos.items():\n for order, lvl in pos.generate_orders(equity):\n used_bp = self.on_order(order, lvl, bp)\n bp -= used_bp\n self._pbar.update(1)\n if self.t >= self.warmup:\n self._save_positions()\n <mask token>\n\n def generate_signal(self, symbol, signal_type, **kws):\n \"\"\"Generate a signal that will stored at Strategy level\n\t\t- Then all signals will be batch processed\n\n\t\tParameter\n\t\t---------\n\t\tsymbol: str, the target symbol for the signal\n\t\tsignal_type: {LONG, SHORT, EXIT}\n\t\tkws: additional arguments passes to the SignalEvent class\n\t\t\t- especially the `strength` for percentage of portfolio\n\t\t\t- if not passed, the default `pct_portfolio` will be used\n\t\t\"\"\"\n self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)\n <mask token>\n\n def _update_data(self, ticks):\n \"\"\"Update the existing state of strategies\n\t\t- based on given market observation\n\n\t\tNote:\n\t\t-----\n\t\t1. It will always be called before calculating the new signal\n\t\t2. this will be called no matter strategy is in warmup period or not\n\t\t\tbecuase warmup period is used for gathering nessceary data\n\t\t\"\"\"\n self.ticks = ticks\n self.t += 1\n for S, pos in self.pos.items():\n pos._update_data(ticks[S])\n self.update_data(ticks)\n\n def _save_positions(self):\n output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':\n self.cash, 'commission': self.commission, 'nav': self.nav}\n for k, v in self.pos.items():\n output[str(k) + '_quantity'] = v.quantity\n output[str(k) + '_mv'] = v.mv\n self._hist.append(output)\n",
"step-3": "<mask token>\n\n\nclass BaseStrategy(BaseConsumer):\n <mask token>\n\n def __init__(self, symbol_list, allocation, freq, positions, start, end,\n warmup=0, fixed_allocation=True, batch_size=10000):\n \"\"\"\n\t\tParameter:\n\t\t----------\n\t\tsymbol_list (list): A list of Contract perm_tick (for data)\n\t\tallocation (float): Dollar amount that this strategy is able to use\n\t\tfreq (conf.FREQ): Data Frequency type for this strategy (for data)\n\t\tpositions (dict of dict):\n\t\t\tA dictionary with perm_tick and a dictionary of arguments\n\n\t\t\t- pct_portfolio (float): percentage of the allocation\n\t\t\t- rebalance (int): # of days to rebalance to pct_portfolio\n\t\t\t- hard_stop (float): hard drawdown gate to close position\n\t\twarmup (int): # of days to warmup the strategy\n\t\tenv_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}\n\t\t\twhich environment to run the startegy\n\t\tstart, end (datetime):\n\t\t\tOnly for backtesting to specificy the range of data to test\n\t\t\"\"\"\n n = ceil(freq.one_day)\n num_pos = len(positions)\n self.symbol_list = symbol_list\n self.freq = freq\n self.warmup = warmup * n\n if start:\n self.start_dt = clean_timestamp(start)\n if end:\n self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,\n days=1)\n self.allocation = allocation\n self.cash = allocation\n self.commission = 0\n self.fixed_allocation = fixed_allocation\n pos_dict = {}\n for perm_tick, v in positions.items():\n if perm_tick not in self.symbol_list:\n self.symbol_list.append(perm_tick)\n pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio', \n 1 / num_pos), rebalance=v.get('rebalance', 0) * n,\n hard_stop=v.get('hard_stop', 0))\n pos_dict[perm_tick] = pos\n self.pos = pos_dict\n self.t = 0\n self._hist = []\n self.batch_size = batch_size\n super().__init__(comp_type='STGY', required=['feed', 'exe'])\n\n @abstractmethod\n def calculate_signals(self):\n \"\"\"Provide the mechanism to calculate a list of signals\"\"\"\n raise NotImplementedError('Should implement calculate_signals()\\n' +\n \"By calling this method to calculate 'Signal' Events\")\n\n def subscriptions(self):\n return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (\n 'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (\n 'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',\n self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),\n ('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]\n\n def update_data(self, ticks):\n pass\n\n def on_hard_stop(self, symbol):\n pass\n\n def on_rebalance(self, symbol):\n pass\n\n def has_position(self, symbol):\n return self.pos[symbol].has_position\n\n def has_open_orders(self, symbol):\n return self.pos[symbol].has_open_orders\n\n def has_long(self, symbol):\n return self.pos[symbol].has_long\n\n def has_short(self, symbol):\n return self.pos[symbol].has_short\n\n @property\n def nav(self):\n \"\"\"Net Account Value / Net Liquidating Value\"\"\"\n return sum(pos.mv for pos in self.pos.values()) + self.cash\n\n @property\n def total_cost(self):\n return sum(pos.cost for pos in self.pos.values())\n\n @property\n def total_bp(self):\n if self.fixed_allocation:\n return self.allocation\n else:\n return self.nav\n\n @property\n def avaliable_bp(self):\n return self.total_bp - self.total_cost\n\n def start(self):\n while self.status != 'RUNNING':\n time.sleep(2)\n self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,\n self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(\n np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.\n value))\n logger.info('Warming up Strategy')\n self.basic_publish('warmup', sender=self.id)\n logger.info('Really Starting up calculating Signals')\n self.basic_publish('next', sender=self.id)\n\n def on_ack_reg_feed(self, oid, body):\n self.required['feed'] = True\n\n def on_ack_reg_exe(self, oid, body):\n self.required['exe'] = True\n\n def on_ack_dereg_feed(self, oid, body):\n self.required['feed'] = False\n\n def on_ack_dereg_exe(self, oid, body):\n self.required['exe'] = False\n\n def on_eod(self, oid, body):\n \"\"\"Handlering End of Data Event\"\"\"\n self._pbar.update(self._pbar.total - self._pbar.n)\n self._pbar.close()\n self.basic_publish('dereg-feed', sender=self.id)\n self.basic_publish('dereg-exe', sender=self.id)\n self._stop()\n\n def on_fill(self, oid, body):\n \"\"\"Upon filled order\n\t\t- update strategy's position, spot position reversion\n\t\t- update holding time\n\t\t- update position quantity\n\n\t\tParameter:\n\t\t----------\n\t\tfill (Fill Event)\n\t\t\"\"\"\n logger.info('Consuming filled Order')\n fill = body['fill']\n self.pos[fill.symbol].on_fill(fill)\n Q = fill.quantity\n K, D, C = fill.fill_cost, fill.fill_type, fill.commission\n cost = D.value * K * Q\n self.commission += C\n self.cash -= cost + C\n\n def on_market(self, oid, body):\n \"\"\"On market event\n\t\t- update information for each existing poistion\n\t\t- generate orders for rebalancing()\n\t\t- the strategy will calculate signal(s)\n\t\t- and publish them to the exchange for processing\n\t\t- then a \"done\" will be published to indicate\n\t\t\tthe strategy is finish doing everything this heartbeat\n\t\t- so then the risk manager will collect all signals\n\t\t\tbefore sending order for execution\n\n\t\tParameter:\n\t\t----------\n\t\tticks (Market Event)\n\t\t\"\"\"\n if body['freq'] != self.freq:\n return\n ticks = body['ticks']\n self._update_data(ticks)\n if self.t >= self.warmup:\n self._calculate_signals()\n equity = self.total_bp\n bp = copy(self.avaliable_bp)\n for S, pos in self.pos.items():\n for order, lvl in pos.generate_orders(equity):\n used_bp = self.on_order(order, lvl, bp)\n bp -= used_bp\n self._pbar.update(1)\n if self.t >= self.warmup:\n self._save_positions()\n\n def on_order(self, order, lvl, bp):\n \"\"\"Handling new order\n\t\t- Orders are generated from signals\n\t\t- will have to check currently avaliable buying power before publish\n\n\t\tParameter:\n\t\t---------\n\t\torder (Order Event)\n\t\tlvl (str): Level of urgency for the order\n\t\t\tThis flag will be used to call corresponding callback\n\t\tbp (float): The amount of avaliable buying power\n\n\t\tReturn:\n\t\t-------\n\t\tused buying power (float)\n\t\t\"\"\"\n S = order.symbol\n need_bp = order.quantity * self.ticks[S].close\n if need_bp <= bp:\n used_bp = need_bp\n if lvl == 'hard_stop':\n self.on_hard_stop(S)\n elif lvl == 'rebalance':\n self.on_rebalance(S)\n self.pos[order.symbol].confirm_order(order)\n logger.info('Publish Order={} for Strategy={}'.format(order,\n self.id))\n self.basic_publish('order', sender=self.id, order=order)\n else:\n used_bp = 0\n return used_bp\n\n def generate_signal(self, symbol, signal_type, **kws):\n \"\"\"Generate a signal that will stored at Strategy level\n\t\t- Then all signals will be batch processed\n\n\t\tParameter\n\t\t---------\n\t\tsymbol: str, the target symbol for the signal\n\t\tsignal_type: {LONG, SHORT, EXIT}\n\t\tkws: additional arguments passes to the SignalEvent class\n\t\t\t- especially the `strength` for percentage of portfolio\n\t\t\t- if not passed, the default `pct_portfolio` will be used\n\t\t\"\"\"\n self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)\n\n def _calculate_signals(self):\n for pos in self.pos.values():\n pos._calculate_signals()\n self.calculate_signals()\n\n def _update_data(self, ticks):\n \"\"\"Update the existing state of strategies\n\t\t- based on given market observation\n\n\t\tNote:\n\t\t-----\n\t\t1. It will always be called before calculating the new signal\n\t\t2. this will be called no matter strategy is in warmup period or not\n\t\t\tbecuase warmup period is used for gathering nessceary data\n\t\t\"\"\"\n self.ticks = ticks\n self.t += 1\n for S, pos in self.pos.items():\n pos._update_data(ticks[S])\n self.update_data(ticks)\n\n def _save_positions(self):\n output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':\n self.cash, 'commission': self.commission, 'nav': self.nav}\n for k, v in self.pos.items():\n output[str(k) + '_quantity'] = v.quantity\n output[str(k) + '_mv'] = v.mv\n self._hist.append(output)\n",
"step-4": "<mask token>\nlogger = logging.getLogger('Strategy')\n\n\nclass BaseStrategy(BaseConsumer):\n \"\"\"Strategy is an abstract base class providing an interface for\n\tall subsequent (inherited) strategy handling objects.\n\n\tGoal\n\t----\n\tThe goal of a (derived) Strategy object \n\t- based on the inbound 'Tick', calcualte signals\n\t- 'Signal' is at the symbol level which will be published\n\n\tNote\n\t----\n\tThis is designed to work both with historic and live data as\n\tthe Strategy object is agnostic to the data source,\n\tsince it obtains the 'Tick' object from MarketEvent message\n\t\"\"\"\n\n def __init__(self, symbol_list, allocation, freq, positions, start, end,\n warmup=0, fixed_allocation=True, batch_size=10000):\n \"\"\"\n\t\tParameter:\n\t\t----------\n\t\tsymbol_list (list): A list of Contract perm_tick (for data)\n\t\tallocation (float): Dollar amount that this strategy is able to use\n\t\tfreq (conf.FREQ): Data Frequency type for this strategy (for data)\n\t\tpositions (dict of dict):\n\t\t\tA dictionary with perm_tick and a dictionary of arguments\n\n\t\t\t- pct_portfolio (float): percentage of the allocation\n\t\t\t- rebalance (int): # of days to rebalance to pct_portfolio\n\t\t\t- hard_stop (float): hard drawdown gate to close position\n\t\twarmup (int): # of days to warmup the strategy\n\t\tenv_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}\n\t\t\twhich environment to run the startegy\n\t\tstart, end (datetime):\n\t\t\tOnly for backtesting to specificy the range of data to test\n\t\t\"\"\"\n n = ceil(freq.one_day)\n num_pos = len(positions)\n self.symbol_list = symbol_list\n self.freq = freq\n self.warmup = warmup * n\n if start:\n self.start_dt = clean_timestamp(start)\n if end:\n self.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1,\n days=1)\n self.allocation = allocation\n self.cash = allocation\n self.commission = 0\n self.fixed_allocation = fixed_allocation\n pos_dict = {}\n for perm_tick, v in positions.items():\n if perm_tick not in self.symbol_list:\n self.symbol_list.append(perm_tick)\n pos = Position(perm_tick, pct_portfolio=v.get('pct_portfolio', \n 1 / num_pos), rebalance=v.get('rebalance', 0) * n,\n hard_stop=v.get('hard_stop', 0))\n pos_dict[perm_tick] = pos\n self.pos = pos_dict\n self.t = 0\n self._hist = []\n self.batch_size = batch_size\n super().__init__(comp_type='STGY', required=['feed', 'exe'])\n\n @abstractmethod\n def calculate_signals(self):\n \"\"\"Provide the mechanism to calculate a list of signals\"\"\"\n raise NotImplementedError('Should implement calculate_signals()\\n' +\n \"By calling this method to calculate 'Signal' Events\")\n\n def subscriptions(self):\n return [('ack-reg-feed', self.id, self.on_ack_reg_feed), (\n 'ack-dereg_feed', self.id, self.on_ack_dereg_feed), (\n 'ack-reg-exe', self.id, self.on_ack_reg_exe), ('ack-dereg-exe',\n self.id, self.on_ack_dereg_exe), ('eod', self.id, self.on_eod),\n ('tick', self.id, self.on_market), ('fill', self.id, self.on_fill)]\n\n def update_data(self, ticks):\n pass\n\n def on_hard_stop(self, symbol):\n pass\n\n def on_rebalance(self, symbol):\n pass\n\n def has_position(self, symbol):\n return self.pos[symbol].has_position\n\n def has_open_orders(self, symbol):\n return self.pos[symbol].has_open_orders\n\n def has_long(self, symbol):\n return self.pos[symbol].has_long\n\n def has_short(self, symbol):\n return self.pos[symbol].has_short\n\n @property\n def nav(self):\n \"\"\"Net Account Value / Net Liquidating Value\"\"\"\n return sum(pos.mv for pos in self.pos.values()) + self.cash\n\n @property\n def total_cost(self):\n return sum(pos.cost for pos in self.pos.values())\n\n @property\n def total_bp(self):\n if self.fixed_allocation:\n return self.allocation\n else:\n return self.nav\n\n @property\n def avaliable_bp(self):\n return self.total_bp - self.total_cost\n\n def start(self):\n while self.status != 'RUNNING':\n time.sleep(2)\n self._pbar = tqdm(total=int(np.ceil(pd.bdate_range(self.start_dt,\n self.end_dt).size * np.ceil(self.freq.one_day))), miniters=int(\n np.ceil(self.freq.one_day)), unit=' tick<{}>'.format(self.freq.\n value))\n logger.info('Warming up Strategy')\n self.basic_publish('warmup', sender=self.id)\n logger.info('Really Starting up calculating Signals')\n self.basic_publish('next', sender=self.id)\n\n def on_ack_reg_feed(self, oid, body):\n self.required['feed'] = True\n\n def on_ack_reg_exe(self, oid, body):\n self.required['exe'] = True\n\n def on_ack_dereg_feed(self, oid, body):\n self.required['feed'] = False\n\n def on_ack_dereg_exe(self, oid, body):\n self.required['exe'] = False\n\n def on_eod(self, oid, body):\n \"\"\"Handlering End of Data Event\"\"\"\n self._pbar.update(self._pbar.total - self._pbar.n)\n self._pbar.close()\n self.basic_publish('dereg-feed', sender=self.id)\n self.basic_publish('dereg-exe', sender=self.id)\n self._stop()\n\n def on_fill(self, oid, body):\n \"\"\"Upon filled order\n\t\t- update strategy's position, spot position reversion\n\t\t- update holding time\n\t\t- update position quantity\n\n\t\tParameter:\n\t\t----------\n\t\tfill (Fill Event)\n\t\t\"\"\"\n logger.info('Consuming filled Order')\n fill = body['fill']\n self.pos[fill.symbol].on_fill(fill)\n Q = fill.quantity\n K, D, C = fill.fill_cost, fill.fill_type, fill.commission\n cost = D.value * K * Q\n self.commission += C\n self.cash -= cost + C\n\n def on_market(self, oid, body):\n \"\"\"On market event\n\t\t- update information for each existing poistion\n\t\t- generate orders for rebalancing()\n\t\t- the strategy will calculate signal(s)\n\t\t- and publish them to the exchange for processing\n\t\t- then a \"done\" will be published to indicate\n\t\t\tthe strategy is finish doing everything this heartbeat\n\t\t- so then the risk manager will collect all signals\n\t\t\tbefore sending order for execution\n\n\t\tParameter:\n\t\t----------\n\t\tticks (Market Event)\n\t\t\"\"\"\n if body['freq'] != self.freq:\n return\n ticks = body['ticks']\n self._update_data(ticks)\n if self.t >= self.warmup:\n self._calculate_signals()\n equity = self.total_bp\n bp = copy(self.avaliable_bp)\n for S, pos in self.pos.items():\n for order, lvl in pos.generate_orders(equity):\n used_bp = self.on_order(order, lvl, bp)\n bp -= used_bp\n self._pbar.update(1)\n if self.t >= self.warmup:\n self._save_positions()\n\n def on_order(self, order, lvl, bp):\n \"\"\"Handling new order\n\t\t- Orders are generated from signals\n\t\t- will have to check currently avaliable buying power before publish\n\n\t\tParameter:\n\t\t---------\n\t\torder (Order Event)\n\t\tlvl (str): Level of urgency for the order\n\t\t\tThis flag will be used to call corresponding callback\n\t\tbp (float): The amount of avaliable buying power\n\n\t\tReturn:\n\t\t-------\n\t\tused buying power (float)\n\t\t\"\"\"\n S = order.symbol\n need_bp = order.quantity * self.ticks[S].close\n if need_bp <= bp:\n used_bp = need_bp\n if lvl == 'hard_stop':\n self.on_hard_stop(S)\n elif lvl == 'rebalance':\n self.on_rebalance(S)\n self.pos[order.symbol].confirm_order(order)\n logger.info('Publish Order={} for Strategy={}'.format(order,\n self.id))\n self.basic_publish('order', sender=self.id, order=order)\n else:\n used_bp = 0\n return used_bp\n\n def generate_signal(self, symbol, signal_type, **kws):\n \"\"\"Generate a signal that will stored at Strategy level\n\t\t- Then all signals will be batch processed\n\n\t\tParameter\n\t\t---------\n\t\tsymbol: str, the target symbol for the signal\n\t\tsignal_type: {LONG, SHORT, EXIT}\n\t\tkws: additional arguments passes to the SignalEvent class\n\t\t\t- especially the `strength` for percentage of portfolio\n\t\t\t- if not passed, the default `pct_portfolio` will be used\n\t\t\"\"\"\n self.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)\n\n def _calculate_signals(self):\n for pos in self.pos.values():\n pos._calculate_signals()\n self.calculate_signals()\n\n def _update_data(self, ticks):\n \"\"\"Update the existing state of strategies\n\t\t- based on given market observation\n\n\t\tNote:\n\t\t-----\n\t\t1. It will always be called before calculating the new signal\n\t\t2. this will be called no matter strategy is in warmup period or not\n\t\t\tbecuase warmup period is used for gathering nessceary data\n\t\t\"\"\"\n self.ticks = ticks\n self.t += 1\n for S, pos in self.pos.items():\n pos._update_data(ticks[S])\n self.update_data(ticks)\n\n def _save_positions(self):\n output = {'timestamp': self.ticks.timestamp, 't': self.t, 'cash':\n self.cash, 'commission': self.commission, 'nav': self.nav}\n for k, v in self.pos.items():\n output[str(k) + '_quantity'] = v.quantity\n output[str(k) + '_mv'] = v.mv\n self._hist.append(output)\n",
"step-5": "import logging, numpy as np, time, pandas as pd\n\nfrom abc import abstractmethod\nfrom kombu import binding\nfrom tqdm import tqdm\nfrom functools import lru_cache\nfrom threading import Thread\nfrom math import ceil\nfrom copy import copy\n\nfrom .pos import Position\nfrom .base import BaseConsumer\nfrom .event import SignalEventPct, OrderEvent\nfrom .conf import LONG, SHORT, EXIT, MKT, BUY, SELL, LOCAL_TZ\nfrom .util import clean_timestamp\nfrom .errors import OverFilling\n\nlogger = logging.getLogger('Strategy')\n\n\n\nclass BaseStrategy(BaseConsumer):\n\t\"\"\"Strategy is an abstract base class providing an interface for\n\tall subsequent (inherited) strategy handling objects.\n\n\tGoal\n\t----\n\tThe goal of a (derived) Strategy object \n\t- based on the inbound 'Tick', calcualte signals\n\t- 'Signal' is at the symbol level which will be published\n\n\tNote\n\t----\n\tThis is designed to work both with historic and live data as\n\tthe Strategy object is agnostic to the data source,\n\tsince it obtains the 'Tick' object from MarketEvent message\n\t\"\"\"\n\tdef __init__(\n\t\tself, symbol_list, allocation, freq, positions,\n\t\tstart, end, warmup=0, fixed_allocation=True,\n\t\tbatch_size=10000\n\t):\n\t\t\"\"\"\n\t\tParameter:\n\t\t----------\n\t\tsymbol_list (list): A list of Contract perm_tick (for data)\n\t\tallocation (float): Dollar amount that this strategy is able to use\n\t\tfreq (conf.FREQ): Data Frequency type for this strategy (for data)\n\t\tpositions (dict of dict):\n\t\t\tA dictionary with perm_tick and a dictionary of arguments\n\n\t\t\t- pct_portfolio (float): percentage of the allocation\n\t\t\t- rebalance (int): # of days to rebalance to pct_portfolio\n\t\t\t- hard_stop (float): hard drawdown gate to close position\n\t\twarmup (int): # of days to warmup the strategy\n\t\tenv_type (string): {'BACKTEST', 'PAPPER', 'LIVE'}\n\t\t\twhich environment to run the startegy\n\t\tstart, end (datetime):\n\t\t\tOnly for backtesting to specificy the range of data to test\n\t\t\"\"\"\n\t\tn = ceil(freq.one_day)\n\t\tnum_pos = len(positions)\n\n\t\t# getting neccesary parameters\n\t\tself.symbol_list = symbol_list\n\t\tself.freq = freq\n\t\tself.warmup = warmup * n\n\n\t\tif start:\n\t\t\tself.start_dt = clean_timestamp(start)\n\n\t\tif end:\n\t\t\tself.end_dt = clean_timestamp(end) + pd.DateOffset(seconds=-1, days=1)\n\n\n\t\t# allocation parameters for tracking portfolio\n\t\tself.allocation = allocation\n\t\tself.cash = allocation\n\t\tself.commission = 0\n\t\tself.fixed_allocation = fixed_allocation\n\n\t\tpos_dict = {}\n\t\tfor perm_tick, v in positions.items():\n\t\t\t# want to have position, must know its market ticks for decision\n\t\t\tif perm_tick not in self.symbol_list:\n\t\t\t\tself.symbol_list.append(perm_tick)\n\n\t\t\tpos = Position(\n\t\t\t\tperm_tick,\n\t\t\t\tpct_portfolio=v.get('pct_portfolio', 1/num_pos),\n\t\t\t\trebalance=v.get('rebalance', 0) * n,\n\t\t\t\thard_stop=v.get('hard_stop', 0),\n\t\t\t)\n\t\t\tpos_dict[perm_tick] = pos\n\t\tself.pos = pos_dict\n\n\t\t# starting is always 0, it will increment itself every market tick\n\t\tself.t = 0\n\t\tself._hist = []\n\t\tself.batch_size = batch_size\n\n\t\tsuper().__init__(comp_type='STGY', required=['feed', 'exe'])\n\n\n\t@abstractmethod\n\tdef calculate_signals(self):\n\t\t\"\"\"Provide the mechanism to calculate a list of signals\"\"\"\n\t\traise NotImplementedError(\n\t\t\t\"Should implement calculate_signals()\\n\" + \\\n\t\t\t\"By calling this method to calculate 'Signal' Events\"\n\t\t)\n\n\tdef subscriptions(self):\n\t\treturn [\n\t\t\t('ack-reg-feed', self.id, self.on_ack_reg_feed),\n\t\t\t('ack-dereg_feed', self.id, self.on_ack_dereg_feed),\n\t\t\t('ack-reg-exe', self.id, self.on_ack_reg_exe),\n\t\t\t('ack-dereg-exe', self.id, self.on_ack_dereg_exe),\n\t\t\t('eod', self.id, self.on_eod),\n\t\t\t('tick', self.id, self.on_market),\n\t\t\t('fill', self.id, self.on_fill),\n\t\t]\n\t\t\n\tdef update_data(self, ticks):\n\t\tpass\n\n\tdef on_hard_stop(self, symbol):\n\t\tpass\n\n\tdef on_rebalance(self, symbol):\n\t\tpass\n\n\tdef has_position(self, symbol):\n\t\treturn self.pos[symbol].has_position\n\n\tdef has_open_orders(self, symbol):\n\t\treturn self.pos[symbol].has_open_orders\n\n\tdef has_long(self, symbol):\n\t\treturn self.pos[symbol].has_long\n\n\tdef has_short(self, symbol):\n\t\treturn self.pos[symbol].has_short\n\n\t@property\n\tdef nav(self):\n\t\t\"\"\"Net Account Value / Net Liquidating Value\"\"\"\n\t\treturn sum(pos.mv for pos in self.pos.values()) + self.cash\n\n\t@property\n\tdef total_cost(self):\n\t\treturn sum(pos.cost for pos in self.pos.values())\n\n\t@property\n\tdef total_bp(self):\n\t\tif self.fixed_allocation:\n\t\t\treturn self.allocation\n\t\telse:\n\t\t\treturn self.nav\n\n\t@property\n\tdef avaliable_bp(self):\n\t\treturn self.total_bp - self.total_cost\n\n\tdef start(self):\n\t\twhile self.status != 'RUNNING':\t\n\t\t\ttime.sleep(2)\n\n\t\t# setting up progress bar\n\t\tself._pbar = tqdm(\n\t\t\ttotal=int(np.ceil(\n\t\t\t\tpd.bdate_range(self.start_dt, self.end_dt).size\n\t\t\t\t* np.ceil(self.freq.one_day)\n\t\t\t)),\n\t\t\tminiters=int(np.ceil(self.freq.one_day)),\n\t\t\tunit=' tick<{}>'.format(self.freq.value),\n\t\t)\n\n\t\t# publish event to get started\n\t\tlogger.info('Warming up Strategy')\n\t\tself.basic_publish('warmup', sender=self.id)\n\t\tlogger.info('Really Starting up calculating Signals')\n\t\tself.basic_publish('next', sender=self.id)\n\n\n\tdef on_ack_reg_feed(self, oid, body):\n\t\tself.required['feed'] = True\n\n\tdef on_ack_reg_exe(self, oid, body):\n\t\tself.required['exe'] = True\n\n\tdef on_ack_dereg_feed(self, oid, body):\n\t\tself.required['feed'] = False\n\n\tdef on_ack_dereg_exe(self, oid, body):\n\t\tself.required['exe'] = False\n\n\n\tdef on_eod(self, oid, body):\n\t\t\"\"\"Handlering End of Data Event\"\"\"\n\t\tself._pbar.update(self._pbar.total - self._pbar.n)\n\t\tself._pbar.close()\n\n\t\tself.basic_publish('dereg-feed', sender=self.id)\n\t\tself.basic_publish('dereg-exe', sender=self.id)\n\n\t\tself._stop()\n\n\n\tdef on_fill(self, oid, body):\n\t\t\"\"\"Upon filled order\n\t\t- update strategy's position, spot position reversion\n\t\t- update holding time\n\t\t- update position quantity\n\n\t\tParameter:\n\t\t----------\n\t\tfill (Fill Event)\n\t\t\"\"\"\n\t\tlogger.info('Consuming filled Order')\n\t\tfill = body['fill']\n\n\t\t# update the position first\n\t\tself.pos[fill.symbol].on_fill(fill)\n\n\t\t# getting data from the fill event\n\t\tQ = fill.quantity\n\t\tK, D, C = fill.fill_cost, fill.fill_type, fill.commission\n\n\t\tcost = D.value * K * Q\n\n\t\tself.commission += C\n\t\tself.cash -= cost + C\n\n\n\tdef on_market(self, oid, body):\n\t\t\"\"\"On market event\n\t\t- update information for each existing poistion\n\t\t- generate orders for rebalancing()\n\t\t- the strategy will calculate signal(s)\n\t\t- and publish them to the exchange for processing\n\t\t- then a \"done\" will be published to indicate\n\t\t\tthe strategy is finish doing everything this heartbeat\n\t\t- so then the risk manager will collect all signals\n\t\t\tbefore sending order for execution\n\n\t\tParameter:\n\t\t----------\n\t\tticks (Market Event)\n\t\t\"\"\"\n\t\tif body['freq'] != self.freq: return\n\n\t\tticks = body['ticks']\n\t\tself._update_data(ticks)\n\n\t\tif self.t >= self.warmup:\n\t\t\tself._calculate_signals()\n\n\t\t\t# publish generated signals\n\t\t\tequity = self.total_bp\n\t\t\tbp = copy(self.avaliable_bp) # current snap_shot of buying power\n\t\t\tfor S, pos in self.pos.items():\n\t\t\t\tfor order, lvl in pos.generate_orders(equity):\n\t\t\t\t\tused_bp = self.on_order(order, lvl, bp)\n\t\t\t\t\tbp -= used_bp\n\t\t\t\t\n\t\t\t# save old strategy performance history\n\t\t\tself._pbar.update(1)\n\t\t\n\t\t# if ticks.timestamp >= self.start_dt:\n\t\t\t# self.basic_publish('next', sender=self.id)\n\n\t\tif self.t >= self.warmup:\n\t\t\tself._save_positions()\n\n\n\tdef on_order(self, order, lvl, bp):\n\t\t\"\"\"Handling new order\n\t\t- Orders are generated from signals\n\t\t- will have to check currently avaliable buying power before publish\n\n\t\tParameter:\n\t\t---------\n\t\torder (Order Event)\n\t\tlvl (str): Level of urgency for the order\n\t\t\tThis flag will be used to call corresponding callback\n\t\tbp (float): The amount of avaliable buying power\n\n\t\tReturn:\n\t\t-------\n\t\tused buying power (float)\n\t\t\"\"\"\n\t\tS = order.symbol\n\n\t\tneed_bp = order.quantity * self.ticks[S].close\n\t\tif need_bp <= bp: # have enough buying power to place order\n\t\t\tused_bp = need_bp\n\n\t\t\tif lvl == 'hard_stop':\n\t\t\t\tself.on_hard_stop(S)\n\t\t\telif lvl == 'rebalance':\n\t\t\t\tself.on_rebalance(S)\n\n\t\t\tself.pos[order.symbol].confirm_order(order)\n\t\t\tlogger.info(\n\t\t\t\t'Publish Order={} for Strategy={}'\n\t\t\t\t.format(order, self.id)\n\t\t\t)\n\t\t\tself.basic_publish('order', sender=self.id, order=order)\n\t\telse:\n\t\t\tused_bp = 0\n\t\treturn used_bp\n\n\n\tdef generate_signal(self, symbol, signal_type, **kws):\n\t\t\"\"\"Generate a signal that will stored at Strategy level\n\t\t- Then all signals will be batch processed\n\n\t\tParameter\n\t\t---------\n\t\tsymbol: str, the target symbol for the signal\n\t\tsignal_type: {LONG, SHORT, EXIT}\n\t\tkws: additional arguments passes to the SignalEvent class\n\t\t\t- especially the `strength` for percentage of portfolio\n\t\t\t- if not passed, the default `pct_portfolio` will be used\n\t\t\"\"\"\n\t\tself.pos[symbol]._generate_signal(signal_type, lvl='normal', **kws)\n\n\n\tdef _calculate_signals(self):\n\t\t# update existing position information\n\t\tfor pos in self.pos.values():\n\t\t\tpos._calculate_signals()\n\n\t\tself.calculate_signals()\n\n\n\tdef _update_data(self, ticks):\n\t\t\"\"\"Update the existing state of strategies\n\t\t- based on given market observation\n\n\t\tNote:\n\t\t-----\n\t\t1. It will always be called before calculating the new signal\n\t\t2. this will be called no matter strategy is in warmup period or not\n\t\t\tbecuase warmup period is used for gathering nessceary data\n\t\t\"\"\"\n\t\tself.ticks = ticks\n\t\tself.t += 1\n\n\t\tfor S, pos in self.pos.items():\n\t\t\tpos._update_data(ticks[S])\n\n\t\tself.update_data(ticks)\n\n\n\tdef _save_positions(self):\n\t\toutput = {\n\t\t\t'timestamp': self.ticks.timestamp, 't': self.t,\n\t\t\t'cash': self.cash, 'commission': self.commission,\n\t\t\t'nav': self.nav,\n\t\t}\n\t\tfor k, v in self.pos.items():\n\t\t\toutput[str(k)+'_quantity'] = v.quantity\n\t\t\toutput[str(k)+'_mv'] = v.mv\n\n\t\tself._hist.append(output)\n\t\t",
"step-ids": [
18,
19,
28,
30,
32
]
}
|
[
18,
19,
28,
30,
32
] |
<|reserved_special_token_0|>
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=
name_with_query)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) ->List[Action]:
return [OptbenchInit('tpch')]
def benchmark(self) ->MeasurementSource:
return OptbenchRun('tpch', self.QUERY)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OptbenchInit(Action):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:
assert not (executor is None and self._executor is None)
assert not (executor is not None and self._executor is not None)
e = executor or self._executor
queries = materialize.optbench.sql.parse_from_file(Path(
f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'
))
assert 1 <= self._query <= len(queries)
query = queries[self._query - 1]
explain_query = materialize.optbench.sql.Query(query).explain(timing
=True)
explain_output = materialize.optbench.sql.ExplainOutput(e.
_composition.sql_query(explain_query)[0][0])
timestamps = [0, float(explain_output.optimization_time()) / 3]
return timestamps
<|reserved_special_token_0|>
@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=
name_with_query)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) ->List[Action]:
return [OptbenchInit('tpch')]
def benchmark(self) ->MeasurementSource:
return OptbenchRun('tpch', self.QUERY)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OptbenchInit(Action):
def __init__(self, scenario: str, no_indexes: bool=False) ->None:
self._executor: Optional[Executor] = None
self._scenario = scenario
self._no_indexes = no_indexes
def run(self, executor: Optional[Executor]=None) ->None:
e = executor or self._executor
statements = materialize.optbench.sql.parse_from_file(Path(
f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))
if self._no_indexes:
idx_re = re.compile('(create|create\\s+default|drop)\\s+index\\s+')
statements = [statement for statement in statements if not
idx_re.match(statement.lower())]
e._composition.sql('\n'.join(statements))
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:
assert not (executor is None and self._executor is None)
assert not (executor is not None and self._executor is not None)
e = executor or self._executor
queries = materialize.optbench.sql.parse_from_file(Path(
f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'
))
assert 1 <= self._query <= len(queries)
query = queries[self._query - 1]
explain_query = materialize.optbench.sql.Query(query).explain(timing
=True)
explain_output = materialize.optbench.sql.ExplainOutput(e.
_composition.sql_query(explain_query)[0][0])
timestamps = [0, float(explain_output.optimization_time()) / 3]
return timestamps
def name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict
) ->str:
return f"OptbenchTPCHQ{params_dict['QUERY']:02d}"
@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=
name_with_query)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) ->List[Action]:
return [OptbenchInit('tpch')]
def benchmark(self) ->MeasurementSource:
return OptbenchRun('tpch', self.QUERY)
<|reserved_special_token_1|>
import re
from pathlib import Path
from typing import Dict, List, Optional, Type
from parameterized import parameterized_class
import materialize.optbench
import materialize.optbench.sql
from materialize.feature_benchmark.action import Action
from materialize.feature_benchmark.executor import Executor
from materialize.feature_benchmark.measurement_source import MeasurementSource, Timestamp
from materialize.feature_benchmark.scenario import Scenario
class OptbenchInit(Action):
def __init__(self, scenario: str, no_indexes: bool=False) ->None:
self._executor: Optional[Executor] = None
self._scenario = scenario
self._no_indexes = no_indexes
def run(self, executor: Optional[Executor]=None) ->None:
e = executor or self._executor
statements = materialize.optbench.sql.parse_from_file(Path(
f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))
if self._no_indexes:
idx_re = re.compile('(create|create\\s+default|drop)\\s+index\\s+')
statements = [statement for statement in statements if not
idx_re.match(statement.lower())]
e._composition.sql('\n'.join(statements))
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:
assert not (executor is None and self._executor is None)
assert not (executor is not None and self._executor is not None)
e = executor or self._executor
queries = materialize.optbench.sql.parse_from_file(Path(
f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'
))
assert 1 <= self._query <= len(queries)
query = queries[self._query - 1]
explain_query = materialize.optbench.sql.Query(query).explain(timing
=True)
explain_output = materialize.optbench.sql.ExplainOutput(e.
_composition.sql_query(explain_query)[0][0])
timestamps = [0, float(explain_output.optimization_time()) / 3]
return timestamps
def name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict
) ->str:
return f"OptbenchTPCHQ{params_dict['QUERY']:02d}"
@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=
name_with_query)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) ->List[Action]:
return [OptbenchInit('tpch')]
def benchmark(self) ->MeasurementSource:
return OptbenchRun('tpch', self.QUERY)
<|reserved_special_token_1|>
# Copyright Materialize, Inc. and contributors. All rights reserved.
#
# Use of this software is governed by the Business Source License
# included in the LICENSE file at the root of this repository.
#
# As of the Change Date specified in that file, in accordance with
# the Business Source License, use of this software will be governed
# by the Apache License, Version 2.0.
import re
from pathlib import Path
from typing import Dict, List, Optional, Type
from parameterized import parameterized_class # type: ignore
import materialize.optbench
import materialize.optbench.sql
from materialize.feature_benchmark.action import Action
from materialize.feature_benchmark.executor import Executor
from materialize.feature_benchmark.measurement_source import (
MeasurementSource,
Timestamp,
)
from materialize.feature_benchmark.scenario import Scenario
class OptbenchInit(Action):
def __init__(self, scenario: str, no_indexes: bool = False) -> None:
self._executor: Optional[Executor] = None
self._scenario = scenario
self._no_indexes = no_indexes
def run(self, executor: Optional[Executor] = None) -> None:
e = executor or self._executor
statements = materialize.optbench.sql.parse_from_file(
Path(f"misc/python/materialize/optbench/schema/{self._scenario}.sql")
)
if self._no_indexes:
idx_re = re.compile(r"(create|create\s+default|drop)\s+index\s+")
statements = [
statement
for statement in statements
if not idx_re.match(statement.lower())
]
e._composition.sql("\n".join(statements)) # type: ignore
class OptbenchRun(MeasurementSource):
def __init__(self, optbench_scenario: str, query: int):
self._executor: Optional[Executor] = None
self._optbench_scenario = optbench_scenario
self._query = query
def run(self, executor: Optional[Executor] = None) -> List[Timestamp]:
assert not (executor is None and self._executor is None)
assert not (executor is not None and self._executor is not None)
e = executor or self._executor
queries = materialize.optbench.sql.parse_from_file(
Path(
f"misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql"
)
)
assert 1 <= self._query <= len(queries)
query = queries[self._query - 1]
explain_query = materialize.optbench.sql.Query(query).explain(timing=True)
explain_output = materialize.optbench.sql.ExplainOutput(
e._composition.sql_query(explain_query)[0][0] # type: ignore
)
# Optimization time is in microseconds, divide by 3 to get a more readable number (still in wrong unit)
timestamps = [0, float(explain_output.optimization_time()) / 3] # type: ignore
return timestamps
def name_with_query(cls: Type["OptbenchTPCH"], num: int, params_dict: Dict) -> str:
return f"OptbenchTPCHQ{params_dict['QUERY']:02d}"
@parameterized_class(
[{"QUERY": i} for i in range(1, 23)], class_name_func=name_with_query
)
class OptbenchTPCH(Scenario):
"""Run optbench TPCH for optimizer benchmarks"""
QUERY = 1
def init(self) -> List[Action]:
return [OptbenchInit("tpch")]
def benchmark(self) -> MeasurementSource:
return OptbenchRun("tpch", self.QUERY)
|
flexible
|
{
"blob_id": "97ca134ffce404f4b2bc7352d4aac73a7bb764bd",
"index": 5708,
"step-1": "<mask token>\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n <mask token>\n\n\n<mask token>\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-2": "<mask token>\n\n\nclass OptbenchInit(Action):\n <mask token>\n <mask token>\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\n<mask token>\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-3": "<mask token>\n\n\nclass OptbenchInit(Action):\n\n def __init__(self, scenario: str, no_indexes: bool=False) ->None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor]=None) ->None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))\n if self._no_indexes:\n idx_re = re.compile('(create|create\\\\s+default|drop)\\\\s+index\\\\s+')\n statements = [statement for statement in statements if not\n idx_re.match(statement.lower())]\n e._composition.sql('\\n'.join(statements))\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\ndef name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict\n ) ->str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-4": "import re\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Type\nfrom parameterized import parameterized_class\nimport materialize.optbench\nimport materialize.optbench.sql\nfrom materialize.feature_benchmark.action import Action\nfrom materialize.feature_benchmark.executor import Executor\nfrom materialize.feature_benchmark.measurement_source import MeasurementSource, Timestamp\nfrom materialize.feature_benchmark.scenario import Scenario\n\n\nclass OptbenchInit(Action):\n\n def __init__(self, scenario: str, no_indexes: bool=False) ->None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor]=None) ->None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/schema/{self._scenario}.sql'))\n if self._no_indexes:\n idx_re = re.compile('(create|create\\\\s+default|drop)\\\\s+index\\\\s+')\n statements = [statement for statement in statements if not\n idx_re.match(statement.lower())]\n e._composition.sql('\\n'.join(statements))\n\n\nclass OptbenchRun(MeasurementSource):\n\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor]=None) ->List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n queries = materialize.optbench.sql.parse_from_file(Path(\n f'misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql'\n ))\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing\n =True)\n explain_output = materialize.optbench.sql.ExplainOutput(e.\n _composition.sql_query(explain_query)[0][0])\n timestamps = [0, float(explain_output.optimization_time()) / 3]\n return timestamps\n\n\ndef name_with_query(cls: Type['OptbenchTPCH'], num: int, params_dict: Dict\n ) ->str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class([{'QUERY': i} for i in range(1, 23)], class_name_func=\n name_with_query)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n QUERY = 1\n\n def init(self) ->List[Action]:\n return [OptbenchInit('tpch')]\n\n def benchmark(self) ->MeasurementSource:\n return OptbenchRun('tpch', self.QUERY)\n",
"step-5": "# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\n\nimport re\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Type\n\nfrom parameterized import parameterized_class # type: ignore\n\nimport materialize.optbench\nimport materialize.optbench.sql\nfrom materialize.feature_benchmark.action import Action\nfrom materialize.feature_benchmark.executor import Executor\nfrom materialize.feature_benchmark.measurement_source import (\n MeasurementSource,\n Timestamp,\n)\nfrom materialize.feature_benchmark.scenario import Scenario\n\n\nclass OptbenchInit(Action):\n def __init__(self, scenario: str, no_indexes: bool = False) -> None:\n self._executor: Optional[Executor] = None\n self._scenario = scenario\n self._no_indexes = no_indexes\n\n def run(self, executor: Optional[Executor] = None) -> None:\n e = executor or self._executor\n statements = materialize.optbench.sql.parse_from_file(\n Path(f\"misc/python/materialize/optbench/schema/{self._scenario}.sql\")\n )\n if self._no_indexes:\n idx_re = re.compile(r\"(create|create\\s+default|drop)\\s+index\\s+\")\n statements = [\n statement\n for statement in statements\n if not idx_re.match(statement.lower())\n ]\n e._composition.sql(\"\\n\".join(statements)) # type: ignore\n\n\nclass OptbenchRun(MeasurementSource):\n def __init__(self, optbench_scenario: str, query: int):\n self._executor: Optional[Executor] = None\n self._optbench_scenario = optbench_scenario\n self._query = query\n\n def run(self, executor: Optional[Executor] = None) -> List[Timestamp]:\n assert not (executor is None and self._executor is None)\n assert not (executor is not None and self._executor is not None)\n e = executor or self._executor\n\n queries = materialize.optbench.sql.parse_from_file(\n Path(\n f\"misc/python/materialize/optbench/workload/{self._optbench_scenario}.sql\"\n )\n )\n assert 1 <= self._query <= len(queries)\n query = queries[self._query - 1]\n explain_query = materialize.optbench.sql.Query(query).explain(timing=True)\n explain_output = materialize.optbench.sql.ExplainOutput(\n e._composition.sql_query(explain_query)[0][0] # type: ignore\n )\n # Optimization time is in microseconds, divide by 3 to get a more readable number (still in wrong unit)\n timestamps = [0, float(explain_output.optimization_time()) / 3] # type: ignore\n return timestamps\n\n\ndef name_with_query(cls: Type[\"OptbenchTPCH\"], num: int, params_dict: Dict) -> str:\n return f\"OptbenchTPCHQ{params_dict['QUERY']:02d}\"\n\n\n@parameterized_class(\n [{\"QUERY\": i} for i in range(1, 23)], class_name_func=name_with_query\n)\nclass OptbenchTPCH(Scenario):\n \"\"\"Run optbench TPCH for optimizer benchmarks\"\"\"\n\n QUERY = 1\n\n def init(self) -> List[Action]:\n return [OptbenchInit(\"tpch\")]\n\n def benchmark(self) -> MeasurementSource:\n return OptbenchRun(\"tpch\", self.QUERY)\n",
"step-ids": [
7,
9,
12,
13,
14
]
}
|
[
7,
9,
12,
13,
14
] |
'''
swea 2806 N-Queen
'''
def nqueen(depth, n, history):
global cnt
if depth == n:
cnt += 1
else:
for i in range(n):
if i not in history:
for index, value in enumerate(history):
if abs(depth - index) == abs(i - value):
break
else:
history.append(i)
nqueen(depth + 1, n, history)
history.remove(i)
for t in range(int(input())):
cnt = 0
nqueen(0, int(input()), [])
print("#{} {}".format(t+1, cnt))
|
normal
|
{
"blob_id": "b35686f7feec2c4a905007f3c105b6fa05b87297",
"index": 5365,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\nfor t in range(int(input())):\n cnt = 0\n nqueen(0, int(input()), [])\n print('#{} {}'.format(t + 1, cnt))\n",
"step-4": "'''\nswea 2806 N-Queen\n'''\ndef nqueen(depth, n, history):\n global cnt\n if depth == n:\n cnt += 1\n else:\n for i in range(n):\n if i not in history:\n for index, value in enumerate(history):\n if abs(depth - index) == abs(i - value):\n break\n else:\n history.append(i)\n nqueen(depth + 1, n, history)\n history.remove(i)\n\n\nfor t in range(int(input())):\n cnt = 0\n nqueen(0, int(input()), [])\n print(\"#{} {}\".format(t+1, cnt))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('app', '0005_alter_users_is_active')]
operations = [migrations.AlterModelManagers(name='users', managers=[])]
<|reserved_special_token_1|>
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('app', '0005_alter_users_is_active')]
operations = [migrations.AlterModelManagers(name='users', managers=[])]
<|reserved_special_token_1|>
# Generated by Django 3.2.7 on 2021-10-01 08:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0005_alter_users_is_active'),
]
operations = [
migrations.AlterModelManagers(
name='users',
managers=[
],
),
]
|
flexible
|
{
"blob_id": "6670295241516664e30c7db5cd3b5e2fb6c4fb05",
"index": 1985,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app', '0005_alter_users_is_active')]\n operations = [migrations.AlterModelManagers(name='users', managers=[])]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('app', '0005_alter_users_is_active')]\n operations = [migrations.AlterModelManagers(name='users', managers=[])]\n",
"step-5": "# Generated by Django 3.2.7 on 2021-10-01 08:36\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('app', '0005_alter_users_is_active'),\n ]\n\n operations = [\n migrations.AlterModelManagers(\n name='users',\n managers=[\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from types import *
class Tokenizer:
def __init__(self, buf):
self.buf = buf
self.index = 0
def token(self):
return self.buf[self.index]
def move(self, value):
self.index += value
def skip_whitespaces(self):
while self.index < len(self.buf) and self.token().isspace():
self.move(1)
def next(self):
self.skip_whitespaces()
if self.index < len(self.buf):
if self.token() == '+':
self.move(1)
return Symbol('+')
elif self.token() == '-':
self.move(1)
return Symbol('-')
elif self.token() == '*':
self.move(1)
return Symbol('*')
elif self.token() == '/':
self.move(1)
return Symbol('/')
elif self.token() == '(':
self.move(1)
return OpenParen()
elif self.token() == ')':
self.move(1)
return CloseParen()
else:
if self.token().isnumeric():
number = int(self.token())
self.move(1)
while self.index < len(self.buf) and self.token().isnumeric():
number = number * 10 + int(self.token())
self.move(1)
return Number(number)
else:
char = self.token()
self.move(1)
return Undefined(char)
else:
return Eof()
|
normal
|
{
"blob_id": "282bccf20cfb114e31c5465c110819796bf81bc0",
"index": 9318,
"step-1": "<mask token>\n\n\nclass Tokenizer:\n\n def __init__(self, buf):\n self.buf = buf\n self.index = 0\n <mask token>\n <mask token>\n\n def skip_whitespaces(self):\n while self.index < len(self.buf) and self.token().isspace():\n self.move(1)\n\n def next(self):\n self.skip_whitespaces()\n if self.index < len(self.buf):\n if self.token() == '+':\n self.move(1)\n return Symbol('+')\n elif self.token() == '-':\n self.move(1)\n return Symbol('-')\n elif self.token() == '*':\n self.move(1)\n return Symbol('*')\n elif self.token() == '/':\n self.move(1)\n return Symbol('/')\n elif self.token() == '(':\n self.move(1)\n return OpenParen()\n elif self.token() == ')':\n self.move(1)\n return CloseParen()\n elif self.token().isnumeric():\n number = int(self.token())\n self.move(1)\n while self.index < len(self.buf) and self.token().isnumeric():\n number = number * 10 + int(self.token())\n self.move(1)\n return Number(number)\n else:\n char = self.token()\n self.move(1)\n return Undefined(char)\n else:\n return Eof()\n",
"step-2": "<mask token>\n\n\nclass Tokenizer:\n\n def __init__(self, buf):\n self.buf = buf\n self.index = 0\n <mask token>\n\n def move(self, value):\n self.index += value\n\n def skip_whitespaces(self):\n while self.index < len(self.buf) and self.token().isspace():\n self.move(1)\n\n def next(self):\n self.skip_whitespaces()\n if self.index < len(self.buf):\n if self.token() == '+':\n self.move(1)\n return Symbol('+')\n elif self.token() == '-':\n self.move(1)\n return Symbol('-')\n elif self.token() == '*':\n self.move(1)\n return Symbol('*')\n elif self.token() == '/':\n self.move(1)\n return Symbol('/')\n elif self.token() == '(':\n self.move(1)\n return OpenParen()\n elif self.token() == ')':\n self.move(1)\n return CloseParen()\n elif self.token().isnumeric():\n number = int(self.token())\n self.move(1)\n while self.index < len(self.buf) and self.token().isnumeric():\n number = number * 10 + int(self.token())\n self.move(1)\n return Number(number)\n else:\n char = self.token()\n self.move(1)\n return Undefined(char)\n else:\n return Eof()\n",
"step-3": "<mask token>\n\n\nclass Tokenizer:\n\n def __init__(self, buf):\n self.buf = buf\n self.index = 0\n\n def token(self):\n return self.buf[self.index]\n\n def move(self, value):\n self.index += value\n\n def skip_whitespaces(self):\n while self.index < len(self.buf) and self.token().isspace():\n self.move(1)\n\n def next(self):\n self.skip_whitespaces()\n if self.index < len(self.buf):\n if self.token() == '+':\n self.move(1)\n return Symbol('+')\n elif self.token() == '-':\n self.move(1)\n return Symbol('-')\n elif self.token() == '*':\n self.move(1)\n return Symbol('*')\n elif self.token() == '/':\n self.move(1)\n return Symbol('/')\n elif self.token() == '(':\n self.move(1)\n return OpenParen()\n elif self.token() == ')':\n self.move(1)\n return CloseParen()\n elif self.token().isnumeric():\n number = int(self.token())\n self.move(1)\n while self.index < len(self.buf) and self.token().isnumeric():\n number = number * 10 + int(self.token())\n self.move(1)\n return Number(number)\n else:\n char = self.token()\n self.move(1)\n return Undefined(char)\n else:\n return Eof()\n",
"step-4": "from types import *\n\n\nclass Tokenizer:\n\n def __init__(self, buf):\n self.buf = buf\n self.index = 0\n\n def token(self):\n return self.buf[self.index]\n\n def move(self, value):\n self.index += value\n\n def skip_whitespaces(self):\n while self.index < len(self.buf) and self.token().isspace():\n self.move(1)\n\n def next(self):\n self.skip_whitespaces()\n if self.index < len(self.buf):\n if self.token() == '+':\n self.move(1)\n return Symbol('+')\n elif self.token() == '-':\n self.move(1)\n return Symbol('-')\n elif self.token() == '*':\n self.move(1)\n return Symbol('*')\n elif self.token() == '/':\n self.move(1)\n return Symbol('/')\n elif self.token() == '(':\n self.move(1)\n return OpenParen()\n elif self.token() == ')':\n self.move(1)\n return CloseParen()\n elif self.token().isnumeric():\n number = int(self.token())\n self.move(1)\n while self.index < len(self.buf) and self.token().isnumeric():\n number = number * 10 + int(self.token())\n self.move(1)\n return Number(number)\n else:\n char = self.token()\n self.move(1)\n return Undefined(char)\n else:\n return Eof()\n",
"step-5": "from types import *\n\nclass Tokenizer:\n def __init__(self, buf):\n self.buf = buf\n self.index = 0\n\n def token(self):\n return self.buf[self.index]\n\n def move(self, value):\n self.index += value\n\n def skip_whitespaces(self):\n while self.index < len(self.buf) and self.token().isspace():\n self.move(1)\n\n def next(self):\n self.skip_whitespaces()\n\n if self.index < len(self.buf):\n if self.token() == '+':\n self.move(1)\n return Symbol('+')\n elif self.token() == '-':\n self.move(1)\n return Symbol('-')\n elif self.token() == '*':\n self.move(1)\n return Symbol('*')\n elif self.token() == '/':\n self.move(1)\n return Symbol('/')\n elif self.token() == '(':\n self.move(1)\n return OpenParen()\n elif self.token() == ')':\n self.move(1)\n return CloseParen()\n else:\n if self.token().isnumeric():\n number = int(self.token())\n self.move(1)\n while self.index < len(self.buf) and self.token().isnumeric():\n number = number * 10 + int(self.token())\n self.move(1)\n return Number(number)\n else:\n char = self.token()\n self.move(1)\n return Undefined(char)\n else:\n return Eof()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#!/usr/bin/env python3
import collections
import glob
import os
import pandas as pd
import numpy as np
import torch.nn.functional as F
import PIL.Image as Image
from inference.base_image_utils import get_scale_size, image2batch, choose_center_full_size_crop_params
from inference.metrics.fid.fid_score import _compute_statistics_of_images, \
calculate_frechet_distance
from inference.metrics.fid.inception import InceptionV3
from inference.metrics.lpips import LPIPSLossWrapper
from inference.perspective import load_video_frames_from_folder, FlowPredictor
from inference.segmentation import SegmentationModule
from inference.encode_and_animate import calc_segmentation_posterior_error, sum_dicts
from inference.metrics.ssim import SSIM
import constants
MOVABLE_CLASSES = [2, 21]
def calc_optical_flow_metrics(flow_predictor, frames, movable_mask):
if not movable_mask.any():
return dict(flow_l2=float('nan'))
assert not (frames < 0).any() and not (frames > 1).any()
flows = flow_predictor.predict_flow(frames * 2 - 1)[1]
flows_x, flows_y = flows[:, [0]], flows[:, [1]]
flow_x_median = float(flows_x[movable_mask.expand_as(flows_x)].abs().mean())
flow_y_median = float(flows_y[movable_mask.expand_as(flows_y)].abs().mean())
result = dict(flow_l2=(flow_x_median ** 2 + flow_y_median ** 2) ** 0.5)
return result
def batch2pil(batch):
np_batch = ((batch.permute(0, 2, 3, 1) / 2 + 0.5) * 255).clamp(0, 255).cpu().numpy().astype('uint8')
return [Image.fromarray(ar) for ar in np_batch]
def main(args):
segmentation_network = SegmentationModule(os.path.expandvars(args.segm_network)).cuda()
segmentation_network.eval()
lpips_criterion = LPIPSLossWrapper(args.lpips_network).cuda()
flow_predictor = FlowPredictor(os.path.expandvars(args.flow_network))
all_metrics = []
all_metrics_idx = []
# load generated images
gen_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.gen_images), '*.jpg')))
gen_frames_as_img = []
for fname in gen_frame_paths:
frame = Image.open(fname).convert('RGB')
frame_batch = image2batch(frame).cuda() / 2 + 0.5
assert not (frame_batch < 0).any() and not (frame_batch > 1).any()
frame_img = batch2pil(frame_batch)[0]
gen_frames_as_img.append(frame_img)
# load gt-images, scale, crop and segment
gt_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.gt_images), '*.jpg')))
gt_frames_as_img = []
for fname in gt_frame_paths:
frame = Image.open(fname).convert('RGB')
frame = frame.resize(get_scale_size(args.resolution, frame.size))
frame_batch = image2batch(frame).cuda() / 2 + 0.5
assert not (frame_batch < 0).any() and not (frame_batch > 1).any()
scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])
frame_batch = F.interpolate(frame_batch, size=scaled_size, mode='bilinear', align_corners=False)
crop_y1, crop_y2, crop_x1, crop_x2 = choose_center_full_size_crop_params(*frame_batch.shape[2:])
frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]
frame_img = batch2pil(frame_batch)[0]
gt_frames_as_img.append(frame_img)
# compute FID between generated images and gt
print('Calculating FID for images...')
block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]
fid_model = InceptionV3([block_idx]).cuda()
fid_gt_means, fid_gt_std = _compute_statistics_of_images(gt_frames_as_img, fid_model,
batch_size=args.batch,
dims=2048, cuda=True, keep_size=False)
fid_gen_means, fid_gen_std = _compute_statistics_of_images(gen_frames_as_img, fid_model,
batch_size=args.batch,
dims=2048, cuda=True, keep_size=False)
fid = dict()
fid['fid_images'] = float(calculate_frechet_distance(fid_gt_means, fid_gt_std, fid_gen_means, fid_gen_std))
# load generated videos
for src_path in sorted(glob.glob(os.path.join(args.gen_videos, '*'))):
if not os.path.isdir(src_path):
continue
print(f'Processing {src_path}')
if src_path.endswith('/'):
src_path = src_path[:-1]
vname = os.path.basename(src_path)
frames = load_video_frames_from_folder(src_path, frame_template=args.frametemplate) / 2 + 0.5
assert not (frames < 0).any() and not (frames > 1).any()
# get mask from the first frame
cur_segm_scores = segmentation_network.predict(frames[:1].cuda(), imgSizes=[args.resolution])
cur_segm_proba = F.softmax(cur_segm_scores, dim=1)
movable_scores = cur_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True)[0]
immovable_scores = cur_segm_proba[:, [c for c in range(cur_segm_proba.shape[1])
if c not in MOVABLE_CLASSES]].max(1, keepdim=True)[0]
shift_mask = (movable_scores > immovable_scores).float()
print('Flow metrics...')
flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, shift_mask > 0)
print('LPIPS metrics...')
cur_metrics = collections.defaultdict(float)
lpips = []
for l in range(1, frames.shape[0], args.batch):
r = min(l + args.batch, frames.shape[0])
lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 - shift_mask), frames[0].cuda() * (1 - shift_mask))))
cur_metrics['lpips_gen'] = np.mean(lpips)
sum_dicts(cur_metrics, flow_metrics)
all_metrics.append(cur_metrics)
all_metrics_idx.append(vname)
# load real images, from which the videos were generated, scale, crop and segment
real_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.real_images), '*.jpg')))
real_frames_as_img = []
real_frames_with_segm = {}
for fname in real_frame_paths:
frame = Image.open(fname).convert('RGB')
frame = frame.resize(get_scale_size(args.resolution, frame.size))
# check the interval of stored numbers: 0..1 || -1..1 || 0..255
frame_batch = image2batch(frame).cuda()
frame_batch = (frame_batch - frame_batch.min()) / (frame_batch.max() - frame_batch.min())
assert not (frame_batch < 0).any() and not (frame_batch > 1).any()
scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])
frame_batch = F.interpolate(frame_batch, size=scaled_size, mode='bilinear', align_corners=False)
crop_y1, crop_y2, crop_x1, crop_x2 = choose_center_full_size_crop_params(*frame_batch.shape[2:])
frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]
frame_img = batch2pil(frame_batch)[0]
real_frames_as_img.append(frame_img)
cur_segm_scores = segmentation_network.predict(frame_batch, imgSizes=[args.resolution])
cur_segm_proba = F.softmax(cur_segm_scores, dim=1)
f_id = os.path.splitext(os.path.basename(fname))[0]
real_frames_with_segm[f_id] = (frame_batch, cur_segm_proba)
# load videos -- animated real images
animated_frames_by_i = collections.defaultdict(list)
for src_path in sorted(glob.glob(os.path.join(args.animated_images, '*'))):
if not os.path.isdir(src_path):
continue
print(f'Processing {src_path}')
if src_path.endswith('/'):
src_path = src_path[:-1]
vname = os.path.basename(src_path)
frames = load_video_frames_from_folder(src_path, frame_template=args.frametemplate) / 2 + 0.5
assert not (frames < 0).any() and not (frames > 1).any()
for i, fr in enumerate(batch2pil(frames)):
animated_frames_by_i[i].append(fr)
cur_real_frame = None
cur_real_segm_proba = None
for frname, (fr, segm) in real_frames_with_segm.items():
if vname.startswith(frname):
cur_real_frame = fr
cur_real_segm_proba = segm
break
assert cur_real_frame is not None, (vname, real_frames_with_segm.keys())
movable_scores = cur_real_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True)[0]
immovable_scores = cur_real_segm_proba[:, [c for c in range(cur_real_segm_proba.shape[1])
if c not in MOVABLE_CLASSES]].max(1, keepdim=True)[0]
shift_mask = (movable_scores > immovable_scores).float()
print('Flow metrics...')
flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, shift_mask > 0)
print('LPIPS metrics...')
cur_metrics = collections.defaultdict(float)
cur_metrics['lpips_1_frame'] = float(lpips_criterion(frames[:1], cur_real_frame))
lpips = []
for l in range(0, frames.shape[0], args.batch):
r = min(l + args.batch, frames.shape[0])
lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 - shift_mask), cur_real_frame.cuda() * (1 - shift_mask))))
cur_metrics['lpips_anim'] = np.mean(lpips)
sum_dicts(cur_metrics, flow_metrics)
all_metrics.append(cur_metrics)
all_metrics_idx.append(vname)
print('Calculating FID...')
block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]
fid_model = InceptionV3([block_idx]).cuda()
fid_real_means, fid_real_std = _compute_statistics_of_images(real_frames_as_img, fid_model,
batch_size=args.batch,
dims=2048, cuda=True, keep_size=False)
for i, cur_gen_frames in animated_frames_by_i.items():
if i % args.skipframe != 0:
continue
cur_fid_means, cur_fid_std = _compute_statistics_of_images(cur_gen_frames, fid_model,
batch_size=args.batch,
dims=2048, cuda=True, keep_size=False)
fid[f'fid_{i}'] = float(calculate_frechet_distance(fid_real_means, fid_real_std,
cur_fid_means, cur_fid_std))
all_metrics.append(fid)
all_metrics_idx.append('global_metrics')
os.makedirs(os.path.dirname(args.outpath), exist_ok=True)
sum_metrics = pd.DataFrame(all_metrics, index=all_metrics_idx)
sum_metrics.to_csv(args.outpath, sep='\t')
if __name__ == '__main__':
import argparse
aparser = argparse.ArgumentParser()
aparser.add_argument('--outpath', type=str, default='results/metrics.csv', help='Path to file to write metrics to')
aparser.add_argument('--gen-images', type=str, default='results/generated/256/images', help='Path to generated images')
aparser.add_argument('--gt-images', type=str, default='results/gt_images', help='Path to gt-images')
aparser.add_argument('--gen-videos', type=str, default='results/generated/256/noise',
help='Path to generated videos (separate folder with frames for each video)')
aparser.add_argument('--animated-images', type=str,
default='results/encode_and_animate_results/test_images/02_eoif',
help='Path to animated images (separate folder with frames for each video)')
aparser.add_argument('--real-images', type=str, default='results/test_images', help='Path to real input images')
aparser.add_argument('--frametemplate', type=str,
default='{:05}.jpg',
help='Template to generate frame file names')
aparser.add_argument('--resolution', type=int, default=256, help='Resolution of generated frames')
aparser.add_argument('--skipframe', type=int, default=10, help='How many frames to skip before evaluating FID')
aparser.add_argument('--batch', type=int, default=69, help='Batch size for FID and LPIPS calculation')
aparser.add_argument('--segm-network', type=str,
default=os.path.join(constants.RESULT_DIR, 'pretrained_models/ade20k-resnet50dilated-ppm_deepsup'),
help='Path to ade20k-resnet50dilated-ppm_deepsup')
aparser.add_argument('--flow-network', type=str,
default=os.path.join(constants.RESULT_DIR, 'pretrained_models/SuperSloMo.ckpt'),
help='Path to SuperSloMo.ckpt')
aparser.add_argument('--lpips-network', type=str,
default=os.path.join(constants.RESULT_DIR, 'pretrained_models/lpips_models/vgg.pth'),
help='Path to vgg.pth')
main(aparser.parse_args())
|
normal
|
{
"blob_id": "f6846bfc6c4d803cedaf37e079e01188733938c7",
"index": 8249,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef calc_optical_flow_metrics(flow_predictor, frames, movable_mask):\n if not movable_mask.any():\n return dict(flow_l2=float('nan'))\n assert not (frames < 0).any() and not (frames > 1).any()\n flows = flow_predictor.predict_flow(frames * 2 - 1)[1]\n flows_x, flows_y = flows[:, [0]], flows[:, [1]]\n flow_x_median = float(flows_x[movable_mask.expand_as(flows_x)].abs().mean()\n )\n flow_y_median = float(flows_y[movable_mask.expand_as(flows_y)].abs().mean()\n )\n result = dict(flow_l2=(flow_x_median ** 2 + flow_y_median ** 2) ** 0.5)\n return result\n\n\ndef batch2pil(batch):\n np_batch = ((batch.permute(0, 2, 3, 1) / 2 + 0.5) * 255).clamp(0, 255).cpu(\n ).numpy().astype('uint8')\n return [Image.fromarray(ar) for ar in np_batch]\n\n\ndef main(args):\n segmentation_network = SegmentationModule(os.path.expandvars(args.\n segm_network)).cuda()\n segmentation_network.eval()\n lpips_criterion = LPIPSLossWrapper(args.lpips_network).cuda()\n flow_predictor = FlowPredictor(os.path.expandvars(args.flow_network))\n all_metrics = []\n all_metrics_idx = []\n gen_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gen_images), '*.jpg')))\n gen_frames_as_img = []\n for fname in gen_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n frame_img = batch2pil(frame_batch)[0]\n gen_frames_as_img.append(frame_img)\n gt_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gt_images), '*.jpg')))\n gt_frames_as_img = []\n for fname in gt_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n gt_frames_as_img.append(frame_img)\n print('Calculating FID for images...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_gt_means, fid_gt_std = _compute_statistics_of_images(gt_frames_as_img,\n fid_model, batch_size=args.batch, dims=2048, cuda=True, keep_size=False\n )\n fid_gen_means, fid_gen_std = _compute_statistics_of_images(\n gen_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid = dict()\n fid['fid_images'] = float(calculate_frechet_distance(fid_gt_means,\n fid_gt_std, fid_gen_means, fid_gen_std))\n for src_path in sorted(glob.glob(os.path.join(args.gen_videos, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n cur_segm_scores = segmentation_network.predict(frames[:1].cuda(),\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n movable_scores = cur_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True\n )[0]\n immovable_scores = cur_segm_proba[:, [c for c in range(\n cur_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(1,\n keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n lpips = []\n for l in range(1, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), frames[0].cuda() * (1 - shift_mask))))\n cur_metrics['lpips_gen'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n real_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n real_images), '*.jpg')))\n real_frames_as_img = []\n real_frames_with_segm = {}\n for fname in real_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda()\n frame_batch = (frame_batch - frame_batch.min()) / (frame_batch.max(\n ) - frame_batch.min())\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n real_frames_as_img.append(frame_img)\n cur_segm_scores = segmentation_network.predict(frame_batch,\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n f_id = os.path.splitext(os.path.basename(fname))[0]\n real_frames_with_segm[f_id] = frame_batch, cur_segm_proba\n animated_frames_by_i = collections.defaultdict(list)\n for src_path in sorted(glob.glob(os.path.join(args.animated_images, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n for i, fr in enumerate(batch2pil(frames)):\n animated_frames_by_i[i].append(fr)\n cur_real_frame = None\n cur_real_segm_proba = None\n for frname, (fr, segm) in real_frames_with_segm.items():\n if vname.startswith(frname):\n cur_real_frame = fr\n cur_real_segm_proba = segm\n break\n assert cur_real_frame is not None, (vname, real_frames_with_segm.keys()\n )\n movable_scores = cur_real_segm_proba[:, MOVABLE_CLASSES].max(1,\n keepdim=True)[0]\n immovable_scores = cur_real_segm_proba[:, [c for c in range(\n cur_real_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(\n 1, keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n cur_metrics['lpips_1_frame'] = float(lpips_criterion(frames[:1],\n cur_real_frame))\n lpips = []\n for l in range(0, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), cur_real_frame.cuda() * (1 - shift_mask))))\n cur_metrics['lpips_anim'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n print('Calculating FID...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_real_means, fid_real_std = _compute_statistics_of_images(\n real_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n for i, cur_gen_frames in animated_frames_by_i.items():\n if i % args.skipframe != 0:\n continue\n cur_fid_means, cur_fid_std = _compute_statistics_of_images(\n cur_gen_frames, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid[f'fid_{i}'] = float(calculate_frechet_distance(fid_real_means,\n fid_real_std, cur_fid_means, cur_fid_std))\n all_metrics.append(fid)\n all_metrics_idx.append('global_metrics')\n os.makedirs(os.path.dirname(args.outpath), exist_ok=True)\n sum_metrics = pd.DataFrame(all_metrics, index=all_metrics_idx)\n sum_metrics.to_csv(args.outpath, sep='\\t')\n\n\nif __name__ == '__main__':\n import argparse\n aparser = argparse.ArgumentParser()\n aparser.add_argument('--outpath', type=str, default=\n 'results/metrics.csv', help='Path to file to write metrics to')\n aparser.add_argument('--gen-images', type=str, default=\n 'results/generated/256/images', help='Path to generated images')\n aparser.add_argument('--gt-images', type=str, default=\n 'results/gt_images', help='Path to gt-images')\n aparser.add_argument('--gen-videos', type=str, default=\n 'results/generated/256/noise', help=\n 'Path to generated videos (separate folder with frames for each video)'\n )\n aparser.add_argument('--animated-images', type=str, default=\n 'results/encode_and_animate_results/test_images/02_eoif', help=\n 'Path to animated images (separate folder with frames for each video)')\n aparser.add_argument('--real-images', type=str, default=\n 'results/test_images', help='Path to real input images')\n aparser.add_argument('--frametemplate', type=str, default='{:05}.jpg',\n help='Template to generate frame file names')\n aparser.add_argument('--resolution', type=int, default=256, help=\n 'Resolution of generated frames')\n aparser.add_argument('--skipframe', type=int, default=10, help=\n 'How many frames to skip before evaluating FID')\n aparser.add_argument('--batch', type=int, default=69, help=\n 'Batch size for FID and LPIPS calculation')\n aparser.add_argument('--segm-network', type=str, default=os.path.join(\n constants.RESULT_DIR,\n 'pretrained_models/ade20k-resnet50dilated-ppm_deepsup'), help=\n 'Path to ade20k-resnet50dilated-ppm_deepsup')\n aparser.add_argument('--flow-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/SuperSloMo.ckpt'), help=\n 'Path to SuperSloMo.ckpt')\n aparser.add_argument('--lpips-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/lpips_models/vgg.pth'),\n help='Path to vgg.pth')\n main(aparser.parse_args())\n",
"step-3": "<mask token>\nMOVABLE_CLASSES = [2, 21]\n\n\ndef calc_optical_flow_metrics(flow_predictor, frames, movable_mask):\n if not movable_mask.any():\n return dict(flow_l2=float('nan'))\n assert not (frames < 0).any() and not (frames > 1).any()\n flows = flow_predictor.predict_flow(frames * 2 - 1)[1]\n flows_x, flows_y = flows[:, [0]], flows[:, [1]]\n flow_x_median = float(flows_x[movable_mask.expand_as(flows_x)].abs().mean()\n )\n flow_y_median = float(flows_y[movable_mask.expand_as(flows_y)].abs().mean()\n )\n result = dict(flow_l2=(flow_x_median ** 2 + flow_y_median ** 2) ** 0.5)\n return result\n\n\ndef batch2pil(batch):\n np_batch = ((batch.permute(0, 2, 3, 1) / 2 + 0.5) * 255).clamp(0, 255).cpu(\n ).numpy().astype('uint8')\n return [Image.fromarray(ar) for ar in np_batch]\n\n\ndef main(args):\n segmentation_network = SegmentationModule(os.path.expandvars(args.\n segm_network)).cuda()\n segmentation_network.eval()\n lpips_criterion = LPIPSLossWrapper(args.lpips_network).cuda()\n flow_predictor = FlowPredictor(os.path.expandvars(args.flow_network))\n all_metrics = []\n all_metrics_idx = []\n gen_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gen_images), '*.jpg')))\n gen_frames_as_img = []\n for fname in gen_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n frame_img = batch2pil(frame_batch)[0]\n gen_frames_as_img.append(frame_img)\n gt_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gt_images), '*.jpg')))\n gt_frames_as_img = []\n for fname in gt_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n gt_frames_as_img.append(frame_img)\n print('Calculating FID for images...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_gt_means, fid_gt_std = _compute_statistics_of_images(gt_frames_as_img,\n fid_model, batch_size=args.batch, dims=2048, cuda=True, keep_size=False\n )\n fid_gen_means, fid_gen_std = _compute_statistics_of_images(\n gen_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid = dict()\n fid['fid_images'] = float(calculate_frechet_distance(fid_gt_means,\n fid_gt_std, fid_gen_means, fid_gen_std))\n for src_path in sorted(glob.glob(os.path.join(args.gen_videos, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n cur_segm_scores = segmentation_network.predict(frames[:1].cuda(),\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n movable_scores = cur_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True\n )[0]\n immovable_scores = cur_segm_proba[:, [c for c in range(\n cur_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(1,\n keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n lpips = []\n for l in range(1, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), frames[0].cuda() * (1 - shift_mask))))\n cur_metrics['lpips_gen'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n real_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n real_images), '*.jpg')))\n real_frames_as_img = []\n real_frames_with_segm = {}\n for fname in real_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda()\n frame_batch = (frame_batch - frame_batch.min()) / (frame_batch.max(\n ) - frame_batch.min())\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n real_frames_as_img.append(frame_img)\n cur_segm_scores = segmentation_network.predict(frame_batch,\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n f_id = os.path.splitext(os.path.basename(fname))[0]\n real_frames_with_segm[f_id] = frame_batch, cur_segm_proba\n animated_frames_by_i = collections.defaultdict(list)\n for src_path in sorted(glob.glob(os.path.join(args.animated_images, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n for i, fr in enumerate(batch2pil(frames)):\n animated_frames_by_i[i].append(fr)\n cur_real_frame = None\n cur_real_segm_proba = None\n for frname, (fr, segm) in real_frames_with_segm.items():\n if vname.startswith(frname):\n cur_real_frame = fr\n cur_real_segm_proba = segm\n break\n assert cur_real_frame is not None, (vname, real_frames_with_segm.keys()\n )\n movable_scores = cur_real_segm_proba[:, MOVABLE_CLASSES].max(1,\n keepdim=True)[0]\n immovable_scores = cur_real_segm_proba[:, [c for c in range(\n cur_real_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(\n 1, keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n cur_metrics['lpips_1_frame'] = float(lpips_criterion(frames[:1],\n cur_real_frame))\n lpips = []\n for l in range(0, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), cur_real_frame.cuda() * (1 - shift_mask))))\n cur_metrics['lpips_anim'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n print('Calculating FID...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_real_means, fid_real_std = _compute_statistics_of_images(\n real_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n for i, cur_gen_frames in animated_frames_by_i.items():\n if i % args.skipframe != 0:\n continue\n cur_fid_means, cur_fid_std = _compute_statistics_of_images(\n cur_gen_frames, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid[f'fid_{i}'] = float(calculate_frechet_distance(fid_real_means,\n fid_real_std, cur_fid_means, cur_fid_std))\n all_metrics.append(fid)\n all_metrics_idx.append('global_metrics')\n os.makedirs(os.path.dirname(args.outpath), exist_ok=True)\n sum_metrics = pd.DataFrame(all_metrics, index=all_metrics_idx)\n sum_metrics.to_csv(args.outpath, sep='\\t')\n\n\nif __name__ == '__main__':\n import argparse\n aparser = argparse.ArgumentParser()\n aparser.add_argument('--outpath', type=str, default=\n 'results/metrics.csv', help='Path to file to write metrics to')\n aparser.add_argument('--gen-images', type=str, default=\n 'results/generated/256/images', help='Path to generated images')\n aparser.add_argument('--gt-images', type=str, default=\n 'results/gt_images', help='Path to gt-images')\n aparser.add_argument('--gen-videos', type=str, default=\n 'results/generated/256/noise', help=\n 'Path to generated videos (separate folder with frames for each video)'\n )\n aparser.add_argument('--animated-images', type=str, default=\n 'results/encode_and_animate_results/test_images/02_eoif', help=\n 'Path to animated images (separate folder with frames for each video)')\n aparser.add_argument('--real-images', type=str, default=\n 'results/test_images', help='Path to real input images')\n aparser.add_argument('--frametemplate', type=str, default='{:05}.jpg',\n help='Template to generate frame file names')\n aparser.add_argument('--resolution', type=int, default=256, help=\n 'Resolution of generated frames')\n aparser.add_argument('--skipframe', type=int, default=10, help=\n 'How many frames to skip before evaluating FID')\n aparser.add_argument('--batch', type=int, default=69, help=\n 'Batch size for FID and LPIPS calculation')\n aparser.add_argument('--segm-network', type=str, default=os.path.join(\n constants.RESULT_DIR,\n 'pretrained_models/ade20k-resnet50dilated-ppm_deepsup'), help=\n 'Path to ade20k-resnet50dilated-ppm_deepsup')\n aparser.add_argument('--flow-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/SuperSloMo.ckpt'), help=\n 'Path to SuperSloMo.ckpt')\n aparser.add_argument('--lpips-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/lpips_models/vgg.pth'),\n help='Path to vgg.pth')\n main(aparser.parse_args())\n",
"step-4": "import collections\nimport glob\nimport os\nimport pandas as pd\nimport numpy as np\nimport torch.nn.functional as F\nimport PIL.Image as Image\nfrom inference.base_image_utils import get_scale_size, image2batch, choose_center_full_size_crop_params\nfrom inference.metrics.fid.fid_score import _compute_statistics_of_images, calculate_frechet_distance\nfrom inference.metrics.fid.inception import InceptionV3\nfrom inference.metrics.lpips import LPIPSLossWrapper\nfrom inference.perspective import load_video_frames_from_folder, FlowPredictor\nfrom inference.segmentation import SegmentationModule\nfrom inference.encode_and_animate import calc_segmentation_posterior_error, sum_dicts\nfrom inference.metrics.ssim import SSIM\nimport constants\nMOVABLE_CLASSES = [2, 21]\n\n\ndef calc_optical_flow_metrics(flow_predictor, frames, movable_mask):\n if not movable_mask.any():\n return dict(flow_l2=float('nan'))\n assert not (frames < 0).any() and not (frames > 1).any()\n flows = flow_predictor.predict_flow(frames * 2 - 1)[1]\n flows_x, flows_y = flows[:, [0]], flows[:, [1]]\n flow_x_median = float(flows_x[movable_mask.expand_as(flows_x)].abs().mean()\n )\n flow_y_median = float(flows_y[movable_mask.expand_as(flows_y)].abs().mean()\n )\n result = dict(flow_l2=(flow_x_median ** 2 + flow_y_median ** 2) ** 0.5)\n return result\n\n\ndef batch2pil(batch):\n np_batch = ((batch.permute(0, 2, 3, 1) / 2 + 0.5) * 255).clamp(0, 255).cpu(\n ).numpy().astype('uint8')\n return [Image.fromarray(ar) for ar in np_batch]\n\n\ndef main(args):\n segmentation_network = SegmentationModule(os.path.expandvars(args.\n segm_network)).cuda()\n segmentation_network.eval()\n lpips_criterion = LPIPSLossWrapper(args.lpips_network).cuda()\n flow_predictor = FlowPredictor(os.path.expandvars(args.flow_network))\n all_metrics = []\n all_metrics_idx = []\n gen_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gen_images), '*.jpg')))\n gen_frames_as_img = []\n for fname in gen_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n frame_img = batch2pil(frame_batch)[0]\n gen_frames_as_img.append(frame_img)\n gt_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n gt_images), '*.jpg')))\n gt_frames_as_img = []\n for fname in gt_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n gt_frames_as_img.append(frame_img)\n print('Calculating FID for images...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_gt_means, fid_gt_std = _compute_statistics_of_images(gt_frames_as_img,\n fid_model, batch_size=args.batch, dims=2048, cuda=True, keep_size=False\n )\n fid_gen_means, fid_gen_std = _compute_statistics_of_images(\n gen_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid = dict()\n fid['fid_images'] = float(calculate_frechet_distance(fid_gt_means,\n fid_gt_std, fid_gen_means, fid_gen_std))\n for src_path in sorted(glob.glob(os.path.join(args.gen_videos, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n cur_segm_scores = segmentation_network.predict(frames[:1].cuda(),\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n movable_scores = cur_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True\n )[0]\n immovable_scores = cur_segm_proba[:, [c for c in range(\n cur_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(1,\n keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n lpips = []\n for l in range(1, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), frames[0].cuda() * (1 - shift_mask))))\n cur_metrics['lpips_gen'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n real_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.\n real_images), '*.jpg')))\n real_frames_as_img = []\n real_frames_with_segm = {}\n for fname in real_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n frame_batch = image2batch(frame).cuda()\n frame_batch = (frame_batch - frame_batch.min()) / (frame_batch.max(\n ) - frame_batch.min())\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode=\n 'bilinear', align_corners=False)\n crop_y1, crop_y2, crop_x1, crop_x2 = (\n choose_center_full_size_crop_params(*frame_batch.shape[2:]))\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n frame_img = batch2pil(frame_batch)[0]\n real_frames_as_img.append(frame_img)\n cur_segm_scores = segmentation_network.predict(frame_batch,\n imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n f_id = os.path.splitext(os.path.basename(fname))[0]\n real_frames_with_segm[f_id] = frame_batch, cur_segm_proba\n animated_frames_by_i = collections.defaultdict(list)\n for src_path in sorted(glob.glob(os.path.join(args.animated_images, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=\n args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n for i, fr in enumerate(batch2pil(frames)):\n animated_frames_by_i[i].append(fr)\n cur_real_frame = None\n cur_real_segm_proba = None\n for frname, (fr, segm) in real_frames_with_segm.items():\n if vname.startswith(frname):\n cur_real_frame = fr\n cur_real_segm_proba = segm\n break\n assert cur_real_frame is not None, (vname, real_frames_with_segm.keys()\n )\n movable_scores = cur_real_segm_proba[:, MOVABLE_CLASSES].max(1,\n keepdim=True)[0]\n immovable_scores = cur_real_segm_proba[:, [c for c in range(\n cur_real_segm_proba.shape[1]) if c not in MOVABLE_CLASSES]].max(\n 1, keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, \n shift_mask > 0)\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n cur_metrics['lpips_1_frame'] = float(lpips_criterion(frames[:1],\n cur_real_frame))\n lpips = []\n for l in range(0, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 -\n shift_mask), cur_real_frame.cuda() * (1 - shift_mask))))\n cur_metrics['lpips_anim'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n print('Calculating FID...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_real_means, fid_real_std = _compute_statistics_of_images(\n real_frames_as_img, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n for i, cur_gen_frames in animated_frames_by_i.items():\n if i % args.skipframe != 0:\n continue\n cur_fid_means, cur_fid_std = _compute_statistics_of_images(\n cur_gen_frames, fid_model, batch_size=args.batch, dims=2048,\n cuda=True, keep_size=False)\n fid[f'fid_{i}'] = float(calculate_frechet_distance(fid_real_means,\n fid_real_std, cur_fid_means, cur_fid_std))\n all_metrics.append(fid)\n all_metrics_idx.append('global_metrics')\n os.makedirs(os.path.dirname(args.outpath), exist_ok=True)\n sum_metrics = pd.DataFrame(all_metrics, index=all_metrics_idx)\n sum_metrics.to_csv(args.outpath, sep='\\t')\n\n\nif __name__ == '__main__':\n import argparse\n aparser = argparse.ArgumentParser()\n aparser.add_argument('--outpath', type=str, default=\n 'results/metrics.csv', help='Path to file to write metrics to')\n aparser.add_argument('--gen-images', type=str, default=\n 'results/generated/256/images', help='Path to generated images')\n aparser.add_argument('--gt-images', type=str, default=\n 'results/gt_images', help='Path to gt-images')\n aparser.add_argument('--gen-videos', type=str, default=\n 'results/generated/256/noise', help=\n 'Path to generated videos (separate folder with frames for each video)'\n )\n aparser.add_argument('--animated-images', type=str, default=\n 'results/encode_and_animate_results/test_images/02_eoif', help=\n 'Path to animated images (separate folder with frames for each video)')\n aparser.add_argument('--real-images', type=str, default=\n 'results/test_images', help='Path to real input images')\n aparser.add_argument('--frametemplate', type=str, default='{:05}.jpg',\n help='Template to generate frame file names')\n aparser.add_argument('--resolution', type=int, default=256, help=\n 'Resolution of generated frames')\n aparser.add_argument('--skipframe', type=int, default=10, help=\n 'How many frames to skip before evaluating FID')\n aparser.add_argument('--batch', type=int, default=69, help=\n 'Batch size for FID and LPIPS calculation')\n aparser.add_argument('--segm-network', type=str, default=os.path.join(\n constants.RESULT_DIR,\n 'pretrained_models/ade20k-resnet50dilated-ppm_deepsup'), help=\n 'Path to ade20k-resnet50dilated-ppm_deepsup')\n aparser.add_argument('--flow-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/SuperSloMo.ckpt'), help=\n 'Path to SuperSloMo.ckpt')\n aparser.add_argument('--lpips-network', type=str, default=os.path.join(\n constants.RESULT_DIR, 'pretrained_models/lpips_models/vgg.pth'),\n help='Path to vgg.pth')\n main(aparser.parse_args())\n",
"step-5": "#!/usr/bin/env python3\nimport collections\nimport glob\nimport os\n\nimport pandas as pd\nimport numpy as np\nimport torch.nn.functional as F\nimport PIL.Image as Image\n\nfrom inference.base_image_utils import get_scale_size, image2batch, choose_center_full_size_crop_params\nfrom inference.metrics.fid.fid_score import _compute_statistics_of_images, \\\n calculate_frechet_distance\nfrom inference.metrics.fid.inception import InceptionV3\nfrom inference.metrics.lpips import LPIPSLossWrapper\nfrom inference.perspective import load_video_frames_from_folder, FlowPredictor\nfrom inference.segmentation import SegmentationModule\nfrom inference.encode_and_animate import calc_segmentation_posterior_error, sum_dicts\nfrom inference.metrics.ssim import SSIM\nimport constants\n\n\nMOVABLE_CLASSES = [2, 21]\n\n\ndef calc_optical_flow_metrics(flow_predictor, frames, movable_mask):\n if not movable_mask.any():\n return dict(flow_l2=float('nan'))\n\n assert not (frames < 0).any() and not (frames > 1).any()\n flows = flow_predictor.predict_flow(frames * 2 - 1)[1]\n flows_x, flows_y = flows[:, [0]], flows[:, [1]]\n flow_x_median = float(flows_x[movable_mask.expand_as(flows_x)].abs().mean())\n flow_y_median = float(flows_y[movable_mask.expand_as(flows_y)].abs().mean())\n\n result = dict(flow_l2=(flow_x_median ** 2 + flow_y_median ** 2) ** 0.5)\n\n return result\n\n\ndef batch2pil(batch):\n np_batch = ((batch.permute(0, 2, 3, 1) / 2 + 0.5) * 255).clamp(0, 255).cpu().numpy().astype('uint8')\n return [Image.fromarray(ar) for ar in np_batch]\n\n\ndef main(args):\n segmentation_network = SegmentationModule(os.path.expandvars(args.segm_network)).cuda()\n segmentation_network.eval()\n\n lpips_criterion = LPIPSLossWrapper(args.lpips_network).cuda()\n flow_predictor = FlowPredictor(os.path.expandvars(args.flow_network))\n\n all_metrics = []\n all_metrics_idx = []\n\n # load generated images\n gen_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.gen_images), '*.jpg')))\n gen_frames_as_img = []\n for fname in gen_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n frame_img = batch2pil(frame_batch)[0]\n gen_frames_as_img.append(frame_img)\n\n # load gt-images, scale, crop and segment\n gt_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.gt_images), '*.jpg')))\n gt_frames_as_img = []\n for fname in gt_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n\n frame_batch = image2batch(frame).cuda() / 2 + 0.5\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode='bilinear', align_corners=False)\n\n crop_y1, crop_y2, crop_x1, crop_x2 = choose_center_full_size_crop_params(*frame_batch.shape[2:])\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n\n frame_img = batch2pil(frame_batch)[0]\n gt_frames_as_img.append(frame_img)\n\n # compute FID between generated images and gt\n print('Calculating FID for images...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_gt_means, fid_gt_std = _compute_statistics_of_images(gt_frames_as_img, fid_model,\n batch_size=args.batch,\n dims=2048, cuda=True, keep_size=False)\n fid_gen_means, fid_gen_std = _compute_statistics_of_images(gen_frames_as_img, fid_model,\n batch_size=args.batch,\n dims=2048, cuda=True, keep_size=False)\n fid = dict()\n fid['fid_images'] = float(calculate_frechet_distance(fid_gt_means, fid_gt_std, fid_gen_means, fid_gen_std))\n\n # load generated videos\n for src_path in sorted(glob.glob(os.path.join(args.gen_videos, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n\n # get mask from the first frame\n cur_segm_scores = segmentation_network.predict(frames[:1].cuda(), imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n \n movable_scores = cur_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True)[0]\n immovable_scores = cur_segm_proba[:, [c for c in range(cur_segm_proba.shape[1])\n if c not in MOVABLE_CLASSES]].max(1, keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, shift_mask > 0)\n\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n lpips = []\n for l in range(1, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 - shift_mask), frames[0].cuda() * (1 - shift_mask))))\n cur_metrics['lpips_gen'] = np.mean(lpips)\n sum_dicts(cur_metrics, flow_metrics)\n\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n\n # load real images, from which the videos were generated, scale, crop and segment\n real_frame_paths = list(glob.glob(os.path.join(os.path.expandvars(args.real_images), '*.jpg')))\n real_frames_as_img = []\n real_frames_with_segm = {}\n for fname in real_frame_paths:\n frame = Image.open(fname).convert('RGB')\n frame = frame.resize(get_scale_size(args.resolution, frame.size))\n\n # check the interval of stored numbers: 0..1 || -1..1 || 0..255\n frame_batch = image2batch(frame).cuda()\n frame_batch = (frame_batch - frame_batch.min()) / (frame_batch.max() - frame_batch.min())\n assert not (frame_batch < 0).any() and not (frame_batch > 1).any()\n scaled_size = get_scale_size(args.resolution, frame_batch.shape[2:])\n frame_batch = F.interpolate(frame_batch, size=scaled_size, mode='bilinear', align_corners=False)\n\n crop_y1, crop_y2, crop_x1, crop_x2 = choose_center_full_size_crop_params(*frame_batch.shape[2:])\n frame_batch = frame_batch[:, :, crop_y1:crop_y2, crop_x1:crop_x2]\n\n frame_img = batch2pil(frame_batch)[0]\n real_frames_as_img.append(frame_img)\n\n cur_segm_scores = segmentation_network.predict(frame_batch, imgSizes=[args.resolution])\n cur_segm_proba = F.softmax(cur_segm_scores, dim=1)\n f_id = os.path.splitext(os.path.basename(fname))[0]\n real_frames_with_segm[f_id] = (frame_batch, cur_segm_proba)\n\n # load videos -- animated real images\n animated_frames_by_i = collections.defaultdict(list)\n\n for src_path in sorted(glob.glob(os.path.join(args.animated_images, '*'))):\n if not os.path.isdir(src_path):\n continue\n print(f'Processing {src_path}')\n if src_path.endswith('/'):\n src_path = src_path[:-1]\n\n vname = os.path.basename(src_path)\n frames = load_video_frames_from_folder(src_path, frame_template=args.frametemplate) / 2 + 0.5\n assert not (frames < 0).any() and not (frames > 1).any()\n\n for i, fr in enumerate(batch2pil(frames)):\n animated_frames_by_i[i].append(fr)\n\n cur_real_frame = None\n cur_real_segm_proba = None\n for frname, (fr, segm) in real_frames_with_segm.items():\n if vname.startswith(frname):\n cur_real_frame = fr\n cur_real_segm_proba = segm\n break\n assert cur_real_frame is not None, (vname, real_frames_with_segm.keys())\n\n movable_scores = cur_real_segm_proba[:, MOVABLE_CLASSES].max(1, keepdim=True)[0]\n immovable_scores = cur_real_segm_proba[:, [c for c in range(cur_real_segm_proba.shape[1])\n if c not in MOVABLE_CLASSES]].max(1, keepdim=True)[0]\n shift_mask = (movable_scores > immovable_scores).float()\n\n print('Flow metrics...')\n flow_metrics = calc_optical_flow_metrics(flow_predictor, frames, shift_mask > 0)\n\n print('LPIPS metrics...')\n cur_metrics = collections.defaultdict(float)\n cur_metrics['lpips_1_frame'] = float(lpips_criterion(frames[:1], cur_real_frame))\n\n lpips = []\n for l in range(0, frames.shape[0], args.batch):\n r = min(l + args.batch, frames.shape[0])\n lpips.append(float(lpips_criterion(frames[l:r].cuda() * (1 - shift_mask), cur_real_frame.cuda() * (1 - shift_mask))))\n cur_metrics['lpips_anim'] = np.mean(lpips)\n\n sum_dicts(cur_metrics, flow_metrics)\n\n all_metrics.append(cur_metrics)\n all_metrics_idx.append(vname)\n\n print('Calculating FID...')\n block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[2048]\n fid_model = InceptionV3([block_idx]).cuda()\n fid_real_means, fid_real_std = _compute_statistics_of_images(real_frames_as_img, fid_model,\n batch_size=args.batch,\n dims=2048, cuda=True, keep_size=False)\n for i, cur_gen_frames in animated_frames_by_i.items():\n if i % args.skipframe != 0:\n continue\n cur_fid_means, cur_fid_std = _compute_statistics_of_images(cur_gen_frames, fid_model,\n batch_size=args.batch,\n dims=2048, cuda=True, keep_size=False)\n fid[f'fid_{i}'] = float(calculate_frechet_distance(fid_real_means, fid_real_std,\n cur_fid_means, cur_fid_std))\n\n all_metrics.append(fid)\n all_metrics_idx.append('global_metrics')\n\n os.makedirs(os.path.dirname(args.outpath), exist_ok=True)\n sum_metrics = pd.DataFrame(all_metrics, index=all_metrics_idx)\n sum_metrics.to_csv(args.outpath, sep='\\t')\n\n\nif __name__ == '__main__':\n import argparse\n\n aparser = argparse.ArgumentParser()\n aparser.add_argument('--outpath', type=str, default='results/metrics.csv', help='Path to file to write metrics to')\n aparser.add_argument('--gen-images', type=str, default='results/generated/256/images', help='Path to generated images')\n aparser.add_argument('--gt-images', type=str, default='results/gt_images', help='Path to gt-images')\n aparser.add_argument('--gen-videos', type=str, default='results/generated/256/noise', \n help='Path to generated videos (separate folder with frames for each video)')\n aparser.add_argument('--animated-images', type=str,\n default='results/encode_and_animate_results/test_images/02_eoif', \n help='Path to animated images (separate folder with frames for each video)')\n aparser.add_argument('--real-images', type=str, default='results/test_images', help='Path to real input images')\n aparser.add_argument('--frametemplate', type=str,\n default='{:05}.jpg',\n help='Template to generate frame file names')\n aparser.add_argument('--resolution', type=int, default=256, help='Resolution of generated frames')\n aparser.add_argument('--skipframe', type=int, default=10, help='How many frames to skip before evaluating FID')\n aparser.add_argument('--batch', type=int, default=69, help='Batch size for FID and LPIPS calculation')\n aparser.add_argument('--segm-network', type=str,\n default=os.path.join(constants.RESULT_DIR, 'pretrained_models/ade20k-resnet50dilated-ppm_deepsup'),\n help='Path to ade20k-resnet50dilated-ppm_deepsup')\n aparser.add_argument('--flow-network', type=str,\n default=os.path.join(constants.RESULT_DIR, 'pretrained_models/SuperSloMo.ckpt'),\n help='Path to SuperSloMo.ckpt')\n aparser.add_argument('--lpips-network', type=str,\n default=os.path.join(constants.RESULT_DIR, 'pretrained_models/lpips_models/vgg.pth'),\n help='Path to vgg.pth')\n\n main(aparser.parse_args())\n\n",
"step-ids": [
0,
4,
5,
6,
7
]
}
|
[
0,
4,
5,
6,
7
] |
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
__all__ = ['lenet_mnist']
class Lenet(nn.Module):
def __init__(self):
super(Lenet, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(20, 50, 5)
self.fc1 = nn.Linear(800, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
#print("weights sizes")
#print(self.conv1.weight.size())
layer_w = self.fc2.weight
sigma = layer_w.std().data.cpu().numpy()
layer_w_numpy = layer_w.data.cpu().numpy()
scale = 0.17
noise = np.random.normal(0, scale*sigma, layer_w.size())
w_noise = np.add(layer_w_numpy, noise)
w_noise_tensor = torch.tensor(w_noise)
#print(w_noise_tensor.size())
w_noise_tensor = w_noise_tensor.to('cuda')
w_noise = torch.nn.Parameter(w_noise_tensor.float())
self.fc2.weight = w_noise
#print("---------------------")
#print(self.conv2.weight.size())
#print("---------------------")
#print(self.fc1.weight.size())
#print("---------------------")
#print(self.fc2.weight.size())
#print("---------------------")
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 800)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
#x = nn.Threshold(0.2, 0.0)#ActivationZeroThreshold(x)
return x
def lenet_mnist():
model = Lenet()
return model
|
normal
|
{
"blob_id": "a38a5010c9edbed0929da225b4288396bb0d814e",
"index": 6989,
"step-1": "<mask token>\n\n\nclass Lenet(nn.Module):\n <mask token>\n\n def forward(self, x):\n layer_w = self.fc2.weight\n sigma = layer_w.std().data.cpu().numpy()\n layer_w_numpy = layer_w.data.cpu().numpy()\n scale = 0.17\n noise = np.random.normal(0, scale * sigma, layer_w.size())\n w_noise = np.add(layer_w_numpy, noise)\n w_noise_tensor = torch.tensor(w_noise)\n w_noise_tensor = w_noise_tensor.to('cuda')\n w_noise = torch.nn.Parameter(w_noise_tensor.float())\n self.fc2.weight = w_noise\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = x.view(-1, 800)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n return x\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Lenet(nn.Module):\n\n def __init__(self):\n super(Lenet, self).__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(20, 50, 5)\n self.fc1 = nn.Linear(800, 500)\n self.fc2 = nn.Linear(500, 10)\n\n def forward(self, x):\n layer_w = self.fc2.weight\n sigma = layer_w.std().data.cpu().numpy()\n layer_w_numpy = layer_w.data.cpu().numpy()\n scale = 0.17\n noise = np.random.normal(0, scale * sigma, layer_w.size())\n w_noise = np.add(layer_w_numpy, noise)\n w_noise_tensor = torch.tensor(w_noise)\n w_noise_tensor = w_noise_tensor.to('cuda')\n w_noise = torch.nn.Parameter(w_noise_tensor.float())\n self.fc2.weight = w_noise\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = x.view(-1, 800)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n return x\n\n\ndef lenet_mnist():\n model = Lenet()\n return model\n",
"step-3": "<mask token>\n__all__ = ['lenet_mnist']\n\n\nclass Lenet(nn.Module):\n\n def __init__(self):\n super(Lenet, self).__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(20, 50, 5)\n self.fc1 = nn.Linear(800, 500)\n self.fc2 = nn.Linear(500, 10)\n\n def forward(self, x):\n layer_w = self.fc2.weight\n sigma = layer_w.std().data.cpu().numpy()\n layer_w_numpy = layer_w.data.cpu().numpy()\n scale = 0.17\n noise = np.random.normal(0, scale * sigma, layer_w.size())\n w_noise = np.add(layer_w_numpy, noise)\n w_noise_tensor = torch.tensor(w_noise)\n w_noise_tensor = w_noise_tensor.to('cuda')\n w_noise = torch.nn.Parameter(w_noise_tensor.float())\n self.fc2.weight = w_noise\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = x.view(-1, 800)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n return x\n\n\ndef lenet_mnist():\n model = Lenet()\n return model\n",
"step-4": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport numpy as np\n__all__ = ['lenet_mnist']\n\n\nclass Lenet(nn.Module):\n\n def __init__(self):\n super(Lenet, self).__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(20, 50, 5)\n self.fc1 = nn.Linear(800, 500)\n self.fc2 = nn.Linear(500, 10)\n\n def forward(self, x):\n layer_w = self.fc2.weight\n sigma = layer_w.std().data.cpu().numpy()\n layer_w_numpy = layer_w.data.cpu().numpy()\n scale = 0.17\n noise = np.random.normal(0, scale * sigma, layer_w.size())\n w_noise = np.add(layer_w_numpy, noise)\n w_noise_tensor = torch.tensor(w_noise)\n w_noise_tensor = w_noise_tensor.to('cuda')\n w_noise = torch.nn.Parameter(w_noise_tensor.float())\n self.fc2.weight = w_noise\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = x.view(-1, 800)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n return x\n\n\ndef lenet_mnist():\n model = Lenet()\n return model\n",
"step-5": "#\n# Copyright (c) 2018 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport numpy as np \n\n__all__ = ['lenet_mnist']\n\nclass Lenet(nn.Module):\n def __init__(self):\n super(Lenet, self).__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.pool = nn.MaxPool2d(2, 2)\n self.conv2 = nn.Conv2d(20, 50, 5)\n self.fc1 = nn.Linear(800, 500)\n self.fc2 = nn.Linear(500, 10)\n\n def forward(self, x):\n #print(\"weights sizes\")\n #print(self.conv1.weight.size())\n layer_w = self.fc2.weight\n sigma = layer_w.std().data.cpu().numpy()\n layer_w_numpy = layer_w.data.cpu().numpy()\n scale = 0.17\n noise = np.random.normal(0, scale*sigma, layer_w.size())\n w_noise = np.add(layer_w_numpy, noise)\n w_noise_tensor = torch.tensor(w_noise)\n #print(w_noise_tensor.size())\n w_noise_tensor = w_noise_tensor.to('cuda')\n w_noise = torch.nn.Parameter(w_noise_tensor.float())\n self.fc2.weight = w_noise \n #print(\"---------------------\")\n #print(self.conv2.weight.size())\n #print(\"---------------------\")\n #print(self.fc1.weight.size())\n #print(\"---------------------\")\n #print(self.fc2.weight.size())\n #print(\"---------------------\")\n x = self.pool(F.relu(self.conv1(x)))\n x = self.pool(F.relu(self.conv2(x)))\n x = x.view(-1, 800)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n #x = nn.Threshold(0.2, 0.0)#ActivationZeroThreshold(x)\n return x\n\ndef lenet_mnist():\n model = Lenet()\n return model\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
from mesa.visualization.modules import CanvasGrid
from mesa.visualization.ModularVisualization import ModularServer
from mesa.visualization.modules import ChartModule
from mesa.batchrunner import BatchRunner
from agentPortrayal import agent_portrayal
import metrics
from matplotlib import pyplot as plt
from ArchitectureModel import MASArchitecture
import os
import random
import sys
runBatch = True
architecture = 'Inter-Firm'
saveResults = True
if __name__ == '__main__':
dir_path = os.path.dirname(os.path.realpath(__file__))
if(runBatch):
fixed_params = {'width': 60, 'height': 60,'splitSize':1,'distributed':True,'verbose':False,'searchSize':1,'batchRun':True}
variable_params = {'quantity':[10,20,50,80,100,120,150],'ordersPerWeek':[1,5,20,40,80,120]}
batch_run = BatchRunner(
MASArchitecture,
variable_params,
fixed_params,
iterations=10,
max_steps=800,
model_reporters={
"Utilisation": metrics.machineUtilisation,
"CompleteOrders": metrics.ordersComplete,
'AverageOrderWaitTime': metrics.averageOrderWaitTime,
'TotalMessagesSent': metrics.totalMessagesSent,
'AverageMessagesSent': metrics.averageMessagesSent,
"SuccessfulOrders":metrics.successfulOrders,
"noProposalOrders":metrics.noProposalOrders,
'OutsourcedOrders': metrics.outsourcedOrders,
'LateOrders':metrics.lateOrders,
'WIPBacklog':metrics.totalWIPSize,
'MaxMessagesSentOrder': metrics.maxMessagesSentFromOrder,
'MaxMessagesReceivedOrder': metrics.maxMessagesReceivedByOrder,
'MaxMessagesSentFactory': metrics.maxMessagesSentFromFactory,
'MaxMessagesReceivedFactory': metrics.maxMessagesReceivedByFactory,
'AverageSatisfactionScore':metrics.averageSatisfactionScore,
'AverageSuccessfulSatisfactionScore':metrics.averageSuccessfulSatisfactionScore,
'CheapOrdersWithCheapMachines':metrics.cheapOrdersWithCheapMachines,
'AsapOrdersWithFastMachines':metrics.asapOrdersWithFastMachines,
'AverageSuccessfulPrice': metrics.averageSuccessfulOrderPrice,
'AverageSuccessfulOrderPriceCheap':metrics.averageSuccessfulOrderPriceCheap,
'AverageSuccessfulOrderPriceNeutral':metrics.averageSuccessfulOrderPriceNeutral,
'AverageSuccessfulOrderPriceAsap':metrics.averageSuccessfulOrderPriceAsap,
'AverageSuccessfulMakespan': metrics.averageSuccessfulOrderMakeSpan,
'AverageSuccessfulOrderMakespanCheap':metrics.averageSuccessfulOrderMakespanCheap,
'AverageSuccessfulOrderMakespanNeutral':metrics.averageSuccessfulOrderMakespanNeutral,
'AverageSuccessfulOrderMakespanAsap':metrics.averageSuccessfulOrderMakespanAsap,
'SuccessfulAsapOrders':metrics.percentageOfSuccessfulASAPOrders,
'SuccessfulCheapOrders':metrics.percentageOfSuccessfulCheapOrders,
'SuccessfulNeutralOrders':metrics.percentageOfSuccessfulNeutralOrders
},
agent_reporters={
'id':'unique_id',
# # TODO: add in other agent reports that you would like to use
}
)
batch_run.run_all()
model_data = batch_run.get_model_vars_dataframe()
agent_data = batch_run.get_agent_vars_dataframe()
# Save results
if(saveResults):
number = 0
### CHANGE PATH TO WHERE YOU WANT RESULTS TO BE SAVED
while (os.path.exists('{}/results/test_{}'.format(dir_path,number)) == True):
number += 1
# TODO: maybe make a text file that describes the test that has been run
os.makedirs(
'{}/results/test_{}'.format(dir_path,number))
model_data.to_pickle(
'{}/results/test_{}/model_data.pkl'.format(dir_path,number))
agent_data.to_pickle(
'{}/results/test_{}/agent_data.pkl'.format(dir_path,number))
else:
# TODO: rename all of these
grid = CanvasGrid(agent_portrayal, 60, 60, 600, 600)
chart = ChartModule([{'Label': 'Utilisation', "Color": 'Black'}],data_collector_name='datacollector')
chart2 = ChartModule([{'Label': 'Complete Orders', 'Color': 'Black'}], data_collector_name='datacollector')
chart3 = ChartModule([{'Label': 'Average Order Wait Time','Color': 'Red'}], data_collector_name='datacollector')
chart4 = ChartModule([{'Label': 'Total Messages Sent','Color': 'Red'}], data_collector_name='datacollector')
averageMessagesSentChart = ChartModule([{'Label': 'Average Messages Sent','Color': 'Red'}], data_collector_name='datacollector')
chart5 = ChartModule([{'Label': 'Successful Orders','Color': 'Green'}], data_collector_name='datacollector')
chart6 = ChartModule([{'Label': 'Outsourced Orders','Color': 'Blue'}], data_collector_name='datacollector')
chart7 = ChartModule([{'Label': 'Late Orders','Color': 'Red'}], data_collector_name='datacollector')
chart8 = ChartModule([{'Label': 'WIP Backlog','Color': 'Blue'}], data_collector_name='datacollector')
chart9 = ChartModule([{'Label': 'Max Messages Sent - Order','Color': 'Blue'}], data_collector_name='datacollector')
chart10 = ChartModule([{'Label': 'Max Messages Received - Order','Color': 'Blue'}], data_collector_name='datacollector')
chart11 = ChartModule([{'Label': 'Max Messages Sent - Factory','Color': 'Red'}], data_collector_name='datacollector')
chart12 = ChartModule([{'Label': 'Max Messages Received - Factory','Color': 'Red'}], data_collector_name='datacollector')
chart13 = ChartModule([{'Label': 'Average satisfaction score','Color': 'Blue'}], data_collector_name='datacollector')
chart14 = ChartModule([{'Label': 'Average successful satisfaction score','Color': 'Blue'}], data_collector_name='datacollector')
chart15 = ChartModule([{'Label': '% Cheap orders with cheap machines','Color': 'Green'}], data_collector_name='datacollector')
chart16 = ChartModule([{'Label': '% Asap orders with fast machines','Color': 'Green'}], data_collector_name='datacollector')
chart17 = ChartModule([{'Label': 'Average successful price','Color': 'Blue'}], data_collector_name='datacollector')
chart18 = ChartModule([{'Label': 'Average successful price Cheap','Color': 'Blue'}], data_collector_name='datacollector')
chart19 = ChartModule([{'Label': 'Average successful price Neutral','Color': 'Blue'}], data_collector_name='datacollector')
chart20 = ChartModule([{'Label': 'Average successful price Asap','Color': 'Blue'}], data_collector_name='datacollector')
chart21 = ChartModule([{'Label': 'Average successful makespan','Color': 'Green'}], data_collector_name='datacollector')
chart22 = ChartModule([{'Label': 'Average successful makespan Cheap','Color': 'Green'}], data_collector_name='datacollector')
chart23 = ChartModule([{'Label': 'Average successful makespan Neutral','Color': 'Green'}], data_collector_name='datacollector')
chart24 = ChartModule([{'Label': 'Average successful makespan Asap','Color': 'Green'}], data_collector_name='datacollector')
chart25 = ChartModule([{'Label': 'Successful Cheap Orders','Color': 'Red'}], data_collector_name='datacollector')
chart26 = ChartModule([{'Label': 'Successful Neutral Orders','Color': 'Red'}], data_collector_name='datacollector')
chart27 = ChartModule([{'Label': 'Successful Asap Orders','Color': 'Red'}], data_collector_name='datacollector')
noProposalOrdersChart = ChartModule([{'Label': 'Orders that received no proposals','Color': 'Red'}], data_collector_name='datacollector')
server = ModularServer(MASArchitecture,
[grid,
chart,
chart2,
chart3,
chart4,
averageMessagesSentChart,
chart5,
noProposalOrdersChart,
chart6,
chart7,
chart8, chart9, chart10,chart11, chart12,
chart13,chart14,
chart15,
chart16,chart17,
chart18, chart19, chart20,chart21,chart22,chart23,chart24,chart25,chart26,chart27
],
'Festo-Fetch.ai',
{'width': 60, 'height': 60, 'distributed':True,'quantity':10,'splitSize':1,'newOrderProbability':5,'verbose':True,'ordersPerWeek':40,
'model_reporters_dict': {
"Utilisation": metrics.machineUtilisation,
"Complete Orders": metrics.ordersComplete,
'Average Order Wait Time': metrics.averageOrderWaitTime,
"Successful Orders":metrics.successfulOrders,
'Total Messages Sent': metrics.totalMessagesSent,
'Average Messages Sent': metrics.averageMessagesSent,
'Late Orders':metrics.lateOrders,
'WIP Backlog':metrics.totalWIPSize,
'Max Messages Sent - Order': metrics.maxMessagesSentFromOrder,
'Max Messages Received - Order': metrics.maxMessagesReceivedByOrder,
'Max Messages Sent - Factory': metrics.maxMessagesSentFromFactory,
'Max Messages Received - Factory': metrics.maxMessagesReceivedByFactory,
'Outsourced Orders': metrics.outsourcedOrders,
'Orders that received no proposals':metrics.noProposalOrders,
'Average successful satisfaction score':metrics.averageSuccessfulSatisfactionScore,
'Average satisfaction score':metrics.averageSatisfactionScore,
'% Cheap orders with cheap machines':metrics.cheapOrdersWithCheapMachines,
'% Asap orders with fast machines':metrics.asapOrdersWithFastMachines,
'Average successful price': metrics.averageSuccessfulOrderPrice,
'Average successful price Cheap':metrics.averageSuccessfulOrderPriceCheap,
'Average successful price Neutral':metrics.averageSuccessfulOrderPriceNeutral,
'Average successful price Asap':metrics.averageSuccessfulOrderPriceAsap,
'Average successful makespan': metrics.averageSuccessfulOrderMakeSpan,
'Average successful makespan Cheap':metrics.averageSuccessfulOrderMakespanCheap,
'Average successful makespan Neutral':metrics.averageSuccessfulOrderMakespanNeutral,
'Average successful makespan Asap':metrics.averageSuccessfulOrderMakespanAsap,
'Successful Cheap Orders':metrics.percentageOfSuccessfulASAPOrders,
'Successful Neutral Orders':metrics.percentageOfSuccessfulCheapOrders,
'Successful Asap Orders':metrics.percentageOfSuccessfulNeutralOrders
}})
server.port = 8521
server.launch()
|
normal
|
{
"blob_id": "57b51ea36e9e2a095cf7e9646db2cc400cc72b83",
"index": 1082,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n dir_path = os.path.dirname(os.path.realpath(__file__))\n if runBatch:\n fixed_params = {'width': 60, 'height': 60, 'splitSize': 1,\n 'distributed': True, 'verbose': False, 'searchSize': 1,\n 'batchRun': True}\n variable_params = {'quantity': [10, 20, 50, 80, 100, 120, 150],\n 'ordersPerWeek': [1, 5, 20, 40, 80, 120]}\n batch_run = BatchRunner(MASArchitecture, variable_params,\n fixed_params, iterations=10, max_steps=800, model_reporters={\n 'Utilisation': metrics.machineUtilisation, 'CompleteOrders':\n metrics.ordersComplete, 'AverageOrderWaitTime': metrics.\n averageOrderWaitTime, 'TotalMessagesSent': metrics.\n totalMessagesSent, 'AverageMessagesSent': metrics.\n averageMessagesSent, 'SuccessfulOrders': metrics.\n successfulOrders, 'noProposalOrders': metrics.noProposalOrders,\n 'OutsourcedOrders': metrics.outsourcedOrders, 'LateOrders':\n metrics.lateOrders, 'WIPBacklog': metrics.totalWIPSize,\n 'MaxMessagesSentOrder': metrics.maxMessagesSentFromOrder,\n 'MaxMessagesReceivedOrder': metrics.maxMessagesReceivedByOrder,\n 'MaxMessagesSentFactory': metrics.maxMessagesSentFromFactory,\n 'MaxMessagesReceivedFactory': metrics.\n maxMessagesReceivedByFactory, 'AverageSatisfactionScore':\n metrics.averageSatisfactionScore,\n 'AverageSuccessfulSatisfactionScore': metrics.\n averageSuccessfulSatisfactionScore,\n 'CheapOrdersWithCheapMachines': metrics.\n cheapOrdersWithCheapMachines, 'AsapOrdersWithFastMachines':\n metrics.asapOrdersWithFastMachines, 'AverageSuccessfulPrice':\n metrics.averageSuccessfulOrderPrice,\n 'AverageSuccessfulOrderPriceCheap': metrics.\n averageSuccessfulOrderPriceCheap,\n 'AverageSuccessfulOrderPriceNeutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'AverageSuccessfulOrderPriceAsap': metrics.\n averageSuccessfulOrderPriceAsap, 'AverageSuccessfulMakespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'AverageSuccessfulOrderMakespanCheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'AverageSuccessfulOrderMakespanNeutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'AverageSuccessfulOrderMakespanAsap': metrics.\n averageSuccessfulOrderMakespanAsap, 'SuccessfulAsapOrders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'SuccessfulCheapOrders': metrics.\n percentageOfSuccessfulCheapOrders, 'SuccessfulNeutralOrders':\n metrics.percentageOfSuccessfulNeutralOrders}, agent_reporters={\n 'id': 'unique_id'})\n batch_run.run_all()\n model_data = batch_run.get_model_vars_dataframe()\n agent_data = batch_run.get_agent_vars_dataframe()\n if saveResults:\n number = 0\n while os.path.exists('{}/results/test_{}'.format(dir_path, number)\n ) == True:\n number += 1\n os.makedirs('{}/results/test_{}'.format(dir_path, number))\n model_data.to_pickle('{}/results/test_{}/model_data.pkl'.format\n (dir_path, number))\n agent_data.to_pickle('{}/results/test_{}/agent_data.pkl'.format\n (dir_path, number))\n else:\n grid = CanvasGrid(agent_portrayal, 60, 60, 600, 600)\n chart = ChartModule([{'Label': 'Utilisation', 'Color': 'Black'}],\n data_collector_name='datacollector')\n chart2 = ChartModule([{'Label': 'Complete Orders', 'Color': 'Black'\n }], data_collector_name='datacollector')\n chart3 = ChartModule([{'Label': 'Average Order Wait Time', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart4 = ChartModule([{'Label': 'Total Messages Sent', 'Color':\n 'Red'}], data_collector_name='datacollector')\n averageMessagesSentChart = ChartModule([{'Label':\n 'Average Messages Sent', 'Color': 'Red'}], data_collector_name=\n 'datacollector')\n chart5 = ChartModule([{'Label': 'Successful Orders', 'Color':\n 'Green'}], data_collector_name='datacollector')\n chart6 = ChartModule([{'Label': 'Outsourced Orders', 'Color':\n 'Blue'}], data_collector_name='datacollector')\n chart7 = ChartModule([{'Label': 'Late Orders', 'Color': 'Red'}],\n data_collector_name='datacollector')\n chart8 = ChartModule([{'Label': 'WIP Backlog', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart9 = ChartModule([{'Label': 'Max Messages Sent - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart10 = ChartModule([{'Label': 'Max Messages Received - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart11 = ChartModule([{'Label': 'Max Messages Sent - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart12 = ChartModule([{'Label': 'Max Messages Received - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart13 = ChartModule([{'Label': 'Average satisfaction score',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart14 = ChartModule([{'Label':\n 'Average successful satisfaction score', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart15 = ChartModule([{'Label':\n '% Cheap orders with cheap machines', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart16 = ChartModule([{'Label': '% Asap orders with fast machines',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart17 = ChartModule([{'Label': 'Average successful price',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart18 = ChartModule([{'Label': 'Average successful price Cheap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart19 = ChartModule([{'Label': 'Average successful price Neutral',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart20 = ChartModule([{'Label': 'Average successful price Asap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart21 = ChartModule([{'Label': 'Average successful makespan',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart22 = ChartModule([{'Label':\n 'Average successful makespan Cheap', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart23 = ChartModule([{'Label':\n 'Average successful makespan Neutral', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart24 = ChartModule([{'Label': 'Average successful makespan Asap',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart25 = ChartModule([{'Label': 'Successful Cheap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart26 = ChartModule([{'Label': 'Successful Neutral Orders',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart27 = ChartModule([{'Label': 'Successful Asap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n noProposalOrdersChart = ChartModule([{'Label':\n 'Orders that received no proposals', 'Color': 'Red'}],\n data_collector_name='datacollector')\n server = ModularServer(MASArchitecture, [grid, chart, chart2,\n chart3, chart4, averageMessagesSentChart, chart5,\n noProposalOrdersChart, chart6, chart7, chart8, chart9, chart10,\n chart11, chart12, chart13, chart14, chart15, chart16, chart17,\n chart18, chart19, chart20, chart21, chart22, chart23, chart24,\n chart25, chart26, chart27], 'Festo-Fetch.ai', {'width': 60,\n 'height': 60, 'distributed': True, 'quantity': 10, 'splitSize':\n 1, 'newOrderProbability': 5, 'verbose': True, 'ordersPerWeek': \n 40, 'model_reporters_dict': {'Utilisation': metrics.\n machineUtilisation, 'Complete Orders': metrics.ordersComplete,\n 'Average Order Wait Time': metrics.averageOrderWaitTime,\n 'Successful Orders': metrics.successfulOrders,\n 'Total Messages Sent': metrics.totalMessagesSent,\n 'Average Messages Sent': metrics.averageMessagesSent,\n 'Late Orders': metrics.lateOrders, 'WIP Backlog': metrics.\n totalWIPSize, 'Max Messages Sent - Order': metrics.\n maxMessagesSentFromOrder, 'Max Messages Received - Order':\n metrics.maxMessagesReceivedByOrder,\n 'Max Messages Sent - Factory': metrics.\n maxMessagesSentFromFactory, 'Max Messages Received - Factory':\n metrics.maxMessagesReceivedByFactory, 'Outsourced Orders':\n metrics.outsourcedOrders, 'Orders that received no proposals':\n metrics.noProposalOrders,\n 'Average successful satisfaction score': metrics.\n averageSuccessfulSatisfactionScore,\n 'Average satisfaction score': metrics.averageSatisfactionScore,\n '% Cheap orders with cheap machines': metrics.\n cheapOrdersWithCheapMachines,\n '% Asap orders with fast machines': metrics.\n asapOrdersWithFastMachines, 'Average successful price': metrics\n .averageSuccessfulOrderPrice, 'Average successful price Cheap':\n metrics.averageSuccessfulOrderPriceCheap,\n 'Average successful price Neutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'Average successful price Asap': metrics.\n averageSuccessfulOrderPriceAsap, 'Average successful makespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'Average successful makespan Cheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'Average successful makespan Neutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'Average successful makespan Asap': metrics.\n averageSuccessfulOrderMakespanAsap, 'Successful Cheap Orders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'Successful Neutral Orders': metrics.\n percentageOfSuccessfulCheapOrders, 'Successful Asap Orders':\n metrics.percentageOfSuccessfulNeutralOrders}})\n server.port = 8521\n server.launch()\n",
"step-3": "<mask token>\nrunBatch = True\narchitecture = 'Inter-Firm'\nsaveResults = True\nif __name__ == '__main__':\n dir_path = os.path.dirname(os.path.realpath(__file__))\n if runBatch:\n fixed_params = {'width': 60, 'height': 60, 'splitSize': 1,\n 'distributed': True, 'verbose': False, 'searchSize': 1,\n 'batchRun': True}\n variable_params = {'quantity': [10, 20, 50, 80, 100, 120, 150],\n 'ordersPerWeek': [1, 5, 20, 40, 80, 120]}\n batch_run = BatchRunner(MASArchitecture, variable_params,\n fixed_params, iterations=10, max_steps=800, model_reporters={\n 'Utilisation': metrics.machineUtilisation, 'CompleteOrders':\n metrics.ordersComplete, 'AverageOrderWaitTime': metrics.\n averageOrderWaitTime, 'TotalMessagesSent': metrics.\n totalMessagesSent, 'AverageMessagesSent': metrics.\n averageMessagesSent, 'SuccessfulOrders': metrics.\n successfulOrders, 'noProposalOrders': metrics.noProposalOrders,\n 'OutsourcedOrders': metrics.outsourcedOrders, 'LateOrders':\n metrics.lateOrders, 'WIPBacklog': metrics.totalWIPSize,\n 'MaxMessagesSentOrder': metrics.maxMessagesSentFromOrder,\n 'MaxMessagesReceivedOrder': metrics.maxMessagesReceivedByOrder,\n 'MaxMessagesSentFactory': metrics.maxMessagesSentFromFactory,\n 'MaxMessagesReceivedFactory': metrics.\n maxMessagesReceivedByFactory, 'AverageSatisfactionScore':\n metrics.averageSatisfactionScore,\n 'AverageSuccessfulSatisfactionScore': metrics.\n averageSuccessfulSatisfactionScore,\n 'CheapOrdersWithCheapMachines': metrics.\n cheapOrdersWithCheapMachines, 'AsapOrdersWithFastMachines':\n metrics.asapOrdersWithFastMachines, 'AverageSuccessfulPrice':\n metrics.averageSuccessfulOrderPrice,\n 'AverageSuccessfulOrderPriceCheap': metrics.\n averageSuccessfulOrderPriceCheap,\n 'AverageSuccessfulOrderPriceNeutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'AverageSuccessfulOrderPriceAsap': metrics.\n averageSuccessfulOrderPriceAsap, 'AverageSuccessfulMakespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'AverageSuccessfulOrderMakespanCheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'AverageSuccessfulOrderMakespanNeutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'AverageSuccessfulOrderMakespanAsap': metrics.\n averageSuccessfulOrderMakespanAsap, 'SuccessfulAsapOrders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'SuccessfulCheapOrders': metrics.\n percentageOfSuccessfulCheapOrders, 'SuccessfulNeutralOrders':\n metrics.percentageOfSuccessfulNeutralOrders}, agent_reporters={\n 'id': 'unique_id'})\n batch_run.run_all()\n model_data = batch_run.get_model_vars_dataframe()\n agent_data = batch_run.get_agent_vars_dataframe()\n if saveResults:\n number = 0\n while os.path.exists('{}/results/test_{}'.format(dir_path, number)\n ) == True:\n number += 1\n os.makedirs('{}/results/test_{}'.format(dir_path, number))\n model_data.to_pickle('{}/results/test_{}/model_data.pkl'.format\n (dir_path, number))\n agent_data.to_pickle('{}/results/test_{}/agent_data.pkl'.format\n (dir_path, number))\n else:\n grid = CanvasGrid(agent_portrayal, 60, 60, 600, 600)\n chart = ChartModule([{'Label': 'Utilisation', 'Color': 'Black'}],\n data_collector_name='datacollector')\n chart2 = ChartModule([{'Label': 'Complete Orders', 'Color': 'Black'\n }], data_collector_name='datacollector')\n chart3 = ChartModule([{'Label': 'Average Order Wait Time', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart4 = ChartModule([{'Label': 'Total Messages Sent', 'Color':\n 'Red'}], data_collector_name='datacollector')\n averageMessagesSentChart = ChartModule([{'Label':\n 'Average Messages Sent', 'Color': 'Red'}], data_collector_name=\n 'datacollector')\n chart5 = ChartModule([{'Label': 'Successful Orders', 'Color':\n 'Green'}], data_collector_name='datacollector')\n chart6 = ChartModule([{'Label': 'Outsourced Orders', 'Color':\n 'Blue'}], data_collector_name='datacollector')\n chart7 = ChartModule([{'Label': 'Late Orders', 'Color': 'Red'}],\n data_collector_name='datacollector')\n chart8 = ChartModule([{'Label': 'WIP Backlog', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart9 = ChartModule([{'Label': 'Max Messages Sent - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart10 = ChartModule([{'Label': 'Max Messages Received - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart11 = ChartModule([{'Label': 'Max Messages Sent - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart12 = ChartModule([{'Label': 'Max Messages Received - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart13 = ChartModule([{'Label': 'Average satisfaction score',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart14 = ChartModule([{'Label':\n 'Average successful satisfaction score', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart15 = ChartModule([{'Label':\n '% Cheap orders with cheap machines', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart16 = ChartModule([{'Label': '% Asap orders with fast machines',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart17 = ChartModule([{'Label': 'Average successful price',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart18 = ChartModule([{'Label': 'Average successful price Cheap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart19 = ChartModule([{'Label': 'Average successful price Neutral',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart20 = ChartModule([{'Label': 'Average successful price Asap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart21 = ChartModule([{'Label': 'Average successful makespan',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart22 = ChartModule([{'Label':\n 'Average successful makespan Cheap', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart23 = ChartModule([{'Label':\n 'Average successful makespan Neutral', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart24 = ChartModule([{'Label': 'Average successful makespan Asap',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart25 = ChartModule([{'Label': 'Successful Cheap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart26 = ChartModule([{'Label': 'Successful Neutral Orders',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart27 = ChartModule([{'Label': 'Successful Asap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n noProposalOrdersChart = ChartModule([{'Label':\n 'Orders that received no proposals', 'Color': 'Red'}],\n data_collector_name='datacollector')\n server = ModularServer(MASArchitecture, [grid, chart, chart2,\n chart3, chart4, averageMessagesSentChart, chart5,\n noProposalOrdersChart, chart6, chart7, chart8, chart9, chart10,\n chart11, chart12, chart13, chart14, chart15, chart16, chart17,\n chart18, chart19, chart20, chart21, chart22, chart23, chart24,\n chart25, chart26, chart27], 'Festo-Fetch.ai', {'width': 60,\n 'height': 60, 'distributed': True, 'quantity': 10, 'splitSize':\n 1, 'newOrderProbability': 5, 'verbose': True, 'ordersPerWeek': \n 40, 'model_reporters_dict': {'Utilisation': metrics.\n machineUtilisation, 'Complete Orders': metrics.ordersComplete,\n 'Average Order Wait Time': metrics.averageOrderWaitTime,\n 'Successful Orders': metrics.successfulOrders,\n 'Total Messages Sent': metrics.totalMessagesSent,\n 'Average Messages Sent': metrics.averageMessagesSent,\n 'Late Orders': metrics.lateOrders, 'WIP Backlog': metrics.\n totalWIPSize, 'Max Messages Sent - Order': metrics.\n maxMessagesSentFromOrder, 'Max Messages Received - Order':\n metrics.maxMessagesReceivedByOrder,\n 'Max Messages Sent - Factory': metrics.\n maxMessagesSentFromFactory, 'Max Messages Received - Factory':\n metrics.maxMessagesReceivedByFactory, 'Outsourced Orders':\n metrics.outsourcedOrders, 'Orders that received no proposals':\n metrics.noProposalOrders,\n 'Average successful satisfaction score': metrics.\n averageSuccessfulSatisfactionScore,\n 'Average satisfaction score': metrics.averageSatisfactionScore,\n '% Cheap orders with cheap machines': metrics.\n cheapOrdersWithCheapMachines,\n '% Asap orders with fast machines': metrics.\n asapOrdersWithFastMachines, 'Average successful price': metrics\n .averageSuccessfulOrderPrice, 'Average successful price Cheap':\n metrics.averageSuccessfulOrderPriceCheap,\n 'Average successful price Neutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'Average successful price Asap': metrics.\n averageSuccessfulOrderPriceAsap, 'Average successful makespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'Average successful makespan Cheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'Average successful makespan Neutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'Average successful makespan Asap': metrics.\n averageSuccessfulOrderMakespanAsap, 'Successful Cheap Orders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'Successful Neutral Orders': metrics.\n percentageOfSuccessfulCheapOrders, 'Successful Asap Orders':\n metrics.percentageOfSuccessfulNeutralOrders}})\n server.port = 8521\n server.launch()\n",
"step-4": "from mesa.visualization.modules import CanvasGrid\nfrom mesa.visualization.ModularVisualization import ModularServer\nfrom mesa.visualization.modules import ChartModule\nfrom mesa.batchrunner import BatchRunner\nfrom agentPortrayal import agent_portrayal\nimport metrics\nfrom matplotlib import pyplot as plt\nfrom ArchitectureModel import MASArchitecture\nimport os\nimport random\nimport sys\nrunBatch = True\narchitecture = 'Inter-Firm'\nsaveResults = True\nif __name__ == '__main__':\n dir_path = os.path.dirname(os.path.realpath(__file__))\n if runBatch:\n fixed_params = {'width': 60, 'height': 60, 'splitSize': 1,\n 'distributed': True, 'verbose': False, 'searchSize': 1,\n 'batchRun': True}\n variable_params = {'quantity': [10, 20, 50, 80, 100, 120, 150],\n 'ordersPerWeek': [1, 5, 20, 40, 80, 120]}\n batch_run = BatchRunner(MASArchitecture, variable_params,\n fixed_params, iterations=10, max_steps=800, model_reporters={\n 'Utilisation': metrics.machineUtilisation, 'CompleteOrders':\n metrics.ordersComplete, 'AverageOrderWaitTime': metrics.\n averageOrderWaitTime, 'TotalMessagesSent': metrics.\n totalMessagesSent, 'AverageMessagesSent': metrics.\n averageMessagesSent, 'SuccessfulOrders': metrics.\n successfulOrders, 'noProposalOrders': metrics.noProposalOrders,\n 'OutsourcedOrders': metrics.outsourcedOrders, 'LateOrders':\n metrics.lateOrders, 'WIPBacklog': metrics.totalWIPSize,\n 'MaxMessagesSentOrder': metrics.maxMessagesSentFromOrder,\n 'MaxMessagesReceivedOrder': metrics.maxMessagesReceivedByOrder,\n 'MaxMessagesSentFactory': metrics.maxMessagesSentFromFactory,\n 'MaxMessagesReceivedFactory': metrics.\n maxMessagesReceivedByFactory, 'AverageSatisfactionScore':\n metrics.averageSatisfactionScore,\n 'AverageSuccessfulSatisfactionScore': metrics.\n averageSuccessfulSatisfactionScore,\n 'CheapOrdersWithCheapMachines': metrics.\n cheapOrdersWithCheapMachines, 'AsapOrdersWithFastMachines':\n metrics.asapOrdersWithFastMachines, 'AverageSuccessfulPrice':\n metrics.averageSuccessfulOrderPrice,\n 'AverageSuccessfulOrderPriceCheap': metrics.\n averageSuccessfulOrderPriceCheap,\n 'AverageSuccessfulOrderPriceNeutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'AverageSuccessfulOrderPriceAsap': metrics.\n averageSuccessfulOrderPriceAsap, 'AverageSuccessfulMakespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'AverageSuccessfulOrderMakespanCheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'AverageSuccessfulOrderMakespanNeutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'AverageSuccessfulOrderMakespanAsap': metrics.\n averageSuccessfulOrderMakespanAsap, 'SuccessfulAsapOrders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'SuccessfulCheapOrders': metrics.\n percentageOfSuccessfulCheapOrders, 'SuccessfulNeutralOrders':\n metrics.percentageOfSuccessfulNeutralOrders}, agent_reporters={\n 'id': 'unique_id'})\n batch_run.run_all()\n model_data = batch_run.get_model_vars_dataframe()\n agent_data = batch_run.get_agent_vars_dataframe()\n if saveResults:\n number = 0\n while os.path.exists('{}/results/test_{}'.format(dir_path, number)\n ) == True:\n number += 1\n os.makedirs('{}/results/test_{}'.format(dir_path, number))\n model_data.to_pickle('{}/results/test_{}/model_data.pkl'.format\n (dir_path, number))\n agent_data.to_pickle('{}/results/test_{}/agent_data.pkl'.format\n (dir_path, number))\n else:\n grid = CanvasGrid(agent_portrayal, 60, 60, 600, 600)\n chart = ChartModule([{'Label': 'Utilisation', 'Color': 'Black'}],\n data_collector_name='datacollector')\n chart2 = ChartModule([{'Label': 'Complete Orders', 'Color': 'Black'\n }], data_collector_name='datacollector')\n chart3 = ChartModule([{'Label': 'Average Order Wait Time', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart4 = ChartModule([{'Label': 'Total Messages Sent', 'Color':\n 'Red'}], data_collector_name='datacollector')\n averageMessagesSentChart = ChartModule([{'Label':\n 'Average Messages Sent', 'Color': 'Red'}], data_collector_name=\n 'datacollector')\n chart5 = ChartModule([{'Label': 'Successful Orders', 'Color':\n 'Green'}], data_collector_name='datacollector')\n chart6 = ChartModule([{'Label': 'Outsourced Orders', 'Color':\n 'Blue'}], data_collector_name='datacollector')\n chart7 = ChartModule([{'Label': 'Late Orders', 'Color': 'Red'}],\n data_collector_name='datacollector')\n chart8 = ChartModule([{'Label': 'WIP Backlog', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart9 = ChartModule([{'Label': 'Max Messages Sent - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart10 = ChartModule([{'Label': 'Max Messages Received - Order',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart11 = ChartModule([{'Label': 'Max Messages Sent - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart12 = ChartModule([{'Label': 'Max Messages Received - Factory',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart13 = ChartModule([{'Label': 'Average satisfaction score',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart14 = ChartModule([{'Label':\n 'Average successful satisfaction score', 'Color': 'Blue'}],\n data_collector_name='datacollector')\n chart15 = ChartModule([{'Label':\n '% Cheap orders with cheap machines', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart16 = ChartModule([{'Label': '% Asap orders with fast machines',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart17 = ChartModule([{'Label': 'Average successful price',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart18 = ChartModule([{'Label': 'Average successful price Cheap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart19 = ChartModule([{'Label': 'Average successful price Neutral',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart20 = ChartModule([{'Label': 'Average successful price Asap',\n 'Color': 'Blue'}], data_collector_name='datacollector')\n chart21 = ChartModule([{'Label': 'Average successful makespan',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart22 = ChartModule([{'Label':\n 'Average successful makespan Cheap', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart23 = ChartModule([{'Label':\n 'Average successful makespan Neutral', 'Color': 'Green'}],\n data_collector_name='datacollector')\n chart24 = ChartModule([{'Label': 'Average successful makespan Asap',\n 'Color': 'Green'}], data_collector_name='datacollector')\n chart25 = ChartModule([{'Label': 'Successful Cheap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n chart26 = ChartModule([{'Label': 'Successful Neutral Orders',\n 'Color': 'Red'}], data_collector_name='datacollector')\n chart27 = ChartModule([{'Label': 'Successful Asap Orders', 'Color':\n 'Red'}], data_collector_name='datacollector')\n noProposalOrdersChart = ChartModule([{'Label':\n 'Orders that received no proposals', 'Color': 'Red'}],\n data_collector_name='datacollector')\n server = ModularServer(MASArchitecture, [grid, chart, chart2,\n chart3, chart4, averageMessagesSentChart, chart5,\n noProposalOrdersChart, chart6, chart7, chart8, chart9, chart10,\n chart11, chart12, chart13, chart14, chart15, chart16, chart17,\n chart18, chart19, chart20, chart21, chart22, chart23, chart24,\n chart25, chart26, chart27], 'Festo-Fetch.ai', {'width': 60,\n 'height': 60, 'distributed': True, 'quantity': 10, 'splitSize':\n 1, 'newOrderProbability': 5, 'verbose': True, 'ordersPerWeek': \n 40, 'model_reporters_dict': {'Utilisation': metrics.\n machineUtilisation, 'Complete Orders': metrics.ordersComplete,\n 'Average Order Wait Time': metrics.averageOrderWaitTime,\n 'Successful Orders': metrics.successfulOrders,\n 'Total Messages Sent': metrics.totalMessagesSent,\n 'Average Messages Sent': metrics.averageMessagesSent,\n 'Late Orders': metrics.lateOrders, 'WIP Backlog': metrics.\n totalWIPSize, 'Max Messages Sent - Order': metrics.\n maxMessagesSentFromOrder, 'Max Messages Received - Order':\n metrics.maxMessagesReceivedByOrder,\n 'Max Messages Sent - Factory': metrics.\n maxMessagesSentFromFactory, 'Max Messages Received - Factory':\n metrics.maxMessagesReceivedByFactory, 'Outsourced Orders':\n metrics.outsourcedOrders, 'Orders that received no proposals':\n metrics.noProposalOrders,\n 'Average successful satisfaction score': metrics.\n averageSuccessfulSatisfactionScore,\n 'Average satisfaction score': metrics.averageSatisfactionScore,\n '% Cheap orders with cheap machines': metrics.\n cheapOrdersWithCheapMachines,\n '% Asap orders with fast machines': metrics.\n asapOrdersWithFastMachines, 'Average successful price': metrics\n .averageSuccessfulOrderPrice, 'Average successful price Cheap':\n metrics.averageSuccessfulOrderPriceCheap,\n 'Average successful price Neutral': metrics.\n averageSuccessfulOrderPriceNeutral,\n 'Average successful price Asap': metrics.\n averageSuccessfulOrderPriceAsap, 'Average successful makespan':\n metrics.averageSuccessfulOrderMakeSpan,\n 'Average successful makespan Cheap': metrics.\n averageSuccessfulOrderMakespanCheap,\n 'Average successful makespan Neutral': metrics.\n averageSuccessfulOrderMakespanNeutral,\n 'Average successful makespan Asap': metrics.\n averageSuccessfulOrderMakespanAsap, 'Successful Cheap Orders':\n metrics.percentageOfSuccessfulASAPOrders,\n 'Successful Neutral Orders': metrics.\n percentageOfSuccessfulCheapOrders, 'Successful Asap Orders':\n metrics.percentageOfSuccessfulNeutralOrders}})\n server.port = 8521\n server.launch()\n",
"step-5": "from mesa.visualization.modules import CanvasGrid\nfrom mesa.visualization.ModularVisualization import ModularServer\nfrom mesa.visualization.modules import ChartModule\nfrom mesa.batchrunner import BatchRunner\nfrom agentPortrayal import agent_portrayal\nimport metrics\nfrom matplotlib import pyplot as plt\nfrom ArchitectureModel import MASArchitecture\nimport os\nimport random\nimport sys\n\nrunBatch = True\narchitecture = 'Inter-Firm'\nsaveResults = True\n\n\n\nif __name__ == '__main__':\n\n dir_path = os.path.dirname(os.path.realpath(__file__))\n \n\n if(runBatch):\n fixed_params = {'width': 60, 'height': 60,'splitSize':1,'distributed':True,'verbose':False,'searchSize':1,'batchRun':True}\n\n variable_params = {'quantity':[10,20,50,80,100,120,150],'ordersPerWeek':[1,5,20,40,80,120]}\n\n batch_run = BatchRunner(\n MASArchitecture,\n variable_params,\n fixed_params,\n iterations=10,\n max_steps=800,\n model_reporters={\n \"Utilisation\": metrics.machineUtilisation,\n \"CompleteOrders\": metrics.ordersComplete,\n 'AverageOrderWaitTime': metrics.averageOrderWaitTime,\n 'TotalMessagesSent': metrics.totalMessagesSent, \n 'AverageMessagesSent': metrics.averageMessagesSent, \n \"SuccessfulOrders\":metrics.successfulOrders,\n \"noProposalOrders\":metrics.noProposalOrders,\n 'OutsourcedOrders': metrics.outsourcedOrders,\n 'LateOrders':metrics.lateOrders,\n 'WIPBacklog':metrics.totalWIPSize, \n 'MaxMessagesSentOrder': metrics.maxMessagesSentFromOrder, \n 'MaxMessagesReceivedOrder': metrics.maxMessagesReceivedByOrder,\n 'MaxMessagesSentFactory': metrics.maxMessagesSentFromFactory, \n 'MaxMessagesReceivedFactory': metrics.maxMessagesReceivedByFactory,\n \n 'AverageSatisfactionScore':metrics.averageSatisfactionScore,\n 'AverageSuccessfulSatisfactionScore':metrics.averageSuccessfulSatisfactionScore,\n 'CheapOrdersWithCheapMachines':metrics.cheapOrdersWithCheapMachines,\n 'AsapOrdersWithFastMachines':metrics.asapOrdersWithFastMachines,\n \n 'AverageSuccessfulPrice': metrics.averageSuccessfulOrderPrice,\n 'AverageSuccessfulOrderPriceCheap':metrics.averageSuccessfulOrderPriceCheap,\n 'AverageSuccessfulOrderPriceNeutral':metrics.averageSuccessfulOrderPriceNeutral,\n 'AverageSuccessfulOrderPriceAsap':metrics.averageSuccessfulOrderPriceAsap,\n \n 'AverageSuccessfulMakespan': metrics.averageSuccessfulOrderMakeSpan,\n 'AverageSuccessfulOrderMakespanCheap':metrics.averageSuccessfulOrderMakespanCheap,\n 'AverageSuccessfulOrderMakespanNeutral':metrics.averageSuccessfulOrderMakespanNeutral,\n 'AverageSuccessfulOrderMakespanAsap':metrics.averageSuccessfulOrderMakespanAsap,\n\n 'SuccessfulAsapOrders':metrics.percentageOfSuccessfulASAPOrders,\n 'SuccessfulCheapOrders':metrics.percentageOfSuccessfulCheapOrders,\n 'SuccessfulNeutralOrders':metrics.percentageOfSuccessfulNeutralOrders\n },\n agent_reporters={\n 'id':'unique_id',\n # # TODO: add in other agent reports that you would like to use\n }\n )\n\n batch_run.run_all()\n\n model_data = batch_run.get_model_vars_dataframe()\n agent_data = batch_run.get_agent_vars_dataframe()\n\n \n # Save results\n if(saveResults):\n number = 0\n ### CHANGE PATH TO WHERE YOU WANT RESULTS TO BE SAVED\n while (os.path.exists('{}/results/test_{}'.format(dir_path,number)) == True):\n number += 1\n\n # TODO: maybe make a text file that describes the test that has been run\n os.makedirs(\n '{}/results/test_{}'.format(dir_path,number))\n\n model_data.to_pickle(\n '{}/results/test_{}/model_data.pkl'.format(dir_path,number))\n agent_data.to_pickle(\n '{}/results/test_{}/agent_data.pkl'.format(dir_path,number))\n\n \n \n else:\n # TODO: rename all of these\n grid = CanvasGrid(agent_portrayal, 60, 60, 600, 600)\n chart = ChartModule([{'Label': 'Utilisation', \"Color\": 'Black'}],data_collector_name='datacollector')\n chart2 = ChartModule([{'Label': 'Complete Orders', 'Color': 'Black'}], data_collector_name='datacollector')\n chart3 = ChartModule([{'Label': 'Average Order Wait Time','Color': 'Red'}], data_collector_name='datacollector')\n chart4 = ChartModule([{'Label': 'Total Messages Sent','Color': 'Red'}], data_collector_name='datacollector')\n averageMessagesSentChart = ChartModule([{'Label': 'Average Messages Sent','Color': 'Red'}], data_collector_name='datacollector')\n chart5 = ChartModule([{'Label': 'Successful Orders','Color': 'Green'}], data_collector_name='datacollector')\n chart6 = ChartModule([{'Label': 'Outsourced Orders','Color': 'Blue'}], data_collector_name='datacollector')\n chart7 = ChartModule([{'Label': 'Late Orders','Color': 'Red'}], data_collector_name='datacollector')\n chart8 = ChartModule([{'Label': 'WIP Backlog','Color': 'Blue'}], data_collector_name='datacollector')\n chart9 = ChartModule([{'Label': 'Max Messages Sent - Order','Color': 'Blue'}], data_collector_name='datacollector')\n chart10 = ChartModule([{'Label': 'Max Messages Received - Order','Color': 'Blue'}], data_collector_name='datacollector')\n chart11 = ChartModule([{'Label': 'Max Messages Sent - Factory','Color': 'Red'}], data_collector_name='datacollector')\n chart12 = ChartModule([{'Label': 'Max Messages Received - Factory','Color': 'Red'}], data_collector_name='datacollector')\n \n \n\n chart13 = ChartModule([{'Label': 'Average satisfaction score','Color': 'Blue'}], data_collector_name='datacollector')\n chart14 = ChartModule([{'Label': 'Average successful satisfaction score','Color': 'Blue'}], data_collector_name='datacollector')\n chart15 = ChartModule([{'Label': '% Cheap orders with cheap machines','Color': 'Green'}], data_collector_name='datacollector')\n chart16 = ChartModule([{'Label': '% Asap orders with fast machines','Color': 'Green'}], data_collector_name='datacollector')\n\n chart17 = ChartModule([{'Label': 'Average successful price','Color': 'Blue'}], data_collector_name='datacollector')\n chart18 = ChartModule([{'Label': 'Average successful price Cheap','Color': 'Blue'}], data_collector_name='datacollector')\n chart19 = ChartModule([{'Label': 'Average successful price Neutral','Color': 'Blue'}], data_collector_name='datacollector')\n chart20 = ChartModule([{'Label': 'Average successful price Asap','Color': 'Blue'}], data_collector_name='datacollector')\n\n chart21 = ChartModule([{'Label': 'Average successful makespan','Color': 'Green'}], data_collector_name='datacollector')\n chart22 = ChartModule([{'Label': 'Average successful makespan Cheap','Color': 'Green'}], data_collector_name='datacollector')\n chart23 = ChartModule([{'Label': 'Average successful makespan Neutral','Color': 'Green'}], data_collector_name='datacollector')\n chart24 = ChartModule([{'Label': 'Average successful makespan Asap','Color': 'Green'}], data_collector_name='datacollector')\n\n chart25 = ChartModule([{'Label': 'Successful Cheap Orders','Color': 'Red'}], data_collector_name='datacollector')\n chart26 = ChartModule([{'Label': 'Successful Neutral Orders','Color': 'Red'}], data_collector_name='datacollector')\n chart27 = ChartModule([{'Label': 'Successful Asap Orders','Color': 'Red'}], data_collector_name='datacollector')\n noProposalOrdersChart = ChartModule([{'Label': 'Orders that received no proposals','Color': 'Red'}], data_collector_name='datacollector')\n\n\n\n \n\n \n server = ModularServer(MASArchitecture,\n [grid,\n chart,\n chart2,\n chart3,\n chart4,\n averageMessagesSentChart,\n chart5, \n noProposalOrdersChart,\n chart6,\n chart7, \n chart8, chart9, chart10,chart11, chart12,\n chart13,chart14,\n chart15,\n chart16,chart17,\n chart18, chart19, chart20,chart21,chart22,chart23,chart24,chart25,chart26,chart27\n ],\n 'Festo-Fetch.ai',\n\n {'width': 60, 'height': 60, 'distributed':True,'quantity':10,'splitSize':1,'newOrderProbability':5,'verbose':True,'ordersPerWeek':40,\n 'model_reporters_dict': {\n \"Utilisation\": metrics.machineUtilisation,\n \"Complete Orders\": metrics.ordersComplete,\n 'Average Order Wait Time': metrics.averageOrderWaitTime, \n \"Successful Orders\":metrics.successfulOrders,\n 'Total Messages Sent': metrics.totalMessagesSent, \n 'Average Messages Sent': metrics.averageMessagesSent, \n 'Late Orders':metrics.lateOrders,\n 'WIP Backlog':metrics.totalWIPSize, \n 'Max Messages Sent - Order': metrics.maxMessagesSentFromOrder, \n 'Max Messages Received - Order': metrics.maxMessagesReceivedByOrder,\n 'Max Messages Sent - Factory': metrics.maxMessagesSentFromFactory, \n 'Max Messages Received - Factory': metrics.maxMessagesReceivedByFactory,\n 'Outsourced Orders': metrics.outsourcedOrders,\n 'Orders that received no proposals':metrics.noProposalOrders,\n \n 'Average successful satisfaction score':metrics.averageSuccessfulSatisfactionScore,\n 'Average satisfaction score':metrics.averageSatisfactionScore,\n '% Cheap orders with cheap machines':metrics.cheapOrdersWithCheapMachines,\n '% Asap orders with fast machines':metrics.asapOrdersWithFastMachines,\n\n 'Average successful price': metrics.averageSuccessfulOrderPrice,\n\n 'Average successful price Cheap':metrics.averageSuccessfulOrderPriceCheap,\n 'Average successful price Neutral':metrics.averageSuccessfulOrderPriceNeutral,\n 'Average successful price Asap':metrics.averageSuccessfulOrderPriceAsap,\n \n 'Average successful makespan': metrics.averageSuccessfulOrderMakeSpan,\n\n 'Average successful makespan Cheap':metrics.averageSuccessfulOrderMakespanCheap,\n 'Average successful makespan Neutral':metrics.averageSuccessfulOrderMakespanNeutral,\n 'Average successful makespan Asap':metrics.averageSuccessfulOrderMakespanAsap,\n \n 'Successful Cheap Orders':metrics.percentageOfSuccessfulASAPOrders,\n 'Successful Neutral Orders':metrics.percentageOfSuccessfulCheapOrders,\n 'Successful Asap Orders':metrics.percentageOfSuccessfulNeutralOrders\n\n }})\n\n server.port = 8521\n server.launch()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
conf = {'PROJECT': 'WCCIA', 'NAS_FOLDER':
'Q:\\GROUPS\\CORP_JGS_DSE\\ATI\\quotations', 'DB_SERVER': '10.0.36.129',
'DB_PORT': '34000/'}
|
flexible
|
{
"blob_id": "fbce185671267bd70cf7b91696867b72dfcc8d5b",
"index": 1585,
"step-1": "<mask token>\n",
"step-2": "conf = {'PROJECT': 'WCCIA', 'NAS_FOLDER':\n 'Q:\\\\GROUPS\\\\CORP_JGS_DSE\\\\ATI\\\\quotations', 'DB_SERVER': '10.0.36.129',\n 'DB_PORT': '34000/'}\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import abc
try:
import cPickle as pickle
except ImportError:
import pickle
from typing import *
T = TypeVar('T')
class BaseSerializer(Generic[T]):
"""
The serializer is responsible for converting complex python data types
into primitive types that can be sent over zmq ports via msgpack.
"""
# Used within the `MultiSerializer` to embed which serializer to use for
# round-trip data serialization.
signature = None # type: str
@abc.abstractmethod
def serialize(self, data):
"""
Serialize a python object to transport over zmq.
Parameters
----------
data : T
Returns
-------
Any
"""
raise NotImplementedError
@abc.abstractmethod
def deserialize(self, data):
"""
Deserialize a python object. Counter of `serialize`.
Parameters
----------
data : Any
Returns
-------
T
"""
return NotImplementedError
class MultiSerializer(BaseSerializer):
"""
Serializer with multple sub-serializers that can register methods to claim
certain python objects.
All serialized objects (besides list, tuples, sets, dicts) are represented
as a tuple of (serializer.signature, serialized_value). This is so data
can be properly decoded on the remote side.
Register new sub-serializers using the register decorator:
@MultiSerializer.register(lamba x: isinstance(x, MyCls))
class MyClsSerializer(BaseSerializer):
...
"""
_registered = []
@classmethod
def register(cls, claim_func):
"""
Decorator for registering a callable to serialize certain types.
Parameters
----------
claim_func : Callable[Any, bool]
Returns
-------
Callable[[T], T]
"""
def _deco(serializer):
cls._registered.insert(0, (claim_func, serializer))
return serializer
return _deco
def __init__(self):
self._serializers = {} # type: Dict[str, BaseSerializer]
self._claims = [] # type: List[Tuple[str, Callable[[Any], bool]]]
for claim_func, serializerCls in self._registered:
assert serializerCls.signature is not None, \
'Populate the serializer.signature attribute.'
assert serializerCls.signature not in self._serializers, \
'Existing serializer with signature ' \
'{!r}'.format(serializerCls.signature)
serializer = serializerCls()
self._claims.append((serializerCls.signature, claim_func))
self._serializers[serializerCls.signature] = serializer
def serialize(self, data):
if isinstance(data, (list, tuple, set)):
return type(data)(self.serialize(x) for x in data)
elif isinstance(data, MutableMapping):
return type(data)({self.serialize(k): self.serialize(v)
for k, v in data.items()})
for name, claim_func in self._claims:
if claim_func(data):
return name, self._serializers[name].serialize(data)
raise ValueError('No serializer found for {!r}'.format(data))
def deserialize(self, payload):
if not payload:
return payload
if isinstance(payload, (tuple, list)) \
and len(payload) == 2 \
and payload[0] in self._serializers.keys():
signature, data = payload
if signature not in self._serializers:
raise ValueError('No deserializer found for {!r}'.format(data))
return self._serializers[signature].deserialize(data)
if isinstance(payload, (list, tuple, set)):
return type(payload)(self.deserialize(x) for x in payload)
elif isinstance(payload, MutableMapping):
return type(payload)({self.deserialize(k): self.deserialize(v)
for k, v in payload.items()})
else:
raise NotImplementedError
@MultiSerializer.register(lambda x: True)
class PickleSerializer(BaseSerializer):
"""
Pickle serialization of python objects over the zmq ports.
"""
signature = '_p'
def serialize(self, data):
return pickle.dumps(data, -1)
def deserialize(self, data):
return pickle.loads(data)
@MultiSerializer.register(lambda x: isinstance(x, Exception))
class ExceptionSerializer(BaseSerializer):
"""
Exception serialization.
"""
signature = '_e'
def serialize(self, data):
return pickle.dumps(data, -1)
def deserialize(self, data):
return pickle.loads(data)
@MultiSerializer.register(
lambda x: isinstance(x, (str, unicode, bytes, int, float)))
class BasicSerializer(BaseSerializer):
"""
Basic serialization of simple python types.
"""
signature = '_b'
def serialize(self, data):
return data
def deserialize(self, data):
return data
class Encoder(object):
"""
Handles how args and kwargs are encoded over zmq ports.
By default zerorpc does not support passing kwargs to remote methods.
This class is used to fix that so args are kwargs are combined into a
single args payload that is then deconstructed on the remote side.
"""
_default_serializer = PickleSerializer
def __init__(self, serializer=None):
if serializer is None:
serializer = self._default_serializer()
self.serializer = serializer
def encode(self, *args, **kwargs):
"""
Encode args and kwargs as a single serialized payload.
Parameters
----------
args : *Any
kwargs : **Any
Returns
-------
Tuple[Tuple[Any, ...], Dict[Any, Any]]
"""
return self.serializer.serialize(args), \
self.serializer.serialize(kwargs)
def decode(self, *payload):
"""
Decode encoded args and kwargs.
Parameters
----------
payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]
Returns
-------
Tuple[Tuple[Any, ...], Dict[Any, Any]]
"""
if not payload:
return (), {}
args, kwargs = payload
return self.serializer.deserialize(args), \
self.serializer.deserialize(kwargs)
|
normal
|
{
"blob_id": "94f5fa411f8a41985caaf4eb7ab1cb4e45439405",
"index": 1524,
"step-1": "<mask token>\n\n\n@MultiSerializer.register(lambda x: True)\nclass PickleSerializer(BaseSerializer):\n <mask token>\n <mask token>\n <mask token>\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, Exception))\nclass ExceptionSerializer(BaseSerializer):\n \"\"\"\n Exception serialization.\n \"\"\"\n signature = '_e'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, (str, unicode, bytes, int,\n float)))\nclass BasicSerializer(BaseSerializer):\n \"\"\"\n Basic serialization of simple python types.\n \"\"\"\n signature = '_b'\n\n def serialize(self, data):\n return data\n\n def deserialize(self, data):\n return data\n\n\nclass Encoder(object):\n \"\"\"\n Handles how args and kwargs are encoded over zmq ports.\n\n By default zerorpc does not support passing kwargs to remote methods.\n This class is used to fix that so args are kwargs are combined into a\n single args payload that is then deconstructed on the remote side.\n \"\"\"\n _default_serializer = PickleSerializer\n\n def __init__(self, serializer=None):\n if serializer is None:\n serializer = self._default_serializer()\n self.serializer = serializer\n\n def encode(self, *args, **kwargs):\n \"\"\"\n Encode args and kwargs as a single serialized payload.\n\n Parameters\n ----------\n args : *Any\n kwargs : **Any\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n return self.serializer.serialize(args), self.serializer.serialize(\n kwargs)\n\n def decode(self, *payload):\n \"\"\"\n Decode encoded args and kwargs.\n\n Parameters\n ----------\n payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n if not payload:\n return (), {}\n args, kwargs = payload\n return self.serializer.deserialize(args), self.serializer.deserialize(\n kwargs)\n",
"step-2": "<mask token>\n\n\nclass MultiSerializer(BaseSerializer):\n <mask token>\n <mask token>\n\n @classmethod\n def register(cls, claim_func):\n \"\"\"\n Decorator for registering a callable to serialize certain types.\n\n Parameters\n ----------\n claim_func : Callable[Any, bool]\n\n Returns\n -------\n Callable[[T], T]\n \"\"\"\n\n def _deco(serializer):\n cls._registered.insert(0, (claim_func, serializer))\n return serializer\n return _deco\n\n def __init__(self):\n self._serializers = {}\n self._claims = []\n for claim_func, serializerCls in self._registered:\n assert serializerCls.signature is not None, 'Populate the serializer.signature attribute.'\n assert serializerCls.signature not in self._serializers, 'Existing serializer with signature {!r}'.format(\n serializerCls.signature)\n serializer = serializerCls()\n self._claims.append((serializerCls.signature, claim_func))\n self._serializers[serializerCls.signature] = serializer\n\n def serialize(self, data):\n if isinstance(data, (list, tuple, set)):\n return type(data)(self.serialize(x) for x in data)\n elif isinstance(data, MutableMapping):\n return type(data)({self.serialize(k): self.serialize(v) for k,\n v in data.items()})\n for name, claim_func in self._claims:\n if claim_func(data):\n return name, self._serializers[name].serialize(data)\n raise ValueError('No serializer found for {!r}'.format(data))\n\n def deserialize(self, payload):\n if not payload:\n return payload\n if isinstance(payload, (tuple, list)) and len(payload\n ) == 2 and payload[0] in self._serializers.keys():\n signature, data = payload\n if signature not in self._serializers:\n raise ValueError('No deserializer found for {!r}'.format(data))\n return self._serializers[signature].deserialize(data)\n if isinstance(payload, (list, tuple, set)):\n return type(payload)(self.deserialize(x) for x in payload)\n elif isinstance(payload, MutableMapping):\n return type(payload)({self.deserialize(k): self.deserialize(v) for\n k, v in payload.items()})\n else:\n raise NotImplementedError\n\n\n@MultiSerializer.register(lambda x: True)\nclass PickleSerializer(BaseSerializer):\n \"\"\"\n Pickle serialization of python objects over the zmq ports.\n \"\"\"\n signature = '_p'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, Exception))\nclass ExceptionSerializer(BaseSerializer):\n \"\"\"\n Exception serialization.\n \"\"\"\n signature = '_e'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, (str, unicode, bytes, int,\n float)))\nclass BasicSerializer(BaseSerializer):\n \"\"\"\n Basic serialization of simple python types.\n \"\"\"\n signature = '_b'\n\n def serialize(self, data):\n return data\n\n def deserialize(self, data):\n return data\n\n\nclass Encoder(object):\n \"\"\"\n Handles how args and kwargs are encoded over zmq ports.\n\n By default zerorpc does not support passing kwargs to remote methods.\n This class is used to fix that so args are kwargs are combined into a\n single args payload that is then deconstructed on the remote side.\n \"\"\"\n _default_serializer = PickleSerializer\n\n def __init__(self, serializer=None):\n if serializer is None:\n serializer = self._default_serializer()\n self.serializer = serializer\n\n def encode(self, *args, **kwargs):\n \"\"\"\n Encode args and kwargs as a single serialized payload.\n\n Parameters\n ----------\n args : *Any\n kwargs : **Any\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n return self.serializer.serialize(args), self.serializer.serialize(\n kwargs)\n\n def decode(self, *payload):\n \"\"\"\n Decode encoded args and kwargs.\n\n Parameters\n ----------\n payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n if not payload:\n return (), {}\n args, kwargs = payload\n return self.serializer.deserialize(args), self.serializer.deserialize(\n kwargs)\n",
"step-3": "<mask token>\n\n\nclass BaseSerializer(Generic[T]):\n <mask token>\n signature = None\n\n @abc.abstractmethod\n def serialize(self, data):\n \"\"\"\n Serialize a python object to transport over zmq.\n\n Parameters\n ----------\n data : T\n\n Returns\n -------\n Any\n \"\"\"\n raise NotImplementedError\n\n @abc.abstractmethod\n def deserialize(self, data):\n \"\"\"\n Deserialize a python object. Counter of `serialize`.\n\n Parameters\n ----------\n data : Any\n\n Returns\n -------\n T\n \"\"\"\n return NotImplementedError\n\n\nclass MultiSerializer(BaseSerializer):\n \"\"\"\n Serializer with multple sub-serializers that can register methods to claim\n certain python objects.\n\n All serialized objects (besides list, tuples, sets, dicts) are represented\n as a tuple of (serializer.signature, serialized_value). This is so data\n can be properly decoded on the remote side.\n\n Register new sub-serializers using the register decorator:\n\n @MultiSerializer.register(lamba x: isinstance(x, MyCls))\n class MyClsSerializer(BaseSerializer):\n ...\n \"\"\"\n _registered = []\n\n @classmethod\n def register(cls, claim_func):\n \"\"\"\n Decorator for registering a callable to serialize certain types.\n\n Parameters\n ----------\n claim_func : Callable[Any, bool]\n\n Returns\n -------\n Callable[[T], T]\n \"\"\"\n\n def _deco(serializer):\n cls._registered.insert(0, (claim_func, serializer))\n return serializer\n return _deco\n\n def __init__(self):\n self._serializers = {}\n self._claims = []\n for claim_func, serializerCls in self._registered:\n assert serializerCls.signature is not None, 'Populate the serializer.signature attribute.'\n assert serializerCls.signature not in self._serializers, 'Existing serializer with signature {!r}'.format(\n serializerCls.signature)\n serializer = serializerCls()\n self._claims.append((serializerCls.signature, claim_func))\n self._serializers[serializerCls.signature] = serializer\n\n def serialize(self, data):\n if isinstance(data, (list, tuple, set)):\n return type(data)(self.serialize(x) for x in data)\n elif isinstance(data, MutableMapping):\n return type(data)({self.serialize(k): self.serialize(v) for k,\n v in data.items()})\n for name, claim_func in self._claims:\n if claim_func(data):\n return name, self._serializers[name].serialize(data)\n raise ValueError('No serializer found for {!r}'.format(data))\n\n def deserialize(self, payload):\n if not payload:\n return payload\n if isinstance(payload, (tuple, list)) and len(payload\n ) == 2 and payload[0] in self._serializers.keys():\n signature, data = payload\n if signature not in self._serializers:\n raise ValueError('No deserializer found for {!r}'.format(data))\n return self._serializers[signature].deserialize(data)\n if isinstance(payload, (list, tuple, set)):\n return type(payload)(self.deserialize(x) for x in payload)\n elif isinstance(payload, MutableMapping):\n return type(payload)({self.deserialize(k): self.deserialize(v) for\n k, v in payload.items()})\n else:\n raise NotImplementedError\n\n\n@MultiSerializer.register(lambda x: True)\nclass PickleSerializer(BaseSerializer):\n \"\"\"\n Pickle serialization of python objects over the zmq ports.\n \"\"\"\n signature = '_p'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, Exception))\nclass ExceptionSerializer(BaseSerializer):\n \"\"\"\n Exception serialization.\n \"\"\"\n signature = '_e'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, (str, unicode, bytes, int,\n float)))\nclass BasicSerializer(BaseSerializer):\n \"\"\"\n Basic serialization of simple python types.\n \"\"\"\n signature = '_b'\n\n def serialize(self, data):\n return data\n\n def deserialize(self, data):\n return data\n\n\nclass Encoder(object):\n \"\"\"\n Handles how args and kwargs are encoded over zmq ports.\n\n By default zerorpc does not support passing kwargs to remote methods.\n This class is used to fix that so args are kwargs are combined into a\n single args payload that is then deconstructed on the remote side.\n \"\"\"\n _default_serializer = PickleSerializer\n\n def __init__(self, serializer=None):\n if serializer is None:\n serializer = self._default_serializer()\n self.serializer = serializer\n\n def encode(self, *args, **kwargs):\n \"\"\"\n Encode args and kwargs as a single serialized payload.\n\n Parameters\n ----------\n args : *Any\n kwargs : **Any\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n return self.serializer.serialize(args), self.serializer.serialize(\n kwargs)\n\n def decode(self, *payload):\n \"\"\"\n Decode encoded args and kwargs.\n\n Parameters\n ----------\n payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n if not payload:\n return (), {}\n args, kwargs = payload\n return self.serializer.deserialize(args), self.serializer.deserialize(\n kwargs)\n",
"step-4": "<mask token>\n\n\nclass BaseSerializer(Generic[T]):\n \"\"\"\n The serializer is responsible for converting complex python data types\n into primitive types that can be sent over zmq ports via msgpack.\n \"\"\"\n signature = None\n\n @abc.abstractmethod\n def serialize(self, data):\n \"\"\"\n Serialize a python object to transport over zmq.\n\n Parameters\n ----------\n data : T\n\n Returns\n -------\n Any\n \"\"\"\n raise NotImplementedError\n\n @abc.abstractmethod\n def deserialize(self, data):\n \"\"\"\n Deserialize a python object. Counter of `serialize`.\n\n Parameters\n ----------\n data : Any\n\n Returns\n -------\n T\n \"\"\"\n return NotImplementedError\n\n\nclass MultiSerializer(BaseSerializer):\n \"\"\"\n Serializer with multple sub-serializers that can register methods to claim\n certain python objects.\n\n All serialized objects (besides list, tuples, sets, dicts) are represented\n as a tuple of (serializer.signature, serialized_value). This is so data\n can be properly decoded on the remote side.\n\n Register new sub-serializers using the register decorator:\n\n @MultiSerializer.register(lamba x: isinstance(x, MyCls))\n class MyClsSerializer(BaseSerializer):\n ...\n \"\"\"\n _registered = []\n\n @classmethod\n def register(cls, claim_func):\n \"\"\"\n Decorator for registering a callable to serialize certain types.\n\n Parameters\n ----------\n claim_func : Callable[Any, bool]\n\n Returns\n -------\n Callable[[T], T]\n \"\"\"\n\n def _deco(serializer):\n cls._registered.insert(0, (claim_func, serializer))\n return serializer\n return _deco\n\n def __init__(self):\n self._serializers = {}\n self._claims = []\n for claim_func, serializerCls in self._registered:\n assert serializerCls.signature is not None, 'Populate the serializer.signature attribute.'\n assert serializerCls.signature not in self._serializers, 'Existing serializer with signature {!r}'.format(\n serializerCls.signature)\n serializer = serializerCls()\n self._claims.append((serializerCls.signature, claim_func))\n self._serializers[serializerCls.signature] = serializer\n\n def serialize(self, data):\n if isinstance(data, (list, tuple, set)):\n return type(data)(self.serialize(x) for x in data)\n elif isinstance(data, MutableMapping):\n return type(data)({self.serialize(k): self.serialize(v) for k,\n v in data.items()})\n for name, claim_func in self._claims:\n if claim_func(data):\n return name, self._serializers[name].serialize(data)\n raise ValueError('No serializer found for {!r}'.format(data))\n\n def deserialize(self, payload):\n if not payload:\n return payload\n if isinstance(payload, (tuple, list)) and len(payload\n ) == 2 and payload[0] in self._serializers.keys():\n signature, data = payload\n if signature not in self._serializers:\n raise ValueError('No deserializer found for {!r}'.format(data))\n return self._serializers[signature].deserialize(data)\n if isinstance(payload, (list, tuple, set)):\n return type(payload)(self.deserialize(x) for x in payload)\n elif isinstance(payload, MutableMapping):\n return type(payload)({self.deserialize(k): self.deserialize(v) for\n k, v in payload.items()})\n else:\n raise NotImplementedError\n\n\n@MultiSerializer.register(lambda x: True)\nclass PickleSerializer(BaseSerializer):\n \"\"\"\n Pickle serialization of python objects over the zmq ports.\n \"\"\"\n signature = '_p'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, Exception))\nclass ExceptionSerializer(BaseSerializer):\n \"\"\"\n Exception serialization.\n \"\"\"\n signature = '_e'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, (str, unicode, bytes, int,\n float)))\nclass BasicSerializer(BaseSerializer):\n \"\"\"\n Basic serialization of simple python types.\n \"\"\"\n signature = '_b'\n\n def serialize(self, data):\n return data\n\n def deserialize(self, data):\n return data\n\n\nclass Encoder(object):\n \"\"\"\n Handles how args and kwargs are encoded over zmq ports.\n\n By default zerorpc does not support passing kwargs to remote methods.\n This class is used to fix that so args are kwargs are combined into a\n single args payload that is then deconstructed on the remote side.\n \"\"\"\n _default_serializer = PickleSerializer\n\n def __init__(self, serializer=None):\n if serializer is None:\n serializer = self._default_serializer()\n self.serializer = serializer\n\n def encode(self, *args, **kwargs):\n \"\"\"\n Encode args and kwargs as a single serialized payload.\n\n Parameters\n ----------\n args : *Any\n kwargs : **Any\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n return self.serializer.serialize(args), self.serializer.serialize(\n kwargs)\n\n def decode(self, *payload):\n \"\"\"\n Decode encoded args and kwargs.\n\n Parameters\n ----------\n payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n if not payload:\n return (), {}\n args, kwargs = payload\n return self.serializer.deserialize(args), self.serializer.deserialize(\n kwargs)\n",
"step-5": "import abc\n\ntry:\n import cPickle as pickle\nexcept ImportError:\n import pickle\n\nfrom typing import *\n\n\nT = TypeVar('T')\n\n\nclass BaseSerializer(Generic[T]):\n \"\"\"\n The serializer is responsible for converting complex python data types\n into primitive types that can be sent over zmq ports via msgpack.\n \"\"\"\n # Used within the `MultiSerializer` to embed which serializer to use for\n # round-trip data serialization.\n signature = None # type: str\n\n @abc.abstractmethod\n def serialize(self, data):\n \"\"\"\n Serialize a python object to transport over zmq.\n\n Parameters\n ----------\n data : T\n\n Returns\n -------\n Any\n \"\"\"\n raise NotImplementedError\n\n @abc.abstractmethod\n def deserialize(self, data):\n \"\"\"\n Deserialize a python object. Counter of `serialize`.\n\n Parameters\n ----------\n data : Any\n\n Returns\n -------\n T\n \"\"\"\n return NotImplementedError\n\n\nclass MultiSerializer(BaseSerializer):\n \"\"\"\n Serializer with multple sub-serializers that can register methods to claim\n certain python objects.\n\n All serialized objects (besides list, tuples, sets, dicts) are represented\n as a tuple of (serializer.signature, serialized_value). This is so data\n can be properly decoded on the remote side.\n\n Register new sub-serializers using the register decorator:\n\n @MultiSerializer.register(lamba x: isinstance(x, MyCls))\n class MyClsSerializer(BaseSerializer):\n ...\n \"\"\"\n\n _registered = []\n\n @classmethod\n def register(cls, claim_func):\n \"\"\"\n Decorator for registering a callable to serialize certain types.\n\n Parameters\n ----------\n claim_func : Callable[Any, bool]\n\n Returns\n -------\n Callable[[T], T]\n \"\"\"\n def _deco(serializer):\n cls._registered.insert(0, (claim_func, serializer))\n return serializer\n return _deco\n\n def __init__(self):\n self._serializers = {} # type: Dict[str, BaseSerializer]\n self._claims = [] # type: List[Tuple[str, Callable[[Any], bool]]]\n for claim_func, serializerCls in self._registered:\n assert serializerCls.signature is not None, \\\n 'Populate the serializer.signature attribute.'\n assert serializerCls.signature not in self._serializers, \\\n 'Existing serializer with signature ' \\\n '{!r}'.format(serializerCls.signature)\n serializer = serializerCls()\n self._claims.append((serializerCls.signature, claim_func))\n self._serializers[serializerCls.signature] = serializer\n\n def serialize(self, data):\n if isinstance(data, (list, tuple, set)):\n return type(data)(self.serialize(x) for x in data)\n elif isinstance(data, MutableMapping):\n return type(data)({self.serialize(k): self.serialize(v)\n for k, v in data.items()})\n for name, claim_func in self._claims:\n if claim_func(data):\n return name, self._serializers[name].serialize(data)\n raise ValueError('No serializer found for {!r}'.format(data))\n\n def deserialize(self, payload):\n if not payload:\n return payload\n if isinstance(payload, (tuple, list)) \\\n and len(payload) == 2 \\\n and payload[0] in self._serializers.keys():\n signature, data = payload\n if signature not in self._serializers:\n raise ValueError('No deserializer found for {!r}'.format(data))\n return self._serializers[signature].deserialize(data)\n if isinstance(payload, (list, tuple, set)):\n return type(payload)(self.deserialize(x) for x in payload)\n elif isinstance(payload, MutableMapping):\n return type(payload)({self.deserialize(k): self.deserialize(v)\n for k, v in payload.items()})\n else:\n raise NotImplementedError\n\n\n@MultiSerializer.register(lambda x: True)\nclass PickleSerializer(BaseSerializer):\n \"\"\"\n Pickle serialization of python objects over the zmq ports.\n \"\"\"\n signature = '_p'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(lambda x: isinstance(x, Exception))\nclass ExceptionSerializer(BaseSerializer):\n \"\"\"\n Exception serialization.\n \"\"\"\n signature = '_e'\n\n def serialize(self, data):\n return pickle.dumps(data, -1)\n\n def deserialize(self, data):\n return pickle.loads(data)\n\n\n@MultiSerializer.register(\n lambda x: isinstance(x, (str, unicode, bytes, int, float)))\nclass BasicSerializer(BaseSerializer):\n \"\"\"\n Basic serialization of simple python types.\n \"\"\"\n signature = '_b'\n\n def serialize(self, data):\n return data\n\n def deserialize(self, data):\n return data\n\n\nclass Encoder(object):\n \"\"\"\n Handles how args and kwargs are encoded over zmq ports.\n\n By default zerorpc does not support passing kwargs to remote methods.\n This class is used to fix that so args are kwargs are combined into a\n single args payload that is then deconstructed on the remote side.\n \"\"\"\n _default_serializer = PickleSerializer\n\n def __init__(self, serializer=None):\n if serializer is None:\n serializer = self._default_serializer()\n self.serializer = serializer\n\n def encode(self, *args, **kwargs):\n \"\"\"\n Encode args and kwargs as a single serialized payload.\n\n Parameters\n ----------\n args : *Any\n kwargs : **Any\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n return self.serializer.serialize(args), \\\n self.serializer.serialize(kwargs)\n\n def decode(self, *payload):\n \"\"\"\n Decode encoded args and kwargs.\n\n Parameters\n ----------\n payload : Tuple[Tuple[Any, ...], Dict[Any, Any]]\n\n Returns\n -------\n Tuple[Tuple[Any, ...], Dict[Any, Any]]\n \"\"\"\n if not payload:\n return (), {}\n args, kwargs = payload\n return self.serializer.deserialize(args), \\\n self.serializer.deserialize(kwargs)\n",
"step-ids": [
18,
26,
32,
33,
37
]
}
|
[
18,
26,
32,
33,
37
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.