code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
__author__ = 'tcaruso'
# !/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
import fnmatch
import os
import sys
import warnings
from shutil import rmtree
from setuptools import find_packages, setup, Command
from collections import namedtuple
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
except Exception:
from pip import __version__ as __pip_version__
msg = """Sorry, could not install due to a pip import error. Please open an issue on the repo
with this message and the error so it can be addressed.
pip version: {}
python version: {}
""".format(__pip_version__, '.'.join(sys.version_info))
raise EnvironmentError(msg)
here = os.path.abspath(os.path.dirname(__file__))
# ------------------------------------------------
# Package meta-data.
# PACKAGE_NAME is the name of the package directory and the import path. If you use my_package then when installed, you
# will import the package like `import my_package`.
PACKAGE_NAME = 'socket_wait'
DESCRIPTION = 'Listen on a port until a connection is received.'
URL = 'https://github.com/tomplex/socket_wait'
EMAIL = 'carusot42@gmail.com'
AUTHOR = 'Tom Caruso'
# The minimum Python version required
REQUIRES_PYTHON = (2, 7, 0)
# PYPI_NAME is the name of the package on pypi. We'll default to pbvt_{PACKAGE_NAME} so we avoid name collisions
# with PyPI. You'll use this name to install the package.
PYPI_NAME = '{}'.format(PACKAGE_NAME)
# Specify the name of the requirements file we should use. If there is none, then just leave it as is. We'll detect
# ------------------------------------------------
# Check Python version we're installing against. Bail if it's not correct. This will blow up both when we build the
# package and when someone tries to install it.
if sys.version_info < REQUIRES_PYTHON:
# Raise if we're trying to install on an unsupported Python version
raise Exception("Package {} requires python >= {}.".format(PYPI_NAME, '.'.join(map(str, REQUIRES_PYTHON))))
REQUIRES_PYTHON = '>=' + '.'.join(map(str, REQUIRES_PYTHON))
# ------------------------------------------------
# Requirements gathering.
about = {}
from socket_wait import __version__
about['__version__'] = __version__
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status("Installing required build packages...")
os.system('{0} -m pip install wheel twine'.format(sys.executable))
self.status('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to pypi via Twine…')
os.system('{0} -m twine upload dist/* '.format(sys.executable))
sys.exit()
setup(
name=PYPI_NAME,
version=about['__version__'],
description=DESCRIPTION,
author=AUTHOR,
author_email=EMAIL,
url=URL,
py_modules=['socket_wait'],
include_package_data=True,
# If your package has a CLI component, specify it in entry_points.
# for example, if you want it to be called like "mycli" from the command line, and the command line entry
# point lives in the somepackage/cli.py file, in the function main, you'd construct it like this:
entry_points={
'console_scripts': ['socket_wait=socket_wait:cli'],
},
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
],
# setup.py publish support.
cmdclass={
'upload': UploadCommand,
},
)
|
normal
|
{
"blob_id": "58438a1fb0b9e620717ba262c25a43bfbf6b8824",
"index": 8100,
"step-1": "<mask token>\n\n\nclass UploadCommand(Command):\n <mask token>\n description = 'Build and publish the package.'\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n @staticmethod\n def status(s):\n \"\"\"Prints things in bold.\"\"\"\n print('\\x1b[1m{0}\\x1b[0m'.format(s))\n\n def run(self):\n try:\n self.status('Removing previous builds…')\n rmtree(os.path.join(here, 'dist'))\n except OSError:\n pass\n self.status('Installing required build packages...')\n os.system('{0} -m pip install wheel twine'.format(sys.executable))\n self.status('Building Source and Wheel (universal) distribution…')\n os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.\n executable))\n self.status('Uploading the package to pypi via Twine…')\n os.system('{0} -m twine upload dist/* '.format(sys.executable))\n sys.exit()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass UploadCommand(Command):\n \"\"\"Support setup.py upload.\"\"\"\n description = 'Build and publish the package.'\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n @staticmethod\n def status(s):\n \"\"\"Prints things in bold.\"\"\"\n print('\\x1b[1m{0}\\x1b[0m'.format(s))\n\n def run(self):\n try:\n self.status('Removing previous builds…')\n rmtree(os.path.join(here, 'dist'))\n except OSError:\n pass\n self.status('Installing required build packages...')\n os.system('{0} -m pip install wheel twine'.format(sys.executable))\n self.status('Building Source and Wheel (universal) distribution…')\n os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.\n executable))\n self.status('Uploading the package to pypi via Twine…')\n os.system('{0} -m twine upload dist/* '.format(sys.executable))\n sys.exit()\n\n\n<mask token>\n",
"step-3": "__author__ = 'tcaruso'\n<mask token>\ntry:\n from pip._internal.req import parse_requirements\nexcept ImportError:\n from pip.req import parse_requirements\nexcept Exception:\n from pip import __version__ as __pip_version__\n msg = (\n \"\"\"Sorry, could not install due to a pip import error. Please open an issue on the repo \n with this message and the error so it can be addressed.\n\n pip version: {}\n python version: {}\n\n \"\"\"\n .format(__pip_version__, '.'.join(sys.version_info)))\n raise EnvironmentError(msg)\nhere = os.path.abspath(os.path.dirname(__file__))\nPACKAGE_NAME = 'socket_wait'\nDESCRIPTION = 'Listen on a port until a connection is received.'\nURL = 'https://github.com/tomplex/socket_wait'\nEMAIL = 'carusot42@gmail.com'\nAUTHOR = 'Tom Caruso'\nREQUIRES_PYTHON = 2, 7, 0\nPYPI_NAME = '{}'.format(PACKAGE_NAME)\nif sys.version_info < REQUIRES_PYTHON:\n raise Exception('Package {} requires python >= {}.'.format(PYPI_NAME,\n '.'.join(map(str, REQUIRES_PYTHON))))\nREQUIRES_PYTHON = '>=' + '.'.join(map(str, REQUIRES_PYTHON))\nabout = {}\n<mask token>\nabout['__version__'] = __version__\n\n\nclass UploadCommand(Command):\n \"\"\"Support setup.py upload.\"\"\"\n description = 'Build and publish the package.'\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n @staticmethod\n def status(s):\n \"\"\"Prints things in bold.\"\"\"\n print('\\x1b[1m{0}\\x1b[0m'.format(s))\n\n def run(self):\n try:\n self.status('Removing previous builds…')\n rmtree(os.path.join(here, 'dist'))\n except OSError:\n pass\n self.status('Installing required build packages...')\n os.system('{0} -m pip install wheel twine'.format(sys.executable))\n self.status('Building Source and Wheel (universal) distribution…')\n os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.\n executable))\n self.status('Uploading the package to pypi via Twine…')\n os.system('{0} -m twine upload dist/* '.format(sys.executable))\n sys.exit()\n\n\nsetup(name=PYPI_NAME, version=about['__version__'], description=DESCRIPTION,\n author=AUTHOR, author_email=EMAIL, url=URL, py_modules=['socket_wait'],\n include_package_data=True, entry_points={'console_scripts': [\n 'socket_wait=socket_wait:cli']}, classifiers=[\n 'Programming Language :: Python', 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: Implementation :: CPython'],\n cmdclass={'upload': UploadCommand})\n",
"step-4": "__author__ = 'tcaruso'\nimport glob\nimport fnmatch\nimport os\nimport sys\nimport warnings\nfrom shutil import rmtree\nfrom setuptools import find_packages, setup, Command\nfrom collections import namedtuple\ntry:\n from pip._internal.req import parse_requirements\nexcept ImportError:\n from pip.req import parse_requirements\nexcept Exception:\n from pip import __version__ as __pip_version__\n msg = (\n \"\"\"Sorry, could not install due to a pip import error. Please open an issue on the repo \n with this message and the error so it can be addressed.\n\n pip version: {}\n python version: {}\n\n \"\"\"\n .format(__pip_version__, '.'.join(sys.version_info)))\n raise EnvironmentError(msg)\nhere = os.path.abspath(os.path.dirname(__file__))\nPACKAGE_NAME = 'socket_wait'\nDESCRIPTION = 'Listen on a port until a connection is received.'\nURL = 'https://github.com/tomplex/socket_wait'\nEMAIL = 'carusot42@gmail.com'\nAUTHOR = 'Tom Caruso'\nREQUIRES_PYTHON = 2, 7, 0\nPYPI_NAME = '{}'.format(PACKAGE_NAME)\nif sys.version_info < REQUIRES_PYTHON:\n raise Exception('Package {} requires python >= {}.'.format(PYPI_NAME,\n '.'.join(map(str, REQUIRES_PYTHON))))\nREQUIRES_PYTHON = '>=' + '.'.join(map(str, REQUIRES_PYTHON))\nabout = {}\nfrom socket_wait import __version__\nabout['__version__'] = __version__\n\n\nclass UploadCommand(Command):\n \"\"\"Support setup.py upload.\"\"\"\n description = 'Build and publish the package.'\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n @staticmethod\n def status(s):\n \"\"\"Prints things in bold.\"\"\"\n print('\\x1b[1m{0}\\x1b[0m'.format(s))\n\n def run(self):\n try:\n self.status('Removing previous builds…')\n rmtree(os.path.join(here, 'dist'))\n except OSError:\n pass\n self.status('Installing required build packages...')\n os.system('{0} -m pip install wheel twine'.format(sys.executable))\n self.status('Building Source and Wheel (universal) distribution…')\n os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.\n executable))\n self.status('Uploading the package to pypi via Twine…')\n os.system('{0} -m twine upload dist/* '.format(sys.executable))\n sys.exit()\n\n\nsetup(name=PYPI_NAME, version=about['__version__'], description=DESCRIPTION,\n author=AUTHOR, author_email=EMAIL, url=URL, py_modules=['socket_wait'],\n include_package_data=True, entry_points={'console_scripts': [\n 'socket_wait=socket_wait:cli']}, classifiers=[\n 'Programming Language :: Python', 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: Implementation :: CPython'],\n cmdclass={'upload': UploadCommand})\n",
"step-5": "__author__ = 'tcaruso'\n\n# !/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport glob\nimport fnmatch\nimport os\nimport sys\nimport warnings\nfrom shutil import rmtree\nfrom setuptools import find_packages, setup, Command\nfrom collections import namedtuple\n\ntry:\n from pip._internal.req import parse_requirements\nexcept ImportError:\n from pip.req import parse_requirements\nexcept Exception:\n from pip import __version__ as __pip_version__\n\n msg = \"\"\"Sorry, could not install due to a pip import error. Please open an issue on the repo \n with this message and the error so it can be addressed.\n\n pip version: {}\n python version: {}\n\n \"\"\".format(__pip_version__, '.'.join(sys.version_info))\n raise EnvironmentError(msg)\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\n# ------------------------------------------------\n\n# Package meta-data.\n# PACKAGE_NAME is the name of the package directory and the import path. If you use my_package then when installed, you\n# will import the package like `import my_package`.\nPACKAGE_NAME = 'socket_wait'\nDESCRIPTION = 'Listen on a port until a connection is received.'\nURL = 'https://github.com/tomplex/socket_wait'\nEMAIL = 'carusot42@gmail.com'\nAUTHOR = 'Tom Caruso'\n# The minimum Python version required\nREQUIRES_PYTHON = (2, 7, 0)\n# PYPI_NAME is the name of the package on pypi. We'll default to pbvt_{PACKAGE_NAME} so we avoid name collisions\n# with PyPI. You'll use this name to install the package.\nPYPI_NAME = '{}'.format(PACKAGE_NAME)\n# Specify the name of the requirements file we should use. If there is none, then just leave it as is. We'll detect\n\n# ------------------------------------------------\n# Check Python version we're installing against. Bail if it's not correct. This will blow up both when we build the\n# package and when someone tries to install it.\n\nif sys.version_info < REQUIRES_PYTHON:\n # Raise if we're trying to install on an unsupported Python version\n raise Exception(\"Package {} requires python >= {}.\".format(PYPI_NAME, '.'.join(map(str, REQUIRES_PYTHON))))\n\nREQUIRES_PYTHON = '>=' + '.'.join(map(str, REQUIRES_PYTHON))\n\n\n# ------------------------------------------------\n# Requirements gathering.\n\n\nabout = {}\nfrom socket_wait import __version__\n\nabout['__version__'] = __version__\n\n\nclass UploadCommand(Command):\n \"\"\"Support setup.py upload.\"\"\"\n\n description = 'Build and publish the package.'\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n @staticmethod\n def status(s):\n \"\"\"Prints things in bold.\"\"\"\n print('\\033[1m{0}\\033[0m'.format(s))\n\n def run(self):\n try:\n self.status('Removing previous builds…')\n rmtree(os.path.join(here, 'dist'))\n except OSError:\n pass\n\n self.status(\"Installing required build packages...\")\n os.system('{0} -m pip install wheel twine'.format(sys.executable))\n\n self.status('Building Source and Wheel (universal) distribution…')\n os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))\n\n self.status('Uploading the package to pypi via Twine…')\n os.system('{0} -m twine upload dist/* '.format(sys.executable))\n\n sys.exit()\n\n\nsetup(\n name=PYPI_NAME,\n version=about['__version__'],\n description=DESCRIPTION,\n author=AUTHOR,\n author_email=EMAIL,\n url=URL,\n py_modules=['socket_wait'],\n include_package_data=True,\n # If your package has a CLI component, specify it in entry_points.\n # for example, if you want it to be called like \"mycli\" from the command line, and the command line entry\n # point lives in the somepackage/cli.py file, in the function main, you'd construct it like this:\n entry_points={\n 'console_scripts': ['socket_wait=socket_wait:cli'],\n },\n\n classifiers=[\n # Trove classifiers\n # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: Implementation :: CPython',\n ],\n # setup.py publish support.\n cmdclass={\n 'upload': UploadCommand,\n },\n)\n",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
import os
#defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":50}
defaults = {"N":20, "K":3, "POP_SIZE":200, "MUT_RATE":.05, "TOURNAMENT_SIZE":2, "SELECTION":0, "CHANGE_RATE":100000, "MAX_GENS": 5000, "FILTER_LENGTH":"POP_SIZE"}
conditions = [{},{"K":10}, {"N":100, "MUT_RATE":.01}, {"MUT_RATE":.005}, {"MUT_RATE": .1}, {"POP_SIZE":20}, {"POP_SIZE":2000}, {"SELECTION":1}, {"SELECTION":1, "FILTER_LENGTH":1000}, {"CHANGE_RATE":500}, {"CHANGE_RATE":500, "CHANGE_TYPE":1}]
seed = 0
for condition in conditions:
print(condition)
command = ["./nk_oee -MODES_RESOLUTION 10 -SEED", seed]
dir_name = []
for var in defaults:
if var not in condition:
condition[var] = defaults[var]
for var in condition:
while condition[var] in condition:
condition[var] = condition[condition[var]]
command.append("-"+var)
dir_name.append("".join(var.split("_"))) # Underscores in variable names will screw up parsing later
val = str(condition[var])
command.append(val)
dir_name.append(val)
str_dir_name = "_".join(dir_name)
if not os.path.exists(str_dir_name):
os.mkdir(str_dir_name)
for i in range(30):
if os.path.exists(str_dir_name+"/"+str(i)+"/command.sh"):
continue
seed += 1
command[1] = str(seed)
print(command)
os.mkdir(str_dir_name+"/"+str(i))
with open(str_dir_name+"/"+str(i)+"/command.sh", "w") as infile:
infile.write(" ".join(command))
|
normal
|
{
"blob_id": "a826f33361ec59824f3c4a83d01e94c6b307b0a9",
"index": 9144,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-3": "<mask token>\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-4": "import os\ndefaults = {'N': 20, 'K': 3, 'POP_SIZE': 200, 'MUT_RATE': 0.05,\n 'TOURNAMENT_SIZE': 2, 'SELECTION': 0, 'CHANGE_RATE': 100000, 'MAX_GENS':\n 5000, 'FILTER_LENGTH': 'POP_SIZE'}\nconditions = [{}, {'K': 10}, {'N': 100, 'MUT_RATE': 0.01}, {'MUT_RATE': \n 0.005}, {'MUT_RATE': 0.1}, {'POP_SIZE': 20}, {'POP_SIZE': 2000}, {\n 'SELECTION': 1}, {'SELECTION': 1, 'FILTER_LENGTH': 1000}, {\n 'CHANGE_RATE': 500}, {'CHANGE_RATE': 500, 'CHANGE_TYPE': 1}]\nseed = 0\nfor condition in conditions:\n print(condition)\n command = ['./nk_oee -MODES_RESOLUTION 10 -SEED', seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n command.append('-' + var)\n dir_name.append(''.join(var.split('_')))\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n str_dir_name = '_'.join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n for i in range(30):\n if os.path.exists(str_dir_name + '/' + str(i) + '/command.sh'):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name + '/' + str(i))\n with open(str_dir_name + '/' + str(i) + '/command.sh', 'w') as infile:\n infile.write(' '.join(command))\n",
"step-5": "import os\n\n\n#defaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":50}\ndefaults = {\"N\":20, \"K\":3, \"POP_SIZE\":200, \"MUT_RATE\":.05, \"TOURNAMENT_SIZE\":2, \"SELECTION\":0, \"CHANGE_RATE\":100000, \"MAX_GENS\": 5000, \"FILTER_LENGTH\":\"POP_SIZE\"}\nconditions = [{},{\"K\":10}, {\"N\":100, \"MUT_RATE\":.01}, {\"MUT_RATE\":.005}, {\"MUT_RATE\": .1}, {\"POP_SIZE\":20}, {\"POP_SIZE\":2000}, {\"SELECTION\":1}, {\"SELECTION\":1, \"FILTER_LENGTH\":1000}, {\"CHANGE_RATE\":500}, {\"CHANGE_RATE\":500, \"CHANGE_TYPE\":1}]\n\nseed = 0\n\nfor condition in conditions:\n print(condition)\n command = [\"./nk_oee -MODES_RESOLUTION 10 -SEED\", seed]\n dir_name = []\n for var in defaults:\n if var not in condition:\n condition[var] = defaults[var]\n\n for var in condition:\n while condition[var] in condition:\n condition[var] = condition[condition[var]]\n\n command.append(\"-\"+var)\n dir_name.append(\"\".join(var.split(\"_\"))) # Underscores in variable names will screw up parsing later\n val = str(condition[var])\n command.append(val)\n dir_name.append(val)\n\n \n str_dir_name = \"_\".join(dir_name)\n if not os.path.exists(str_dir_name):\n os.mkdir(str_dir_name)\n \n for i in range(30):\n if os.path.exists(str_dir_name+\"/\"+str(i)+\"/command.sh\"):\n continue\n seed += 1\n command[1] = str(seed)\n print(command)\n os.mkdir(str_dir_name+\"/\"+str(i))\n with open(str_dir_name+\"/\"+str(i)+\"/command.sh\", \"w\") as infile:\n infile.write(\" \".join(command))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from lilaclib import *
def pre_build():
newver = _G.newver.removeprefix('amd-drm-fixes-')
for line in edit_file('PKGBUILD'):
if line.startswith('_tag'):
line = "_tag='amd-drm-fixes-" + newver + "'"
print(line)
newver2 = newver.replace("-",".")
update_pkgver_and_pkgrel(newver2)
def post_build():
git_add_files('PKGBUILD')
git_commit()
update_aur_repo()
#if __name__ == '__main__':
# single_main()
|
normal
|
{
"blob_id": "32eff306444966fab47815fcbae4aefb6769d29b",
"index": 9684,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-3": "<mask token>\n\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace('-', '.')\n update_pkgver_and_pkgrel(newver2)\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-4": "from lilaclib import *\n\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace('-', '.')\n update_pkgver_and_pkgrel(newver2)\n\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n",
"step-5": "from lilaclib import *\n\ndef pre_build():\n newver = _G.newver.removeprefix('amd-drm-fixes-')\n\n for line in edit_file('PKGBUILD'):\n if line.startswith('_tag'):\n line = \"_tag='amd-drm-fixes-\" + newver + \"'\"\n print(line)\n newver2 = newver.replace(\"-\",\".\")\n update_pkgver_and_pkgrel(newver2)\n\ndef post_build():\n git_add_files('PKGBUILD')\n git_commit()\n update_aur_repo()\n\n#if __name__ == '__main__':\n# single_main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def scan_files(dir, pattern):
fileList = []
for root, subFolders, files in os.walk(dir):
for file in files:
if fnmatch.fnmatch(file, pattern):
fileList.append(os.path.join(root, file))
return fileList
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)
def scan_files(dir, pattern):
fileList = []
for root, subFolders, files in os.walk(dir):
for file in files:
if fnmatch.fnmatch(file, pattern):
fileList.append(os.path.join(root, file))
return fileList
if not os.path.exists('dist'):
os.makedirs('dist')
<|reserved_special_token_0|>
os.chdir('..\\..')
<|reserved_special_token_0|>
if os.path.exists(distPath):
shutil.rmtree(distPath)
for data in scanData:
for file in scan_files(data[0], data[1]):
if data[3]:
newSubPath = file
else:
newSubPath = os.path.basename(file)
newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))
newDir = os.path.dirname(newPath)
if not os.path.exists(newDir):
os.makedirs(newDir)
logging.info('Copying %s to %s' % (file, newPath))
shutil.copyfile(file, newPath)
logging.info('Creating archive')
<|reserved_special_token_0|>
for root, subFolders, files in os.walk(distPath):
for file in files:
newPath = os.path.join(root, file).replace(distPath, '')
bundleZip.write(os.path.join(root, file), newPath)
bundleZip.close()
logging.info('Done')
os.chdir(currentDir)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)
def scan_files(dir, pattern):
fileList = []
for root, subFolders, files in os.walk(dir):
for file in files:
if fnmatch.fnmatch(file, pattern):
fileList.append(os.path.join(root, file))
return fileList
if not os.path.exists('dist'):
os.makedirs('dist')
currentDir = os.getcwd()
os.chdir('..\\..')
distPath = os.path.join(currentDir, 'bundle')
scanData = [['WSN\\simulations', '*.ned', '', True], ['WSN\\simulations',
'*.xml', '', True], ['WSN\\simulations', '*.exe', '', True], [
'WSN\\simulations', '*.ini', '', True], ['WSN\\src', '*.ned', '', True],
['WSN\\src', '*.dll', '', True], ['MiXiM\\src', '*.ned', '', True], [
'MiXiM\\src', '*.dll', '', True], ['MiXiM\\src\\base', '*.dll', 'lib',
False], ['MiXiM\\src\\modules', '*.dll', 'lib', False], [os.path.join(
currentDir, 'lib'), '*.dll', 'lib', False]]
if os.path.exists(distPath):
shutil.rmtree(distPath)
for data in scanData:
for file in scan_files(data[0], data[1]):
if data[3]:
newSubPath = file
else:
newSubPath = os.path.basename(file)
newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))
newDir = os.path.dirname(newPath)
if not os.path.exists(newDir):
os.makedirs(newDir)
logging.info('Copying %s to %s' % (file, newPath))
shutil.copyfile(file, newPath)
logging.info('Creating archive')
bundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', 'bundle.zip'),
'w', zipfile.ZIP_DEFLATED)
for root, subFolders, files in os.walk(distPath):
for file in files:
newPath = os.path.join(root, file).replace(distPath, '')
bundleZip.write(os.path.join(root, file), newPath)
bundleZip.close()
logging.info('Done')
os.chdir(currentDir)
<|reserved_special_token_1|>
import os, sys, shutil
import fnmatch, logging, zipfile
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)
def scan_files(dir, pattern):
fileList = []
for root, subFolders, files in os.walk(dir):
for file in files:
if fnmatch.fnmatch(file, pattern):
fileList.append(os.path.join(root, file))
return fileList
if not os.path.exists('dist'):
os.makedirs('dist')
currentDir = os.getcwd()
os.chdir('..\\..')
distPath = os.path.join(currentDir, 'bundle')
scanData = [['WSN\\simulations', '*.ned', '', True], ['WSN\\simulations',
'*.xml', '', True], ['WSN\\simulations', '*.exe', '', True], [
'WSN\\simulations', '*.ini', '', True], ['WSN\\src', '*.ned', '', True],
['WSN\\src', '*.dll', '', True], ['MiXiM\\src', '*.ned', '', True], [
'MiXiM\\src', '*.dll', '', True], ['MiXiM\\src\\base', '*.dll', 'lib',
False], ['MiXiM\\src\\modules', '*.dll', 'lib', False], [os.path.join(
currentDir, 'lib'), '*.dll', 'lib', False]]
if os.path.exists(distPath):
shutil.rmtree(distPath)
for data in scanData:
for file in scan_files(data[0], data[1]):
if data[3]:
newSubPath = file
else:
newSubPath = os.path.basename(file)
newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))
newDir = os.path.dirname(newPath)
if not os.path.exists(newDir):
os.makedirs(newDir)
logging.info('Copying %s to %s' % (file, newPath))
shutil.copyfile(file, newPath)
logging.info('Creating archive')
bundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', 'bundle.zip'),
'w', zipfile.ZIP_DEFLATED)
for root, subFolders, files in os.walk(distPath):
for file in files:
newPath = os.path.join(root, file).replace(distPath, '')
bundleZip.write(os.path.join(root, file), newPath)
bundleZip.close()
logging.info('Done')
os.chdir(currentDir)
<|reserved_special_token_1|>
import os, sys, shutil
import fnmatch, logging, zipfile
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)
def scan_files(dir, pattern):
fileList = []
for root, subFolders, files in os.walk(dir):
for file in files:
if fnmatch.fnmatch(file, pattern):
fileList.append(os.path.join(root,file))
return fileList
if (not os.path.exists('dist')):
os.makedirs('dist')
currentDir = os.getcwd() # save current dir
os.chdir('..\\..') # go to root of simulation
distPath = os.path.join(currentDir, 'bundle') # where to put files
scanData = [
['WSN\\simulations', '*.ned', '', True],
['WSN\\simulations', '*.xml', '', True],
['WSN\\simulations', '*.exe', '', True],
['WSN\\simulations', '*.ini', '', True],
['WSN\\src', '*.ned', '', True],
['WSN\\src', '*.dll', '', True],
['MiXiM\\src', '*.ned', '', True],
['MiXiM\\src', '*.dll', '', True],
['MiXiM\\src\\base', '*.dll', 'lib', False],
['MiXiM\\src\\modules', '*.dll', 'lib', False],
[os.path.join(currentDir, 'lib'), '*.dll', 'lib', False],
]
# remove old bundle
if (os.path.exists(distPath)):
shutil.rmtree(distPath)
# copy neccessary files
for data in scanData:
for file in scan_files(data[0], data[1]):
if (data[3]):
newSubPath = file
else:
newSubPath = os.path.basename(file)
newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))
newDir = os.path.dirname(newPath)
if (not os.path.exists(newDir)):
os.makedirs(newDir)
logging.info('Copying %s to %s' % (file, newPath))
shutil.copyfile(file, newPath)
logging.info("Creating archive")
bundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', "bundle.zip"), 'w', zipfile.ZIP_DEFLATED)
for root, subFolders, files in os.walk(distPath):
for file in files:
# make path relative to distPath
newPath = os.path.join(root, file).replace(distPath, '')
# add files to zip
bundleZip.write(os.path.join(root, file), newPath)
bundleZip.close()
logging.info("Done")
os.chdir(currentDir) # go back
|
flexible
|
{
"blob_id": "187c2a56ba9360b89c8ded09861091e2deedf32e",
"index": 7783,
"step-1": "<mask token>\n\n\ndef scan_files(dir, pattern):\n fileList = []\n for root, subFolders, files in os.walk(dir):\n for file in files:\n if fnmatch.fnmatch(file, pattern):\n fileList.append(os.path.join(root, file))\n return fileList\n\n\n<mask token>\n",
"step-2": "<mask token>\nlogging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\n datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)\n\n\ndef scan_files(dir, pattern):\n fileList = []\n for root, subFolders, files in os.walk(dir):\n for file in files:\n if fnmatch.fnmatch(file, pattern):\n fileList.append(os.path.join(root, file))\n return fileList\n\n\nif not os.path.exists('dist'):\n os.makedirs('dist')\n<mask token>\nos.chdir('..\\\\..')\n<mask token>\nif os.path.exists(distPath):\n shutil.rmtree(distPath)\nfor data in scanData:\n for file in scan_files(data[0], data[1]):\n if data[3]:\n newSubPath = file\n else:\n newSubPath = os.path.basename(file)\n newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))\n newDir = os.path.dirname(newPath)\n if not os.path.exists(newDir):\n os.makedirs(newDir)\n logging.info('Copying %s to %s' % (file, newPath))\n shutil.copyfile(file, newPath)\nlogging.info('Creating archive')\n<mask token>\nfor root, subFolders, files in os.walk(distPath):\n for file in files:\n newPath = os.path.join(root, file).replace(distPath, '')\n bundleZip.write(os.path.join(root, file), newPath)\nbundleZip.close()\nlogging.info('Done')\nos.chdir(currentDir)\n",
"step-3": "<mask token>\nlogging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\n datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)\n\n\ndef scan_files(dir, pattern):\n fileList = []\n for root, subFolders, files in os.walk(dir):\n for file in files:\n if fnmatch.fnmatch(file, pattern):\n fileList.append(os.path.join(root, file))\n return fileList\n\n\nif not os.path.exists('dist'):\n os.makedirs('dist')\ncurrentDir = os.getcwd()\nos.chdir('..\\\\..')\ndistPath = os.path.join(currentDir, 'bundle')\nscanData = [['WSN\\\\simulations', '*.ned', '', True], ['WSN\\\\simulations',\n '*.xml', '', True], ['WSN\\\\simulations', '*.exe', '', True], [\n 'WSN\\\\simulations', '*.ini', '', True], ['WSN\\\\src', '*.ned', '', True],\n ['WSN\\\\src', '*.dll', '', True], ['MiXiM\\\\src', '*.ned', '', True], [\n 'MiXiM\\\\src', '*.dll', '', True], ['MiXiM\\\\src\\\\base', '*.dll', 'lib', \n False], ['MiXiM\\\\src\\\\modules', '*.dll', 'lib', False], [os.path.join(\n currentDir, 'lib'), '*.dll', 'lib', False]]\nif os.path.exists(distPath):\n shutil.rmtree(distPath)\nfor data in scanData:\n for file in scan_files(data[0], data[1]):\n if data[3]:\n newSubPath = file\n else:\n newSubPath = os.path.basename(file)\n newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))\n newDir = os.path.dirname(newPath)\n if not os.path.exists(newDir):\n os.makedirs(newDir)\n logging.info('Copying %s to %s' % (file, newPath))\n shutil.copyfile(file, newPath)\nlogging.info('Creating archive')\nbundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', 'bundle.zip'),\n 'w', zipfile.ZIP_DEFLATED)\nfor root, subFolders, files in os.walk(distPath):\n for file in files:\n newPath = os.path.join(root, file).replace(distPath, '')\n bundleZip.write(os.path.join(root, file), newPath)\nbundleZip.close()\nlogging.info('Done')\nos.chdir(currentDir)\n",
"step-4": "import os, sys, shutil\nimport fnmatch, logging, zipfile\nlogging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\n datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)\n\n\ndef scan_files(dir, pattern):\n fileList = []\n for root, subFolders, files in os.walk(dir):\n for file in files:\n if fnmatch.fnmatch(file, pattern):\n fileList.append(os.path.join(root, file))\n return fileList\n\n\nif not os.path.exists('dist'):\n os.makedirs('dist')\ncurrentDir = os.getcwd()\nos.chdir('..\\\\..')\ndistPath = os.path.join(currentDir, 'bundle')\nscanData = [['WSN\\\\simulations', '*.ned', '', True], ['WSN\\\\simulations',\n '*.xml', '', True], ['WSN\\\\simulations', '*.exe', '', True], [\n 'WSN\\\\simulations', '*.ini', '', True], ['WSN\\\\src', '*.ned', '', True],\n ['WSN\\\\src', '*.dll', '', True], ['MiXiM\\\\src', '*.ned', '', True], [\n 'MiXiM\\\\src', '*.dll', '', True], ['MiXiM\\\\src\\\\base', '*.dll', 'lib', \n False], ['MiXiM\\\\src\\\\modules', '*.dll', 'lib', False], [os.path.join(\n currentDir, 'lib'), '*.dll', 'lib', False]]\nif os.path.exists(distPath):\n shutil.rmtree(distPath)\nfor data in scanData:\n for file in scan_files(data[0], data[1]):\n if data[3]:\n newSubPath = file\n else:\n newSubPath = os.path.basename(file)\n newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))\n newDir = os.path.dirname(newPath)\n if not os.path.exists(newDir):\n os.makedirs(newDir)\n logging.info('Copying %s to %s' % (file, newPath))\n shutil.copyfile(file, newPath)\nlogging.info('Creating archive')\nbundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', 'bundle.zip'),\n 'w', zipfile.ZIP_DEFLATED)\nfor root, subFolders, files in os.walk(distPath):\n for file in files:\n newPath = os.path.join(root, file).replace(distPath, '')\n bundleZip.write(os.path.join(root, file), newPath)\nbundleZip.close()\nlogging.info('Done')\nos.chdir(currentDir)\n",
"step-5": "import os, sys, shutil \r\nimport fnmatch, logging, zipfile\r\n\r\nlogging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d,%H:%M:%S', level=logging.DEBUG)\r\n\r\ndef scan_files(dir, pattern):\r\n fileList = []\r\n for root, subFolders, files in os.walk(dir):\r\n for file in files:\r\n if fnmatch.fnmatch(file, pattern):\r\n fileList.append(os.path.join(root,file))\r\n return fileList\r\n\r\nif (not os.path.exists('dist')):\r\n os.makedirs('dist')\r\n \r\ncurrentDir = os.getcwd() # save current dir\r\nos.chdir('..\\\\..') # go to root of simulation\r\n\r\ndistPath = os.path.join(currentDir, 'bundle') # where to put files\r\nscanData = [\r\n ['WSN\\\\simulations', '*.ned', '', True],\r\n ['WSN\\\\simulations', '*.xml', '', True],\r\n ['WSN\\\\simulations', '*.exe', '', True],\r\n ['WSN\\\\simulations', '*.ini', '', True],\r\n ['WSN\\\\src', '*.ned', '', True],\r\n ['WSN\\\\src', '*.dll', '', True],\r\n ['MiXiM\\\\src', '*.ned', '', True],\r\n ['MiXiM\\\\src', '*.dll', '', True],\r\n ['MiXiM\\\\src\\\\base', '*.dll', 'lib', False],\r\n ['MiXiM\\\\src\\\\modules', '*.dll', 'lib', False],\r\n [os.path.join(currentDir, 'lib'), '*.dll', 'lib', False],\r\n]\r\n\r\n# remove old bundle\r\nif (os.path.exists(distPath)):\r\n shutil.rmtree(distPath)\r\n\r\n# copy neccessary files\r\nfor data in scanData:\r\n \r\n for file in scan_files(data[0], data[1]):\r\n \r\n if (data[3]):\r\n newSubPath = file \r\n else:\r\n newSubPath = os.path.basename(file)\r\n \r\n newPath = os.path.relpath(os.path.join(distPath, data[2], newSubPath))\r\n newDir = os.path.dirname(newPath)\r\n \r\n if (not os.path.exists(newDir)):\r\n os.makedirs(newDir)\r\n \r\n logging.info('Copying %s to %s' % (file, newPath))\r\n shutil.copyfile(file, newPath)\r\n\r\nlogging.info(\"Creating archive\")\r\nbundleZip = zipfile.ZipFile(os.path.join(currentDir, 'dist', \"bundle.zip\"), 'w', zipfile.ZIP_DEFLATED)\r\nfor root, subFolders, files in os.walk(distPath):\r\n for file in files:\r\n # make path relative to distPath\r\n newPath = os.path.join(root, file).replace(distPath, '')\r\n # add files to zip\r\n bundleZip.write(os.path.join(root, file), newPath)\r\nbundleZip.close()\r\nlogging.info(\"Done\")\r\n\r\nos.chdir(currentDir) # go back",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='Pastebin API')
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^doc_u/', schema_view),
url(r'^', include('o.urls', )),
url(r'^api/', include('restapi.urls', namespace='res')),
]
|
normal
|
{
"blob_id": "891588327046e26acb9a691fa8bb9a99420712d6",
"index": 913,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nschema_view = get_swagger_view(title='Pastebin API')\nurlpatterns = [url('^admin/', admin.site.urls), url('^doc_u/', schema_view),\n url('^', include('o.urls')), url('^api/', include('restapi.urls',\n namespace='res'))]\n",
"step-3": "from django.conf.urls import url, include\nfrom django.contrib import admin\nfrom rest_framework_swagger.views import get_swagger_view\nschema_view = get_swagger_view(title='Pastebin API')\nurlpatterns = [url('^admin/', admin.site.urls), url('^doc_u/', schema_view),\n url('^', include('o.urls')), url('^api/', include('restapi.urls',\n namespace='res'))]\n",
"step-4": "from django.conf.urls import url, include\nfrom django.contrib import admin\n\nfrom rest_framework_swagger.views import get_swagger_view\nschema_view = get_swagger_view(title='Pastebin API')\n\nurlpatterns = [\n url(r'^admin/', admin.site.urls),\n url(r'^doc_u/', schema_view),\n url(r'^', include('o.urls', )),\n url(r'^api/', include('restapi.urls', namespace='res')),\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ''
self.seq_alignments = None
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.
shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],
unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.
repre_sq).start(0)
x = self.profile
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index + offset), column=self
.seq_align_counter, value=1)
self.seq_align_counter -= 1
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment]
self.seq_alignments.index = self.seq_alignments.index + 1
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=
'-'])
<|reserved_special_token_0|>
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None, linkage='complete', affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ''
self.seq_alignments = None
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.
shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],
unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.
repre_sq).start(0)
x = self.profile
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index + offset), column=self
.seq_align_counter, value=1)
self.seq_align_counter -= 1
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment]
self.seq_alignments.index = self.seq_alignments.index + 1
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=
'-'])
def dst_func(x, y):
return (np.array(x) != np.array(y)).sum()
def read_alignment(filename):
for line in open(filename):
sq, count = line.strip('\n').split(';')
yield sq, np.array([int(x) for x in count.split(',')]), count
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None, linkage='complete', affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parser.add_argument('--pools', default=4, type=int, help=
'Number of threads to use in aligning. Default 4. Optional.')
parser.add_argument('--misses', default=5, type=float, help=
'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '
)
parser.add_argument('--aligned', default=None, type=str, help=
'Path to the output aligned directory. Required.')
parser.add_argument('--overview', default=None, type=str, help=
'Path to the output description csv. Required. Pairs with <--aligned> directory.'
)
parser.add_argument('--k', default=-1, type=int, help=
'Size of the k-mer created by BCALM. Required.')
parser.add_argument('--input', default=None, type=str, help=
'Path to the input file.')
parser.set_defaults(all_sqs_result=False)
<|reserved_special_token_0|>
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ''
self.seq_alignments = None
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.
shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],
unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.
repre_sq).start(0)
x = self.profile
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index + offset), column=self
.seq_align_counter, value=1)
self.seq_align_counter -= 1
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment]
self.seq_alignments.index = self.seq_alignments.index + 1
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=
'-'])
def dst_func(x, y):
return (np.array(x) != np.array(y)).sum()
def read_alignment(filename):
for line in open(filename):
sq, count = line.strip('\n').split(';')
yield sq, np.array([int(x) for x in count.split(',')]), count
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None, linkage='complete', affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
<|reserved_special_token_0|>
if args.aligned is None:
output_profile_dir = aligned_sqs_file + '_profiles'
else:
output_profile_dir = args.aligned
if args.overview is None:
output_csv_file = aligned_sqs_file + '_overview.csv'
else:
output_csv_file = args.overview
<|reserved_special_token_0|>
for cluster, cluster_df in df_group.groupby(by='cluster'):
alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no
)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
<|reserved_special_token_0|>
with Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:
for length in unique_lengths[1:]:
bar.next()
df_group = groups.get_group(length).copy()
def getDistanceAndAlignment(sq):
maxval = np.floor(threshold * len(sq))
min = np.inf
min_target = None
if maxval < 1:
return min, min_target
for target in against:
align_res = edlib.align(sq, target.repre_sq, mode='HW',
task='distance', k=maxval)
if align_res['editDistance'] != -1:
if min > align_res['editDistance']:
if align_res['editDistance'] == 0:
return align_res['editDistance'], target.ident
min = align_res['editDistance']
min_target = target
if min_target is not None:
min_target = min_target.ident
return min, min_target
x = length * threshold
if length * threshold >= 1:
with Pool(pools) as pool:
result = pool.map(getDistanceAndAlignment, df_group['sq'])
df_group['aligned'] = result
aligned = df_group[df_group['aligned'] != (np.inf, None)]
for index, row in aligned.iterrows():
to = alignments[row['aligned'][1]]
align_res = edlib.align(row.sq, to.repre_sq, mode='HW',
task='path')
nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)
to.add_sequence(row.sq, row['count'], nice, index)
unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()
clusters = cluster_group(unaligned, length)
unaligned['cluster'] = clusters
for cluster, cluster_df in unaligned.groupby(by='cluster'):
alignment = AlignmentProfile(length, cluster_df,
global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
else:
df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))
]
unaligned = df_group.copy()
unaligned['cluster'] = list(range(len(unaligned)))
s = time.time()
for i, row in unaligned.iterrows():
cluster_df = pd.DataFrame(row).T
alignment = AlignmentProfile(length, cluster_df,
global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
print(f'{aligned_sqs_file} elapsed: {time.time() - start}')
print(f'{aligned_sqs_file} writing...')
os.makedirs(output_profile_dir, exist_ok=True)
for alignment in against:
filename = f'{output_profile_dir}/{alignment.ident}.prf'
np.save(filename, alignment.profile)
<|reserved_special_token_0|>
for alignment in against:
itemized = alignment.seq_alignments
num_cols = itemized.columns[1:]
for col in num_cols:
itemized[col] = itemized[col].astype(int).apply(str)
itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)
itemized = itemized.drop(columns=num_cols)
itemized.columns = ['index_df', 'alignment_actual']
itemized['alignment'] = alignment.ident
all_alignments.append(itemized)
<|reserved_special_token_0|>
merged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)
print(f'{aligned_sqs_file} done')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
parser = argparse.ArgumentParser()
parser.add_argument('--pools', default=4, type=int, help=
'Number of threads to use in aligning. Default 4. Optional.')
parser.add_argument('--misses', default=5, type=float, help=
'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '
)
parser.add_argument('--aligned', default=None, type=str, help=
'Path to the output aligned directory. Required.')
parser.add_argument('--overview', default=None, type=str, help=
'Path to the output description csv. Required. Pairs with <--aligned> directory.'
)
parser.add_argument('--k', default=-1, type=int, help=
'Size of the k-mer created by BCALM. Required.')
parser.add_argument('--input', default=None, type=str, help=
'Path to the input file.')
parser.set_defaults(all_sqs_result=False)
args = parser.parse_args([] if '__file__' not in globals() else None)
bases = dict(A=0, C=1, G=2, T=3)
bases['-'] = 4
rev_bases = {v: k for k, v in bases.items()}
global_alignment_ident_no = 0
operations = {'.': 0, '-': 1, '|': 0}
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ''
self.seq_alignments = None
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.
shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],
unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.
repre_sq).start(0)
x = self.profile
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index + offset), column=self
.seq_align_counter, value=1)
self.seq_align_counter -= 1
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment]
self.seq_alignments.index = self.seq_alignments.index + 1
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=
'-'])
def dst_func(x, y):
return (np.array(x) != np.array(y)).sum()
def read_alignment(filename):
for line in open(filename):
sq, count = line.strip('\n').split(';')
yield sq, np.array([int(x) for x in count.split(',')]), count
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None, linkage='complete', affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
aligned_sqs_file = args.input
k = args.k
misses = args.misses
pools = args.pools
threshold = misses / k
if args.aligned is None:
output_profile_dir = aligned_sqs_file + '_profiles'
else:
output_profile_dir = args.aligned
if args.overview is None:
output_csv_file = aligned_sqs_file + '_overview.csv'
else:
output_csv_file = args.overview
df = pd.DataFrame(read_alignment(aligned_sqs_file))
df.columns = ['sq', 'count', 'str_count']
df['length'] = df['sq'].str.len()
groups = df.groupby(by='length')
unique_lengths = df['length'].sort_values(ascending=False).unique()
against = []
longest = unique_lengths[0]
df_group = groups.get_group(longest).copy()
clusters = cluster_group(df_group, longest)
df_group['cluster'] = clusters
alignments = {}
for cluster, cluster_df in df_group.groupby(by='cluster'):
alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no
)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
start = time.time()
with Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:
for length in unique_lengths[1:]:
bar.next()
df_group = groups.get_group(length).copy()
def getDistanceAndAlignment(sq):
maxval = np.floor(threshold * len(sq))
min = np.inf
min_target = None
if maxval < 1:
return min, min_target
for target in against:
align_res = edlib.align(sq, target.repre_sq, mode='HW',
task='distance', k=maxval)
if align_res['editDistance'] != -1:
if min > align_res['editDistance']:
if align_res['editDistance'] == 0:
return align_res['editDistance'], target.ident
min = align_res['editDistance']
min_target = target
if min_target is not None:
min_target = min_target.ident
return min, min_target
x = length * threshold
if length * threshold >= 1:
with Pool(pools) as pool:
result = pool.map(getDistanceAndAlignment, df_group['sq'])
df_group['aligned'] = result
aligned = df_group[df_group['aligned'] != (np.inf, None)]
for index, row in aligned.iterrows():
to = alignments[row['aligned'][1]]
align_res = edlib.align(row.sq, to.repre_sq, mode='HW',
task='path')
nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)
to.add_sequence(row.sq, row['count'], nice, index)
unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()
clusters = cluster_group(unaligned, length)
unaligned['cluster'] = clusters
for cluster, cluster_df in unaligned.groupby(by='cluster'):
alignment = AlignmentProfile(length, cluster_df,
global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
else:
df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))
]
unaligned = df_group.copy()
unaligned['cluster'] = list(range(len(unaligned)))
s = time.time()
for i, row in unaligned.iterrows():
cluster_df = pd.DataFrame(row).T
alignment = AlignmentProfile(length, cluster_df,
global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
print(f'{aligned_sqs_file} elapsed: {time.time() - start}')
print(f'{aligned_sqs_file} writing...')
os.makedirs(output_profile_dir, exist_ok=True)
for alignment in against:
filename = f'{output_profile_dir}/{alignment.ident}.prf'
np.save(filename, alignment.profile)
all_alignments = []
for alignment in against:
itemized = alignment.seq_alignments
num_cols = itemized.columns[1:]
for col in num_cols:
itemized[col] = itemized[col].astype(int).apply(str)
itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)
itemized = itemized.drop(columns=num_cols)
itemized.columns = ['index_df', 'alignment_actual']
itemized['alignment'] = alignment.ident
all_alignments.append(itemized)
all_alignments = pd.concat(all_alignments)
merged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)
merged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)
print(f'{aligned_sqs_file} done')
<|reserved_special_token_1|>
import os
import re
import time
import numpy as np
import pandas as pd
from sklearn.cluster import AgglomerativeClustering
import math
import edlib
from progress.bar import IncrementalBar as Bar
from multiprocessing import Pool
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--pools",
default=4,
type=int,
help="Number of threads to use in aligning. Default 4. Optional."
)
parser.add_argument("--misses",
default=5,
type=float,
help="Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. "
"For longer sequences, this is scaled. "
)
parser.add_argument("--aligned",
default=None,
type=str,
help="Path to the output aligned directory. Required."
)
parser.add_argument("--overview",
default=None,
type=str,
help="Path to the output description csv. Required. Pairs with <--aligned> directory."
)
parser.add_argument("--k",
default=-1,
type=int,
help="Size of the k-mer created by BCALM. Required."
)
parser.add_argument("--input",
default=None,
type=str,
help="Path to the input file."
)
parser.set_defaults(all_sqs_result=False)
args = parser.parse_args([] if "__file__" not in globals() else None)
bases = dict(A=0, C=1, G=2, T=3)
bases['-'] = 4
rev_bases = {v: k for k, v in bases.items()}
global_alignment_ident_no = 0
operations = {
'.' : 0,
'-' : 1,
'|' : 0
}
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ""
self.seq_alignments = None # this will be a pandas df
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0], unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
# repre_sq
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = "".join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.repre_sq).start(0)
x = self.profile
# padding with the following number of observed positions (sum of all bases)
# pad profile with insertions
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index+offset), column=self.seq_align_counter, value=1)
self.seq_align_counter -= 1
# pad new counts with deletions
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
# store new sequence alignment
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment] # adding a row
self.seq_alignments.index = self.seq_alignments.index + 1 # shifting index
# recalculate repre_sq -- the most probable one
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = "".join([rev_bases[x] for x in maxs if rev_bases[x] != '-']) # '-' is removed from the sq
def dst_func(x, y):
return (np.array(x) != np.array(y)).sum()
def read_alignment(filename):
for line in open(filename):
sq, count = line.strip('\n').split(';')
yield sq, np.array([int(x) for x in count.split(',')]), count
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None,
linkage='complete',
affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
aligned_sqs_file = args.input
k = args.k
misses = args.misses
pools = args.pools
threshold = misses / k
if args.aligned is None:
output_profile_dir = aligned_sqs_file + "_profiles"
else:
output_profile_dir = args.aligned
if args.overview is None:
output_csv_file = aligned_sqs_file + "_overview.csv"
else:
output_csv_file = args.overview
# read
df = pd.DataFrame(read_alignment(aligned_sqs_file))
df.columns = ['sq', 'count', 'str_count']
df['length'] = df['sq'].str.len()
# df['alignment'] = -1 # every aligned sq has an alignment identification
groups = df.groupby(by='length')
unique_lengths = df['length'].sort_values(ascending=False).unique()
against = []
longest = unique_lengths[0]
df_group = groups.get_group(longest).copy()
clusters = cluster_group(df_group, longest)
df_group['cluster'] = clusters
alignments = {
}
for cluster, cluster_df in df_group.groupby(by='cluster'):
alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
# df.loc[df['sq'].isin(cluster_df['sq']), 'alignment'] = alignment.ident
# to each sequence
start = time.time()
# print(df.groupby(by='length').get_group(longest))
# print("running on shorter")
with Bar("Processing length groups...", max=len(unique_lengths) - 1) as bar:
for length in unique_lengths[1:]:
bar.next()
df_group = groups.get_group(length).copy()
def getDistanceAndAlignment(sq):
# this is a fallback, it should not happen
maxval = np.floor(threshold * len(sq))
min = np.inf
min_target = None
if maxval < 1:
return min,min_target
for target in against:
align_res = edlib.align(sq, target.repre_sq, mode='HW', task='distance', k=maxval)
if align_res['editDistance'] != -1:
if min > align_res['editDistance']:
if align_res['editDistance'] == 0:
return align_res['editDistance'], target.ident
min = align_res['editDistance']
min_target = target
if min_target is not None:
min_target = min_target.ident
return min, min_target
x = length * threshold
if length * threshold >= 1:
# try align
with Pool(pools) as pool:
result = pool.map(getDistanceAndAlignment, df_group['sq'])
df_group['aligned'] = result
# add aligned to profiles
aligned = df_group[df_group['aligned'] != (np.inf, None)]
for index, row in aligned.iterrows():
to = alignments[row['aligned'][1]]
align_res = edlib.align(row.sq, to.repre_sq, mode='HW', task='path')
nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)
to.add_sequence(row.sq, row['count'], nice, index)
# df.loc[df['sq'] == row.sq, 'alignment'] = to.ident
# cluster unaligned, add to against
unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()
clusters = cluster_group(unaligned, length)
unaligned['cluster'] = clusters
for cluster, cluster_df in unaligned.groupby(by='cluster'):
alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
else:
# threshold is less than one, no clustering nor alignment takes place
df_group["aligned"] = [(np.inf, None) for _ in range(len(df_group))]
unaligned = df_group.copy()
unaligned["cluster"] = list(range(len(unaligned)))
# print(f"pseudoclustering elapsed: {time.time() - s}")
s = time.time()
for i, row in unaligned.iterrows():
cluster_df = pd.DataFrame(row).T
alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
# print(f"alignment elapsed: {time.time() - s}")
print(f"{aligned_sqs_file} elapsed: {time.time() - start}")
print(f"{aligned_sqs_file} writing...")
os.makedirs(output_profile_dir, exist_ok=True)
for alignment in against:
filename = f"{output_profile_dir}/{alignment.ident}.prf"
np.save(filename, alignment.profile)
# get actual alignment for each sq
all_alignments = []
for alignment in against:
itemized = alignment.seq_alignments
num_cols = itemized.columns[1:]
# index_col = itemized.columns[0]
# translate to sth readable
for col in num_cols:
itemized[col] = itemized[col].astype(int).apply(str)
itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1) # todo maybe cigar?
itemized = itemized.drop(columns=num_cols)
itemized.columns = ['index_df', 'alignment_actual']
itemized['alignment'] = alignment.ident
all_alignments.append(itemized)
all_alignments = pd.concat(all_alignments)
merged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)
# write sequences in df
merged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)
print(f"{aligned_sqs_file} done")
|
flexible
|
{
"blob_id": "7ae328bcfdec2d17fceb5d707f13cf495fde4469",
"index": 7490,
"step-1": "<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\n<mask token>\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\n",
"step-3": "<mask token>\nparser.add_argument('--pools', default=4, type=int, help=\n 'Number of threads to use in aligning. Default 4. Optional.')\nparser.add_argument('--misses', default=5, type=float, help=\n 'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '\n )\nparser.add_argument('--aligned', default=None, type=str, help=\n 'Path to the output aligned directory. Required.')\nparser.add_argument('--overview', default=None, type=str, help=\n 'Path to the output description csv. Required. Pairs with <--aligned> directory.'\n )\nparser.add_argument('--k', default=-1, type=int, help=\n 'Size of the k-mer created by BCALM. Required.')\nparser.add_argument('--input', default=None, type=str, help=\n 'Path to the input file.')\nparser.set_defaults(all_sqs_result=False)\n<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + '_profiles'\nelse:\n output_profile_dir = args.aligned\nif args.overview is None:\n output_csv_file = aligned_sqs_file + '_overview.csv'\nelse:\n output_csv_file = args.overview\n<mask token>\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no\n )\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n<mask token>\nwith Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n maxval = np.floor(threshold * len(sq))\n min = np.inf\n min_target = None\n if maxval < 1:\n return min, min_target\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW',\n task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n min = align_res['editDistance']\n min_target = target\n if min_target is not None:\n min_target = min_target.ident\n return min, min_target\n x = length * threshold\n if length * threshold >= 1:\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW',\n task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))\n ]\n unaligned = df_group.copy()\n unaligned['cluster'] = list(range(len(unaligned)))\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nprint(f'{aligned_sqs_file} elapsed: {time.time() - start}')\nprint(f'{aligned_sqs_file} writing...')\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f'{output_profile_dir}/{alignment.ident}.prf'\n np.save(filename, alignment.profile)\n<mask token>\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\n<mask token>\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f'{aligned_sqs_file} done')\n",
"step-4": "<mask token>\nparser = argparse.ArgumentParser()\nparser.add_argument('--pools', default=4, type=int, help=\n 'Number of threads to use in aligning. Default 4. Optional.')\nparser.add_argument('--misses', default=5, type=float, help=\n 'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '\n )\nparser.add_argument('--aligned', default=None, type=str, help=\n 'Path to the output aligned directory. Required.')\nparser.add_argument('--overview', default=None, type=str, help=\n 'Path to the output description csv. Required. Pairs with <--aligned> directory.'\n )\nparser.add_argument('--k', default=-1, type=int, help=\n 'Size of the k-mer created by BCALM. Required.')\nparser.add_argument('--input', default=None, type=str, help=\n 'Path to the input file.')\nparser.set_defaults(all_sqs_result=False)\nargs = parser.parse_args([] if '__file__' not in globals() else None)\nbases = dict(A=0, C=1, G=2, T=3)\nbases['-'] = 4\nrev_bases = {v: k for k, v in bases.items()}\nglobal_alignment_ident_no = 0\noperations = {'.': 0, '-': 1, '|': 0}\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\naligned_sqs_file = args.input\nk = args.k\nmisses = args.misses\npools = args.pools\nthreshold = misses / k\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + '_profiles'\nelse:\n output_profile_dir = args.aligned\nif args.overview is None:\n output_csv_file = aligned_sqs_file + '_overview.csv'\nelse:\n output_csv_file = args.overview\ndf = pd.DataFrame(read_alignment(aligned_sqs_file))\ndf.columns = ['sq', 'count', 'str_count']\ndf['length'] = df['sq'].str.len()\ngroups = df.groupby(by='length')\nunique_lengths = df['length'].sort_values(ascending=False).unique()\nagainst = []\nlongest = unique_lengths[0]\ndf_group = groups.get_group(longest).copy()\nclusters = cluster_group(df_group, longest)\ndf_group['cluster'] = clusters\nalignments = {}\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no\n )\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nstart = time.time()\nwith Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n maxval = np.floor(threshold * len(sq))\n min = np.inf\n min_target = None\n if maxval < 1:\n return min, min_target\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW',\n task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n min = align_res['editDistance']\n min_target = target\n if min_target is not None:\n min_target = min_target.ident\n return min, min_target\n x = length * threshold\n if length * threshold >= 1:\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW',\n task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))\n ]\n unaligned = df_group.copy()\n unaligned['cluster'] = list(range(len(unaligned)))\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nprint(f'{aligned_sqs_file} elapsed: {time.time() - start}')\nprint(f'{aligned_sqs_file} writing...')\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f'{output_profile_dir}/{alignment.ident}.prf'\n np.save(filename, alignment.profile)\nall_alignments = []\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\nall_alignments = pd.concat(all_alignments)\nmerged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f'{aligned_sqs_file} done')\n",
"step-5": "import os\nimport re\nimport time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.cluster import AgglomerativeClustering\nimport math\nimport edlib\nfrom progress.bar import IncrementalBar as Bar\nfrom multiprocessing import Pool\nimport argparse\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pools\",\n default=4,\n type=int,\n help=\"Number of threads to use in aligning. Default 4. Optional.\"\n )\nparser.add_argument(\"--misses\",\n default=5,\n type=float,\n help=\"Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. \"\n \"For longer sequences, this is scaled. \"\n )\nparser.add_argument(\"--aligned\",\n default=None,\n type=str,\n help=\"Path to the output aligned directory. Required.\"\n )\nparser.add_argument(\"--overview\",\n default=None,\n type=str,\n help=\"Path to the output description csv. Required. Pairs with <--aligned> directory.\"\n )\nparser.add_argument(\"--k\",\n default=-1,\n type=int,\n help=\"Size of the k-mer created by BCALM. Required.\"\n )\nparser.add_argument(\"--input\",\n default=None,\n type=str,\n help=\"Path to the input file.\"\n )\nparser.set_defaults(all_sqs_result=False)\n\nargs = parser.parse_args([] if \"__file__\" not in globals() else None)\n\nbases = dict(A=0, C=1, G=2, T=3)\nbases['-'] = 4\nrev_bases = {v: k for k, v in bases.items()}\nglobal_alignment_ident_no = 0\n\n\noperations = {\n '.' : 0,\n '-' : 1,\n '|' : 0\n}\n\n\nclass AlignmentProfile:\n def __init__(self, width, df, identifier):\n self.ident = identifier\n\n self.profile = np.zeros((5, width))\n self.repre_sq = \"\"\n self.seq_alignments = None # this will be a pandas df\n self.seq_align_counter = -1\n\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.shape[1], dtype=np.int8)) for index in df.index])\n\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0], unwrapped_sq.columns[-1]])\n\n counts = np.stack(df['count'].values)\n\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n\n # repre_sq\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = \"\".join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.repre_sq).start(0)\n x = self.profile\n # padding with the following number of observed positions (sum of all bases)\n\n # pad profile with insertions\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index+offset), column=self.seq_align_counter, value=1)\n self.seq_align_counter -= 1\n\n # pad new counts with deletions\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n\n self.profile = x\n\n # store new sequence alignment\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment] # adding a row\n self.seq_alignments.index = self.seq_alignments.index + 1 # shifting index\n\n # recalculate repre_sq -- the most probable one\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = \"\".join([rev_bases[x] for x in maxs if rev_bases[x] != '-']) # '-' is removed from the sq\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n\n if n <= 1:\n return np.zeros(n)\n\n dst_matrix = np.zeros((n, n))\n\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None,\n linkage='complete',\n affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\naligned_sqs_file = args.input\nk = args.k\nmisses = args.misses\npools = args.pools\n\nthreshold = misses / k\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + \"_profiles\"\nelse:\n output_profile_dir = args.aligned\n\nif args.overview is None:\n output_csv_file = aligned_sqs_file + \"_overview.csv\"\nelse:\n output_csv_file = args.overview\n\n# read\ndf = pd.DataFrame(read_alignment(aligned_sqs_file))\ndf.columns = ['sq', 'count', 'str_count']\ndf['length'] = df['sq'].str.len()\n# df['alignment'] = -1 # every aligned sq has an alignment identification\ngroups = df.groupby(by='length')\n\nunique_lengths = df['length'].sort_values(ascending=False).unique()\n\nagainst = []\n\nlongest = unique_lengths[0]\ndf_group = groups.get_group(longest).copy()\n\nclusters = cluster_group(df_group, longest)\ndf_group['cluster'] = clusters\n\nalignments = {\n}\n\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n\n global_alignment_ident_no += 1\n against.append(alignment)\n\n # df.loc[df['sq'].isin(cluster_df['sq']), 'alignment'] = alignment.ident\n\n # to each sequence\n\n\nstart = time.time()\n\n# print(df.groupby(by='length').get_group(longest))\n# print(\"running on shorter\")\n\nwith Bar(\"Processing length groups...\", max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n # this is a fallback, it should not happen\n maxval = np.floor(threshold * len(sq))\n\n min = np.inf\n min_target = None\n\n if maxval < 1:\n return min,min_target\n\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW', task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n\n min = align_res['editDistance']\n min_target = target\n\n if min_target is not None:\n min_target = min_target.ident\n\n return min, min_target\n\n x = length * threshold\n if length * threshold >= 1:\n # try align\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n\n # add aligned to profiles\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW', task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n # df.loc[df['sq'] == row.sq, 'alignment'] = to.ident\n\n # cluster unaligned, add to against\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n # threshold is less than one, no clustering nor alignment takes place\n df_group[\"aligned\"] = [(np.inf, None) for _ in range(len(df_group))]\n unaligned = df_group.copy()\n unaligned[\"cluster\"] = list(range(len(unaligned)))\n # print(f\"pseudoclustering elapsed: {time.time() - s}\")\n\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n # print(f\"alignment elapsed: {time.time() - s}\")\n\n\nprint(f\"{aligned_sqs_file} elapsed: {time.time() - start}\")\nprint(f\"{aligned_sqs_file} writing...\")\n\n\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f\"{output_profile_dir}/{alignment.ident}.prf\"\n np.save(filename, alignment.profile)\n\n# get actual alignment for each sq\nall_alignments = []\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n # index_col = itemized.columns[0]\n # translate to sth readable\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1) # todo maybe cigar?\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\n\nall_alignments = pd.concat(all_alignments)\nmerged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)\n\n\n# write sequences in df\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f\"{aligned_sqs_file} done\")\n",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
<|reserved_special_token_0|>
class UpdateProduct(GenericAPIView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects
.values('name', 'value').filter(product=data.id))
extract_img = self.extract_filter_data(Product_Image.objects.values
('image').filter(product=data.id))
if data:
return Response(self.get_data({'product': data, 'specific':
extract_sp, 'img': extract_img}))
else:
return Response({'errors': False}, status=HTTP_404_NOT_FOUND)
<|reserved_special_token_0|>
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values('platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values('size', 'type_size').filter(product
=id)
recom = Product_Recommended_Use.objects.values('recommended_use'
).filter(product=id)
terms = Product_Terms_Condition.objects.values('terms_condition'
).filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=
HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {'user': User.objects.values('id', 'username').get(username=
'root'), 'name': main['product'].title, 'brand': main['product'
].brand.id, 'quantity': main['product'].quantity, 'price': main
['product'].price, 'currency': main['product'].currency,
'condition': main['product'].condition, 'description': main[
'product'].description, 'brands': self._brands, 'conditions':
type_condition_choices, 'currencys': type_currency_choices,
'colors': color_choices, 'sizes': type_size_choices, 'specific':
self.extract_filter_data(main['specific']), 'images': self.
extract_filter_data(main['img']), 'extra_data': self.
get_extra_data(main['product'].id)}
def prepare_data(self, data, img_data=None):
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {i.split('_')[2]: data['img_current'][i] for
i in data['img_current']}
if len(img_data) > 0:
img = {i.split('_')[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.
objects.get(product=self._product_obj.id, name=i[
'current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(Product_Image.objects.get(product=
self._product_obj.id, image=img['current'][i]), img[
'images'][i])
return {'product': pro, 'specific': Product_Specification.objects.
values('name', 'value').filter(product=pro.id), 'img':
Product_Image.objects.values('image').filter(product=pro.id)}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and 'color' in ser_data:
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(product=self._product_obj.id
).delete()
for i in data['color']['color']:
ser_data['color'].create({'product': self.
_product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create({'product': self._product_obj,
'color': i})
else:
col = Product_Color.objects.filter(product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and 'size' in ser_data:
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
else:
ser_data['size'].create({'product': self._product_obj,
'size': siz, 'type_size': typ})
else:
siz = Product_Size.objects.filter(product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and 'platform' in ser_data:
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(
Product_Platform.objects.get(product=self.
_product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create({
'product': self._product_obj, 'platform': data['platform']}
)
else:
pl = Product_Platform.objects.filter(product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and 'recom_use' in ser_data:
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(
Product_Recommended_Use.objects.get(product=self.
_product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create({
'product': self._product_obj, 'recommended_use': data[
'recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(product=self.
_product_obj.id)
if recom.exists():
recom.delete()
if ser_data and 'term_condition' in ser_data:
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'
].update(Product_Terms_Condition.objects.get(
product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create({
'product': self._product_obj, 'terms_condition': data[
'term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(product=self.
_product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(product=self.
_product_obj.id)
extra_d['size'] = Product_Size.objects.filter(product=self.
_product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(self._product_obj, data[
'specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {'images': data['images'], 'current':
data['img_current']}
img = self.validate_image(self._product_obj, data['images']
)
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return {'errors': img}
else:
return ser_data
else:
return {'errors': sp}
else:
return {'errors': pro_ser.errors}
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def validate_size(self, data):
size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})
if not size.is_valid():
return size.errors
return size
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UpdateProduct(GenericAPIView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects
.values('name', 'value').filter(product=data.id))
extract_img = self.extract_filter_data(Product_Image.objects.values
('image').filter(product=data.id))
if data:
return Response(self.get_data({'product': data, 'specific':
extract_sp, 'img': extract_img}))
else:
return Response({'errors': False}, status=HTTP_404_NOT_FOUND)
<|reserved_special_token_0|>
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values('platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values('size', 'type_size').filter(product
=id)
recom = Product_Recommended_Use.objects.values('recommended_use'
).filter(product=id)
terms = Product_Terms_Condition.objects.values('terms_condition'
).filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=
HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {'user': User.objects.values('id', 'username').get(username=
'root'), 'name': main['product'].title, 'brand': main['product'
].brand.id, 'quantity': main['product'].quantity, 'price': main
['product'].price, 'currency': main['product'].currency,
'condition': main['product'].condition, 'description': main[
'product'].description, 'brands': self._brands, 'conditions':
type_condition_choices, 'currencys': type_currency_choices,
'colors': color_choices, 'sizes': type_size_choices, 'specific':
self.extract_filter_data(main['specific']), 'images': self.
extract_filter_data(main['img']), 'extra_data': self.
get_extra_data(main['product'].id)}
def prepare_data(self, data, img_data=None):
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {i.split('_')[2]: data['img_current'][i] for
i in data['img_current']}
if len(img_data) > 0:
img = {i.split('_')[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.
objects.get(product=self._product_obj.id, name=i[
'current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(Product_Image.objects.get(product=
self._product_obj.id, image=img['current'][i]), img[
'images'][i])
return {'product': pro, 'specific': Product_Specification.objects.
values('name', 'value').filter(product=pro.id), 'img':
Product_Image.objects.values('image').filter(product=pro.id)}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and 'color' in ser_data:
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(product=self._product_obj.id
).delete()
for i in data['color']['color']:
ser_data['color'].create({'product': self.
_product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create({'product': self._product_obj,
'color': i})
else:
col = Product_Color.objects.filter(product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and 'size' in ser_data:
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
else:
ser_data['size'].create({'product': self._product_obj,
'size': siz, 'type_size': typ})
else:
siz = Product_Size.objects.filter(product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and 'platform' in ser_data:
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(
Product_Platform.objects.get(product=self.
_product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create({
'product': self._product_obj, 'platform': data['platform']}
)
else:
pl = Product_Platform.objects.filter(product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and 'recom_use' in ser_data:
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(
Product_Recommended_Use.objects.get(product=self.
_product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create({
'product': self._product_obj, 'recommended_use': data[
'recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(product=self.
_product_obj.id)
if recom.exists():
recom.delete()
if ser_data and 'term_condition' in ser_data:
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'
].update(Product_Terms_Condition.objects.get(
product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create({
'product': self._product_obj, 'terms_condition': data[
'term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(product=self.
_product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(product=self.
_product_obj.id)
extra_d['size'] = Product_Size.objects.filter(product=self.
_product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(self._product_obj, data[
'specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {'images': data['images'], 'current':
data['img_current']}
img = self.validate_image(self._product_obj, data['images']
)
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return {'errors': img}
else:
return ser_data
else:
return {'errors': sp}
else:
return {'errors': pro_ser.errors}
def validate_extra_data(self, data):
ser_data = {}
if 'color' in data:
col = self.validate_color(data['color']['color'])
if isinstance(col, ColorSerializer):
ser_data['color'] = col
else:
return {'errors': col}
if 'size' in data:
siz = self.validate_size(data['size']['size'])
if isinstance(siz, SizeSerializer):
ser_data['size'] = siz
else:
return {'errors': siz}
if 'platform' in data:
pl = PlatformSerializer(data={'platform': data['platform']})
if pl.is_valid():
ser_data['platform'] = pl
else:
return {'errors': pl.errors}
if 'recom_use' in data:
recom = RecommendedUseSerializer(data={'recommended_use': data[
'recom_use']})
if recom.is_valid():
ser_data['recom_use'] = recom
else:
return {'errors': recom.errors}
if 'term_condition' in data:
term = TermConditionSerializer(data={'terms_condition': data[
'term_condition']})
if term.is_valid():
ser_data['term_condition'] = term
else:
return {'errors': term.errors}
if ser_data:
return ser_data
else:
return False
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def validate_color(self, data):
for i in data:
col = ColorSerializer(data={'color': i})
if not col.is_valid():
return col.errors
return col
def validate_size(self, data):
size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})
if not size.is_valid():
return size.errors
return size
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UpdateProduct(GenericAPIView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects
.values('name', 'value').filter(product=data.id))
extract_img = self.extract_filter_data(Product_Image.objects.values
('image').filter(product=data.id))
if data:
return Response(self.get_data({'product': data, 'specific':
extract_sp, 'img': extract_img}))
else:
return Response({'errors': False}, status=HTTP_404_NOT_FOUND)
def extract_filter_data(self, data):
arr = []
for i in data:
arr.append(i)
return arr
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values('platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values('size', 'type_size').filter(product
=id)
recom = Product_Recommended_Use.objects.values('recommended_use'
).filter(product=id)
terms = Product_Terms_Condition.objects.values('terms_condition'
).filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=
HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {'user': User.objects.values('id', 'username').get(username=
'root'), 'name': main['product'].title, 'brand': main['product'
].brand.id, 'quantity': main['product'].quantity, 'price': main
['product'].price, 'currency': main['product'].currency,
'condition': main['product'].condition, 'description': main[
'product'].description, 'brands': self._brands, 'conditions':
type_condition_choices, 'currencys': type_currency_choices,
'colors': color_choices, 'sizes': type_size_choices, 'specific':
self.extract_filter_data(main['specific']), 'images': self.
extract_filter_data(main['img']), 'extra_data': self.
get_extra_data(main['product'].id)}
def prepare_data(self, data, img_data=None):
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {i.split('_')[2]: data['img_current'][i] for
i in data['img_current']}
if len(img_data) > 0:
img = {i.split('_')[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.
objects.get(product=self._product_obj.id, name=i[
'current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(Product_Image.objects.get(product=
self._product_obj.id, image=img['current'][i]), img[
'images'][i])
return {'product': pro, 'specific': Product_Specification.objects.
values('name', 'value').filter(product=pro.id), 'img':
Product_Image.objects.values('image').filter(product=pro.id)}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and 'color' in ser_data:
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(product=self._product_obj.id
).delete()
for i in data['color']['color']:
ser_data['color'].create({'product': self.
_product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create({'product': self._product_obj,
'color': i})
else:
col = Product_Color.objects.filter(product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and 'size' in ser_data:
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
else:
ser_data['size'].create({'product': self._product_obj,
'size': siz, 'type_size': typ})
else:
siz = Product_Size.objects.filter(product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and 'platform' in ser_data:
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(
Product_Platform.objects.get(product=self.
_product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create({
'product': self._product_obj, 'platform': data['platform']}
)
else:
pl = Product_Platform.objects.filter(product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and 'recom_use' in ser_data:
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(
Product_Recommended_Use.objects.get(product=self.
_product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create({
'product': self._product_obj, 'recommended_use': data[
'recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(product=self.
_product_obj.id)
if recom.exists():
recom.delete()
if ser_data and 'term_condition' in ser_data:
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'
].update(Product_Terms_Condition.objects.get(
product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create({
'product': self._product_obj, 'terms_condition': data[
'term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(product=self.
_product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(product=self.
_product_obj.id)
extra_d['size'] = Product_Size.objects.filter(product=self.
_product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(self._product_obj, data[
'specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {'images': data['images'], 'current':
data['img_current']}
img = self.validate_image(self._product_obj, data['images']
)
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return {'errors': img}
else:
return ser_data
else:
return {'errors': sp}
else:
return {'errors': pro_ser.errors}
def validate_extra_data(self, data):
ser_data = {}
if 'color' in data:
col = self.validate_color(data['color']['color'])
if isinstance(col, ColorSerializer):
ser_data['color'] = col
else:
return {'errors': col}
if 'size' in data:
siz = self.validate_size(data['size']['size'])
if isinstance(siz, SizeSerializer):
ser_data['size'] = siz
else:
return {'errors': siz}
if 'platform' in data:
pl = PlatformSerializer(data={'platform': data['platform']})
if pl.is_valid():
ser_data['platform'] = pl
else:
return {'errors': pl.errors}
if 'recom_use' in data:
recom = RecommendedUseSerializer(data={'recommended_use': data[
'recom_use']})
if recom.is_valid():
ser_data['recom_use'] = recom
else:
return {'errors': recom.errors}
if 'term_condition' in data:
term = TermConditionSerializer(data={'terms_condition': data[
'term_condition']})
if term.is_valid():
ser_data['term_condition'] = term
else:
return {'errors': term.errors}
if ser_data:
return ser_data
else:
return False
def validate_specification(self, pro, data):
for i in data:
sp = SpecificationSerializer(data={'name': i['name'], 'value':
i['value']})
if not sp.is_valid():
return sp.errors
return sp
def validate_image(self, pro, data):
for i in data['images']:
img = ImageSerializer(data={'image': data['images'][i]})
if not img.is_valid():
return img.errors
return img
def validate_color(self, data):
for i in data:
col = ColorSerializer(data={'color': i})
if not col.is_valid():
return col.errors
return col
def validate_size(self, data):
size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})
if not size.is_valid():
return size.errors
return size
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UpdateProduct(GenericAPIView):
serializer_class = UpdateSerializer
_product_obj = None
_brands = Brand.objects.values('id', 'name')
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects
.values('name', 'value').filter(product=data.id))
extract_img = self.extract_filter_data(Product_Image.objects.values
('image').filter(product=data.id))
if data:
return Response(self.get_data({'product': data, 'specific':
extract_sp, 'img': extract_img}))
else:
return Response({'errors': False}, status=HTTP_404_NOT_FOUND)
def extract_filter_data(self, data):
arr = []
for i in data:
arr.append(i)
return arr
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values('platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values('size', 'type_size').filter(product
=id)
recom = Product_Recommended_Use.objects.values('recommended_use'
).filter(product=id)
terms = Product_Terms_Condition.objects.values('terms_condition'
).filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=
HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {'user': User.objects.values('id', 'username').get(username=
'root'), 'name': main['product'].title, 'brand': main['product'
].brand.id, 'quantity': main['product'].quantity, 'price': main
['product'].price, 'currency': main['product'].currency,
'condition': main['product'].condition, 'description': main[
'product'].description, 'brands': self._brands, 'conditions':
type_condition_choices, 'currencys': type_currency_choices,
'colors': color_choices, 'sizes': type_size_choices, 'specific':
self.extract_filter_data(main['specific']), 'images': self.
extract_filter_data(main['img']), 'extra_data': self.
get_extra_data(main['product'].id)}
def prepare_data(self, data, img_data=None):
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {i.split('_')[2]: data['img_current'][i] for
i in data['img_current']}
if len(img_data) > 0:
img = {i.split('_')[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.
objects.get(product=self._product_obj.id, name=i[
'current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(Product_Image.objects.get(product=
self._product_obj.id, image=img['current'][i]), img[
'images'][i])
return {'product': pro, 'specific': Product_Specification.objects.
values('name', 'value').filter(product=pro.id), 'img':
Product_Image.objects.values('image').filter(product=pro.id)}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and 'color' in ser_data:
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(product=self._product_obj.id
).delete()
for i in data['color']['color']:
ser_data['color'].create({'product': self.
_product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create({'product': self._product_obj,
'color': i})
else:
col = Product_Color.objects.filter(product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and 'size' in ser_data:
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {'size': siz,
'type_size': typ})
else:
ser_data['size'].create({'product': self._product_obj,
'size': siz, 'type_size': typ})
else:
siz = Product_Size.objects.filter(product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and 'platform' in ser_data:
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(
Product_Platform.objects.get(product=self.
_product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create({
'product': self._product_obj, 'platform': data['platform']}
)
else:
pl = Product_Platform.objects.filter(product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and 'recom_use' in ser_data:
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(
Product_Recommended_Use.objects.get(product=self.
_product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create({
'product': self._product_obj, 'recommended_use': data[
'recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(product=self.
_product_obj.id)
if recom.exists():
recom.delete()
if ser_data and 'term_condition' in ser_data:
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'
].update(Product_Terms_Condition.objects.get(
product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create({
'product': self._product_obj, 'terms_condition': data[
'term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(product=self.
_product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(product=self.
_product_obj.id)
extra_d['size'] = Product_Size.objects.filter(product=self.
_product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(self._product_obj, data[
'specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {'images': data['images'], 'current':
data['img_current']}
img = self.validate_image(self._product_obj, data['images']
)
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return {'errors': img}
else:
return ser_data
else:
return {'errors': sp}
else:
return {'errors': pro_ser.errors}
def validate_extra_data(self, data):
ser_data = {}
if 'color' in data:
col = self.validate_color(data['color']['color'])
if isinstance(col, ColorSerializer):
ser_data['color'] = col
else:
return {'errors': col}
if 'size' in data:
siz = self.validate_size(data['size']['size'])
if isinstance(siz, SizeSerializer):
ser_data['size'] = siz
else:
return {'errors': siz}
if 'platform' in data:
pl = PlatformSerializer(data={'platform': data['platform']})
if pl.is_valid():
ser_data['platform'] = pl
else:
return {'errors': pl.errors}
if 'recom_use' in data:
recom = RecommendedUseSerializer(data={'recommended_use': data[
'recom_use']})
if recom.is_valid():
ser_data['recom_use'] = recom
else:
return {'errors': recom.errors}
if 'term_condition' in data:
term = TermConditionSerializer(data={'terms_condition': data[
'term_condition']})
if term.is_valid():
ser_data['term_condition'] = term
else:
return {'errors': term.errors}
if ser_data:
return ser_data
else:
return False
def validate_specification(self, pro, data):
for i in data:
sp = SpecificationSerializer(data={'name': i['name'], 'value':
i['value']})
if not sp.is_valid():
return sp.errors
return sp
def validate_image(self, pro, data):
for i in data['images']:
img = ImageSerializer(data={'image': data['images'][i]})
if not img.is_valid():
return img.errors
return img
def validate_color(self, data):
for i in data:
col = ColorSerializer(data={'color': i})
if not col.is_valid():
return col.errors
return col
def validate_size(self, data):
size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})
if not size.is_valid():
return size.errors
return size
<|reserved_special_token_1|>
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND
from ...models.brand import Brand
from ...models.product import type_currency_choices, type_condition_choices, User, Product
from ...models.product_color import color_choices, Product_Color
from ...models.product_size import type_size_choices, Product_Size
from ...models.product_image import Product_Image
from ...models.product_specification import Product_Specification
from ...models.product_platform import Product_Platform
from ...models.product_recommended_use import Product_Recommended_Use
from ...models.product_terms_condition import Product_Terms_Condition
from ...serilaizers.products.updateSerializer import UpdateSerializer
from ...serilaizers.products.specificationSerializer import SpecificationSerializer
from ...serilaizers.products.imageSerializer import ImageSerializer
from ...serilaizers.products.colorSerializer import ColorSerializer
from ...serilaizers.products.platformSerializer import PlatformSerializer
from ...serilaizers.products.recommendedUseSerializer import RecommendedUseSerializer
from ...serilaizers.products.sizeSerializer import SizeSerializer
from ...serilaizers.products.termConditionSerializer import TermConditionSerializer
class UpdateProduct(GenericAPIView):
serializer_class = UpdateSerializer
_product_obj = None
_brands = Brand.objects.values("id", "name")
def get(self, request, *args, **kwargs):
data = self.get_queryset()
extract_sp = self.extract_filter_data(Product_Specification.objects.values(
"name", "value").filter(product=data.id))
extract_img = self.extract_filter_data(
Product_Image.objects.values('image').filter(product=data.id))
if data:
return Response(self.get_data({
"product": data,
"specific": extract_sp,
"img": extract_img
}))
else:
return Response({"errors": False}, status=HTTP_404_NOT_FOUND)
def extract_filter_data(self, data):
arr = []
for i in data:
arr.append(i)
return arr
def get_extra_data(self, id):
extra_data = {}
pl = Product_Platform.objects.values(
'platform').filter(product=id)
col = Product_Color.objects.values('color').filter(product=id)
siz = Product_Size.objects.values(
'size', 'type_size').filter(product=id)
recom = Product_Recommended_Use.objects.values(
'recommended_use').filter(product=id)
terms = Product_Terms_Condition.objects.values(
'terms_condition').filter(product=id)
if pl.exists():
extra_data['platform'] = self.extract_filter_data(pl)
if col.exists():
extra_data['color'] = self.extract_filter_data(col)
if siz.exists():
extra_data['size'] = self.extract_filter_data(siz)
if recom.exists():
extra_data['recom_use'] = self.extract_filter_data(recom)
if terms.exists():
extra_data['term_condition'] = self.extract_filter_data(terms)
if extra_data:
return extra_data
else:
return False
def get_queryset(self):
try:
return Product.objects.get(id=self.kwargs['pk'])
except:
return False
def put(self, request, *args, **kwargs):
self._product_obj = self.get_queryset()
data = self.prepare_data(self.request.data, self.request.FILES)
main = self.validate_main_data(data)
if 'errors' in main:
return Response(main['errors'], status=HTTP_400_BAD_REQUEST)
else:
extra = self.validate_extra_data(data)
if extra:
if 'errors' in extra:
return Response(extra['errors'], status=HTTP_400_BAD_REQUEST)
else:
main = self.update_main_data(data, main)
self.update_extra_data(data, extra)
return Response(self.get_data(main))
self.update_extra_data(data, False)
main = self.update_main_data(data, main)
return Response(self.get_data(main))
def get_data(self, main):
return {
"user": User.objects.values('id', 'username').get(username="root"),
"name": main['product'].title,
"brand": main['product'].brand.id,
"quantity": main['product'].quantity,
"price": main['product'].price,
"currency": main['product'].currency,
"condition": main['product'].condition,
"description": main['product'].description,
"brands": self._brands,
"conditions": type_condition_choices,
"currencys": type_currency_choices,
"colors": color_choices,
"sizes": type_size_choices,
"specific": self.extract_filter_data(main['specific']),
"images": self.extract_filter_data(main['img']),
"extra_data": self.get_extra_data(main['product'].id)
}
def prepare_data(self, data, img_data=None):
# prepared the data extract all data from request and loads using json
# extract images from request files and
# return data as a dict
from json import loads
data = data['data']
data = loads(data)
data['img_current'] = {
i.split("_")[2]: data['img_current'][i] for i in data['img_current']}
if len(img_data) > 0:
img = {i.split("_")[1]: img_data[i] for i in img_data}
data['images'] = img
return data
def update_main_data(self, data, ser_data):
pro = ser_data['product'].update(self._product_obj, data)
for i in data['specific']:
if 'current' in i:
if i['current'] != i['name']:
ser_data['specific'].update(Product_Specification.objects.get(
product=self._product_obj.id, name=i['current']), i)
else:
i['product'] = self._product_obj
ser_data['specific'].create(i)
if 'images' in data:
img = data['images']
for i in img['images']:
ser_data['image'].update(
Product_Image.objects.get(
product=self._product_obj.id,
image=img['current'][i]), img['images'][i])
return {
"product": pro,
"specific": Product_Specification.objects.values('name', 'value').filter(product=pro.id),
"img": Product_Image.objects.values('image').filter(product=pro.id)
}
def update_extra_data(self, data, ser_data):
extra_d = {}
if ser_data and ('color' in ser_data):
if 'current' in data['color']:
if data['color']['current'] != data['color']['color']:
Product_Color.objects.filter(
product=self._product_obj.id).delete()
for i in data['color']['color']:
ser_data['color'].create(
{"product": self._product_obj, 'color': i})
else:
for i in data['color']['color']:
ser_data['color'].create(
{"product": self._product_obj, 'color': i})
else:
col = Product_Color.objects.filter(
product=self._product_obj.id)
if col.exists():
col.delete()
if ser_data and ('size' in ser_data):
siz = data['size']['size'][0]
typ = data['size']['size'][1]
if 'current' in data['size']:
cur_siz = data['size']['current'][0]
cur_typ = data['size']['current'][1]
if siz != cur_siz:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {"size": siz, "type_size": typ})
elif typ != cur_typ:
ser_data['size'].update(Product_Size.objects.get(
product=self._product_obj.id), {"size": siz, "type_size": typ})
else:
ser_data['size'].create(
{"product": self._product_obj, "size": siz, "type_size": typ})
else:
siz = Product_Size.objects.filter(
product=self._product_obj.id)
if siz.exists():
siz.delete()
if ser_data and ('platform' in ser_data):
if 'platform_current' in data:
if data['platform_current'] != data['platform']:
extra_d['platform'] = ser_data['platform'].update(Product_Platform.objects.get(
product=self._product_obj.id), data['platform'])
else:
extra_d['platform'] = ser_data['platform'].create(
{"product": self._product_obj, "platform": data['platform']})
else:
pl = Product_Platform.objects.filter(
product=self._product_obj.id)
if pl.exists():
pl.delete()
if ser_data and ('recom_use' in ser_data):
if 'recom_use_current' in data:
if data['recom_use_current'] != data['recom_use']:
extra_d['recom_use'] = ser_data['recom_use'].update(Product_Recommended_Use.objects.get(
product=self._product_obj.id), data['recom_use'])
else:
extra_d['recom_use'] = ser_data['recom_use'].create(
{"product": self._product_obj, "recommended_use": data['recom_use']})
else:
recom = Product_Recommended_Use.objects.filter(
product=self._product_obj.id)
if recom.exists():
recom.delete()
if ser_data and ('term_condition' in ser_data):
if 'term_condition_current' in data:
if data['term_condition_current'] != data['term_condition']:
extra_d['term_condition'] = ser_data['term_condition'].update(
Product_Terms_Condition.objects.get(product=self._product_obj.id), data['term_condition'])
else:
extra_d['term_condition'] = ser_data['term_condition'].create(
{"product": self._product_obj, "terms_condition": data['term_condition']})
else:
terms = Product_Terms_Condition.objects.filter(
product=self._product_obj.id)
if terms.exists():
terms.delete()
extra_d['color'] = Product_Color.objects.filter(
product=self._product_obj.id)
extra_d['size'] = Product_Size.objects.filter(
product=self._product_obj.id)
return extra_d
def validate_main_data(self, data):
pro_ser = UpdateSerializer(instance=self._product_obj, data=data)
ser_data = {}
if pro_ser.is_valid():
ser_data['product'] = pro_ser
sp = self.validate_specification(
self._product_obj, data['specific'])
if isinstance(sp, SpecificationSerializer):
ser_data['specific'] = sp
if 'images' in data:
data['images'] = {"images": data['images'],
'current': data['img_current']}
img = self.validate_image(
self._product_obj, data['images'])
if isinstance(img, ImageSerializer):
ser_data['image'] = img
return ser_data
else:
return{"errors": img}
else:
return ser_data
else:
return {"errors": sp} # return error
else:
return {"errors": pro_ser.errors}
def validate_extra_data(self, data):
ser_data = {}
if 'color' in data:
col = self.validate_color(data['color']['color'])
if isinstance(col, ColorSerializer):
ser_data['color'] = col
else:
return {"errors": col}
if 'size' in data:
siz = self.validate_size(data['size']['size'])
if isinstance(siz, SizeSerializer):
ser_data['size'] = siz
else:
return {"errors": siz}
if 'platform' in data:
pl = PlatformSerializer(data={"platform": data['platform']})
if pl.is_valid():
ser_data['platform'] = pl
else:
return {"errors": pl.errors}
if 'recom_use' in data:
recom = RecommendedUseSerializer(
data={"recommended_use": data['recom_use']})
if recom.is_valid():
ser_data['recom_use'] = recom
else:
return {"errors": recom.errors}
if 'term_condition' in data:
term = TermConditionSerializer(
data={"terms_condition": data['term_condition']})
if term.is_valid():
ser_data['term_condition'] = term
else:
return {"errors": term.errors}
if ser_data:
return ser_data
else:
return False
def validate_specification(self, pro, data):
for i in data:
sp = SpecificationSerializer(
data={"name": i['name'], "value": i['value']})
if not sp.is_valid():
return sp.errors
return sp
def validate_image(self, pro, data):
for i in data['images']:
img = ImageSerializer(data={"image": data['images'][i]})
if not img.is_valid():
return img.errors
return img
def validate_color(self, data):
for i in data:
col = ColorSerializer(data={"color": i})
if not col.is_valid():
return col.errors
return col
def validate_size(self, data):
size = SizeSerializer(data={"size": data[0],
"type_size": data[1]})
if not size.is_valid():
return size.errors
return size
|
flexible
|
{
"blob_id": "47e9b73fc7f6b3c8295e78d0cdb5aa51ca4c5f8d",
"index": 8140,
"step-1": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n <mask token>\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-2": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n <mask token>\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n <mask token>\n <mask token>\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-3": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n <mask token>\n <mask token>\n <mask token>\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n for i in data:\n sp = SpecificationSerializer(data={'name': i['name'], 'value':\n i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n for i in data['images']:\n img = ImageSerializer(data={'image': data['images'][i]})\n if not img.is_valid():\n return img.errors\n return img\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-4": "<mask token>\n\n\nclass UpdateProduct(GenericAPIView):\n serializer_class = UpdateSerializer\n _product_obj = None\n _brands = Brand.objects.values('id', 'name')\n\n def get(self, request, *args, **kwargs):\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects\n .values('name', 'value').filter(product=data.id))\n extract_img = self.extract_filter_data(Product_Image.objects.values\n ('image').filter(product=data.id))\n if data:\n return Response(self.get_data({'product': data, 'specific':\n extract_sp, 'img': extract_img}))\n else:\n return Response({'errors': False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n extra_data = {}\n pl = Product_Platform.objects.values('platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values('size', 'type_size').filter(product\n =id)\n recom = Product_Recommended_Use.objects.values('recommended_use'\n ).filter(product=id)\n terms = Product_Terms_Condition.objects.values('terms_condition'\n ).filter(product=id)\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=\n HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n return {'user': User.objects.values('id', 'username').get(username=\n 'root'), 'name': main['product'].title, 'brand': main['product'\n ].brand.id, 'quantity': main['product'].quantity, 'price': main\n ['product'].price, 'currency': main['product'].currency,\n 'condition': main['product'].condition, 'description': main[\n 'product'].description, 'brands': self._brands, 'conditions':\n type_condition_choices, 'currencys': type_currency_choices,\n 'colors': color_choices, 'sizes': type_size_choices, 'specific':\n self.extract_filter_data(main['specific']), 'images': self.\n extract_filter_data(main['img']), 'extra_data': self.\n get_extra_data(main['product'].id)}\n\n def prepare_data(self, data, img_data=None):\n from json import loads\n data = data['data']\n data = loads(data)\n data['img_current'] = {i.split('_')[2]: data['img_current'][i] for\n i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split('_')[1]: img_data[i] for i in img_data}\n data['images'] = img\n return data\n\n def update_main_data(self, data, ser_data):\n pro = ser_data['product'].update(self._product_obj, data)\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.\n objects.get(product=self._product_obj.id, name=i[\n 'current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(Product_Image.objects.get(product=\n self._product_obj.id, image=img['current'][i]), img[\n 'images'][i])\n return {'product': pro, 'specific': Product_Specification.objects.\n values('name', 'value').filter(product=pro.id), 'img':\n Product_Image.objects.values('image').filter(product=pro.id)}\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n if ser_data and 'color' in ser_data:\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(product=self._product_obj.id\n ).delete()\n for i in data['color']['color']:\n ser_data['color'].create({'product': self.\n _product_obj, 'color': i})\n else:\n for i in data['color']['color']:\n ser_data['color'].create({'product': self._product_obj,\n 'color': i})\n else:\n col = Product_Color.objects.filter(product=self._product_obj.id)\n if col.exists():\n col.delete()\n if ser_data and 'size' in ser_data:\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {'size': siz,\n 'type_size': typ})\n else:\n ser_data['size'].create({'product': self._product_obj,\n 'size': siz, 'type_size': typ})\n else:\n siz = Product_Size.objects.filter(product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n if ser_data and 'platform' in ser_data:\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(\n Product_Platform.objects.get(product=self.\n _product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create({\n 'product': self._product_obj, 'platform': data['platform']}\n )\n else:\n pl = Product_Platform.objects.filter(product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n if ser_data and 'recom_use' in ser_data:\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(\n Product_Recommended_Use.objects.get(product=self.\n _product_obj.id), data['recom_use'])\n else:\n extra_d['recom_use'] = ser_data['recom_use'].create({\n 'product': self._product_obj, 'recommended_use': data[\n 'recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(product=self.\n _product_obj.id)\n if recom.exists():\n recom.delete()\n if ser_data and 'term_condition' in ser_data:\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'\n ].update(Product_Terms_Condition.objects.get(\n product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create({\n 'product': self._product_obj, 'terms_condition': data[\n 'term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(product=self.\n _product_obj.id)\n if terms.exists():\n terms.delete()\n extra_d['color'] = Product_Color.objects.filter(product=self.\n _product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(product=self.\n _product_obj.id)\n return extra_d\n\n def validate_main_data(self, data):\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(self._product_obj, data[\n 'specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {'images': data['images'], 'current':\n data['img_current']}\n img = self.validate_image(self._product_obj, data['images']\n )\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return {'errors': img}\n else:\n return ser_data\n else:\n return {'errors': sp}\n else:\n return {'errors': pro_ser.errors}\n\n def validate_extra_data(self, data):\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {'errors': col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {'errors': siz}\n if 'platform' in data:\n pl = PlatformSerializer(data={'platform': data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {'errors': pl.errors}\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(data={'recommended_use': data[\n 'recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {'errors': recom.errors}\n if 'term_condition' in data:\n term = TermConditionSerializer(data={'terms_condition': data[\n 'term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {'errors': term.errors}\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n for i in data:\n sp = SpecificationSerializer(data={'name': i['name'], 'value':\n i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n for i in data['images']:\n img = ImageSerializer(data={'image': data['images'][i]})\n if not img.is_valid():\n return img.errors\n return img\n\n def validate_color(self, data):\n for i in data:\n col = ColorSerializer(data={'color': i})\n if not col.is_valid():\n return col.errors\n return col\n\n def validate_size(self, data):\n size = SizeSerializer(data={'size': data[0], 'type_size': data[1]})\n if not size.is_valid():\n return size.errors\n return size\n",
"step-5": "from rest_framework.generics import GenericAPIView\nfrom rest_framework.response import Response\nfrom rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\nfrom ...models.brand import Brand\nfrom ...models.product import type_currency_choices, type_condition_choices, User, Product\nfrom ...models.product_color import color_choices, Product_Color\nfrom ...models.product_size import type_size_choices, Product_Size\nfrom ...models.product_image import Product_Image\nfrom ...models.product_specification import Product_Specification\nfrom ...models.product_platform import Product_Platform\nfrom ...models.product_recommended_use import Product_Recommended_Use\nfrom ...models.product_terms_condition import Product_Terms_Condition\nfrom ...serilaizers.products.updateSerializer import UpdateSerializer\nfrom ...serilaizers.products.specificationSerializer import SpecificationSerializer\nfrom ...serilaizers.products.imageSerializer import ImageSerializer\nfrom ...serilaizers.products.colorSerializer import ColorSerializer\nfrom ...serilaizers.products.platformSerializer import PlatformSerializer\nfrom ...serilaizers.products.recommendedUseSerializer import RecommendedUseSerializer\nfrom ...serilaizers.products.sizeSerializer import SizeSerializer\nfrom ...serilaizers.products.termConditionSerializer import TermConditionSerializer\n\n\nclass UpdateProduct(GenericAPIView):\n serializer_class = UpdateSerializer\n _product_obj = None\n _brands = Brand.objects.values(\"id\", \"name\")\n def get(self, request, *args, **kwargs):\n\n data = self.get_queryset()\n extract_sp = self.extract_filter_data(Product_Specification.objects.values(\n \"name\", \"value\").filter(product=data.id))\n extract_img = self.extract_filter_data(\n Product_Image.objects.values('image').filter(product=data.id))\n if data:\n return Response(self.get_data({\n \"product\": data,\n \"specific\": extract_sp,\n \"img\": extract_img\n }))\n else:\n return Response({\"errors\": False}, status=HTTP_404_NOT_FOUND)\n\n def extract_filter_data(self, data):\n arr = []\n for i in data:\n arr.append(i)\n return arr\n\n def get_extra_data(self, id):\n\n extra_data = {}\n pl = Product_Platform.objects.values(\n 'platform').filter(product=id)\n col = Product_Color.objects.values('color').filter(product=id)\n siz = Product_Size.objects.values(\n 'size', 'type_size').filter(product=id)\n recom = Product_Recommended_Use.objects.values(\n 'recommended_use').filter(product=id)\n terms = Product_Terms_Condition.objects.values(\n 'terms_condition').filter(product=id)\n\n if pl.exists():\n extra_data['platform'] = self.extract_filter_data(pl)\n if col.exists():\n extra_data['color'] = self.extract_filter_data(col)\n if siz.exists():\n extra_data['size'] = self.extract_filter_data(siz)\n if recom.exists():\n extra_data['recom_use'] = self.extract_filter_data(recom)\n if terms.exists():\n extra_data['term_condition'] = self.extract_filter_data(terms)\n\n if extra_data:\n return extra_data\n else:\n return False\n\n def get_queryset(self):\n try:\n return Product.objects.get(id=self.kwargs['pk'])\n except:\n return False\n\n def put(self, request, *args, **kwargs):\n\n self._product_obj = self.get_queryset()\n data = self.prepare_data(self.request.data, self.request.FILES)\n main = self.validate_main_data(data)\n if 'errors' in main:\n return Response(main['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n extra = self.validate_extra_data(data)\n if extra:\n if 'errors' in extra:\n return Response(extra['errors'], status=HTTP_400_BAD_REQUEST)\n else:\n main = self.update_main_data(data, main)\n self.update_extra_data(data, extra)\n return Response(self.get_data(main))\n\n self.update_extra_data(data, False)\n main = self.update_main_data(data, main)\n return Response(self.get_data(main))\n\n def get_data(self, main):\n\n return {\n \"user\": User.objects.values('id', 'username').get(username=\"root\"),\n \"name\": main['product'].title,\n \"brand\": main['product'].brand.id,\n \"quantity\": main['product'].quantity,\n \"price\": main['product'].price,\n \"currency\": main['product'].currency,\n \"condition\": main['product'].condition,\n \"description\": main['product'].description,\n \"brands\": self._brands,\n \"conditions\": type_condition_choices,\n \"currencys\": type_currency_choices,\n \"colors\": color_choices,\n \"sizes\": type_size_choices,\n \"specific\": self.extract_filter_data(main['specific']),\n \"images\": self.extract_filter_data(main['img']),\n \"extra_data\": self.get_extra_data(main['product'].id)\n }\n\n def prepare_data(self, data, img_data=None):\n # prepared the data extract all data from request and loads using json\n # extract images from request files and\n # return data as a dict\n\n from json import loads\n\n data = data['data']\n data = loads(data)\n data['img_current'] = {\n i.split(\"_\")[2]: data['img_current'][i] for i in data['img_current']}\n if len(img_data) > 0:\n img = {i.split(\"_\")[1]: img_data[i] for i in img_data}\n data['images'] = img\n\n return data\n\n def update_main_data(self, data, ser_data):\n\n pro = ser_data['product'].update(self._product_obj, data)\n\n for i in data['specific']:\n if 'current' in i:\n if i['current'] != i['name']:\n ser_data['specific'].update(Product_Specification.objects.get(\n product=self._product_obj.id, name=i['current']), i)\n else:\n i['product'] = self._product_obj\n ser_data['specific'].create(i)\n\n if 'images' in data:\n img = data['images']\n for i in img['images']:\n ser_data['image'].update(\n Product_Image.objects.get(\n product=self._product_obj.id,\n image=img['current'][i]), img['images'][i])\n return {\n \"product\": pro,\n \"specific\": Product_Specification.objects.values('name', 'value').filter(product=pro.id),\n \"img\": Product_Image.objects.values('image').filter(product=pro.id)\n }\n\n def update_extra_data(self, data, ser_data):\n extra_d = {}\n\n if ser_data and ('color' in ser_data):\n\n if 'current' in data['color']:\n if data['color']['current'] != data['color']['color']:\n Product_Color.objects.filter(\n product=self._product_obj.id).delete()\n for i in data['color']['color']:\n ser_data['color'].create(\n {\"product\": self._product_obj, 'color': i})\n\n else:\n for i in data['color']['color']:\n ser_data['color'].create(\n {\"product\": self._product_obj, 'color': i})\n else:\n col = Product_Color.objects.filter(\n product=self._product_obj.id)\n if col.exists():\n col.delete()\n\n if ser_data and ('size' in ser_data):\n siz = data['size']['size'][0]\n typ = data['size']['size'][1]\n if 'current' in data['size']:\n cur_siz = data['size']['current'][0]\n cur_typ = data['size']['current'][1]\n if siz != cur_siz:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {\"size\": siz, \"type_size\": typ})\n elif typ != cur_typ:\n ser_data['size'].update(Product_Size.objects.get(\n product=self._product_obj.id), {\"size\": siz, \"type_size\": typ})\n else:\n ser_data['size'].create(\n {\"product\": self._product_obj, \"size\": siz, \"type_size\": typ})\n else:\n siz = Product_Size.objects.filter(\n product=self._product_obj.id)\n if siz.exists():\n siz.delete()\n\n if ser_data and ('platform' in ser_data):\n if 'platform_current' in data:\n if data['platform_current'] != data['platform']:\n extra_d['platform'] = ser_data['platform'].update(Product_Platform.objects.get(\n product=self._product_obj.id), data['platform'])\n else:\n extra_d['platform'] = ser_data['platform'].create(\n {\"product\": self._product_obj, \"platform\": data['platform']})\n else:\n pl = Product_Platform.objects.filter(\n product=self._product_obj.id)\n if pl.exists():\n pl.delete()\n\n if ser_data and ('recom_use' in ser_data):\n\n if 'recom_use_current' in data:\n if data['recom_use_current'] != data['recom_use']:\n extra_d['recom_use'] = ser_data['recom_use'].update(Product_Recommended_Use.objects.get(\n product=self._product_obj.id), data['recom_use'])\n else:\n\n extra_d['recom_use'] = ser_data['recom_use'].create(\n {\"product\": self._product_obj, \"recommended_use\": data['recom_use']})\n else:\n recom = Product_Recommended_Use.objects.filter(\n product=self._product_obj.id)\n if recom.exists():\n recom.delete()\n\n if ser_data and ('term_condition' in ser_data):\n if 'term_condition_current' in data:\n if data['term_condition_current'] != data['term_condition']:\n extra_d['term_condition'] = ser_data['term_condition'].update(\n Product_Terms_Condition.objects.get(product=self._product_obj.id), data['term_condition'])\n else:\n extra_d['term_condition'] = ser_data['term_condition'].create(\n {\"product\": self._product_obj, \"terms_condition\": data['term_condition']})\n else:\n terms = Product_Terms_Condition.objects.filter(\n product=self._product_obj.id)\n if terms.exists():\n terms.delete()\n\n extra_d['color'] = Product_Color.objects.filter(\n product=self._product_obj.id)\n extra_d['size'] = Product_Size.objects.filter(\n product=self._product_obj.id)\n\n return extra_d\n\n def validate_main_data(self, data):\n\n pro_ser = UpdateSerializer(instance=self._product_obj, data=data)\n ser_data = {}\n if pro_ser.is_valid():\n ser_data['product'] = pro_ser\n sp = self.validate_specification(\n self._product_obj, data['specific'])\n if isinstance(sp, SpecificationSerializer):\n ser_data['specific'] = sp\n if 'images' in data:\n data['images'] = {\"images\": data['images'],\n 'current': data['img_current']}\n img = self.validate_image(\n self._product_obj, data['images'])\n if isinstance(img, ImageSerializer):\n ser_data['image'] = img\n return ser_data\n else:\n return{\"errors\": img}\n else:\n return ser_data\n else:\n return {\"errors\": sp} # return error\n else:\n return {\"errors\": pro_ser.errors}\n\n def validate_extra_data(self, data):\n\n ser_data = {}\n if 'color' in data:\n col = self.validate_color(data['color']['color'])\n if isinstance(col, ColorSerializer):\n ser_data['color'] = col\n else:\n return {\"errors\": col}\n if 'size' in data:\n siz = self.validate_size(data['size']['size'])\n if isinstance(siz, SizeSerializer):\n ser_data['size'] = siz\n else:\n return {\"errors\": siz}\n\n if 'platform' in data:\n pl = PlatformSerializer(data={\"platform\": data['platform']})\n if pl.is_valid():\n ser_data['platform'] = pl\n else:\n return {\"errors\": pl.errors}\n\n if 'recom_use' in data:\n recom = RecommendedUseSerializer(\n data={\"recommended_use\": data['recom_use']})\n if recom.is_valid():\n ser_data['recom_use'] = recom\n else:\n return {\"errors\": recom.errors}\n\n if 'term_condition' in data:\n term = TermConditionSerializer(\n data={\"terms_condition\": data['term_condition']})\n if term.is_valid():\n ser_data['term_condition'] = term\n else:\n return {\"errors\": term.errors}\n\n if ser_data:\n return ser_data\n else:\n return False\n\n def validate_specification(self, pro, data):\n\n for i in data:\n sp = SpecificationSerializer(\n data={\"name\": i['name'], \"value\": i['value']})\n if not sp.is_valid():\n return sp.errors\n return sp\n\n def validate_image(self, pro, data):\n\n for i in data['images']:\n img = ImageSerializer(data={\"image\": data['images'][i]})\n if not img.is_valid():\n return img.errors\n\n return img\n\n def validate_color(self, data):\n\n for i in data:\n col = ColorSerializer(data={\"color\": i})\n if not col.is_valid():\n return col.errors\n\n return col\n\n def validate_size(self, data):\n\n size = SizeSerializer(data={\"size\": data[0],\n \"type_size\": data[1]})\n if not size.is_valid():\n return size.errors\n\n return size\n",
"step-ids": [
11,
13,
16,
17,
19
]
}
|
[
11,
13,
16,
17,
19
] |
import os
from flask import Flask, jsonify, request, abort, make_response
from flask_sqlalchemy import SQLAlchemy
from .models import User
from .config import app_config
app = Flask(__name__)
app.config.from_object(app_config[os.getenv('FLASK_ENV', 'production')])
db = SQLAlchemy(app)
@app.route('/api/v1/users/<int:user_id>', methods=['GET'])
def get_user(user_id):
try:
user = User.query.filter_by(id=user_id).first()
return jsonify({'user': user.serialize})
except:
abort(404)
@app.route('/api/v1/users', methods=['POST'])
def create_user():
if not request.json or not 'firstName' or not 'lastName' in request.json:
abort(400)
user = User(request.get_json()['firstName'], request.get_json()['lastName']
)
db.session.add(user)
db.session.commit()
return jsonify({'user': user.serialize}), 201
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
@app.errorhandler(400)
def not_found(error):
return make_response(jsonify({'error': 'Bad Request'}), 400)
@app.errorhandler(405)
def not_found(error):
return make_response(jsonify({'error': 'Method Not Allowed'}), 405)
|
normal
|
{
"blob_id": "f4519fa82ffc6bf945c7bb36d3761a708a06f641",
"index": 5933,
"step-1": "<mask token>\n\n\n@app.route('/api/v1/users/<int:user_id>', methods=['GET'])\ndef get_user(user_id):\n try:\n user = User.query.filter_by(id=user_id).first()\n return jsonify({'user': user.serialize})\n except:\n abort(404)\n\n\n@app.route('/api/v1/users', methods=['POST'])\ndef create_user():\n if not request.json or not 'firstName' or not 'lastName' in request.json:\n abort(400)\n user = User(request.get_json()['firstName'], request.get_json()['lastName']\n )\n db.session.add(user)\n db.session.commit()\n return jsonify({'user': user.serialize}), 201\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n<mask token>\n\n\n@app.errorhandler(405)\ndef not_found(error):\n return make_response(jsonify({'error': 'Method Not Allowed'}), 405)\n",
"step-2": "<mask token>\napp.config.from_object(app_config[os.getenv('FLASK_ENV', 'production')])\n<mask token>\n\n\n@app.route('/api/v1/users/<int:user_id>', methods=['GET'])\ndef get_user(user_id):\n try:\n user = User.query.filter_by(id=user_id).first()\n return jsonify({'user': user.serialize})\n except:\n abort(404)\n\n\n@app.route('/api/v1/users', methods=['POST'])\ndef create_user():\n if not request.json or not 'firstName' or not 'lastName' in request.json:\n abort(400)\n user = User(request.get_json()['firstName'], request.get_json()['lastName']\n )\n db.session.add(user)\n db.session.commit()\n return jsonify({'user': user.serialize}), 201\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.errorhandler(400)\ndef not_found(error):\n return make_response(jsonify({'error': 'Bad Request'}), 400)\n\n\n@app.errorhandler(405)\ndef not_found(error):\n return make_response(jsonify({'error': 'Method Not Allowed'}), 405)\n",
"step-3": "<mask token>\napp = Flask(__name__)\napp.config.from_object(app_config[os.getenv('FLASK_ENV', 'production')])\ndb = SQLAlchemy(app)\n\n\n@app.route('/api/v1/users/<int:user_id>', methods=['GET'])\ndef get_user(user_id):\n try:\n user = User.query.filter_by(id=user_id).first()\n return jsonify({'user': user.serialize})\n except:\n abort(404)\n\n\n@app.route('/api/v1/users', methods=['POST'])\ndef create_user():\n if not request.json or not 'firstName' or not 'lastName' in request.json:\n abort(400)\n user = User(request.get_json()['firstName'], request.get_json()['lastName']\n )\n db.session.add(user)\n db.session.commit()\n return jsonify({'user': user.serialize}), 201\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.errorhandler(400)\ndef not_found(error):\n return make_response(jsonify({'error': 'Bad Request'}), 400)\n\n\n@app.errorhandler(405)\ndef not_found(error):\n return make_response(jsonify({'error': 'Method Not Allowed'}), 405)\n",
"step-4": "import os\nfrom flask import Flask, jsonify, request, abort, make_response\nfrom flask_sqlalchemy import SQLAlchemy\nfrom .models import User\nfrom .config import app_config\napp = Flask(__name__)\napp.config.from_object(app_config[os.getenv('FLASK_ENV', 'production')])\ndb = SQLAlchemy(app)\n\n\n@app.route('/api/v1/users/<int:user_id>', methods=['GET'])\ndef get_user(user_id):\n try:\n user = User.query.filter_by(id=user_id).first()\n return jsonify({'user': user.serialize})\n except:\n abort(404)\n\n\n@app.route('/api/v1/users', methods=['POST'])\ndef create_user():\n if not request.json or not 'firstName' or not 'lastName' in request.json:\n abort(400)\n user = User(request.get_json()['firstName'], request.get_json()['lastName']\n )\n db.session.add(user)\n db.session.commit()\n return jsonify({'user': user.serialize}), 201\n\n\n@app.errorhandler(404)\ndef not_found(error):\n return make_response(jsonify({'error': 'Not found'}), 404)\n\n\n@app.errorhandler(400)\ndef not_found(error):\n return make_response(jsonify({'error': 'Bad Request'}), 400)\n\n\n@app.errorhandler(405)\ndef not_found(error):\n return make_response(jsonify({'error': 'Method Not Allowed'}), 405)\n",
"step-5": null,
"step-ids": [
4,
6,
7,
8
]
}
|
[
4,
6,
7,
8
] |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=40)
content = models.TextField()
date_published = models.DateTimeField(auto_now=True)
author = models.ForeignKey(User, on_delete=models.CASCADE)
img = models.ImageField(upload_to='post_img', null=True, blank=True)
like = models.ManyToManyField(User, related_name='like_user', blank=True)
dislike = models.ManyToManyField(User, related_name='dislike_user',blank=True)
def __str__(self):
return self.title
class Comment(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
post = models.ForeignKey(Post, on_delete=models.CASCADE)
comment_box = models.TextField()
date_comment = models.DateTimeField(auto_now=True)
def __str__(self):
return self.user.username
class Comment_to_comment(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)
comment = models.TextField()
date_comment = models.DateTimeField(auto_now=True)
def __str__(self):
return self.from_comment.comment_box
class Points(models.Model):
post = models.ForeignKey(Post, on_delete=models.CASCADE)
point = models.IntegerField(default=0)
|
normal
|
{
"blob_id": "1257b90781a213ca8e07f67a33b8e847d0525653",
"index": 9354,
"step-1": "<mask token>\n\n\nclass Comment(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.user.username\n\n\nclass Comment_to_comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)\n comment = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.from_comment.comment_box\n\n\nclass Points(models.Model):\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n point = models.IntegerField(default=0)\n",
"step-2": "<mask token>\n\n\nclass Post(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.title\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n comment_box = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n\n\nclass Comment_to_comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)\n comment = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.from_comment.comment_box\n\n\nclass Points(models.Model):\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n point = models.IntegerField(default=0)\n",
"step-3": "<mask token>\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=40)\n content = models.TextField()\n date_published = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE)\n img = models.ImageField(upload_to='post_img', null=True, blank=True)\n like = models.ManyToManyField(User, related_name='like_user', blank=True)\n dislike = models.ManyToManyField(User, related_name='dislike_user',\n blank=True)\n\n def __str__(self):\n return self.title\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n comment_box = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n\n\nclass Comment_to_comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)\n comment = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.from_comment.comment_box\n\n\nclass Points(models.Model):\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n point = models.IntegerField(default=0)\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass Post(models.Model):\n title = models.CharField(max_length=40)\n content = models.TextField()\n date_published = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE)\n img = models.ImageField(upload_to='post_img', null=True, blank=True)\n like = models.ManyToManyField(User, related_name='like_user', blank=True)\n dislike = models.ManyToManyField(User, related_name='dislike_user',\n blank=True)\n\n def __str__(self):\n return self.title\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n comment_box = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n\n\nclass Comment_to_comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)\n comment = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.from_comment.comment_box\n\n\nclass Points(models.Model):\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n point = models.IntegerField(default=0)\n",
"step-5": "from django.db import models\nfrom django.contrib.auth.models import User\n\n# Create your models here.\nclass Post(models.Model):\n title = models.CharField(max_length=40)\n content = models.TextField()\n date_published = models.DateTimeField(auto_now=True)\n author = models.ForeignKey(User, on_delete=models.CASCADE)\n img = models.ImageField(upload_to='post_img', null=True, blank=True)\n like = models.ManyToManyField(User, related_name='like_user', blank=True)\n dislike = models.ManyToManyField(User, related_name='dislike_user',blank=True)\n\n def __str__(self):\n return self.title\n\nclass Comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n comment_box = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n\nclass Comment_to_comment(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n from_comment = models.ForeignKey(Comment, on_delete=models.CASCADE)\n comment = models.TextField()\n date_comment = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.from_comment.comment_box\n\nclass Points(models.Model):\n post = models.ForeignKey(Post, on_delete=models.CASCADE)\n point = models.IntegerField(default=0)\n ",
"step-ids": [
7,
10,
11,
12,
13
]
}
|
[
7,
10,
11,
12,
13
] |
import numpy as np
er = ['why','who','how','where','which','what','when','was','were','did','do','does','is','are','many','much']
qst = []
txt = None
ans = None
fnd = []
def chek_qst(qst):
global er
for h in er:
for i in qst:
if i == h:
qst.remove(i)
# qst = np.delete(qst, ([i for i, j in enumerate(qst) if h in j]))
return qst
def search_word(qst):
global txt
for h in qst:
temp = []
for n,l in enumerate(txt):
if [n for i,j in enumerate(l) if h in j] != []:
temp.append(n)
# temp = np.array(temp)
if temp != []:
fnd.append(temp)
def read():
global txt
global qst
global ans
txt = np.array((input().lower()).split('.'))
txt = txt.reshape(len(txt), 1)
for i in range(5):
qst.append((input().lower()).replace('?','').split())
split_quest()
qst = np.array(qst)
ans = np.array((input().lower()).split(';'))
ans = ans.reshape(len(ans), 1)
def split_quest():
for i in range(len(qst)):
qst[i] = chek_qst(qst[i])
def find_answer(fnd):
flag = False
answer = None
global ans
temp_min = []
for i in fnd:
if len(i) == 1:
answer = i[0]
# print(str(txt[answer][0]))
for i in ans:
for j in i:
if j in txt[answer][0]:
# print('from first :: ',j,'\n',answer)
print(j)
flag = True
if flag:
break
if flag:
break
if not flag:
for i in fnd:
temp_min.append(len(i))
temp_min = np.array(temp_min)
temp_min = temp_min.argmin()
# print(temp)
p = []
for i in fnd[temp_min]:
count = 0
for j,h in enumerate(fnd):
if fnd[temp_min] != h:
if i in h:
count +=1
p.append(count)
p = np.array(p)
# print('from second :: ',str(txt[fnd[temp_min][p.argmax()]][0]))
print(str(txt[fnd[temp_min][p.argmax()]][0]))
# for i in ans:
# for j in i:
# if j in txt[fnd[temp_min][p.argmax()]][0]:
# print(j)
# # break
# break
read()
for i,qst_num in enumerate(qst):
fnd = []
search_word(qst_num)
# print('\n',fnd)
find_answer(fnd)
# fnd = np.array(fnd).reshape(len(fnd))
# print('questin #{}'.format(i+1),fnd,'\n')
# print(str(txt[find_answer(fnd)][0]))
# print(ans)
# print('\n',qst)
# print('\n\n',[(i,j[0]) for i,j in enumerate(txt)])
# print('\n\n',[(i,j[0]) for i,j in enumerate(ans)])
'''Zebras are several species of African equids (horse family) united by their distinctive black and white stripes. Their stripes come in different patterns, unique to each individual. They are generally social animals that live in small harems to large herds. Unlike their closest relatives, horses and donkeys, zebras have never been truly domesticated. There are three species of zebras: the plains zebra, the Grévy's zebra and the mountain zebra. The plains zebra and the mountain zebra belong to the subgenus Hippotigris, but Grévy's zebra is the sole species of subgenus Dolichohippus. The latter resembles an ass, to which it is closely related, while the former two are more horse-like. All three belong to the genus Equus, along with other living equids. The unique stripes of zebras make them one of the animals most familiar to people. They occur in a variety of habitats, such as grasslands, savannas, woodlands, thorny scrublands, mountains, and coastal hills. However, various anthropogenic factors have had a severe impact on zebra populations, in particular hunting for skins and habitat destruction. Grévy's zebra and the mountain zebra are endangered. While plains zebras are much more plentiful, one subspecies, the quagga, became extinct in the late 19th century – though there is currently a plan, called the Quagga Project, that aims to breed zebras that are phenotypically similar to the quagga in a process called breeding back.
Which Zebras are endangered?
What is the aim of the Quagga Project?
Which animals are some of their closest relatives?
Which are the three species of zebras?
Which subgenus do the plains zebra and the mountain zebra belong to?
subgenus Hippotigris;the plains zebra, the Grévy's zebra and the mountain zebra;horses and donkeys;aims to breed zebras that are phenotypically similar to the quagga;Grévy's zebra and the mountain zebra
'''
|
normal
|
{
"blob_id": "d30129248f5245560ee0d3ee786e118427e169d7",
"index": 4616,
"step-1": "<mask token>\n\n\ndef search_word(qst):\n global txt\n for h in qst:\n temp = []\n for n, l in enumerate(txt):\n if [n for i, j in enumerate(l) if h in j] != []:\n temp.append(n)\n if temp != []:\n fnd.append(temp)\n\n\n<mask token>\n\n\ndef split_quest():\n for i in range(len(qst)):\n qst[i] = chek_qst(qst[i])\n\n\ndef find_answer(fnd):\n flag = False\n answer = None\n global ans\n temp_min = []\n for i in fnd:\n if len(i) == 1:\n answer = i[0]\n for i in ans:\n for j in i:\n if j in txt[answer][0]:\n print(j)\n flag = True\n if flag:\n break\n if flag:\n break\n if not flag:\n for i in fnd:\n temp_min.append(len(i))\n temp_min = np.array(temp_min)\n temp_min = temp_min.argmin()\n p = []\n for i in fnd[temp_min]:\n count = 0\n for j, h in enumerate(fnd):\n if fnd[temp_min] != h:\n if i in h:\n count += 1\n p.append(count)\n p = np.array(p)\n print(str(txt[fnd[temp_min][p.argmax()]][0]))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef chek_qst(qst):\n global er\n for h in er:\n for i in qst:\n if i == h:\n qst.remove(i)\n return qst\n\n\ndef search_word(qst):\n global txt\n for h in qst:\n temp = []\n for n, l in enumerate(txt):\n if [n for i, j in enumerate(l) if h in j] != []:\n temp.append(n)\n if temp != []:\n fnd.append(temp)\n\n\ndef read():\n global txt\n global qst\n global ans\n txt = np.array(input().lower().split('.'))\n txt = txt.reshape(len(txt), 1)\n for i in range(5):\n qst.append(input().lower().replace('?', '').split())\n split_quest()\n qst = np.array(qst)\n ans = np.array(input().lower().split(';'))\n ans = ans.reshape(len(ans), 1)\n\n\ndef split_quest():\n for i in range(len(qst)):\n qst[i] = chek_qst(qst[i])\n\n\ndef find_answer(fnd):\n flag = False\n answer = None\n global ans\n temp_min = []\n for i in fnd:\n if len(i) == 1:\n answer = i[0]\n for i in ans:\n for j in i:\n if j in txt[answer][0]:\n print(j)\n flag = True\n if flag:\n break\n if flag:\n break\n if not flag:\n for i in fnd:\n temp_min.append(len(i))\n temp_min = np.array(temp_min)\n temp_min = temp_min.argmin()\n p = []\n for i in fnd[temp_min]:\n count = 0\n for j, h in enumerate(fnd):\n if fnd[temp_min] != h:\n if i in h:\n count += 1\n p.append(count)\n p = np.array(p)\n print(str(txt[fnd[temp_min][p.argmax()]][0]))\n\n\nread()\nfor i, qst_num in enumerate(qst):\n fnd = []\n search_word(qst_num)\n find_answer(fnd)\n<mask token>\n",
"step-3": "<mask token>\ner = ['why', 'who', 'how', 'where', 'which', 'what', 'when', 'was', 'were',\n 'did', 'do', 'does', 'is', 'are', 'many', 'much']\nqst = []\ntxt = None\nans = None\nfnd = []\n\n\ndef chek_qst(qst):\n global er\n for h in er:\n for i in qst:\n if i == h:\n qst.remove(i)\n return qst\n\n\ndef search_word(qst):\n global txt\n for h in qst:\n temp = []\n for n, l in enumerate(txt):\n if [n for i, j in enumerate(l) if h in j] != []:\n temp.append(n)\n if temp != []:\n fnd.append(temp)\n\n\ndef read():\n global txt\n global qst\n global ans\n txt = np.array(input().lower().split('.'))\n txt = txt.reshape(len(txt), 1)\n for i in range(5):\n qst.append(input().lower().replace('?', '').split())\n split_quest()\n qst = np.array(qst)\n ans = np.array(input().lower().split(';'))\n ans = ans.reshape(len(ans), 1)\n\n\ndef split_quest():\n for i in range(len(qst)):\n qst[i] = chek_qst(qst[i])\n\n\ndef find_answer(fnd):\n flag = False\n answer = None\n global ans\n temp_min = []\n for i in fnd:\n if len(i) == 1:\n answer = i[0]\n for i in ans:\n for j in i:\n if j in txt[answer][0]:\n print(j)\n flag = True\n if flag:\n break\n if flag:\n break\n if not flag:\n for i in fnd:\n temp_min.append(len(i))\n temp_min = np.array(temp_min)\n temp_min = temp_min.argmin()\n p = []\n for i in fnd[temp_min]:\n count = 0\n for j, h in enumerate(fnd):\n if fnd[temp_min] != h:\n if i in h:\n count += 1\n p.append(count)\n p = np.array(p)\n print(str(txt[fnd[temp_min][p.argmax()]][0]))\n\n\nread()\nfor i, qst_num in enumerate(qst):\n fnd = []\n search_word(qst_num)\n find_answer(fnd)\n<mask token>\n",
"step-4": "import numpy as np\ner = ['why', 'who', 'how', 'where', 'which', 'what', 'when', 'was', 'were',\n 'did', 'do', 'does', 'is', 'are', 'many', 'much']\nqst = []\ntxt = None\nans = None\nfnd = []\n\n\ndef chek_qst(qst):\n global er\n for h in er:\n for i in qst:\n if i == h:\n qst.remove(i)\n return qst\n\n\ndef search_word(qst):\n global txt\n for h in qst:\n temp = []\n for n, l in enumerate(txt):\n if [n for i, j in enumerate(l) if h in j] != []:\n temp.append(n)\n if temp != []:\n fnd.append(temp)\n\n\ndef read():\n global txt\n global qst\n global ans\n txt = np.array(input().lower().split('.'))\n txt = txt.reshape(len(txt), 1)\n for i in range(5):\n qst.append(input().lower().replace('?', '').split())\n split_quest()\n qst = np.array(qst)\n ans = np.array(input().lower().split(';'))\n ans = ans.reshape(len(ans), 1)\n\n\ndef split_quest():\n for i in range(len(qst)):\n qst[i] = chek_qst(qst[i])\n\n\ndef find_answer(fnd):\n flag = False\n answer = None\n global ans\n temp_min = []\n for i in fnd:\n if len(i) == 1:\n answer = i[0]\n for i in ans:\n for j in i:\n if j in txt[answer][0]:\n print(j)\n flag = True\n if flag:\n break\n if flag:\n break\n if not flag:\n for i in fnd:\n temp_min.append(len(i))\n temp_min = np.array(temp_min)\n temp_min = temp_min.argmin()\n p = []\n for i in fnd[temp_min]:\n count = 0\n for j, h in enumerate(fnd):\n if fnd[temp_min] != h:\n if i in h:\n count += 1\n p.append(count)\n p = np.array(p)\n print(str(txt[fnd[temp_min][p.argmax()]][0]))\n\n\nread()\nfor i, qst_num in enumerate(qst):\n fnd = []\n search_word(qst_num)\n find_answer(fnd)\n<mask token>\n",
"step-5": "import numpy as np\n\ner = ['why','who','how','where','which','what','when','was','were','did','do','does','is','are','many','much']\nqst = []\ntxt = None\nans = None\nfnd = []\n\n\ndef chek_qst(qst):\n global er\n for h in er:\n for i in qst:\n if i == h:\n qst.remove(i)\n # qst = np.delete(qst, ([i for i, j in enumerate(qst) if h in j]))\n return qst\n\ndef search_word(qst):\n global txt\n for h in qst:\n temp = []\n for n,l in enumerate(txt):\n if [n for i,j in enumerate(l) if h in j] != []:\n temp.append(n)\n # temp = np.array(temp)\n if temp != []:\n fnd.append(temp)\n\ndef read():\n global txt\n global qst\n global ans\n txt = np.array((input().lower()).split('.'))\n txt = txt.reshape(len(txt), 1)\n for i in range(5):\n qst.append((input().lower()).replace('?','').split())\n\n split_quest()\n qst = np.array(qst)\n ans = np.array((input().lower()).split(';'))\n ans = ans.reshape(len(ans), 1)\n\ndef split_quest():\n for i in range(len(qst)):\n qst[i] = chek_qst(qst[i])\n\ndef find_answer(fnd):\n flag = False\n answer = None\n global ans\n temp_min = []\n for i in fnd:\n if len(i) == 1:\n answer = i[0]\n # print(str(txt[answer][0]))\n for i in ans:\n for j in i:\n if j in txt[answer][0]:\n # print('from first :: ',j,'\\n',answer)\n print(j)\n flag = True\n if flag:\n break\n if flag:\n break\n\n if not flag:\n for i in fnd:\n temp_min.append(len(i))\n temp_min = np.array(temp_min)\n temp_min = temp_min.argmin()\n # print(temp)\n p = []\n for i in fnd[temp_min]:\n count = 0\n for j,h in enumerate(fnd):\n if fnd[temp_min] != h:\n if i in h:\n count +=1\n p.append(count)\n p = np.array(p)\n # print('from second :: ',str(txt[fnd[temp_min][p.argmax()]][0]))\n print(str(txt[fnd[temp_min][p.argmax()]][0]))\n # for i in ans:\n # for j in i:\n # if j in txt[fnd[temp_min][p.argmax()]][0]:\n # print(j)\n # # break\n # break\n\n\n\nread()\nfor i,qst_num in enumerate(qst):\n fnd = []\n search_word(qst_num)\n # print('\\n',fnd)\n find_answer(fnd)\n # fnd = np.array(fnd).reshape(len(fnd))\n # print('questin #{}'.format(i+1),fnd,'\\n')\n # print(str(txt[find_answer(fnd)][0]))\n\n# print(ans)\n# print('\\n',qst)\n# print('\\n\\n',[(i,j[0]) for i,j in enumerate(txt)])\n# print('\\n\\n',[(i,j[0]) for i,j in enumerate(ans)])\n\n\n\n\n\n'''Zebras are several species of African equids (horse family) united by their distinctive black and white stripes. Their stripes come in different patterns, unique to each individual. They are generally social animals that live in small harems to large herds. Unlike their closest relatives, horses and donkeys, zebras have never been truly domesticated. There are three species of zebras: the plains zebra, the Grévy's zebra and the mountain zebra. The plains zebra and the mountain zebra belong to the subgenus Hippotigris, but Grévy's zebra is the sole species of subgenus Dolichohippus. The latter resembles an ass, to which it is closely related, while the former two are more horse-like. All three belong to the genus Equus, along with other living equids. The unique stripes of zebras make them one of the animals most familiar to people. They occur in a variety of habitats, such as grasslands, savannas, woodlands, thorny scrublands, mountains, and coastal hills. However, various anthropogenic factors have had a severe impact on zebra populations, in particular hunting for skins and habitat destruction. Grévy's zebra and the mountain zebra are endangered. While plains zebras are much more plentiful, one subspecies, the quagga, became extinct in the late 19th century – though there is currently a plan, called the Quagga Project, that aims to breed zebras that are phenotypically similar to the quagga in a process called breeding back.\nWhich Zebras are endangered?\nWhat is the aim of the Quagga Project?\nWhich animals are some of their closest relatives?\nWhich are the three species of zebras?\nWhich subgenus do the plains zebra and the mountain zebra belong to?\nsubgenus Hippotigris;the plains zebra, the Grévy's zebra and the mountain zebra;horses and donkeys;aims to breed zebras that are phenotypically similar to the quagga;Grévy's zebra and the mountain zebra\n'''",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def fit(x, iters=1000, eps=1e-06):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
ln_x = np.log(x)
k = 1.0
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - 1.0 / k
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fit(x, iters=1000, eps=1e-06):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
ln_x = np.log(x)
k = 1.0
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - 1.0 / k
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
def my_test():
weibull = np.random.weibull(2.0, 100000)
x = 2 * weibull
mle_shape, mle_scale = fit(x)
x.sort()
print(mle_shape)
print(mle_scale)
ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,
mle_scale)
plt.plot(np.linspace(0, x.max(), 10), ydata, '-')
plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fit(x, iters=1000, eps=1e-06):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
ln_x = np.log(x)
k = 1.0
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - 1.0 / k
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
def my_test():
weibull = np.random.weibull(2.0, 100000)
x = 2 * weibull
mle_shape, mle_scale = fit(x)
x.sort()
print(mle_shape)
print(mle_scale)
ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,
mle_scale)
plt.plot(np.linspace(0, x.max(), 10), ydata, '-')
plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)
plt.show()
if __name__ == '__main__':
my_test()
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
def fit(x, iters=1000, eps=1e-06):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
ln_x = np.log(x)
k = 1.0
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - 1.0 / k
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
def my_test():
weibull = np.random.weibull(2.0, 100000)
x = 2 * weibull
mle_shape, mle_scale = fit(x)
x.sort()
print(mle_shape)
print(mle_scale)
ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,
mle_scale)
plt.plot(np.linspace(0, x.max(), 10), ydata, '-')
plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)
plt.show()
if __name__ == '__main__':
my_test()
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
def fit(x, iters=1000, eps=1e-6):
"""
Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.
:param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.
:param iters: Maximum number of iterations
:param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.
:return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.
Impossible fits may be due to 0-values in x.
"""
# fit k via MLE
ln_x = np.log(x)
k = 1.
k_t_1 = k
for t in range(iters):
x_k = x ** k
x_k_ln_x = x_k * ln_x
ff = np.sum(x_k_ln_x)
fg = np.sum(x_k)
f = ff / fg - np.mean(ln_x) - (1. / k)
# Calculate second derivative d^2f/dk^2
ff_prime = np.sum(x_k_ln_x * ln_x)
fg_prime = ff
f_prime = (ff_prime / fg - (ff / fg * fg_prime / fg)) + (
1. / (k * k))
# Newton-Raphson method k = k - f(k;x)/f'(k;x)
k -= f / f_prime
if np.isnan(f):
return np.nan, np.nan
if abs(k - k_t_1) < eps:
break
k_t_1 = k
lam = np.mean(x ** k) ** (1.0 / k)
return k, lam
def my_test():
weibull = np.random.weibull(2.0, 100000)
x = 2 * weibull
mle_shape, mle_scale = fit(x)
x.sort()
print(mle_shape)
print(mle_scale)
# p0, p1, p2 = stats.weibull_min.fit(x, floc=0)
# print(p0, p1, p2)
ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,
mle_scale)
plt.plot(np.linspace(0, x.max(), 10), ydata, '-')
plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)
plt.show()
if __name__ == '__main__':
my_test()
|
flexible
|
{
"blob_id": "b10d3d8d0ded0d2055c1abdaf40a97abd4cb2cb8",
"index": 1631,
"step-1": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\nif __name__ == '__main__':\n my_test()\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy import stats\n\n\ndef fit(x, iters=1000, eps=1e-06):\n \"\"\"\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\n :param iters: Maximum number of iterations\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\n Impossible fits may be due to 0-values in x.\n \"\"\"\n ln_x = np.log(x)\n k = 1.0\n k_t_1 = k\n for t in range(iters):\n x_k = x ** k\n x_k_ln_x = x_k * ln_x\n ff = np.sum(x_k_ln_x)\n fg = np.sum(x_k)\n f = ff / fg - np.mean(ln_x) - 1.0 / k\n ff_prime = np.sum(x_k_ln_x * ln_x)\n fg_prime = ff\n f_prime = ff_prime / fg - ff / fg * fg_prime / fg + 1.0 / (k * k)\n k -= f / f_prime\n if np.isnan(f):\n return np.nan, np.nan\n if abs(k - k_t_1) < eps:\n break\n k_t_1 = k\n lam = np.mean(x ** k) ** (1.0 / k)\n return k, lam\n\n\ndef my_test():\n weibull = np.random.weibull(2.0, 100000)\n x = 2 * weibull\n mle_shape, mle_scale = fit(x)\n x.sort()\n print(mle_shape)\n print(mle_scale)\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\n mle_scale)\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\n plt.show()\n\n\nif __name__ == '__main__':\n my_test()\n",
"step-5": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom scipy import stats\r\n\r\n\r\ndef fit(x, iters=1000, eps=1e-6):\r\n \"\"\"\r\n Fits a 2-parameter Weibull distribution to the given data using maximum-likelihood estimation.\r\n :param x: 1d-ndarray of samples from an (unknown) distribution. Each value must satisfy x > 0.\r\n :param iters: Maximum number of iterations\r\n :param eps: Stopping criterion. Fit is stopped ff the change within two iterations is smaller than eps.\r\n :return: Tuple (Shape, Scale) which can be (NaN, NaN) if a fit is impossible.\r\n Impossible fits may be due to 0-values in x.\r\n \"\"\"\r\n # fit k via MLE\r\n ln_x = np.log(x)\r\n k = 1.\r\n k_t_1 = k\r\n\r\n for t in range(iters):\r\n x_k = x ** k\r\n x_k_ln_x = x_k * ln_x\r\n ff = np.sum(x_k_ln_x)\r\n fg = np.sum(x_k)\r\n f = ff / fg - np.mean(ln_x) - (1. / k)\r\n\r\n # Calculate second derivative d^2f/dk^2\r\n ff_prime = np.sum(x_k_ln_x * ln_x)\r\n fg_prime = ff\r\n f_prime = (ff_prime / fg - (ff / fg * fg_prime / fg)) + (\r\n 1. / (k * k))\r\n\r\n # Newton-Raphson method k = k - f(k;x)/f'(k;x)\r\n k -= f / f_prime\r\n\r\n if np.isnan(f):\r\n return np.nan, np.nan\r\n if abs(k - k_t_1) < eps:\r\n break\r\n\r\n k_t_1 = k\r\n\r\n lam = np.mean(x ** k) ** (1.0 / k)\r\n\r\n return k, lam\r\n\r\n\r\ndef my_test():\r\n weibull = np.random.weibull(2.0, 100000)\r\n x = 2 * weibull\r\n mle_shape, mle_scale = fit(x)\r\n x.sort()\r\n print(mle_shape)\r\n print(mle_scale)\r\n # p0, p1, p2 = stats.weibull_min.fit(x, floc=0)\r\n # print(p0, p1, p2)\r\n ydata = stats.weibull_min.pdf(np.linspace(0, x.max(), 10), mle_shape, 0,\r\n mle_scale)\r\n plt.plot(np.linspace(0, x.max(), 10), ydata, '-')\r\n plt.hist(x, bins=np.linspace(0, x.max(), 10), normed=True, alpha=0.5)\r\n plt.show()\r\n\r\n\r\nif __name__ == '__main__':\r\n my_test()\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""Scans all files in this project for FIXME and TODO comments and writes them to todos.txt
has to be invoked while being in myLambda/ and not in e.g. myLambda/src"""
import sys
import os
import re
files = []
searchFiles = []
# get all subdirs and its files
for root, dirs, f in os.walk('./'):
files.append((root, f))
# build filepaths out of dir and filename
for f in files[0][1]:
searchFiles.append(files[0][0] + str(f)) # we're in ./ so we can just concat directory's name with filename
for i in range(1,len(files)): # we're in subdirs so we have to add '/' to get real paths
for f in files[i][1]:
searchFiles.append(files[i][0] + '/'+ f)
files = searchFiles
#remove unwanted files
blacklist = ['./todos.txt', './todoGen.py'] + \
[f for f in files if '__' in f or f[-3:] =='pyc' or '.git' in f]
for b in blacklist:
files.remove(b)
print 'searching:'
map(lambda x: sys.stdout.write(x + '\n'), files)
TODO = re.compile('TODO.*') # everything after TODO in one line
FIXME = re.compile('FIXME.*') # everything after FIXME in one line
# gather todos and fixmes
todos = []
fixmes = []
for f in files:
with open(f) as fi:
lineNumber = 0
for line in fi:
lineNumber +=1
todo = re.search(TODO, line)
fixme = re.search(FIXME, line)
if todo:
todos.append((todo.group(0), f, lineNumber))
elif fixme:
fixmes.append((fixme.group(0), f, lineNumber))
f = open('todos.txt', 'w') #write fixmes and todos to todos.txt
f.write('#TODO#\n')
if todos == []:
f.write('All todos are done')
else:
for i in range(len(todos)):
f.write('\t' + todos[i][0] + ' in file ' + todos[i][1] + ' in line ' + str(todos[i][2]) + '\n')
f.write('#FIXME#\n')
if fixmes == []:
f.write('Nothing to fix')
else:
for i in range(len(fixmes)):
f.write('\t'+ fixmes[i][0] + ' in file ' + fixmes[i][1] + ' in line ' + str(fixmes[i][2]) + '\n')
print "Done"
|
normal
|
{
"blob_id": "3bc6091d822fa197dcce3cd75fa9755dc9f93592",
"index": 7520,
"step-1": "\"\"\"Scans all files in this project for FIXME and TODO comments and writes them to todos.txt\nhas to be invoked while being in myLambda/ and not in e.g. myLambda/src\"\"\"\nimport sys\nimport os\nimport re\nfiles = []\nsearchFiles = []\n\n# get all subdirs and its files\nfor root, dirs, f in os.walk('./'):\n\tfiles.append((root, f))\n\n# build filepaths out of dir and filename\nfor f in files[0][1]:\n\tsearchFiles.append(files[0][0] + str(f))\t# we're in ./ so we can just concat directory's name with filename\n\nfor i in range(1,len(files)):\t# we're in subdirs so we have to add '/' to get real paths\n\tfor f in files[i][1]:\n\t\tsearchFiles.append(files[i][0] + '/'+ f)\nfiles = searchFiles\n\n#remove unwanted files\nblacklist = ['./todos.txt', './todoGen.py'] + \\\n\t\t[f for f in files if '__' in f or f[-3:] =='pyc' or '.git' in f]\nfor b in blacklist:\n\tfiles.remove(b)\n\nprint 'searching:'\nmap(lambda x: sys.stdout.write(x + '\\n'), files)\n\nTODO = re.compile('TODO.*')\t# everything after TODO in one line\nFIXME = re.compile('FIXME.*') # everything after FIXME in one line\n\n# gather todos and fixmes\ntodos = []\nfixmes = []\nfor f in files:\n\twith open(f) as fi:\n\t\tlineNumber = 0\n\t\tfor line in fi:\n\t\t\tlineNumber +=1\n\t\t\ttodo = re.search(TODO, line)\n\t\t\tfixme = re.search(FIXME, line)\n\t\t\tif todo:\n\t\t\t\ttodos.append((todo.group(0), f, lineNumber))\n\t\t\telif fixme:\n\t\t\t\tfixmes.append((fixme.group(0), f, lineNumber))\n\n\nf = open('todos.txt', 'w')\t\t#write fixmes and todos to todos.txt\n\nf.write('#TODO#\\n')\n\nif todos == []:\n\tf.write('All todos are done')\nelse:\n\tfor i in range(len(todos)):\n\t\tf.write('\\t' + todos[i][0] + ' in file ' + todos[i][1] + ' in line ' + str(todos[i][2]) + '\\n')\n\nf.write('#FIXME#\\n')\n\nif fixmes == []:\n\tf.write('Nothing to fix')\nelse:\n\tfor i in range(len(fixmes)):\n\t\tf.write('\\t'+ fixmes[i][0] + ' in file ' + fixmes[i][1] + ' in line ' + str(fixmes[i][2]) + '\\n')\n\nprint \"Done\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
urlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup
.as_view(), name='signup'), path('login', Login.as_view(), name='login')]
<|reserved_special_token_1|>
from django.urls import path
from .views.home import Home
from .views.signup import Signup
from .views.login import Login
urlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup
.as_view(), name='signup'), path('login', Login.as_view(), name='login')]
<|reserved_special_token_1|>
from django.urls import path
from .views.home import Home
from .views.signup import Signup
from .views.login import Login
urlpatterns = [
path('', Home.as_view(), name='home'),
path('signup', Signup.as_view(), name='signup'),
path('login', Login.as_view(), name='login'),
]
|
flexible
|
{
"blob_id": "979a387e29867818ffad7291511ff0be40dee118",
"index": 1938,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup\n .as_view(), name='signup'), path('login', Login.as_view(), name='login')]\n",
"step-3": "from django.urls import path\nfrom .views.home import Home\nfrom .views.signup import Signup\nfrom .views.login import Login\nurlpatterns = [path('', Home.as_view(), name='home'), path('signup', Signup\n .as_view(), name='signup'), path('login', Login.as_view(), name='login')]\n",
"step-4": "from django.urls import path\nfrom .views.home import Home\nfrom .views.signup import Signup\nfrom .views.login import Login\nurlpatterns = [\n path('', Home.as_view(), name='home'),\n path('signup', Signup.as_view(), name='signup'),\n path('login', Login.as_view(), name='login'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import time
import pickle
from configparser import ConfigParser
from slackbot import bot
from slackbot.bot import Bot
from slackbot.bot import listen_to
from elasticsearch_dsl.connections import connections
from okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram
from marginalbear_elastic.query import post_multifield_query
from marginalbear_elastic.utils import concat_tokens
from marginalbear_elastic.ranking import avg_pmi
top_title = 100
top_response = 15
package_dir = os.path.dirname(os.path.realpath(__name__))
config = ConfigParser()
config.read(package_dir + '/chatbot_apps/config.ini')
bot.settings.API_TOKEN = config.get('slack', 'slack_token')
SLACK_CHANNEL = config.get('slack', 'slack_channel')
@listen_to(r'(.*)')
def receive_question(message, question_string):
if message._body['channel'] == SLACK_CHANNEL:
try:
query_ccjieba = ccjieba.cut(question_string.strip())
query_unigram = unigram.cut(question_string.strip())
results = post_multifield_query(client,
index='post',
query_ccjieba=concat_tokens(query_ccjieba, pos=False),
query_unigram=concat_tokens(query_unigram, pos=False),
top=top_title)
ans = avg_pmi(query_unigram, results, pairs_cnt, total_pairs_cnt, tokenizer='unigram')
ans_string = '\n'.join(['<{:.3f}> <title:{}> comment: {}'.format(score, title, comment) for score, comment, title in ans[:top_response]])
message.send(ans_string)
except Exception as err:
print(err)
def main():
bot = Bot()
bot.run()
if __name__ == '__main__':
client = connections.create_connection()
ccjieba = CCEmojiJieba()
unigram = UniGram()
t = time.time()
print('Loading unigram pmi pickle')
with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:
pairs_cnt = dict(pickle.load(f))
total_pairs_cnt = sum(pairs_cnt.values())
print('Pickle loaded in {:.5f}s'.format(time.time() - t))
main()
|
normal
|
{
"blob_id": "3630f83e7e6a10f42e96f8bd6fa9714232d9176b",
"index": 4552,
"step-1": "<mask token>\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\n<mask token>\n",
"step-2": "<mask token>\nconfig.read(package_dir + '/chatbot_apps/config.ini')\n<mask token>\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n",
"step-3": "<mask token>\ntop_title = 100\ntop_response = 15\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n",
"step-4": "import os\nimport time\nimport pickle\nfrom configparser import ConfigParser\nfrom slackbot import bot\nfrom slackbot.bot import Bot\nfrom slackbot.bot import listen_to\nfrom elasticsearch_dsl.connections import connections\nfrom okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram\nfrom marginalbear_elastic.query import post_multifield_query\nfrom marginalbear_elastic.utils import concat_tokens\nfrom marginalbear_elastic.ranking import avg_pmi\ntop_title = 100\ntop_response = 15\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n",
"step-5": "import os\nimport time\nimport pickle\nfrom configparser import ConfigParser\n\nfrom slackbot import bot\nfrom slackbot.bot import Bot\nfrom slackbot.bot import listen_to\nfrom elasticsearch_dsl.connections import connections\n\nfrom okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram\nfrom marginalbear_elastic.query import post_multifield_query\nfrom marginalbear_elastic.utils import concat_tokens\nfrom marginalbear_elastic.ranking import avg_pmi\n\n\ntop_title = 100\ntop_response = 15\n\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to(r'(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client,\n index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False),\n top=top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt, total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.format(score, title, comment) for score, comment, title in ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def removdup():
train = pd.read_csv('C:\\Users\\Lenovo\\zqrbtest\\data.csv')
train = train['titlec']
train = set(train)
data = pd.DataFrame(list(train), columns=['titlec'])
data.to_csv('redup.csv', index=False, encoding='utf_8_sig')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
importlib.reload(sys)
<|reserved_special_token_0|>
jieba.load_userdict('newdict.txt')
<|reserved_special_token_0|>
def removdup():
train = pd.read_csv('C:\\Users\\Lenovo\\zqrbtest\\data.csv')
train = train['titlec']
train = set(train)
data = pd.DataFrame(list(train), columns=['titlec'])
data.to_csv('redup.csv', index=False, encoding='utf_8_sig')
if __name__ == '__main__':
def stopwordslist(filepath):
stopwords = [line.strip() for line in open(filepath, 'r', encoding=
'utf-8').read().split('\n')]
rs2 = []
return stopwords
def seg_sentence(sentence):
sentence_seged = jieba.cut(sentence.strip())
stopwords = stopwordslist('stop.txt')
outstr = ''
for word in sentence_seged:
if word not in stopwords:
if word != '\t':
outstr += word
outstr += ' '
return outstr
inputs = open('redup.csv', 'r', encoding='utf-8')
outputs = open('hel.csv', 'w', encoding='utf-8')
for line in inputs:
line_seg = seg_sentence(line)
outputs.write(line_seg + '\n')
outputs.close()
inputs.close()
if __name__ == '__main__':
aResult = removdup()
csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')
spamwriter = csv.writer(csvfile)
word_list = []
key_list = []
for line in open('hel.csv', 'r', encoding='UTF-8'):
item = line.strip('\n\r').split('\t')
tags = jieba.analyse.extract_tags(item[0])
for t in tags:
word_list.append(t)
word_dict = {}
with open('result3.txt', 'w') as wf2:
for item in word_list:
if item not in word_dict:
word_dict[item] = 1
else:
word_dict[item] += 1
orderList = list(word_dict.values())
orderList.sort(reverse=True)
for i in range(len(orderList)):
for key in word_dict:
if word_dict[key] == orderList[i]:
wf2.write(key + ' ' + str(word_dict[key]) + '\n')
key_list.append(key)
word_dict[key] = 0
for i in range(len(key_list)):
spamwriter.writerow((key_list[i], orderList[i]))
csvfile.close()
rf_path = 'wordcount.csv'
title = ['keyut', 'fre']
r2g = pd.read_csv(rf_path, header=None)
insertRow = pd.DataFrame([title])
r2g = insertRow.append(r2g, ignore_index=True)
df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=
'utf_8_sig')
a = pd.read_csv('wordcount-1.csv')
a.set_index('keyut')
b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')
b.set_index('keyut')
c = pd.merge(b, a, on='keyut', how='left')
c.to_csv('collection.csv', encoding='utf_8_sig')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
importlib.reload(sys)
<|reserved_special_token_0|>
jieba.load_userdict('newdict.txt')
d = path.dirname(__file__)
filepath = 'C:\\Users\\Lenovo\\zqrbtest\\redup.csv'
def removdup():
train = pd.read_csv('C:\\Users\\Lenovo\\zqrbtest\\data.csv')
train = train['titlec']
train = set(train)
data = pd.DataFrame(list(train), columns=['titlec'])
data.to_csv('redup.csv', index=False, encoding='utf_8_sig')
if __name__ == '__main__':
def stopwordslist(filepath):
stopwords = [line.strip() for line in open(filepath, 'r', encoding=
'utf-8').read().split('\n')]
rs2 = []
return stopwords
def seg_sentence(sentence):
sentence_seged = jieba.cut(sentence.strip())
stopwords = stopwordslist('stop.txt')
outstr = ''
for word in sentence_seged:
if word not in stopwords:
if word != '\t':
outstr += word
outstr += ' '
return outstr
inputs = open('redup.csv', 'r', encoding='utf-8')
outputs = open('hel.csv', 'w', encoding='utf-8')
for line in inputs:
line_seg = seg_sentence(line)
outputs.write(line_seg + '\n')
outputs.close()
inputs.close()
if __name__ == '__main__':
aResult = removdup()
csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')
spamwriter = csv.writer(csvfile)
word_list = []
key_list = []
for line in open('hel.csv', 'r', encoding='UTF-8'):
item = line.strip('\n\r').split('\t')
tags = jieba.analyse.extract_tags(item[0])
for t in tags:
word_list.append(t)
word_dict = {}
with open('result3.txt', 'w') as wf2:
for item in word_list:
if item not in word_dict:
word_dict[item] = 1
else:
word_dict[item] += 1
orderList = list(word_dict.values())
orderList.sort(reverse=True)
for i in range(len(orderList)):
for key in word_dict:
if word_dict[key] == orderList[i]:
wf2.write(key + ' ' + str(word_dict[key]) + '\n')
key_list.append(key)
word_dict[key] = 0
for i in range(len(key_list)):
spamwriter.writerow((key_list[i], orderList[i]))
csvfile.close()
rf_path = 'wordcount.csv'
title = ['keyut', 'fre']
r2g = pd.read_csv(rf_path, header=None)
insertRow = pd.DataFrame([title])
r2g = insertRow.append(r2g, ignore_index=True)
df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=
'utf_8_sig')
a = pd.read_csv('wordcount-1.csv')
a.set_index('keyut')
b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')
b.set_index('keyut')
c = pd.merge(b, a, on='keyut', how='left')
c.to_csv('collection.csv', encoding='utf_8_sig')
<|reserved_special_token_1|>
import jieba.analyse as analyse
from collections import Counter
import time
from os import path
import jieba
import importlib, sys
importlib.reload(sys)
import csv
import pandas as pd
from pandas import DataFrame
jieba.load_userdict('newdict.txt')
d = path.dirname(__file__)
filepath = 'C:\\Users\\Lenovo\\zqrbtest\\redup.csv'
def removdup():
train = pd.read_csv('C:\\Users\\Lenovo\\zqrbtest\\data.csv')
train = train['titlec']
train = set(train)
data = pd.DataFrame(list(train), columns=['titlec'])
data.to_csv('redup.csv', index=False, encoding='utf_8_sig')
if __name__ == '__main__':
def stopwordslist(filepath):
stopwords = [line.strip() for line in open(filepath, 'r', encoding=
'utf-8').read().split('\n')]
rs2 = []
return stopwords
def seg_sentence(sentence):
sentence_seged = jieba.cut(sentence.strip())
stopwords = stopwordslist('stop.txt')
outstr = ''
for word in sentence_seged:
if word not in stopwords:
if word != '\t':
outstr += word
outstr += ' '
return outstr
inputs = open('redup.csv', 'r', encoding='utf-8')
outputs = open('hel.csv', 'w', encoding='utf-8')
for line in inputs:
line_seg = seg_sentence(line)
outputs.write(line_seg + '\n')
outputs.close()
inputs.close()
if __name__ == '__main__':
aResult = removdup()
csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')
spamwriter = csv.writer(csvfile)
word_list = []
key_list = []
for line in open('hel.csv', 'r', encoding='UTF-8'):
item = line.strip('\n\r').split('\t')
tags = jieba.analyse.extract_tags(item[0])
for t in tags:
word_list.append(t)
word_dict = {}
with open('result3.txt', 'w') as wf2:
for item in word_list:
if item not in word_dict:
word_dict[item] = 1
else:
word_dict[item] += 1
orderList = list(word_dict.values())
orderList.sort(reverse=True)
for i in range(len(orderList)):
for key in word_dict:
if word_dict[key] == orderList[i]:
wf2.write(key + ' ' + str(word_dict[key]) + '\n')
key_list.append(key)
word_dict[key] = 0
for i in range(len(key_list)):
spamwriter.writerow((key_list[i], orderList[i]))
csvfile.close()
rf_path = 'wordcount.csv'
title = ['keyut', 'fre']
r2g = pd.read_csv(rf_path, header=None)
insertRow = pd.DataFrame([title])
r2g = insertRow.append(r2g, ignore_index=True)
df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=
'utf_8_sig')
a = pd.read_csv('wordcount-1.csv')
a.set_index('keyut')
b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')
b.set_index('keyut')
c = pd.merge(b, a, on='keyut', how='left')
c.to_csv('collection.csv', encoding='utf_8_sig')
<|reserved_special_token_1|>
#!/usr/bin/env python
#coding:utf-8
import jieba.analyse as analyse
from collections import Counter
import time
from os import path
import jieba
import importlib, sys
importlib.reload(sys)
import csv
import pandas as pd
from pandas import DataFrame
jieba.load_userdict("newdict.txt")
d = path.dirname(__file__)
filepath = r'C:\Users\Lenovo\zqrbtest\redup.csv'
def removdup():
train = pd.read_csv(r'C:\Users\Lenovo\zqrbtest\data.csv')
train = train['titlec']
train = set(train)
data = pd.DataFrame(list(train), columns=['titlec'])
data.to_csv('redup.csv', index=False, encoding='utf_8_sig')
if __name__ == "__main__":
def stopwordslist(filepath):
stopwords = [line.strip()for line in open(filepath, 'r', encoding='utf-8').read().split('\n')]
rs2 = []
return stopwords
def seg_sentence (sentence):
sentence_seged = jieba.cut(sentence.strip())
stopwords = stopwordslist('stop.txt')
outstr = ''
for word in sentence_seged:
if word not in stopwords:
if word != '\t':
outstr += word
outstr += " "
return outstr
inputs = open('redup.csv', 'r', encoding='utf-8')
outputs = open('hel.csv', 'w', encoding='utf-8')
for line in inputs:
line_seg = seg_sentence(line)
outputs.write(line_seg + '\n')
outputs.close()
inputs.close()
if __name__ == "__main__":
aResult = removdup()
csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')
spamwriter = csv.writer(csvfile)
word_list = []
key_list = []
for line in open('hel.csv', 'r', encoding='UTF-8'):
item = line.strip('\n\r').split('\t')
tags = jieba.analyse.extract_tags(item[0])
for t in tags:
word_list.append(t)
word_dict = {}
with open("result3.txt", 'w') as wf2:
for item in word_list:
if item not in word_dict:
word_dict[item] = 1
else:
word_dict[item] += 1
orderList = list(word_dict.values())
orderList.sort(reverse=True)
for i in range(len(orderList)):
for key in word_dict:
if word_dict[key] == orderList[i]:
wf2.write(key + ' ' + str(word_dict[key]) + '\n')
key_list.append(key)
word_dict[key] = 0
for i in range(len(key_list)):
spamwriter.writerow((key_list[i], orderList[i]))
csvfile.close()
rf_path = 'wordcount.csv'
title = ['keyut', 'fre']
r2g = pd.read_csv(rf_path, header=None)
insertRow = pd.DataFrame([title])
r2g = insertRow.append(r2g, ignore_index=True)
df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding='utf_8_sig')
a = pd.read_csv('wordcount-1.csv')
a.set_index('keyut')
b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')
b.set_index('keyut')
c = pd.merge(b, a, on='keyut', how='left')
c.to_csv('collection.csv', encoding='utf_8_sig')
|
flexible
|
{
"blob_id": "6f3aa4e1309745265bb9d79df5f5a352e54493f9",
"index": 6313,
"step-1": "<mask token>\n\n\ndef removdup():\n train = pd.read_csv('C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\data.csv')\n train = train['titlec']\n train = set(train)\n data = pd.DataFrame(list(train), columns=['titlec'])\n data.to_csv('redup.csv', index=False, encoding='utf_8_sig')\n\n\n<mask token>\n",
"step-2": "<mask token>\nimportlib.reload(sys)\n<mask token>\njieba.load_userdict('newdict.txt')\n<mask token>\n\n\ndef removdup():\n train = pd.read_csv('C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\data.csv')\n train = train['titlec']\n train = set(train)\n data = pd.DataFrame(list(train), columns=['titlec'])\n data.to_csv('redup.csv', index=False, encoding='utf_8_sig')\n\n\nif __name__ == '__main__':\n\n def stopwordslist(filepath):\n stopwords = [line.strip() for line in open(filepath, 'r', encoding=\n 'utf-8').read().split('\\n')]\n rs2 = []\n return stopwords\n\n def seg_sentence(sentence):\n sentence_seged = jieba.cut(sentence.strip())\n stopwords = stopwordslist('stop.txt')\n outstr = ''\n for word in sentence_seged:\n if word not in stopwords:\n if word != '\\t':\n outstr += word\n outstr += ' '\n return outstr\n inputs = open('redup.csv', 'r', encoding='utf-8')\n outputs = open('hel.csv', 'w', encoding='utf-8')\n for line in inputs:\n line_seg = seg_sentence(line)\n outputs.write(line_seg + '\\n')\n outputs.close()\n inputs.close()\nif __name__ == '__main__':\n aResult = removdup()\n csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')\n spamwriter = csv.writer(csvfile)\n word_list = []\n key_list = []\n for line in open('hel.csv', 'r', encoding='UTF-8'):\n item = line.strip('\\n\\r').split('\\t')\n tags = jieba.analyse.extract_tags(item[0])\n for t in tags:\n word_list.append(t)\n word_dict = {}\n with open('result3.txt', 'w') as wf2:\n for item in word_list:\n if item not in word_dict:\n word_dict[item] = 1\n else:\n word_dict[item] += 1\n orderList = list(word_dict.values())\n orderList.sort(reverse=True)\n for i in range(len(orderList)):\n for key in word_dict:\n if word_dict[key] == orderList[i]:\n wf2.write(key + ' ' + str(word_dict[key]) + '\\n')\n key_list.append(key)\n word_dict[key] = 0\n for i in range(len(key_list)):\n spamwriter.writerow((key_list[i], orderList[i]))\n csvfile.close()\n rf_path = 'wordcount.csv'\n title = ['keyut', 'fre']\n r2g = pd.read_csv(rf_path, header=None)\n insertRow = pd.DataFrame([title])\n r2g = insertRow.append(r2g, ignore_index=True)\n df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=\n 'utf_8_sig')\n a = pd.read_csv('wordcount-1.csv')\n a.set_index('keyut')\n b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')\n b.set_index('keyut')\n c = pd.merge(b, a, on='keyut', how='left')\n c.to_csv('collection.csv', encoding='utf_8_sig')\n",
"step-3": "<mask token>\nimportlib.reload(sys)\n<mask token>\njieba.load_userdict('newdict.txt')\nd = path.dirname(__file__)\nfilepath = 'C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\redup.csv'\n\n\ndef removdup():\n train = pd.read_csv('C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\data.csv')\n train = train['titlec']\n train = set(train)\n data = pd.DataFrame(list(train), columns=['titlec'])\n data.to_csv('redup.csv', index=False, encoding='utf_8_sig')\n\n\nif __name__ == '__main__':\n\n def stopwordslist(filepath):\n stopwords = [line.strip() for line in open(filepath, 'r', encoding=\n 'utf-8').read().split('\\n')]\n rs2 = []\n return stopwords\n\n def seg_sentence(sentence):\n sentence_seged = jieba.cut(sentence.strip())\n stopwords = stopwordslist('stop.txt')\n outstr = ''\n for word in sentence_seged:\n if word not in stopwords:\n if word != '\\t':\n outstr += word\n outstr += ' '\n return outstr\n inputs = open('redup.csv', 'r', encoding='utf-8')\n outputs = open('hel.csv', 'w', encoding='utf-8')\n for line in inputs:\n line_seg = seg_sentence(line)\n outputs.write(line_seg + '\\n')\n outputs.close()\n inputs.close()\nif __name__ == '__main__':\n aResult = removdup()\n csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')\n spamwriter = csv.writer(csvfile)\n word_list = []\n key_list = []\n for line in open('hel.csv', 'r', encoding='UTF-8'):\n item = line.strip('\\n\\r').split('\\t')\n tags = jieba.analyse.extract_tags(item[0])\n for t in tags:\n word_list.append(t)\n word_dict = {}\n with open('result3.txt', 'w') as wf2:\n for item in word_list:\n if item not in word_dict:\n word_dict[item] = 1\n else:\n word_dict[item] += 1\n orderList = list(word_dict.values())\n orderList.sort(reverse=True)\n for i in range(len(orderList)):\n for key in word_dict:\n if word_dict[key] == orderList[i]:\n wf2.write(key + ' ' + str(word_dict[key]) + '\\n')\n key_list.append(key)\n word_dict[key] = 0\n for i in range(len(key_list)):\n spamwriter.writerow((key_list[i], orderList[i]))\n csvfile.close()\n rf_path = 'wordcount.csv'\n title = ['keyut', 'fre']\n r2g = pd.read_csv(rf_path, header=None)\n insertRow = pd.DataFrame([title])\n r2g = insertRow.append(r2g, ignore_index=True)\n df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=\n 'utf_8_sig')\n a = pd.read_csv('wordcount-1.csv')\n a.set_index('keyut')\n b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')\n b.set_index('keyut')\n c = pd.merge(b, a, on='keyut', how='left')\n c.to_csv('collection.csv', encoding='utf_8_sig')\n",
"step-4": "import jieba.analyse as analyse\nfrom collections import Counter\nimport time\nfrom os import path\nimport jieba\nimport importlib, sys\nimportlib.reload(sys)\nimport csv\nimport pandas as pd\nfrom pandas import DataFrame\njieba.load_userdict('newdict.txt')\nd = path.dirname(__file__)\nfilepath = 'C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\redup.csv'\n\n\ndef removdup():\n train = pd.read_csv('C:\\\\Users\\\\Lenovo\\\\zqrbtest\\\\data.csv')\n train = train['titlec']\n train = set(train)\n data = pd.DataFrame(list(train), columns=['titlec'])\n data.to_csv('redup.csv', index=False, encoding='utf_8_sig')\n\n\nif __name__ == '__main__':\n\n def stopwordslist(filepath):\n stopwords = [line.strip() for line in open(filepath, 'r', encoding=\n 'utf-8').read().split('\\n')]\n rs2 = []\n return stopwords\n\n def seg_sentence(sentence):\n sentence_seged = jieba.cut(sentence.strip())\n stopwords = stopwordslist('stop.txt')\n outstr = ''\n for word in sentence_seged:\n if word not in stopwords:\n if word != '\\t':\n outstr += word\n outstr += ' '\n return outstr\n inputs = open('redup.csv', 'r', encoding='utf-8')\n outputs = open('hel.csv', 'w', encoding='utf-8')\n for line in inputs:\n line_seg = seg_sentence(line)\n outputs.write(line_seg + '\\n')\n outputs.close()\n inputs.close()\nif __name__ == '__main__':\n aResult = removdup()\n csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')\n spamwriter = csv.writer(csvfile)\n word_list = []\n key_list = []\n for line in open('hel.csv', 'r', encoding='UTF-8'):\n item = line.strip('\\n\\r').split('\\t')\n tags = jieba.analyse.extract_tags(item[0])\n for t in tags:\n word_list.append(t)\n word_dict = {}\n with open('result3.txt', 'w') as wf2:\n for item in word_list:\n if item not in word_dict:\n word_dict[item] = 1\n else:\n word_dict[item] += 1\n orderList = list(word_dict.values())\n orderList.sort(reverse=True)\n for i in range(len(orderList)):\n for key in word_dict:\n if word_dict[key] == orderList[i]:\n wf2.write(key + ' ' + str(word_dict[key]) + '\\n')\n key_list.append(key)\n word_dict[key] = 0\n for i in range(len(key_list)):\n spamwriter.writerow((key_list[i], orderList[i]))\n csvfile.close()\n rf_path = 'wordcount.csv'\n title = ['keyut', 'fre']\n r2g = pd.read_csv(rf_path, header=None)\n insertRow = pd.DataFrame([title])\n r2g = insertRow.append(r2g, ignore_index=True)\n df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding=\n 'utf_8_sig')\n a = pd.read_csv('wordcount-1.csv')\n a.set_index('keyut')\n b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')\n b.set_index('keyut')\n c = pd.merge(b, a, on='keyut', how='left')\n c.to_csv('collection.csv', encoding='utf_8_sig')\n",
"step-5": "#!/usr/bin/env python\r\n#coding:utf-8\r\nimport jieba.analyse as analyse\r\nfrom collections import Counter\r\nimport time\r\nfrom os import path\r\nimport jieba\r\nimport importlib, sys\r\nimportlib.reload(sys)\r\nimport csv\r\nimport pandas as pd\r\nfrom pandas import DataFrame\r\n\r\njieba.load_userdict(\"newdict.txt\")\r\nd = path.dirname(__file__)\r\nfilepath = r'C:\\Users\\Lenovo\\zqrbtest\\redup.csv'\r\n\r\ndef removdup():\r\n train = pd.read_csv(r'C:\\Users\\Lenovo\\zqrbtest\\data.csv')\r\n train = train['titlec']\r\n train = set(train)\r\n data = pd.DataFrame(list(train), columns=['titlec'])\r\n data.to_csv('redup.csv', index=False, encoding='utf_8_sig')\r\n \r\nif __name__ == \"__main__\":\r\n def stopwordslist(filepath):\r\n stopwords = [line.strip()for line in open(filepath, 'r', encoding='utf-8').read().split('\\n')]\r\n rs2 = []\r\n return stopwords\r\n def seg_sentence (sentence):\r\n sentence_seged = jieba.cut(sentence.strip())\r\n stopwords = stopwordslist('stop.txt')\r\n outstr = ''\r\n for word in sentence_seged:\r\n if word not in stopwords:\r\n if word != '\\t':\r\n outstr += word\r\n outstr += \" \"\r\n return outstr\r\n inputs = open('redup.csv', 'r', encoding='utf-8')\r\n outputs = open('hel.csv', 'w', encoding='utf-8')\r\n for line in inputs:\r\n line_seg = seg_sentence(line)\r\n outputs.write(line_seg + '\\n')\r\n outputs.close()\r\n inputs.close()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n aResult = removdup()\r\n csvfile = open('wordCount.csv', 'w', newline='', encoding='utf_8_sig')\r\n spamwriter = csv.writer(csvfile)\r\n word_list = []\r\n key_list = []\r\n for line in open('hel.csv', 'r', encoding='UTF-8'):\r\n item = line.strip('\\n\\r').split('\\t')\r\n tags = jieba.analyse.extract_tags(item[0])\r\n for t in tags:\r\n word_list.append(t)\r\n\r\n word_dict = {}\r\n with open(\"result3.txt\", 'w') as wf2:\r\n for item in word_list:\r\n if item not in word_dict:\r\n word_dict[item] = 1\r\n else:\r\n word_dict[item] += 1\r\n\r\n orderList = list(word_dict.values())\r\n orderList.sort(reverse=True)\r\n for i in range(len(orderList)):\r\n\r\n for key in word_dict:\r\n if word_dict[key] == orderList[i]:\r\n wf2.write(key + ' ' + str(word_dict[key]) + '\\n')\r\n key_list.append(key)\r\n word_dict[key] = 0\r\n\r\n for i in range(len(key_list)):\r\n spamwriter.writerow((key_list[i], orderList[i]))\r\n csvfile.close()\r\n \r\n rf_path = 'wordcount.csv'\r\n title = ['keyut', 'fre']\r\n\r\n r2g = pd.read_csv(rf_path, header=None)\r\n insertRow = pd.DataFrame([title])\r\n r2g = insertRow.append(r2g, ignore_index=True)\r\n df = r2g.to_csv('wordcount-1.csv', header=None, index=None, encoding='utf_8_sig')\r\n\r\n a = pd.read_csv('wordcount-1.csv')\r\n a.set_index('keyut')\r\n b = pd.read_csv('total.csv', encoding='utf_8_sig', engine='python')\r\n b.set_index('keyut')\r\n c = pd.merge(b, a, on='keyut', how='left')\r\n c.to_csv('collection.csv', encoding='utf_8_sig')\r\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
admin.site.register(UserProfileInfo)
<|reserved_special_token_1|>
from django.contrib import admin
from basic_app.models import UserProfileInfo
admin.site.register(UserProfileInfo)
<|reserved_special_token_1|>
from django.contrib import admin
from basic_app.models import UserProfileInfo
admin.site.register(UserProfileInfo)
# we do not need to register User() default form since it comes
# with the default admin site in Django itself.
|
flexible
|
{
"blob_id": "624212a1d73ff3a3b3092ffa27912a6ae25a2484",
"index": 6826,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(UserProfileInfo)\n",
"step-3": "from django.contrib import admin\nfrom basic_app.models import UserProfileInfo\nadmin.site.register(UserProfileInfo)\n",
"step-4": "from django.contrib import admin\nfrom basic_app.models import UserProfileInfo\n\nadmin.site.register(UserProfileInfo)\n\n# we do not need to register User() default form since it comes\n# with the default admin site in Django itself.\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Rouge(Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass
<|reserved_special_token_0|>
def regen_resource(self):
pass
def full_resource(self):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Rouge(Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass
def heal(self, target):
pass
def regen_resource(self):
pass
def full_resource(self):
pass
<|reserved_special_token_1|>
__author__ = 'Jager'
<|reserved_special_token_0|>
class Rouge(Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass
def heal(self, target):
pass
def regen_resource(self):
pass
def full_resource(self):
pass
<|reserved_special_token_1|>
__author__ = 'Jager'
from char import Character
class Rouge(Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass
def heal(self, target):
pass
def regen_resource(self):
pass
def full_resource(self):
pass
<|reserved_special_token_1|>
__author__ = 'Jager'
from char import Character
class Rouge (Character):
def special_attack1(self, opponent, hitdamage_callback, specatt_callback):
pass # hook method
def special_attack2(self, opponent, hitdamage_callback, specatt_callback):
pass # hook method
def heal(self, target):
pass # hook method
def regen_resource(self):
pass # hook method
def full_resource(self):
pass
|
flexible
|
{
"blob_id": "36991c3191ba48b1b9dbd843e279f8fe124f1339",
"index": 73,
"step-1": "<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n <mask token>\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-2": "<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-3": "__author__ = 'Jager'\n<mask token>\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-4": "__author__ = 'Jager'\nfrom char import Character\n\n\nclass Rouge(Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass\n\n def heal(self, target):\n pass\n\n def regen_resource(self):\n pass\n\n def full_resource(self):\n pass\n",
"step-5": "__author__ = 'Jager'\nfrom char import Character\n\nclass Rouge (Character):\n\n def special_attack1(self, opponent, hitdamage_callback, specatt_callback):\n pass # hook method\n\n def special_attack2(self, opponent, hitdamage_callback, specatt_callback):\n pass # hook method\n\n def heal(self, target):\n pass # hook method\n\n def regen_resource(self):\n pass # hook method\n\n\n def full_resource(self):\n pass",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from oslo_concurrency import processutils as putils
import six
from cinder import context
from cinder import exception
from cinder.tests.unit.targets import targets_fixture as tf
from cinder import utils
from cinder.volume.targets import iet
class TestIetAdmDriver(tf.TargetDriverFixture):
def setUp(self):
super(TestIetAdmDriver, self).setUp()
self.target = iet.IetAdm(root_helper=utils.get_root_helper(),
configuration=self.configuration)
def test_get_target(self):
tmp_file = six.StringIO()
tmp_file.write(
'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n' # noqa
' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n' # noqa
' cid:0 ip:10.9.8.7 state:active hd:none dd:none')
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual('1',
self.target._get_target(
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa
))
# Test the failure case: Failed to handle the config file
mock_open.side_effect = MemoryError()
self.assertRaises(MemoryError,
self.target._get_target,
'')
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=0)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('cinder.utils.temporary_chown')
@mock.patch.object(iet, 'LOG')
def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,
mock_execute, mock_get_targ):
mock_execute.return_value = ('', '')
tmp_file = six.StringIO()
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(
0,
self.target.create_iscsi_target(
self.test_vol,
0,
0,
self.fake_volumes_dir))
self.assertTrue(mock_execute.called)
self.assertTrue(mock_open.called)
self.assertTrue(mock_get_targ.called)
# Test the failure case: Failed to chown the config file
mock_open.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.create_iscsi_target,
self.test_vol,
0,
0,
self.fake_volumes_dir)
# Test the failure case: Failed to set new auth
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.create_iscsi_target,
self.test_vol,
0,
0,
self.fake_volumes_dir)
@mock.patch('cinder.utils.execute')
@mock.patch('os.path.exists', return_value=True)
def test_update_config_file_failure(self, mock_exists, mock_execute):
# Test the failure case: conf file does not exist
mock_exists.return_value = False
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetCreateFailed,
self.target.update_config_file,
self.test_vol,
0,
self.fake_volumes_dir,
"foo bar")
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
@mock.patch('cinder.utils.execute')
def test_create_iscsi_target_already_exists(self, mock_execute,
mock_get_targ):
mock_execute.return_value = ('fake out', 'fake err')
self.assertEqual(
1,
self.target.create_iscsi_target(
self.test_vol,
1,
0,
self.fake_volumes_dir))
self.assertTrue(mock_get_targ.called)
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',
return_value=None)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('cinder.utils.execute')
def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):
# Test the normal case
self.target.remove_iscsi_target(1,
0,
self.testvol['id'],
self.testvol['name'])
mock_execute.assert_any_call('ietadm',
'--op',
'delete',
'--tid=1',
run_as_root=True)
# Test the failure case: putils.ProcessExecutionError
mock_execute.side_effect = putils.ProcessExecutionError
self.assertRaises(exception.ISCSITargetRemoveFailed,
self.target.remove_iscsi_target,
1,
0,
self.testvol['id'],
self.testvol['name'])
def test_find_sid_cid_for_target(self):
tmp_file = six.StringIO()
tmp_file.write(
'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n' # noqa
' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n' # noqa
' cid:0 ip:10.9.8.7 state:active hd:none dd:none')
tmp_file.seek(0)
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value = contextlib.closing(tmp_file)
self.assertEqual(('844427031282176', '0'),
self.target._find_sid_cid_for_target(
'1',
'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45', # noqa
'volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa
))
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
@mock.patch('cinder.utils.execute')
@mock.patch.object(iet.IetAdm, '_get_target_chap_auth')
def test_create_export(self, mock_get_chap, mock_execute,
mock_get_targ):
mock_execute.return_value = ('', '')
mock_get_chap.return_value = ('QZJbisGmn9AL954FNF4D',
'P68eE7u9eFqDGexd28DQ')
expected_result = {'location': '10.9.8.7:3260,1 '
'iqn.2010-10.org.openstack:testvol 0',
'auth': 'CHAP '
'QZJbisGmn9AL954FNF4D P68eE7u9eFqDGexd28DQ'}
ctxt = context.get_admin_context()
self.assertEqual(expected_result,
self.target.create_export(ctxt,
self.testvol,
self.fake_volumes_dir))
self.assertTrue(mock_execute.called)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',
return_value=None)
@mock.patch('cinder.volume.targets.iet.IetAdm._get_target',
return_value=1)
def test_ensure_export(self, mock_get_targetm, mock_get_chap):
ctxt = context.get_admin_context()
with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt,
self.testvol,
self.fake_volumes_dir)
self.target.create_iscsi_target.assert_called_once_with(
'iqn.2010-10.org.openstack:testvol',
1, 0, self.fake_volumes_dir, None,
portals_ips=[self.configuration.iscsi_ip_address],
portals_port=int(self.configuration.iscsi_port),
check_exit_code=False,
old_name=None)
|
normal
|
{
"blob_id": "932502c93dd7dfc095adfe2ab88b4404396d9845",
"index": 8680,
"step-1": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n <mask token>\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-3": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n <mask token>\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = 'fake out', 'fake err'\n self.assertEqual(1, self.target.create_iscsi_target(self.test_vol, \n 1, 0, self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-4": "<mask token>\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1', self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n ))\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError, self.target._get_target, '')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=0)\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n @mock.patch('cinder.utils.temporary_chown')\n @mock.patch.object(iet, 'LOG')\n def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,\n mock_execute, mock_get_targ):\n mock_execute.return_value = '', ''\n tmp_file = six.StringIO()\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(0, self.target.create_iscsi_target(self.\n test_vol, 0, 0, self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n self.assertTrue(mock_open.called)\n self.assertTrue(mock_get_targ.called)\n mock_open.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.\n target.create_iscsi_target, self.test_vol, 0, 0, self.\n fake_volumes_dir)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.\n target.create_iscsi_target, self.test_vol, 0, 0, self.\n fake_volumes_dir)\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed, self.target.\n update_config_file, self.test_vol, 0, self.fake_volumes_dir,\n 'foo bar')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = 'fake out', 'fake err'\n self.assertEqual(1, self.target.create_iscsi_target(self.test_vol, \n 1, 0, self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n self.target.remove_iscsi_target(1, 0, self.testvol['id'], self.\n testvol['name'])\n mock_execute.assert_any_call('ietadm', '--op', 'delete', '--tid=1',\n run_as_root=True)\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed, self.target.\n remove_iscsi_target, 1, 0, self.testvol['id'], self.testvol['name']\n )\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n \"\"\"tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\n sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\n cid:0 ip:10.9.8.7 state:active hd:none dd:none\"\"\"\n )\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'), self.target.\n _find_sid_cid_for_target('1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45'\n , 'volume-83c2e877-feed-46be-8435-77884fe55b45'))\n <mask token>\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target', return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt, self.testvol, self.fake_volumes_dir\n )\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol', 1, 0, self.\n fake_volumes_dir, None, portals_ips=[self.configuration.\n iscsi_ip_address], portals_port=int(self.configuration.\n iscsi_port), check_exit_code=False, old_name=None)\n",
"step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport contextlib\n\nimport mock\nfrom oslo_concurrency import processutils as putils\nimport six\n\nfrom cinder import context\nfrom cinder import exception\n\nfrom cinder.tests.unit.targets import targets_fixture as tf\nfrom cinder import utils\nfrom cinder.volume.targets import iet\n\n\nclass TestIetAdmDriver(tf.TargetDriverFixture):\n\n def setUp(self):\n super(TestIetAdmDriver, self).setUp()\n self.target = iet.IetAdm(root_helper=utils.get_root_helper(),\n configuration=self.configuration)\n\n def test_get_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n 'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\\n' # noqa\n ' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\\n' # noqa\n ' cid:0 ip:10.9.8.7 state:active hd:none dd:none')\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual('1',\n self.target._get_target(\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa\n ))\n\n # Test the failure case: Failed to handle the config file\n mock_open.side_effect = MemoryError()\n self.assertRaises(MemoryError,\n self.target._get_target,\n '')\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=0)\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n @mock.patch('cinder.utils.temporary_chown')\n @mock.patch.object(iet, 'LOG')\n def test_create_iscsi_target(self, mock_log, mock_chown, mock_exists,\n mock_execute, mock_get_targ):\n mock_execute.return_value = ('', '')\n tmp_file = six.StringIO()\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(\n 0,\n self.target.create_iscsi_target(\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n self.assertTrue(mock_open.called)\n self.assertTrue(mock_get_targ.called)\n\n # Test the failure case: Failed to chown the config file\n mock_open.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.create_iscsi_target,\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir)\n\n # Test the failure case: Failed to set new auth\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.create_iscsi_target,\n self.test_vol,\n 0,\n 0,\n self.fake_volumes_dir)\n\n @mock.patch('cinder.utils.execute')\n @mock.patch('os.path.exists', return_value=True)\n def test_update_config_file_failure(self, mock_exists, mock_execute):\n # Test the failure case: conf file does not exist\n mock_exists.return_value = False\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetCreateFailed,\n self.target.update_config_file,\n self.test_vol,\n 0,\n self.fake_volumes_dir,\n \"foo bar\")\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n @mock.patch('cinder.utils.execute')\n def test_create_iscsi_target_already_exists(self, mock_execute,\n mock_get_targ):\n mock_execute.return_value = ('fake out', 'fake err')\n self.assertEqual(\n 1,\n self.target.create_iscsi_target(\n self.test_vol,\n 1,\n 0,\n self.fake_volumes_dir))\n self.assertTrue(mock_get_targ.called)\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._find_sid_cid_for_target',\n return_value=None)\n @mock.patch('os.path.exists', return_value=False)\n @mock.patch('cinder.utils.execute')\n def test_remove_iscsi_target(self, mock_execute, mock_exists, mock_find):\n\n # Test the normal case\n self.target.remove_iscsi_target(1,\n 0,\n self.testvol['id'],\n self.testvol['name'])\n mock_execute.assert_any_call('ietadm',\n '--op',\n 'delete',\n '--tid=1',\n run_as_root=True)\n\n # Test the failure case: putils.ProcessExecutionError\n mock_execute.side_effect = putils.ProcessExecutionError\n self.assertRaises(exception.ISCSITargetRemoveFailed,\n self.target.remove_iscsi_target,\n 1,\n 0,\n self.testvol['id'],\n self.testvol['name'])\n\n def test_find_sid_cid_for_target(self):\n tmp_file = six.StringIO()\n tmp_file.write(\n 'tid:1 name:iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45\\n' # noqa\n ' sid:844427031282176 initiator:iqn.1994-05.com.redhat:5a6894679665\\n' # noqa\n ' cid:0 ip:10.9.8.7 state:active hd:none dd:none')\n tmp_file.seek(0)\n with mock.patch('six.moves.builtins.open') as mock_open:\n mock_open.return_value = contextlib.closing(tmp_file)\n self.assertEqual(('844427031282176', '0'),\n self.target._find_sid_cid_for_target(\n '1',\n 'iqn.2010-10.org.openstack:volume-83c2e877-feed-46be-8435-77884fe55b45', # noqa\n 'volume-83c2e877-feed-46be-8435-77884fe55b45' # noqa\n ))\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n @mock.patch('cinder.utils.execute')\n @mock.patch.object(iet.IetAdm, '_get_target_chap_auth')\n def test_create_export(self, mock_get_chap, mock_execute,\n mock_get_targ):\n mock_execute.return_value = ('', '')\n mock_get_chap.return_value = ('QZJbisGmn9AL954FNF4D',\n 'P68eE7u9eFqDGexd28DQ')\n expected_result = {'location': '10.9.8.7:3260,1 '\n 'iqn.2010-10.org.openstack:testvol 0',\n 'auth': 'CHAP '\n 'QZJbisGmn9AL954FNF4D P68eE7u9eFqDGexd28DQ'}\n ctxt = context.get_admin_context()\n self.assertEqual(expected_result,\n self.target.create_export(ctxt,\n self.testvol,\n self.fake_volumes_dir))\n self.assertTrue(mock_execute.called)\n\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target_chap_auth',\n return_value=None)\n @mock.patch('cinder.volume.targets.iet.IetAdm._get_target',\n return_value=1)\n def test_ensure_export(self, mock_get_targetm, mock_get_chap):\n ctxt = context.get_admin_context()\n with mock.patch.object(self.target, 'create_iscsi_target'):\n self.target.ensure_export(ctxt,\n self.testvol,\n self.fake_volumes_dir)\n self.target.create_iscsi_target.assert_called_once_with(\n 'iqn.2010-10.org.openstack:testvol',\n 1, 0, self.fake_volumes_dir, None,\n portals_ips=[self.configuration.iscsi_ip_address],\n portals_port=int(self.configuration.iscsi_port),\n check_exit_code=False,\n old_name=None)\n",
"step-ids": [
3,
7,
8,
9,
12
]
}
|
[
3,
7,
8,
9,
12
] |
#Charlie Quinn if.py
#Check < in an 'if' statement
#use a 'while' loop to make testing easier
def income_input(prompt_message):
prompt = prompt_message + ' '
temp = input(prompt)
#get input from user
return float(temp)
do_again = 'y'
while do_again =='y':
income = income_input("\nHow much did you make this year?: ")
if income < 15001:
rate = .005
elif income < 17501:
rate = .006
elif income < 22401:
rate = .010
elif income < 47301:
rate = .014
elif income < 75001:
rate = .018
elif income < 112450:
rate = .022
elif income < 277000:
rate = .027
else:
rate = .03
taxesdue = income *rate
print("Income: ",income,"\nRate: ",rate,"\nTaxes Due: ",taxesdue)
#loop will end when you type in an n
do_again = input("\nAnother one? (y or no) ")
|
normal
|
{
"blob_id": "d5acde6c6139833c6631a2d88a181cd019d3d2da",
"index": 5747,
"step-1": "<mask token>\n",
"step-2": "def income_input(prompt_message):\n prompt = prompt_message + ' '\n temp = input(prompt)\n return float(temp)\n\n\n<mask token>\n",
"step-3": "def income_input(prompt_message):\n prompt = prompt_message + ' '\n temp = input(prompt)\n return float(temp)\n\n\n<mask token>\nwhile do_again == 'y':\n income = income_input('\\nHow much did you make this year?: ')\n if income < 15001:\n rate = 0.005\n elif income < 17501:\n rate = 0.006\n elif income < 22401:\n rate = 0.01\n elif income < 47301:\n rate = 0.014\n elif income < 75001:\n rate = 0.018\n elif income < 112450:\n rate = 0.022\n elif income < 277000:\n rate = 0.027\n else:\n rate = 0.03\n taxesdue = income * rate\n print('Income: ', income, '\\nRate: ', rate, '\\nTaxes Due: ', taxesdue)\n do_again = input('\\nAnother one? (y or no) ')\n",
"step-4": "def income_input(prompt_message):\n prompt = prompt_message + ' '\n temp = input(prompt)\n return float(temp)\n\n\ndo_again = 'y'\nwhile do_again == 'y':\n income = income_input('\\nHow much did you make this year?: ')\n if income < 15001:\n rate = 0.005\n elif income < 17501:\n rate = 0.006\n elif income < 22401:\n rate = 0.01\n elif income < 47301:\n rate = 0.014\n elif income < 75001:\n rate = 0.018\n elif income < 112450:\n rate = 0.022\n elif income < 277000:\n rate = 0.027\n else:\n rate = 0.03\n taxesdue = income * rate\n print('Income: ', income, '\\nRate: ', rate, '\\nTaxes Due: ', taxesdue)\n do_again = input('\\nAnother one? (y or no) ')\n",
"step-5": "#Charlie Quinn if.py\n#Check < in an 'if' statement\n#use a 'while' loop to make testing easier\n\ndef income_input(prompt_message):\n\n prompt = prompt_message + ' '\n temp = input(prompt)\n #get input from user\n return float(temp)\n\n\ndo_again = 'y'\n\n\nwhile do_again =='y':\n income = income_input(\"\\nHow much did you make this year?: \")\n \n if income < 15001:\n rate = .005\n elif income < 17501:\n rate = .006\n elif income < 22401:\n rate = .010\n elif income < 47301:\n rate = .014\n elif income < 75001:\n rate = .018\n elif income < 112450:\n rate = .022\n elif income < 277000:\n rate = .027\n else:\n rate = .03\n\n taxesdue = income *rate\n\n print(\"Income: \",income,\"\\nRate: \",rate,\"\\nTaxes Due: \",taxesdue)\n\n #loop will end when you type in an n\n do_again = input(\"\\nAnother one? (y or no) \")\n\n \n\n\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import queue
import sys
import logging
from superai.common import InitLog
logger = logging.getLogger(__name__)
# 2维到1维
def hwToidx(x: int, y: int, weight: int):
return y * weight + x
# 1维到2维
def idxTohw(idx, weight: int):
return [idx % weight, idx // weight]
# 10x10 cell idx 到 [x,y]
def idxToXY(idx, cellw: int):
curpoint = idxTohw(idx, cellw)
curpoint[0], curpoint[1] = curpoint[0] * 10, curpoint[1] * 10
return curpoint
# 有向图
class Graph:
def __init__(self, V: int, W: int):
# 顶点数量
self.V = V
# 边数量
self.E = 0
# 邻接表
self.adj = []
# 宽度 (虽然是一维的但是表示是二维的)
self.W = W
for i in range(V):
nears = []
self.adj.append(nears)
def AddEdge(self, v: int, w: int):
self.adj[v].append(w)
self.E += 1
def __str__(self):
str = ""
for idx, nears in enumerate(self.adj):
str += "idx: {} nears: {}\n".format(idx, nears)
return str
# bfs
class BreadthFirstPaths:
def __init__(self, g: Graph, s: int):
self.marked = [False] * g.V
self.edgeTo = [0] * g.V
self.s = s
self.bfs(g, self.s)
def bfs(self, g: Graph, s: int):
q = queue.Queue()
q.put(s)
while q.qsize() != 0:
v = q.get()
for w in g.adj[v]:
# 这个路径没有经过
if not self.marked[w]:
self.edgeTo[w] = v
self.marked[w] = True
q.put(w)
# print(w, idxTohw(w, 6))
def HasPathTo(self, v: int):
return self.marked[v]
def PathTo(self, v: int) -> [int]:
result = []
if not self.HasPathTo(v):
return result
x = v
while x != self.s:
result.append(x)
x = self.edgeTo[x]
result.append(self.s)
return result
# 曼哈顿距离
def manhattanDistance(x, y):
return sum(map(lambda i, j: abs(i - j), x, y))
# 欧几里得距离
def dist_between(a, b):
return (b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2
# a* 4方位
class AStarPaths:
def __init__(self, g: Graph, start: int, end: int):
self.closedSet = []
self.openSet = [start]
self.start = start
self.end = end
self.edgeTo = [0] * g.V
self.marked = [False] * g.V
# 实际距离
self.gScore = [sys.maxsize] * g.V
self.gScore[start] = 0
# 估算到终点的距离
self.fScore = [sys.maxsize] * g.V
self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw(end, g.W))
self.astar(g)
def astar(self, g: Graph):
while len(self.openSet) > 0:
current = min(self.openSet, key=lambda s: self.fScore[s])
if current == self.end:
return
self.openSet.remove(current)
self.closedSet.append(current)
for w in g.adj[current]:
if w in self.closedSet:
continue
# 实际距离
tentativegScore = self.gScore[current] + manhattanDistance(idxTohw(current, g.W),
idxTohw(w, g.W))
if tentativegScore < self.gScore[w]:
self.edgeTo[w] = current
self.marked[w] = True
print("edgeTo ({}) -> ({})".format(idxTohw(current, g.W), idxTohw(w, g.W)))
self.gScore[w] = tentativegScore
self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw(w, g.W), idxTohw(self.end, g.W))
print("fScore[%d] manhattan: %d" % (w, self.fScore[w]))
if w not in self.openSet:
self.openSet.append(w)
def HasPathTo(self, v: int):
return self.marked[v]
def PathTo(self, v: int):
result = []
if not self.HasPathTo(v):
return result
x = v
while x != self.start:
result.append(x)
x = self.edgeTo[x]
result.append(self.start)
return result
def main():
InitLog()
# 0,1,2,3,4 ... 一共12个顶点. width=6,height=4
graph = Graph(24, 6)
graph.AddEdge(hwToidx(2, 0, 6), hwToidx(2, 1, 6))
graph.AddEdge(hwToidx(1, 1, 6), hwToidx(1, 2, 6))
graph.AddEdge(hwToidx(2, 1, 6), hwToidx(2, 0, 6))
graph.AddEdge(hwToidx(2, 1, 6), hwToidx(3, 1, 6))
graph.AddEdge(hwToidx(3, 1, 6), hwToidx(2, 1, 6))
graph.AddEdge(hwToidx(3, 1, 6), hwToidx(4, 1, 6))
graph.AddEdge(hwToidx(4, 1, 6), hwToidx(4, 2, 6))
graph.AddEdge(hwToidx(4, 1, 6), hwToidx(3, 1, 6))
graph.AddEdge(hwToidx(0, 2, 6), hwToidx(1, 2, 6))
graph.AddEdge(hwToidx(1, 2, 6), hwToidx(1, 3, 6))
graph.AddEdge(hwToidx(1, 2, 6), hwToidx(0, 2, 6))
graph.AddEdge(hwToidx(1, 2, 6), hwToidx(1, 1, 6))
graph.AddEdge(hwToidx(1, 2, 6), hwToidx(2, 2, 6))
graph.AddEdge(hwToidx(2, 2, 6), hwToidx(2, 3, 6))
graph.AddEdge(hwToidx(2, 2, 6), hwToidx(1, 2, 6))
graph.AddEdge(hwToidx(2, 2, 6), hwToidx(3, 2, 6))
graph.AddEdge(hwToidx(3, 2, 6), hwToidx(3, 3, 6))
graph.AddEdge(hwToidx(3, 2, 6), hwToidx(2, 2, 6))
graph.AddEdge(hwToidx(3, 2, 6), hwToidx(4, 2, 6))
graph.AddEdge(hwToidx(4, 2, 6), hwToidx(4, 3, 6))
graph.AddEdge(hwToidx(4, 2, 6), hwToidx(3, 2, 6))
graph.AddEdge(hwToidx(4, 2, 6), hwToidx(4, 1, 6))
graph.AddEdge(hwToidx(4, 2, 6), hwToidx(5, 2, 6))
graph.AddEdge(hwToidx(5, 2, 6), hwToidx(5, 3, 6))
graph.AddEdge(hwToidx(5, 2, 6), hwToidx(4, 2, 6))
graph.AddEdge(hwToidx(1, 3, 6), hwToidx(1, 2, 6))
graph.AddEdge(hwToidx(2, 3, 6), hwToidx(2, 2, 6))
graph.AddEdge(hwToidx(2, 3, 6), hwToidx(3, 3, 6))
graph.AddEdge(hwToidx(3, 3, 6), hwToidx(2, 3, 6))
graph.AddEdge(hwToidx(3, 3, 6), hwToidx(3, 2, 6))
graph.AddEdge(hwToidx(4, 3, 6), hwToidx(5, 3, 6))
graph.AddEdge(hwToidx(4, 3, 6), hwToidx(4, 2, 6))
graph.AddEdge(hwToidx(5, 3, 6), hwToidx(4, 3, 6))
graph.AddEdge(hwToidx(5, 3, 6), hwToidx(5, 2, 6))
print("图:")
print(graph)
print("广度优先:")
bfs = BreadthFirstPaths(graph, 2)
paths = bfs.PathTo(hwToidx(1, 1, 6))
for v in reversed(paths):
print(v, idxTohw(v, 6))
# print(manhattanDistance([0, 2], [1, 2]))
print("\na*寻径")
astar = AStarPaths(graph, 2, 7)
paths = astar.PathTo(hwToidx(1, 1, 6))
for v in reversed(paths):
print(v, idxTohw(v, 6))
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "b6d8a918659f733919fe3bb4be9037e36ad32386",
"index": 272,
"step-1": "<mask token>\n\n\nclass Graph:\n\n def __init__(self, V: int, W: int):\n self.V = V\n self.E = 0\n self.adj = []\n self.W = W\n for i in range(V):\n nears = []\n self.adj.append(nears)\n\n def AddEdge(self, v: int, w: int):\n self.adj[v].append(w)\n self.E += 1\n\n def __str__(self):\n str = ''\n for idx, nears in enumerate(self.adj):\n str += 'idx: {} nears: {}\\n'.format(idx, nears)\n return str\n\n\nclass BreadthFirstPaths:\n\n def __init__(self, g: Graph, s: int):\n self.marked = [False] * g.V\n self.edgeTo = [0] * g.V\n self.s = s\n self.bfs(g, self.s)\n\n def bfs(self, g: Graph, s: int):\n q = queue.Queue()\n q.put(s)\n while q.qsize() != 0:\n v = q.get()\n for w in g.adj[v]:\n if not self.marked[w]:\n self.edgeTo[w] = v\n self.marked[w] = True\n q.put(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int) ->[int]:\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.s:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.s)\n return result\n\n\n<mask token>\n\n\nclass AStarPaths:\n\n def __init__(self, g: Graph, start: int, end: int):\n self.closedSet = []\n self.openSet = [start]\n self.start = start\n self.end = end\n self.edgeTo = [0] * g.V\n self.marked = [False] * g.V\n self.gScore = [sys.maxsize] * g.V\n self.gScore[start] = 0\n self.fScore = [sys.maxsize] * g.V\n self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw\n (end, g.W))\n self.astar(g)\n\n def astar(self, g: Graph):\n while len(self.openSet) > 0:\n current = min(self.openSet, key=lambda s: self.fScore[s])\n if current == self.end:\n return\n self.openSet.remove(current)\n self.closedSet.append(current)\n for w in g.adj[current]:\n if w in self.closedSet:\n continue\n tentativegScore = self.gScore[current] + manhattanDistance(\n idxTohw(current, g.W), idxTohw(w, g.W))\n if tentativegScore < self.gScore[w]:\n self.edgeTo[w] = current\n self.marked[w] = True\n print('edgeTo ({}) -> ({})'.format(idxTohw(current, g.W\n ), idxTohw(w, g.W)))\n self.gScore[w] = tentativegScore\n self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw\n (w, g.W), idxTohw(self.end, g.W))\n print('fScore[%d] manhattan: %d' % (w, self.fScore[w]))\n if w not in self.openSet:\n self.openSet.append(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int):\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.start:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.start)\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef hwToidx(x: int, y: int, weight: int):\n return y * weight + x\n\n\ndef idxTohw(idx, weight: int):\n return [idx % weight, idx // weight]\n\n\ndef idxToXY(idx, cellw: int):\n curpoint = idxTohw(idx, cellw)\n curpoint[0], curpoint[1] = curpoint[0] * 10, curpoint[1] * 10\n return curpoint\n\n\nclass Graph:\n\n def __init__(self, V: int, W: int):\n self.V = V\n self.E = 0\n self.adj = []\n self.W = W\n for i in range(V):\n nears = []\n self.adj.append(nears)\n\n def AddEdge(self, v: int, w: int):\n self.adj[v].append(w)\n self.E += 1\n\n def __str__(self):\n str = ''\n for idx, nears in enumerate(self.adj):\n str += 'idx: {} nears: {}\\n'.format(idx, nears)\n return str\n\n\nclass BreadthFirstPaths:\n\n def __init__(self, g: Graph, s: int):\n self.marked = [False] * g.V\n self.edgeTo = [0] * g.V\n self.s = s\n self.bfs(g, self.s)\n\n def bfs(self, g: Graph, s: int):\n q = queue.Queue()\n q.put(s)\n while q.qsize() != 0:\n v = q.get()\n for w in g.adj[v]:\n if not self.marked[w]:\n self.edgeTo[w] = v\n self.marked[w] = True\n q.put(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int) ->[int]:\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.s:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.s)\n return result\n\n\n<mask token>\n\n\nclass AStarPaths:\n\n def __init__(self, g: Graph, start: int, end: int):\n self.closedSet = []\n self.openSet = [start]\n self.start = start\n self.end = end\n self.edgeTo = [0] * g.V\n self.marked = [False] * g.V\n self.gScore = [sys.maxsize] * g.V\n self.gScore[start] = 0\n self.fScore = [sys.maxsize] * g.V\n self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw\n (end, g.W))\n self.astar(g)\n\n def astar(self, g: Graph):\n while len(self.openSet) > 0:\n current = min(self.openSet, key=lambda s: self.fScore[s])\n if current == self.end:\n return\n self.openSet.remove(current)\n self.closedSet.append(current)\n for w in g.adj[current]:\n if w in self.closedSet:\n continue\n tentativegScore = self.gScore[current] + manhattanDistance(\n idxTohw(current, g.W), idxTohw(w, g.W))\n if tentativegScore < self.gScore[w]:\n self.edgeTo[w] = current\n self.marked[w] = True\n print('edgeTo ({}) -> ({})'.format(idxTohw(current, g.W\n ), idxTohw(w, g.W)))\n self.gScore[w] = tentativegScore\n self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw\n (w, g.W), idxTohw(self.end, g.W))\n print('fScore[%d] manhattan: %d' % (w, self.fScore[w]))\n if w not in self.openSet:\n self.openSet.append(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int):\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.start:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.start)\n return result\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef hwToidx(x: int, y: int, weight: int):\n return y * weight + x\n\n\ndef idxTohw(idx, weight: int):\n return [idx % weight, idx // weight]\n\n\ndef idxToXY(idx, cellw: int):\n curpoint = idxTohw(idx, cellw)\n curpoint[0], curpoint[1] = curpoint[0] * 10, curpoint[1] * 10\n return curpoint\n\n\nclass Graph:\n\n def __init__(self, V: int, W: int):\n self.V = V\n self.E = 0\n self.adj = []\n self.W = W\n for i in range(V):\n nears = []\n self.adj.append(nears)\n\n def AddEdge(self, v: int, w: int):\n self.adj[v].append(w)\n self.E += 1\n\n def __str__(self):\n str = ''\n for idx, nears in enumerate(self.adj):\n str += 'idx: {} nears: {}\\n'.format(idx, nears)\n return str\n\n\nclass BreadthFirstPaths:\n\n def __init__(self, g: Graph, s: int):\n self.marked = [False] * g.V\n self.edgeTo = [0] * g.V\n self.s = s\n self.bfs(g, self.s)\n\n def bfs(self, g: Graph, s: int):\n q = queue.Queue()\n q.put(s)\n while q.qsize() != 0:\n v = q.get()\n for w in g.adj[v]:\n if not self.marked[w]:\n self.edgeTo[w] = v\n self.marked[w] = True\n q.put(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int) ->[int]:\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.s:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.s)\n return result\n\n\n<mask token>\n\n\ndef dist_between(a, b):\n return (b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2\n\n\nclass AStarPaths:\n\n def __init__(self, g: Graph, start: int, end: int):\n self.closedSet = []\n self.openSet = [start]\n self.start = start\n self.end = end\n self.edgeTo = [0] * g.V\n self.marked = [False] * g.V\n self.gScore = [sys.maxsize] * g.V\n self.gScore[start] = 0\n self.fScore = [sys.maxsize] * g.V\n self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw\n (end, g.W))\n self.astar(g)\n\n def astar(self, g: Graph):\n while len(self.openSet) > 0:\n current = min(self.openSet, key=lambda s: self.fScore[s])\n if current == self.end:\n return\n self.openSet.remove(current)\n self.closedSet.append(current)\n for w in g.adj[current]:\n if w in self.closedSet:\n continue\n tentativegScore = self.gScore[current] + manhattanDistance(\n idxTohw(current, g.W), idxTohw(w, g.W))\n if tentativegScore < self.gScore[w]:\n self.edgeTo[w] = current\n self.marked[w] = True\n print('edgeTo ({}) -> ({})'.format(idxTohw(current, g.W\n ), idxTohw(w, g.W)))\n self.gScore[w] = tentativegScore\n self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw\n (w, g.W), idxTohw(self.end, g.W))\n print('fScore[%d] manhattan: %d' % (w, self.fScore[w]))\n if w not in self.openSet:\n self.openSet.append(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int):\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.start:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.start)\n return result\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef hwToidx(x: int, y: int, weight: int):\n return y * weight + x\n\n\ndef idxTohw(idx, weight: int):\n return [idx % weight, idx // weight]\n\n\ndef idxToXY(idx, cellw: int):\n curpoint = idxTohw(idx, cellw)\n curpoint[0], curpoint[1] = curpoint[0] * 10, curpoint[1] * 10\n return curpoint\n\n\nclass Graph:\n\n def __init__(self, V: int, W: int):\n self.V = V\n self.E = 0\n self.adj = []\n self.W = W\n for i in range(V):\n nears = []\n self.adj.append(nears)\n\n def AddEdge(self, v: int, w: int):\n self.adj[v].append(w)\n self.E += 1\n\n def __str__(self):\n str = ''\n for idx, nears in enumerate(self.adj):\n str += 'idx: {} nears: {}\\n'.format(idx, nears)\n return str\n\n\nclass BreadthFirstPaths:\n\n def __init__(self, g: Graph, s: int):\n self.marked = [False] * g.V\n self.edgeTo = [0] * g.V\n self.s = s\n self.bfs(g, self.s)\n\n def bfs(self, g: Graph, s: int):\n q = queue.Queue()\n q.put(s)\n while q.qsize() != 0:\n v = q.get()\n for w in g.adj[v]:\n if not self.marked[w]:\n self.edgeTo[w] = v\n self.marked[w] = True\n q.put(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int) ->[int]:\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.s:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.s)\n return result\n\n\ndef manhattanDistance(x, y):\n return sum(map(lambda i, j: abs(i - j), x, y))\n\n\ndef dist_between(a, b):\n return (b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2\n\n\nclass AStarPaths:\n\n def __init__(self, g: Graph, start: int, end: int):\n self.closedSet = []\n self.openSet = [start]\n self.start = start\n self.end = end\n self.edgeTo = [0] * g.V\n self.marked = [False] * g.V\n self.gScore = [sys.maxsize] * g.V\n self.gScore[start] = 0\n self.fScore = [sys.maxsize] * g.V\n self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw\n (end, g.W))\n self.astar(g)\n\n def astar(self, g: Graph):\n while len(self.openSet) > 0:\n current = min(self.openSet, key=lambda s: self.fScore[s])\n if current == self.end:\n return\n self.openSet.remove(current)\n self.closedSet.append(current)\n for w in g.adj[current]:\n if w in self.closedSet:\n continue\n tentativegScore = self.gScore[current] + manhattanDistance(\n idxTohw(current, g.W), idxTohw(w, g.W))\n if tentativegScore < self.gScore[w]:\n self.edgeTo[w] = current\n self.marked[w] = True\n print('edgeTo ({}) -> ({})'.format(idxTohw(current, g.W\n ), idxTohw(w, g.W)))\n self.gScore[w] = tentativegScore\n self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw\n (w, g.W), idxTohw(self.end, g.W))\n print('fScore[%d] manhattan: %d' % (w, self.fScore[w]))\n if w not in self.openSet:\n self.openSet.append(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int):\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.start:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.start)\n return result\n\n\n<mask token>\n",
"step-5": "import queue\nimport sys\nimport logging\n\nfrom superai.common import InitLog\n\nlogger = logging.getLogger(__name__)\n\n\n# 2维到1维\ndef hwToidx(x: int, y: int, weight: int):\n return y * weight + x\n\n\n# 1维到2维\ndef idxTohw(idx, weight: int):\n return [idx % weight, idx // weight]\n\n\n# 10x10 cell idx 到 [x,y]\ndef idxToXY(idx, cellw: int):\n curpoint = idxTohw(idx, cellw)\n curpoint[0], curpoint[1] = curpoint[0] * 10, curpoint[1] * 10\n return curpoint\n\n\n# 有向图\nclass Graph:\n def __init__(self, V: int, W: int):\n # 顶点数量\n self.V = V\n\n # 边数量\n self.E = 0\n\n # 邻接表\n self.adj = []\n\n # 宽度 (虽然是一维的但是表示是二维的)\n self.W = W\n\n for i in range(V):\n nears = []\n self.adj.append(nears)\n\n def AddEdge(self, v: int, w: int):\n self.adj[v].append(w)\n self.E += 1\n\n def __str__(self):\n str = \"\"\n for idx, nears in enumerate(self.adj):\n str += \"idx: {} nears: {}\\n\".format(idx, nears)\n return str\n\n\n# bfs\nclass BreadthFirstPaths:\n def __init__(self, g: Graph, s: int):\n self.marked = [False] * g.V\n self.edgeTo = [0] * g.V\n self.s = s\n\n self.bfs(g, self.s)\n\n def bfs(self, g: Graph, s: int):\n q = queue.Queue()\n q.put(s)\n while q.qsize() != 0:\n v = q.get()\n\n for w in g.adj[v]:\n # 这个路径没有经过\n if not self.marked[w]:\n self.edgeTo[w] = v\n self.marked[w] = True\n q.put(w)\n\n # print(w, idxTohw(w, 6))\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int) -> [int]:\n result = []\n if not self.HasPathTo(v):\n return result\n x = v\n while x != self.s:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.s)\n return result\n\n\n# 曼哈顿距离\ndef manhattanDistance(x, y):\n return sum(map(lambda i, j: abs(i - j), x, y))\n\n\n# 欧几里得距离\ndef dist_between(a, b):\n return (b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2\n\n\n# a* 4方位\nclass AStarPaths:\n def __init__(self, g: Graph, start: int, end: int):\n self.closedSet = []\n self.openSet = [start]\n\n self.start = start\n self.end = end\n\n self.edgeTo = [0] * g.V\n self.marked = [False] * g.V\n\n # 实际距离\n self.gScore = [sys.maxsize] * g.V\n self.gScore[start] = 0\n\n # 估算到终点的距离\n self.fScore = [sys.maxsize] * g.V\n self.fScore[start] = manhattanDistance(idxTohw(start, g.W), idxTohw(end, g.W))\n\n self.astar(g)\n\n def astar(self, g: Graph):\n while len(self.openSet) > 0:\n current = min(self.openSet, key=lambda s: self.fScore[s])\n\n if current == self.end:\n return\n self.openSet.remove(current)\n self.closedSet.append(current)\n for w in g.adj[current]:\n if w in self.closedSet:\n continue\n # 实际距离\n tentativegScore = self.gScore[current] + manhattanDistance(idxTohw(current, g.W),\n idxTohw(w, g.W))\n if tentativegScore < self.gScore[w]:\n self.edgeTo[w] = current\n self.marked[w] = True\n\n print(\"edgeTo ({}) -> ({})\".format(idxTohw(current, g.W), idxTohw(w, g.W)))\n self.gScore[w] = tentativegScore\n self.fScore[w] = self.gScore[w] + manhattanDistance(idxTohw(w, g.W), idxTohw(self.end, g.W))\n\n print(\"fScore[%d] manhattan: %d\" % (w, self.fScore[w]))\n\n if w not in self.openSet:\n self.openSet.append(w)\n\n def HasPathTo(self, v: int):\n return self.marked[v]\n\n def PathTo(self, v: int):\n result = []\n if not self.HasPathTo(v):\n return result\n\n x = v\n while x != self.start:\n result.append(x)\n x = self.edgeTo[x]\n result.append(self.start)\n return result\n\n\ndef main():\n InitLog()\n\n # 0,1,2,3,4 ... 一共12个顶点. width=6,height=4\n graph = Graph(24, 6)\n\n graph.AddEdge(hwToidx(2, 0, 6), hwToidx(2, 1, 6))\n\n graph.AddEdge(hwToidx(1, 1, 6), hwToidx(1, 2, 6))\n\n graph.AddEdge(hwToidx(2, 1, 6), hwToidx(2, 0, 6))\n graph.AddEdge(hwToidx(2, 1, 6), hwToidx(3, 1, 6))\n\n graph.AddEdge(hwToidx(3, 1, 6), hwToidx(2, 1, 6))\n graph.AddEdge(hwToidx(3, 1, 6), hwToidx(4, 1, 6))\n\n graph.AddEdge(hwToidx(4, 1, 6), hwToidx(4, 2, 6))\n graph.AddEdge(hwToidx(4, 1, 6), hwToidx(3, 1, 6))\n\n graph.AddEdge(hwToidx(0, 2, 6), hwToidx(1, 2, 6))\n\n graph.AddEdge(hwToidx(1, 2, 6), hwToidx(1, 3, 6))\n graph.AddEdge(hwToidx(1, 2, 6), hwToidx(0, 2, 6))\n graph.AddEdge(hwToidx(1, 2, 6), hwToidx(1, 1, 6))\n graph.AddEdge(hwToidx(1, 2, 6), hwToidx(2, 2, 6))\n\n graph.AddEdge(hwToidx(2, 2, 6), hwToidx(2, 3, 6))\n graph.AddEdge(hwToidx(2, 2, 6), hwToidx(1, 2, 6))\n graph.AddEdge(hwToidx(2, 2, 6), hwToidx(3, 2, 6))\n graph.AddEdge(hwToidx(3, 2, 6), hwToidx(3, 3, 6))\n graph.AddEdge(hwToidx(3, 2, 6), hwToidx(2, 2, 6))\n graph.AddEdge(hwToidx(3, 2, 6), hwToidx(4, 2, 6))\n graph.AddEdge(hwToidx(4, 2, 6), hwToidx(4, 3, 6))\n graph.AddEdge(hwToidx(4, 2, 6), hwToidx(3, 2, 6))\n graph.AddEdge(hwToidx(4, 2, 6), hwToidx(4, 1, 6))\n graph.AddEdge(hwToidx(4, 2, 6), hwToidx(5, 2, 6))\n graph.AddEdge(hwToidx(5, 2, 6), hwToidx(5, 3, 6))\n graph.AddEdge(hwToidx(5, 2, 6), hwToidx(4, 2, 6))\n graph.AddEdge(hwToidx(1, 3, 6), hwToidx(1, 2, 6))\n graph.AddEdge(hwToidx(2, 3, 6), hwToidx(2, 2, 6))\n graph.AddEdge(hwToidx(2, 3, 6), hwToidx(3, 3, 6))\n graph.AddEdge(hwToidx(3, 3, 6), hwToidx(2, 3, 6))\n graph.AddEdge(hwToidx(3, 3, 6), hwToidx(3, 2, 6))\n graph.AddEdge(hwToidx(4, 3, 6), hwToidx(5, 3, 6))\n graph.AddEdge(hwToidx(4, 3, 6), hwToidx(4, 2, 6))\n graph.AddEdge(hwToidx(5, 3, 6), hwToidx(4, 3, 6))\n graph.AddEdge(hwToidx(5, 3, 6), hwToidx(5, 2, 6))\n\n print(\"图:\")\n print(graph)\n\n print(\"广度优先:\")\n bfs = BreadthFirstPaths(graph, 2)\n paths = bfs.PathTo(hwToidx(1, 1, 6))\n for v in reversed(paths):\n print(v, idxTohw(v, 6))\n\n # print(manhattanDistance([0, 2], [1, 2]))\n\n print(\"\\na*寻径\")\n astar = AStarPaths(graph, 2, 7)\n paths = astar.PathTo(hwToidx(1, 1, 6))\n for v in reversed(paths):\n print(v, idxTohw(v, 6))\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
14,
17,
18,
19,
24
]
}
|
[
14,
17,
18,
19,
24
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_name = 'authors'
urlpatterns = [path('polls/', PollsList.as_view()), path('polls/create',
PollsCreate.as_view()), path('polls/<int:pk>', SinglePollsView.as_view(
)), path('answers/', PollsAnswer.as_view())]
<|reserved_special_token_1|>
from django.urls import path
from .views import PollsList, SinglePollsView, PollsCreate, PollsAnswer
app_name = 'authors'
urlpatterns = [path('polls/', PollsList.as_view()), path('polls/create',
PollsCreate.as_view()), path('polls/<int:pk>', SinglePollsView.as_view(
)), path('answers/', PollsAnswer.as_view())]
<|reserved_special_token_1|>
from django.urls import path
from .views import PollsList, SinglePollsView, PollsCreate, PollsAnswer
app_name = "authors"
# app_name will help us do a reverse look-up latter.
urlpatterns = [
path('polls/', PollsList.as_view()),
path('polls/create', PollsCreate.as_view()),
path('polls/<int:pk>', SinglePollsView.as_view()),
path('answers/', PollsAnswer.as_view()),
]
|
flexible
|
{
"blob_id": "64ac007faeebe0e71ba0060e74fa07154e6291e2",
"index": 6053,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'authors'\nurlpatterns = [path('polls/', PollsList.as_view()), path('polls/create',\n PollsCreate.as_view()), path('polls/<int:pk>', SinglePollsView.as_view(\n )), path('answers/', PollsAnswer.as_view())]\n",
"step-3": "from django.urls import path\nfrom .views import PollsList, SinglePollsView, PollsCreate, PollsAnswer\napp_name = 'authors'\nurlpatterns = [path('polls/', PollsList.as_view()), path('polls/create',\n PollsCreate.as_view()), path('polls/<int:pk>', SinglePollsView.as_view(\n )), path('answers/', PollsAnswer.as_view())]\n",
"step-4": "from django.urls import path\nfrom .views import PollsList, SinglePollsView, PollsCreate, PollsAnswer\napp_name = \"authors\"\n# app_name will help us do a reverse look-up latter.\nurlpatterns = [\n path('polls/', PollsList.as_view()),\n path('polls/create', PollsCreate.as_view()),\n path('polls/<int:pk>', SinglePollsView.as_view()),\n path('answers/', PollsAnswer.as_view()),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def convert(o):
if isinstance(o, np.generic):
return o.item()
raise TypeError
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert(o):
if isinstance(o, np.generic):
return o.item()
raise TypeError
<|reserved_special_token_0|>
for d in dirs:
outfile = 'ocr/' + d + '.json'
if os.path.isfile(outfile):
print('found ' + outfile + ', skipping')
continue
files = glob.glob(basedir + d + '/*.png')
ocr = {}
for f in files:
i = f.split('_')[-2]
img = cv2.imread(f)
results = []
for reader in readers:
results = results + reader.readtext(img)
h = list(filter(lambda result: len(result) > 2 and len(result[1]) >
0 and result[2] >= 0.1, results))
if len(h) > 0:
ocr[i] = h
with open(outfile, 'w') as f:
json.dump(ocr, f, indent=1, default=convert)
print(d)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert(o):
if isinstance(o, np.generic):
return o.item()
raise TypeError
readers = [easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu=
False)]
basedir = 'keyframes/'
dirs = os.listdir(basedir)
for d in dirs:
outfile = 'ocr/' + d + '.json'
if os.path.isfile(outfile):
print('found ' + outfile + ', skipping')
continue
files = glob.glob(basedir + d + '/*.png')
ocr = {}
for f in files:
i = f.split('_')[-2]
img = cv2.imread(f)
results = []
for reader in readers:
results = results + reader.readtext(img)
h = list(filter(lambda result: len(result) > 2 and len(result[1]) >
0 and result[2] >= 0.1, results))
if len(h) > 0:
ocr[i] = h
with open(outfile, 'w') as f:
json.dump(ocr, f, indent=1, default=convert)
print(d)
<|reserved_special_token_1|>
import easyocr
import cv2
import json
import numpy as np
import os
import os.path
import glob
def convert(o):
if isinstance(o, np.generic):
return o.item()
raise TypeError
readers = [easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu=
False)]
basedir = 'keyframes/'
dirs = os.listdir(basedir)
for d in dirs:
outfile = 'ocr/' + d + '.json'
if os.path.isfile(outfile):
print('found ' + outfile + ', skipping')
continue
files = glob.glob(basedir + d + '/*.png')
ocr = {}
for f in files:
i = f.split('_')[-2]
img = cv2.imread(f)
results = []
for reader in readers:
results = results + reader.readtext(img)
h = list(filter(lambda result: len(result) > 2 and len(result[1]) >
0 and result[2] >= 0.1, results))
if len(h) > 0:
ocr[i] = h
with open(outfile, 'w') as f:
json.dump(ocr, f, indent=1, default=convert)
print(d)
<|reserved_special_token_1|>
import easyocr
import cv2
import json
import numpy as np
import os
import os.path
import glob
def convert(o):
if isinstance(o, np.generic): return o.item()
raise TypeError
readers = [
easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu = False),
#easyocr.Reader(['ch_tra'], gpu = False),
#easyocr.Reader(['fa'], gpu = False),
#easyocr.Reader(['hi'], gpu = False),
#easyocr.Reader(['ja'], gpu = False),
#easyocr.Reader(['ko'], gpu = False),
#easyocr.Reader(['th'], gpu = False),
]
basedir = "keyframes/"
dirs = os.listdir(basedir)
for d in dirs:
outfile = 'ocr/' + d + '.json'
if os.path.isfile(outfile):
print("found " + outfile + ", skipping")
continue
files = glob.glob(basedir + d + "/*.png")
ocr = {}
for f in files:
i = f.split("_")[-2]
img = cv2.imread(f)
results = []
for reader in readers:
results = results + reader.readtext(img)
h = list(filter(lambda result : len(result) > 2 and len(result[1]) > 0 and result[2] >= 0.1, results))
if len(h) > 0:
ocr[i] = h
with open(outfile,'w') as f:
json.dump(ocr, f, indent=1, default=convert)
print(d)
|
flexible
|
{
"blob_id": "7057b882ca1ce2c08e9ba7add5f115636b9b319e",
"index": 8745,
"step-1": "<mask token>\n\n\ndef convert(o):\n if isinstance(o, np.generic):\n return o.item()\n raise TypeError\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef convert(o):\n if isinstance(o, np.generic):\n return o.item()\n raise TypeError\n\n\n<mask token>\nfor d in dirs:\n outfile = 'ocr/' + d + '.json'\n if os.path.isfile(outfile):\n print('found ' + outfile + ', skipping')\n continue\n files = glob.glob(basedir + d + '/*.png')\n ocr = {}\n for f in files:\n i = f.split('_')[-2]\n img = cv2.imread(f)\n results = []\n for reader in readers:\n results = results + reader.readtext(img)\n h = list(filter(lambda result: len(result) > 2 and len(result[1]) >\n 0 and result[2] >= 0.1, results))\n if len(h) > 0:\n ocr[i] = h\n with open(outfile, 'w') as f:\n json.dump(ocr, f, indent=1, default=convert)\n print(d)\n",
"step-3": "<mask token>\n\n\ndef convert(o):\n if isinstance(o, np.generic):\n return o.item()\n raise TypeError\n\n\nreaders = [easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu=\n False)]\nbasedir = 'keyframes/'\ndirs = os.listdir(basedir)\nfor d in dirs:\n outfile = 'ocr/' + d + '.json'\n if os.path.isfile(outfile):\n print('found ' + outfile + ', skipping')\n continue\n files = glob.glob(basedir + d + '/*.png')\n ocr = {}\n for f in files:\n i = f.split('_')[-2]\n img = cv2.imread(f)\n results = []\n for reader in readers:\n results = results + reader.readtext(img)\n h = list(filter(lambda result: len(result) > 2 and len(result[1]) >\n 0 and result[2] >= 0.1, results))\n if len(h) > 0:\n ocr[i] = h\n with open(outfile, 'w') as f:\n json.dump(ocr, f, indent=1, default=convert)\n print(d)\n",
"step-4": "import easyocr\nimport cv2\nimport json\nimport numpy as np\nimport os\nimport os.path\nimport glob\n\n\ndef convert(o):\n if isinstance(o, np.generic):\n return o.item()\n raise TypeError\n\n\nreaders = [easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu=\n False)]\nbasedir = 'keyframes/'\ndirs = os.listdir(basedir)\nfor d in dirs:\n outfile = 'ocr/' + d + '.json'\n if os.path.isfile(outfile):\n print('found ' + outfile + ', skipping')\n continue\n files = glob.glob(basedir + d + '/*.png')\n ocr = {}\n for f in files:\n i = f.split('_')[-2]\n img = cv2.imread(f)\n results = []\n for reader in readers:\n results = results + reader.readtext(img)\n h = list(filter(lambda result: len(result) > 2 and len(result[1]) >\n 0 and result[2] >= 0.1, results))\n if len(h) > 0:\n ocr[i] = h\n with open(outfile, 'w') as f:\n json.dump(ocr, f, indent=1, default=convert)\n print(d)\n",
"step-5": "import easyocr\r\nimport cv2\r\nimport json\r\nimport numpy as np\r\nimport os\r\nimport os.path\r\nimport glob\r\n\r\ndef convert(o):\r\n if isinstance(o, np.generic): return o.item() \r\n raise TypeError\r\n\r\nreaders = [\r\n easyocr.Reader(['la', 'en', 'de', 'fr', 'es', 'cs', 'is'], gpu = False),\r\n #easyocr.Reader(['ch_tra'], gpu = False),\r\n #easyocr.Reader(['fa'], gpu = False),\r\n #easyocr.Reader(['hi'], gpu = False), \r\n #easyocr.Reader(['ja'], gpu = False), \r\n #easyocr.Reader(['ko'], gpu = False),\r\n #easyocr.Reader(['th'], gpu = False),\r\n]\r\n\r\nbasedir = \"keyframes/\"\r\n\r\ndirs = os.listdir(basedir)\r\n\r\n\r\nfor d in dirs:\r\n\r\n outfile = 'ocr/' + d + '.json'\r\n if os.path.isfile(outfile):\r\n print(\"found \" + outfile + \", skipping\")\r\n continue\r\n \r\n files = glob.glob(basedir + d + \"/*.png\")\r\n \r\n ocr = {}\r\n\r\n for f in files:\r\n i = f.split(\"_\")[-2]\r\n img = cv2.imread(f)\r\n \r\n results = []\r\n for reader in readers:\r\n results = results + reader.readtext(img)\r\n \r\n h = list(filter(lambda result : len(result) > 2 and len(result[1]) > 0 and result[2] >= 0.1, results))\r\n \r\n if len(h) > 0:\r\n ocr[i] = h\r\n \r\n with open(outfile,'w') as f: \r\n json.dump(ocr, f, indent=1, default=convert)\r\n \r\n print(d)\r\n \r\n ",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
# encoding: utf-8
import multiprocessing
import time
import sys
def daemon():
p = multiprocessing.current_process()
print('Starting:', p.name, p.pid)
sys.stdout.flush()
time.sleep(2)
print('Exiting :', p.name, p.pid)
sys.stdout.flush()
def non_daemon():
p = multiprocessing.current_process()
print('Starting:', p.name, p.pid)
sys.stdout.flush()
print('Exiting :', p.name, p.pid)
sys.stdout.flush()
def main1():
d = multiprocessing.Process(name="daemon_process", target=daemon)
n = multiprocessing.Process(name="no_daemon_process", target=non_daemon)
print("daemon_process default daemon value: %s" % d.daemon)
print("no_daemon_process default daemon value: %s" % n.daemon)
d.daemon = True
n.daemon = False
d.start()
time.sleep(1)
n.start()
def main2():
d = multiprocessing.Process(name="daemon_process", target=daemon)
n = multiprocessing.Process(name="no_daemon_process", target=non_daemon)
print("daemon_process default daemon value: %s" % d.daemon)
print("no_daemon_process default daemon value: %s" % n.daemon)
d.daemon = True
n.daemon = False
d.start()
time.sleep(1)
n.start()
# 阻塞父进程,直到子进程结束为止。
# 从实验来看,子进程结束和join的先后顺序无关。
# 唯一的限制是父进程需要等所有join的子进程结束后,才会继续向下执行。
d.join()
n.join()
def main3():
d = multiprocessing.Process(name='daemon', target=daemon)
d.daemon = True
n = multiprocessing.Process(name='non-daemon', target=non_daemon)
n.daemon = False
d.start()
n.start()
# join接受一个timeout的参数,意思就是如果超过了timeout的时间,不管子进程是否结束,join函数也会直接返回。
d.join(1)
# 可以看到子进程d仍然未结束,但是父进程已经继续执行了。
print('d.is_alive()', d.is_alive())
n.join()
if __name__ == "__main__":
# main1()
# main2()
main3()
|
normal
|
{
"blob_id": "9bb6fd6fbe212bdc29e2d1ec37fa6ec6ca9a9469",
"index": 1060,
"step-1": "<mask token>\n\n\ndef daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n time.sleep(2)\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\n<mask token>\n\n\ndef main2():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n d.join()\n n.join()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n time.sleep(2)\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef non_daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef main1():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n\n\ndef main2():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n d.join()\n n.join()\n\n\ndef main3():\n d = multiprocessing.Process(name='daemon', target=daemon)\n d.daemon = True\n n = multiprocessing.Process(name='non-daemon', target=non_daemon)\n n.daemon = False\n d.start()\n n.start()\n d.join(1)\n print('d.is_alive()', d.is_alive())\n n.join()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n time.sleep(2)\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef non_daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef main1():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n\n\ndef main2():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n d.join()\n n.join()\n\n\ndef main3():\n d = multiprocessing.Process(name='daemon', target=daemon)\n d.daemon = True\n n = multiprocessing.Process(name='non-daemon', target=non_daemon)\n n.daemon = False\n d.start()\n n.start()\n d.join(1)\n print('d.is_alive()', d.is_alive())\n n.join()\n\n\nif __name__ == '__main__':\n main3()\n",
"step-4": "import multiprocessing\nimport time\nimport sys\n\n\ndef daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n time.sleep(2)\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef non_daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef main1():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n\n\ndef main2():\n d = multiprocessing.Process(name='daemon_process', target=daemon)\n n = multiprocessing.Process(name='no_daemon_process', target=non_daemon)\n print('daemon_process default daemon value: %s' % d.daemon)\n print('no_daemon_process default daemon value: %s' % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n d.join()\n n.join()\n\n\ndef main3():\n d = multiprocessing.Process(name='daemon', target=daemon)\n d.daemon = True\n n = multiprocessing.Process(name='non-daemon', target=non_daemon)\n n.daemon = False\n d.start()\n n.start()\n d.join(1)\n print('d.is_alive()', d.is_alive())\n n.join()\n\n\nif __name__ == '__main__':\n main3()\n",
"step-5": "#!/usr/bin/env python\n# encoding: utf-8\n\nimport multiprocessing\nimport time\nimport sys\n\n\ndef daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n time.sleep(2)\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef non_daemon():\n p = multiprocessing.current_process()\n print('Starting:', p.name, p.pid)\n sys.stdout.flush()\n print('Exiting :', p.name, p.pid)\n sys.stdout.flush()\n\n\ndef main1():\n d = multiprocessing.Process(name=\"daemon_process\", target=daemon)\n n = multiprocessing.Process(name=\"no_daemon_process\", target=non_daemon)\n print(\"daemon_process default daemon value: %s\" % d.daemon)\n print(\"no_daemon_process default daemon value: %s\" % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n\n\ndef main2():\n d = multiprocessing.Process(name=\"daemon_process\", target=daemon)\n n = multiprocessing.Process(name=\"no_daemon_process\", target=non_daemon)\n print(\"daemon_process default daemon value: %s\" % d.daemon)\n print(\"no_daemon_process default daemon value: %s\" % n.daemon)\n d.daemon = True\n n.daemon = False\n d.start()\n time.sleep(1)\n n.start()\n # 阻塞父进程,直到子进程结束为止。\n # 从实验来看,子进程结束和join的先后顺序无关。\n # 唯一的限制是父进程需要等所有join的子进程结束后,才会继续向下执行。\n d.join()\n n.join()\n\n\ndef main3():\n d = multiprocessing.Process(name='daemon', target=daemon)\n d.daemon = True\n n = multiprocessing.Process(name='non-daemon', target=non_daemon)\n n.daemon = False\n d.start()\n n.start()\n # join接受一个timeout的参数,意思就是如果超过了timeout的时间,不管子进程是否结束,join函数也会直接返回。\n d.join(1)\n # 可以看到子进程d仍然未结束,但是父进程已经继续执行了。\n print('d.is_alive()', d.is_alive())\n n.join()\n\n\nif __name__ == \"__main__\":\n # main1()\n # main2()\n main3()\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
import data
import sub_vgg19
import time
import tensorflow as tf
model_syn = sub_vgg19.vgg19_syn
model_asy = sub_vgg19.vgg19_asy
train_x = data.train_x
train_y = data.train_y
test_x = data.test_x
test_y = data.test_y
def input_fn(images, labels, epochs, batch_size):
data = tf.data.Dataset.from_tensor_slices((images, labels))
data = data.repeat(epochs).batch(batch_size)
return data
epochs = 30
batch_size = 32 * 8
syn = True
time1 = time.time()
if syn :
model_syn.fit(train_x, train_y, epochs=epochs, batch_size = batch_size)
test_loss, test_acc = model_syn.evaluate(test_x, test_y, verbose=2, batch_size = 1000) # test는 size 1000으로 고정
else:
model_asy.train(lambda: input_fn(train_x,
train_y,
epochs=epochs,
batch_size=batch_size))
acc = model_asy.evaluate(lambda: input_fn(test_x,
test_y,
epochs=epochs,
batch_size=1000)) # test는 size 1000으로 고정
print("acc", acc)
print("총 걸린 시간 :", time.time() - time1)
|
normal
|
{
"blob_id": "ef6f91af5f500745fdcc23947a7e1764061c608c",
"index": 2368,
"step-1": "<mask token>\n\n\ndef input_fn(images, labels, epochs, batch_size):\n data = tf.data.Dataset.from_tensor_slices((images, labels))\n data = data.repeat(epochs).batch(batch_size)\n return data\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef input_fn(images, labels, epochs, batch_size):\n data = tf.data.Dataset.from_tensor_slices((images, labels))\n data = data.repeat(epochs).batch(batch_size)\n return data\n\n\n<mask token>\nif syn:\n model_syn.fit(train_x, train_y, epochs=epochs, batch_size=batch_size)\n test_loss, test_acc = model_syn.evaluate(test_x, test_y, verbose=2,\n batch_size=1000)\nelse:\n model_asy.train(lambda : input_fn(train_x, train_y, epochs=epochs,\n batch_size=batch_size))\n acc = model_asy.evaluate(lambda : input_fn(test_x, test_y, epochs=\n epochs, batch_size=1000))\n print('acc', acc)\nprint('총 걸린 시간 :', time.time() - time1)\n",
"step-3": "<mask token>\nmodel_syn = sub_vgg19.vgg19_syn\nmodel_asy = sub_vgg19.vgg19_asy\ntrain_x = data.train_x\ntrain_y = data.train_y\ntest_x = data.test_x\ntest_y = data.test_y\n\n\ndef input_fn(images, labels, epochs, batch_size):\n data = tf.data.Dataset.from_tensor_slices((images, labels))\n data = data.repeat(epochs).batch(batch_size)\n return data\n\n\nepochs = 30\nbatch_size = 32 * 8\nsyn = True\ntime1 = time.time()\nif syn:\n model_syn.fit(train_x, train_y, epochs=epochs, batch_size=batch_size)\n test_loss, test_acc = model_syn.evaluate(test_x, test_y, verbose=2,\n batch_size=1000)\nelse:\n model_asy.train(lambda : input_fn(train_x, train_y, epochs=epochs,\n batch_size=batch_size))\n acc = model_asy.evaluate(lambda : input_fn(test_x, test_y, epochs=\n epochs, batch_size=1000))\n print('acc', acc)\nprint('총 걸린 시간 :', time.time() - time1)\n",
"step-4": "import data\nimport sub_vgg19\nimport time\nimport tensorflow as tf\nmodel_syn = sub_vgg19.vgg19_syn\nmodel_asy = sub_vgg19.vgg19_asy\ntrain_x = data.train_x\ntrain_y = data.train_y\ntest_x = data.test_x\ntest_y = data.test_y\n\n\ndef input_fn(images, labels, epochs, batch_size):\n data = tf.data.Dataset.from_tensor_slices((images, labels))\n data = data.repeat(epochs).batch(batch_size)\n return data\n\n\nepochs = 30\nbatch_size = 32 * 8\nsyn = True\ntime1 = time.time()\nif syn:\n model_syn.fit(train_x, train_y, epochs=epochs, batch_size=batch_size)\n test_loss, test_acc = model_syn.evaluate(test_x, test_y, verbose=2,\n batch_size=1000)\nelse:\n model_asy.train(lambda : input_fn(train_x, train_y, epochs=epochs,\n batch_size=batch_size))\n acc = model_asy.evaluate(lambda : input_fn(test_x, test_y, epochs=\n epochs, batch_size=1000))\n print('acc', acc)\nprint('총 걸린 시간 :', time.time() - time1)\n",
"step-5": "import data\nimport sub_vgg19\nimport time\nimport tensorflow as tf\n\nmodel_syn = sub_vgg19.vgg19_syn\nmodel_asy = sub_vgg19.vgg19_asy\ntrain_x = data.train_x\ntrain_y = data.train_y\ntest_x = data.test_x\ntest_y = data.test_y\n\n\ndef input_fn(images, labels, epochs, batch_size):\n data = tf.data.Dataset.from_tensor_slices((images, labels))\n data = data.repeat(epochs).batch(batch_size)\n return data\n\n\n\nepochs = 30\nbatch_size = 32 * 8\n\nsyn = True\n\ntime1 = time.time()\nif syn :\n model_syn.fit(train_x, train_y, epochs=epochs, batch_size = batch_size)\n test_loss, test_acc = model_syn.evaluate(test_x, test_y, verbose=2, batch_size = 1000) # test는 size 1000으로 고정\nelse:\n\n model_asy.train(lambda: input_fn(train_x,\n train_y,\n epochs=epochs,\n batch_size=batch_size))\n\n acc = model_asy.evaluate(lambda: input_fn(test_x,\n test_y,\n epochs=epochs,\n batch_size=1000)) # test는 size 1000으로 고정\n print(\"acc\", acc)\n\nprint(\"총 걸린 시간 :\", time.time() - time1)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python3
def main():
pass
def handle_result(args, result, target_window_id, boss):
if args[1] == "next":
boss.active_tab_manager.next_tab(1)
elif args[1] == "previous":
boss.active_tab_manager.next_tab(-1)
boss.active_tab.neighboring_window(args[1])
handle_result.no_ui = True
|
normal
|
{
"blob_id": "3a7f9bf5420b2d3587f1988c35f2f88bd2fa2b32",
"index": 2771,
"step-1": "<mask token>\n",
"step-2": "def main():\n pass\n\n\n<mask token>\n",
"step-3": "def main():\n pass\n\n\ndef handle_result(args, result, target_window_id, boss):\n if args[1] == 'next':\n boss.active_tab_manager.next_tab(1)\n elif args[1] == 'previous':\n boss.active_tab_manager.next_tab(-1)\n boss.active_tab.neighboring_window(args[1])\n\n\n<mask token>\n",
"step-4": "def main():\n pass\n\n\ndef handle_result(args, result, target_window_id, boss):\n if args[1] == 'next':\n boss.active_tab_manager.next_tab(1)\n elif args[1] == 'previous':\n boss.active_tab_manager.next_tab(-1)\n boss.active_tab.neighboring_window(args[1])\n\n\nhandle_result.no_ui = True\n",
"step-5": "#!/usr/bin/env python3\n\ndef main():\n pass\n\n\ndef handle_result(args, result, target_window_id, boss):\n if args[1] == \"next\":\n boss.active_tab_manager.next_tab(1)\n elif args[1] == \"previous\":\n boss.active_tab_manager.next_tab(-1)\n\n boss.active_tab.neighboring_window(args[1])\n\nhandle_result.no_ui = True\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import re
import cgi
import os
import urllib
import urllib2
from time import sleep
from google.appengine.api import taskqueue
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.api import urlfetch
from google.appengine.api import backends
from google.appengine.api import logservice
logservice.AUTOFLUSH_EVERY_SECONDS = None
logservice.AUTOFLUSH_EVERY_BYTES = None
logservice.AUTOFLUSH_ENABLED = False
MONTH = "jun09"
NGRAM = "3"
PROB = "jp"
DATASET = "bing-body"
REQUESTURL = "http://web-ngram.research.microsoft.com/rest/lookup.svc/"+DATASET+"/"+MONTH+"/"+NGRAM+"/"+PROB+"?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e"
GENURL = "http://web-ngram.research.microsoft.com/rest/lookup.svc/"+DATASET+"/"+MONTH+"/"+NGRAM+"/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e"
class lexicon0(db.Model):
word = db.StringProperty(required = True)
known = db.StringListProperty(indexed = False)
def lexicon_key(lexicon_name=None):
return db.Key.from_path('lexicon0', lexicon_name or 'default')
def combination(wordlist,t):#argument t is to notify that it is the main query while using cobination for first time
tempc = wordlist
combinationqueryset = [listtostr(tempc[:i] +
["%s%s"%(tempc[i],tempc[i+1])] +
tempc[i+2:] ) for i in range(0, len(tempc)-1)]
cquery = listtostr(tempc)
combinationqueryset.append(cquery)
results = getjp1('',combinationqueryset,'')
dictionary = dict(results)
x = results.index((cquery,dictionary[cquery]))
if (t==0): t = dictionary[cquery]
if (results[0][0] == cquery):
return (cquery,results[0][1],t)
else:
dictionary = dict(results)
x = results.index((cquery,dictionary[cquery]))
y = list()
for i in range(x):
y.append(combinationqueryset.index(results[i][0]))
y.sort(reverse = True)
cache = wordlist
for z in y:
cache[z] += cache[z+1]
del cache[z+1]
return combination(cache,t)
def spacesplits(wordlist):
temps = wordlist
query = listtostr(temps)
strings = []
for i in range(len(temps)):
for y in range(1,len(temps[i])):
strings.append(listtostr(temps[:i]+list([temps[i][:y],temps[i][y:]])+temps[i+1:]))
strings.append(query)
results = getjp1('',strings,'')
if (results[0][0] == query):
return (query,results[0][1])
else:
return spacesplits(results[0][0].split())
def getjp(before,wordlist,after):
global REQUESTURL
wordli = wordlist
string = ''
for x in wordli:
string += before+" "+str(x)+" "+after+"\n"
string = string.strip()
jps = list()
jps = urllib2.urlopen(
urllib2.Request(REQUESTURL,str(string))).read().split()
for i in range(len(jps)):
jps[i] = float(jps[i])/(querylength(wordli[i]))
dictionary = dict(zip(wordli,jps))
return sorted(dictionary.iteritems(), key = lambda entity:entity[1], reverse = True)
def getjp1(before,wordlist,after):
global REQUESTURL
string = ''
for x in wordlist:
string += before+" "+str(x)+" "+after+"\n"
string = string.strip()
jps = list()
jps = urllib2.urlopen(
urllib2.Request(REQUESTURL,str(string))).read().split()
for i in range(len(jps)):
jps[i] = float(jps[i])
dictionary = dict(zip(wordlist,jps))
return sorted(dictionary.iteritems(), key = lambda entity:entity[1], reverse = True)
class mainpage(webapp.RequestHandler):
def get(self):
global MONTH,DATASET,NGRAM,PROB,REQUESTURL,GENURL
if len(self.request.get('m')):
MONTH = str(self.request.get('m'))
if len(self.request.get('d')):
DATASET = str(self.request.get('d'))
if len(self.request.get('ng')):
NGRAM = str(self.request.get('ng'))
if len(self.request.get('pp')):
PROB = str(self.request.get('pp'))
REQUESTURL = "http://web-ngram.research.microsoft.com/rest/lookup.svc/"+DATASET+"/"+MONTH+"/"+NGRAM+"/"+PROB+"?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e"
GENURL = "http://web-ngram.research.microsoft.com/rest/lookup.svc/"+DATASET+"/"+MONTH+"/"+NGRAM+"/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e"
query = str(self.request.get('q'))
wordlist = query.strip().split()
dictionary = dict()
try:
cquery = combination(wordlist,0)[0]
except:
cquery = query
try:
wordlist = query.strip().split()
squery = spacesplits(wordlist)[0]
except:
squery = query
try: dictionary.update(getdictionary(wordlist))
except:
dictionary.update({query:0})
try:
if (query != cquery): dictionary.update(getdictionary(cquery.split()))
except: dictionary.update({cquery:0})
try:
if (query != squery): dictionary.update(getdictionary(squery.split()))
except: dictionary.update({squery:0})
finallist = dictionary.keys()
self.response.headers['Content-Type'] = 'text/plain'
try:
result = getjp('',finallist,'')
final = list()
for i in range(len(result)):
final.append(10**((result[i][1])))
printresult = normalize(final)
for i in range(len(printresult)):
self.response.out.write(str(result[i][0])+"\t"+printresult[i]+"\n")
except:
self.response.out.write(query+"\t"+str(1))
class maintest(webapp.RequestHandler):
def get(self):
global MONTH,DATASET,NGRAM,PROB,REQUESTURL,GENURL
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(REQUESTURL+"\n")
self.response.out.write(GENURL)
def getdictionary(wordelist):
global MONTH,DATASET,NGRAM,PROB
dictionaryy = dict()
rpcs = []
for i in range(len(wordelist)):
if i<3: t=0
else: t = i-3
form_fields = {
"word": wordelist[i],
"before": listtostr(wordelist[t:i]),
"after": listtostr(wordelist[i+1:i+4]),
"m": MONTH,
"d": DATASET,
"ng": NGRAM,
"pp": PROB
}
formdata = urllib.urlencode(form_fields)
rpc = urlfetch.create_rpc()
url = "http://timetest.forbackend.appspot.com/wordspellcheck"
#rpc.callback = create_callback(rpc)
urlfetch.make_fetch_call(rpc,
url,
payload = formdata,
method = urlfetch.POST)
rpcs.append(rpc)
resultts = list()
for rpc in rpcs:
result = rpc.get_result()
resultts.append(result.content)
#self.response.out.write(results)
#self.response.out.write(wordee)
dictionaryy[listtostr(wordelist)] = 0
for i in range(len(wordelist)):
if resultts[i] == wordelist[i]: continue
else:
for j in range(i,len(wordelist)+1):
pp = listtostr(wordelist[:i]+resultts[i:j]+wordelist[j:])
dictionaryy[pp] = 0
return dictionaryy
class splittest(webapp.RequestHandler):
def get(self):
query = self.request.get('q')
wordlist = query.split()
splitted = combination(wordlist,0)
self.response.out.write(splitted)
def querylength(query):
liste = query.split()
counte = 0
for x in liste:
if len(x)>1: counte += 1
if counte == 0: return 1
else: return counte
def listtostr(wordlist):
string = ''
for word in wordlist:
string += word+" "
string = string.strip()
return string
#def create_callback(rpc):
def normalize(problist):
tot = 0
for x in problist:
tot += x
returnlist = list()
for i in range(len(problist)):
returnlist.append(str(round((problist[i]/tot),3)))
return returnlist
application = webapp.WSGIApplication([
('/mainpage',maintest),#### the main speller is in main page web handler as i submitted maintest as the official submission i changed this
('/maintest',mainpage),
('/split',splittest)],
debug = True)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "c8a6a8633f863e0350157346106a747096d26939",
"index": 9912,
"step-1": "<mask token>\n\n\nclass lexicon0(db.Model):\n word = db.StringProperty(required=True)\n known = db.StringListProperty(indexed=False)\n\n\n<mask token>\n\n\ndef getjp(before, wordlist, after):\n global REQUESTURL\n wordli = wordlist\n string = ''\n for x in wordli:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i]) / querylength(wordli[i])\n dictionary = dict(zip(wordli, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\ndef getjp1(before, wordlist, after):\n global REQUESTURL\n string = ''\n for x in wordlist:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])\n dictionary = dict(zip(wordlist, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\nclass mainpage(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n if len(self.request.get('m')):\n MONTH = str(self.request.get('m'))\n if len(self.request.get('d')):\n DATASET = str(self.request.get('d'))\n if len(self.request.get('ng')):\n NGRAM = str(self.request.get('ng'))\n if len(self.request.get('pp')):\n PROB = str(self.request.get('pp'))\n REQUESTURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM + '/' + PROB +\n '?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n GENURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM +\n '/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n query = str(self.request.get('q'))\n wordlist = query.strip().split()\n dictionary = dict()\n try:\n cquery = combination(wordlist, 0)[0]\n except:\n cquery = query\n try:\n wordlist = query.strip().split()\n squery = spacesplits(wordlist)[0]\n except:\n squery = query\n try:\n dictionary.update(getdictionary(wordlist))\n except:\n dictionary.update({query: 0})\n try:\n if query != cquery:\n dictionary.update(getdictionary(cquery.split()))\n except:\n dictionary.update({cquery: 0})\n try:\n if query != squery:\n dictionary.update(getdictionary(squery.split()))\n except:\n dictionary.update({squery: 0})\n finallist = dictionary.keys()\n self.response.headers['Content-Type'] = 'text/plain'\n try:\n result = getjp('', finallist, '')\n final = list()\n for i in range(len(result)):\n final.append(10 ** result[i][1])\n printresult = normalize(final)\n for i in range(len(printresult)):\n self.response.out.write(str(result[i][0]) + '\\t' +\n printresult[i] + '\\n')\n except:\n self.response.out.write(query + '\\t' + str(1))\n\n\nclass maintest(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write(REQUESTURL + '\\n')\n self.response.out.write(GENURL)\n\n\n<mask token>\n\n\nclass splittest(webapp.RequestHandler):\n\n def get(self):\n query = self.request.get('q')\n wordlist = query.split()\n splitted = combination(wordlist, 0)\n self.response.out.write(splitted)\n\n\n<mask token>\n\n\ndef listtostr(wordlist):\n string = ''\n for word in wordlist:\n string += word + ' '\n string = string.strip()\n return string\n\n\ndef normalize(problist):\n tot = 0\n for x in problist:\n tot += x\n returnlist = list()\n for i in range(len(problist)):\n returnlist.append(str(round(problist[i] / tot, 3)))\n return returnlist\n\n\n<mask token>\n\n\ndef main():\n run_wsgi_app(application)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass lexicon0(db.Model):\n word = db.StringProperty(required=True)\n known = db.StringListProperty(indexed=False)\n\n\ndef lexicon_key(lexicon_name=None):\n return db.Key.from_path('lexicon0', lexicon_name or 'default')\n\n\n<mask token>\n\n\ndef getjp(before, wordlist, after):\n global REQUESTURL\n wordli = wordlist\n string = ''\n for x in wordli:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i]) / querylength(wordli[i])\n dictionary = dict(zip(wordli, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\ndef getjp1(before, wordlist, after):\n global REQUESTURL\n string = ''\n for x in wordlist:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])\n dictionary = dict(zip(wordlist, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\nclass mainpage(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n if len(self.request.get('m')):\n MONTH = str(self.request.get('m'))\n if len(self.request.get('d')):\n DATASET = str(self.request.get('d'))\n if len(self.request.get('ng')):\n NGRAM = str(self.request.get('ng'))\n if len(self.request.get('pp')):\n PROB = str(self.request.get('pp'))\n REQUESTURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM + '/' + PROB +\n '?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n GENURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM +\n '/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n query = str(self.request.get('q'))\n wordlist = query.strip().split()\n dictionary = dict()\n try:\n cquery = combination(wordlist, 0)[0]\n except:\n cquery = query\n try:\n wordlist = query.strip().split()\n squery = spacesplits(wordlist)[0]\n except:\n squery = query\n try:\n dictionary.update(getdictionary(wordlist))\n except:\n dictionary.update({query: 0})\n try:\n if query != cquery:\n dictionary.update(getdictionary(cquery.split()))\n except:\n dictionary.update({cquery: 0})\n try:\n if query != squery:\n dictionary.update(getdictionary(squery.split()))\n except:\n dictionary.update({squery: 0})\n finallist = dictionary.keys()\n self.response.headers['Content-Type'] = 'text/plain'\n try:\n result = getjp('', finallist, '')\n final = list()\n for i in range(len(result)):\n final.append(10 ** result[i][1])\n printresult = normalize(final)\n for i in range(len(printresult)):\n self.response.out.write(str(result[i][0]) + '\\t' +\n printresult[i] + '\\n')\n except:\n self.response.out.write(query + '\\t' + str(1))\n\n\nclass maintest(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write(REQUESTURL + '\\n')\n self.response.out.write(GENURL)\n\n\n<mask token>\n\n\nclass splittest(webapp.RequestHandler):\n\n def get(self):\n query = self.request.get('q')\n wordlist = query.split()\n splitted = combination(wordlist, 0)\n self.response.out.write(splitted)\n\n\n<mask token>\n\n\ndef listtostr(wordlist):\n string = ''\n for word in wordlist:\n string += word + ' '\n string = string.strip()\n return string\n\n\ndef normalize(problist):\n tot = 0\n for x in problist:\n tot += x\n returnlist = list()\n for i in range(len(problist)):\n returnlist.append(str(round(problist[i] / tot, 3)))\n return returnlist\n\n\n<mask token>\n\n\ndef main():\n run_wsgi_app(application)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass lexicon0(db.Model):\n word = db.StringProperty(required=True)\n known = db.StringListProperty(indexed=False)\n\n\ndef lexicon_key(lexicon_name=None):\n return db.Key.from_path('lexicon0', lexicon_name or 'default')\n\n\ndef combination(wordlist, t):\n tempc = wordlist\n combinationqueryset = [listtostr(tempc[:i] + ['%s%s' % (tempc[i], tempc\n [i + 1])] + tempc[i + 2:]) for i in range(0, len(tempc) - 1)]\n cquery = listtostr(tempc)\n combinationqueryset.append(cquery)\n results = getjp1('', combinationqueryset, '')\n dictionary = dict(results)\n x = results.index((cquery, dictionary[cquery]))\n if t == 0:\n t = dictionary[cquery]\n if results[0][0] == cquery:\n return cquery, results[0][1], t\n else:\n dictionary = dict(results)\n x = results.index((cquery, dictionary[cquery]))\n y = list()\n for i in range(x):\n y.append(combinationqueryset.index(results[i][0]))\n y.sort(reverse=True)\n cache = wordlist\n for z in y:\n cache[z] += cache[z + 1]\n del cache[z + 1]\n return combination(cache, t)\n\n\n<mask token>\n\n\ndef getjp(before, wordlist, after):\n global REQUESTURL\n wordli = wordlist\n string = ''\n for x in wordli:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i]) / querylength(wordli[i])\n dictionary = dict(zip(wordli, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\ndef getjp1(before, wordlist, after):\n global REQUESTURL\n string = ''\n for x in wordlist:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])\n dictionary = dict(zip(wordlist, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\nclass mainpage(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n if len(self.request.get('m')):\n MONTH = str(self.request.get('m'))\n if len(self.request.get('d')):\n DATASET = str(self.request.get('d'))\n if len(self.request.get('ng')):\n NGRAM = str(self.request.get('ng'))\n if len(self.request.get('pp')):\n PROB = str(self.request.get('pp'))\n REQUESTURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM + '/' + PROB +\n '?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n GENURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM +\n '/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n query = str(self.request.get('q'))\n wordlist = query.strip().split()\n dictionary = dict()\n try:\n cquery = combination(wordlist, 0)[0]\n except:\n cquery = query\n try:\n wordlist = query.strip().split()\n squery = spacesplits(wordlist)[0]\n except:\n squery = query\n try:\n dictionary.update(getdictionary(wordlist))\n except:\n dictionary.update({query: 0})\n try:\n if query != cquery:\n dictionary.update(getdictionary(cquery.split()))\n except:\n dictionary.update({cquery: 0})\n try:\n if query != squery:\n dictionary.update(getdictionary(squery.split()))\n except:\n dictionary.update({squery: 0})\n finallist = dictionary.keys()\n self.response.headers['Content-Type'] = 'text/plain'\n try:\n result = getjp('', finallist, '')\n final = list()\n for i in range(len(result)):\n final.append(10 ** result[i][1])\n printresult = normalize(final)\n for i in range(len(printresult)):\n self.response.out.write(str(result[i][0]) + '\\t' +\n printresult[i] + '\\n')\n except:\n self.response.out.write(query + '\\t' + str(1))\n\n\nclass maintest(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write(REQUESTURL + '\\n')\n self.response.out.write(GENURL)\n\n\ndef getdictionary(wordelist):\n global MONTH, DATASET, NGRAM, PROB\n dictionaryy = dict()\n rpcs = []\n for i in range(len(wordelist)):\n if i < 3:\n t = 0\n else:\n t = i - 3\n form_fields = {'word': wordelist[i], 'before': listtostr(wordelist[\n t:i]), 'after': listtostr(wordelist[i + 1:i + 4]), 'm': MONTH,\n 'd': DATASET, 'ng': NGRAM, 'pp': PROB}\n formdata = urllib.urlencode(form_fields)\n rpc = urlfetch.create_rpc()\n url = 'http://timetest.forbackend.appspot.com/wordspellcheck'\n urlfetch.make_fetch_call(rpc, url, payload=formdata, method=\n urlfetch.POST)\n rpcs.append(rpc)\n resultts = list()\n for rpc in rpcs:\n result = rpc.get_result()\n resultts.append(result.content)\n dictionaryy[listtostr(wordelist)] = 0\n for i in range(len(wordelist)):\n if resultts[i] == wordelist[i]:\n continue\n else:\n for j in range(i, len(wordelist) + 1):\n pp = listtostr(wordelist[:i] + resultts[i:j] + wordelist[j:])\n dictionaryy[pp] = 0\n return dictionaryy\n\n\nclass splittest(webapp.RequestHandler):\n\n def get(self):\n query = self.request.get('q')\n wordlist = query.split()\n splitted = combination(wordlist, 0)\n self.response.out.write(splitted)\n\n\ndef querylength(query):\n liste = query.split()\n counte = 0\n for x in liste:\n if len(x) > 1:\n counte += 1\n if counte == 0:\n return 1\n else:\n return counte\n\n\ndef listtostr(wordlist):\n string = ''\n for word in wordlist:\n string += word + ' '\n string = string.strip()\n return string\n\n\ndef normalize(problist):\n tot = 0\n for x in problist:\n tot += x\n returnlist = list()\n for i in range(len(problist)):\n returnlist.append(str(round(problist[i] / tot, 3)))\n return returnlist\n\n\n<mask token>\n\n\ndef main():\n run_wsgi_app(application)\n\n\n<mask token>\n",
"step-4": "import re\nimport cgi\nimport os\nimport urllib\nimport urllib2\nfrom time import sleep\nfrom google.appengine.api import taskqueue\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp.util import run_wsgi_app\nfrom google.appengine.ext import db\nfrom google.appengine.api import urlfetch\nfrom google.appengine.api import backends\nfrom google.appengine.api import logservice\nlogservice.AUTOFLUSH_EVERY_SECONDS = None\nlogservice.AUTOFLUSH_EVERY_BYTES = None\nlogservice.AUTOFLUSH_ENABLED = False\nMONTH = 'jun09'\nNGRAM = '3'\nPROB = 'jp'\nDATASET = 'bing-body'\nREQUESTURL = ('http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM + '/' + PROB +\n '?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\nGENURL = ('http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM +\n '/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n\n\nclass lexicon0(db.Model):\n word = db.StringProperty(required=True)\n known = db.StringListProperty(indexed=False)\n\n\ndef lexicon_key(lexicon_name=None):\n return db.Key.from_path('lexicon0', lexicon_name or 'default')\n\n\ndef combination(wordlist, t):\n tempc = wordlist\n combinationqueryset = [listtostr(tempc[:i] + ['%s%s' % (tempc[i], tempc\n [i + 1])] + tempc[i + 2:]) for i in range(0, len(tempc) - 1)]\n cquery = listtostr(tempc)\n combinationqueryset.append(cquery)\n results = getjp1('', combinationqueryset, '')\n dictionary = dict(results)\n x = results.index((cquery, dictionary[cquery]))\n if t == 0:\n t = dictionary[cquery]\n if results[0][0] == cquery:\n return cquery, results[0][1], t\n else:\n dictionary = dict(results)\n x = results.index((cquery, dictionary[cquery]))\n y = list()\n for i in range(x):\n y.append(combinationqueryset.index(results[i][0]))\n y.sort(reverse=True)\n cache = wordlist\n for z in y:\n cache[z] += cache[z + 1]\n del cache[z + 1]\n return combination(cache, t)\n\n\ndef spacesplits(wordlist):\n temps = wordlist\n query = listtostr(temps)\n strings = []\n for i in range(len(temps)):\n for y in range(1, len(temps[i])):\n strings.append(listtostr(temps[:i] + list([temps[i][:y], temps[\n i][y:]]) + temps[i + 1:]))\n strings.append(query)\n results = getjp1('', strings, '')\n if results[0][0] == query:\n return query, results[0][1]\n else:\n return spacesplits(results[0][0].split())\n\n\ndef getjp(before, wordlist, after):\n global REQUESTURL\n wordli = wordlist\n string = ''\n for x in wordli:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i]) / querylength(wordli[i])\n dictionary = dict(zip(wordli, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\ndef getjp1(before, wordlist, after):\n global REQUESTURL\n string = ''\n for x in wordlist:\n string += before + ' ' + str(x) + ' ' + after + '\\n'\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(urllib2.Request(REQUESTURL, str(string))).read(\n ).split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])\n dictionary = dict(zip(wordlist, jps))\n return sorted(dictionary.iteritems(), key=lambda entity: entity[1],\n reverse=True)\n\n\nclass mainpage(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n if len(self.request.get('m')):\n MONTH = str(self.request.get('m'))\n if len(self.request.get('d')):\n DATASET = str(self.request.get('d'))\n if len(self.request.get('ng')):\n NGRAM = str(self.request.get('ng'))\n if len(self.request.get('pp')):\n PROB = str(self.request.get('pp'))\n REQUESTURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM + '/' + PROB +\n '?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n GENURL = (\n 'http://web-ngram.research.microsoft.com/rest/lookup.svc/' +\n DATASET + '/' + MONTH + '/' + NGRAM +\n '/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e')\n query = str(self.request.get('q'))\n wordlist = query.strip().split()\n dictionary = dict()\n try:\n cquery = combination(wordlist, 0)[0]\n except:\n cquery = query\n try:\n wordlist = query.strip().split()\n squery = spacesplits(wordlist)[0]\n except:\n squery = query\n try:\n dictionary.update(getdictionary(wordlist))\n except:\n dictionary.update({query: 0})\n try:\n if query != cquery:\n dictionary.update(getdictionary(cquery.split()))\n except:\n dictionary.update({cquery: 0})\n try:\n if query != squery:\n dictionary.update(getdictionary(squery.split()))\n except:\n dictionary.update({squery: 0})\n finallist = dictionary.keys()\n self.response.headers['Content-Type'] = 'text/plain'\n try:\n result = getjp('', finallist, '')\n final = list()\n for i in range(len(result)):\n final.append(10 ** result[i][1])\n printresult = normalize(final)\n for i in range(len(printresult)):\n self.response.out.write(str(result[i][0]) + '\\t' +\n printresult[i] + '\\n')\n except:\n self.response.out.write(query + '\\t' + str(1))\n\n\nclass maintest(webapp.RequestHandler):\n\n def get(self):\n global MONTH, DATASET, NGRAM, PROB, REQUESTURL, GENURL\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write(REQUESTURL + '\\n')\n self.response.out.write(GENURL)\n\n\ndef getdictionary(wordelist):\n global MONTH, DATASET, NGRAM, PROB\n dictionaryy = dict()\n rpcs = []\n for i in range(len(wordelist)):\n if i < 3:\n t = 0\n else:\n t = i - 3\n form_fields = {'word': wordelist[i], 'before': listtostr(wordelist[\n t:i]), 'after': listtostr(wordelist[i + 1:i + 4]), 'm': MONTH,\n 'd': DATASET, 'ng': NGRAM, 'pp': PROB}\n formdata = urllib.urlencode(form_fields)\n rpc = urlfetch.create_rpc()\n url = 'http://timetest.forbackend.appspot.com/wordspellcheck'\n urlfetch.make_fetch_call(rpc, url, payload=formdata, method=\n urlfetch.POST)\n rpcs.append(rpc)\n resultts = list()\n for rpc in rpcs:\n result = rpc.get_result()\n resultts.append(result.content)\n dictionaryy[listtostr(wordelist)] = 0\n for i in range(len(wordelist)):\n if resultts[i] == wordelist[i]:\n continue\n else:\n for j in range(i, len(wordelist) + 1):\n pp = listtostr(wordelist[:i] + resultts[i:j] + wordelist[j:])\n dictionaryy[pp] = 0\n return dictionaryy\n\n\nclass splittest(webapp.RequestHandler):\n\n def get(self):\n query = self.request.get('q')\n wordlist = query.split()\n splitted = combination(wordlist, 0)\n self.response.out.write(splitted)\n\n\ndef querylength(query):\n liste = query.split()\n counte = 0\n for x in liste:\n if len(x) > 1:\n counte += 1\n if counte == 0:\n return 1\n else:\n return counte\n\n\ndef listtostr(wordlist):\n string = ''\n for word in wordlist:\n string += word + ' '\n string = string.strip()\n return string\n\n\ndef normalize(problist):\n tot = 0\n for x in problist:\n tot += x\n returnlist = list()\n for i in range(len(problist)):\n returnlist.append(str(round(problist[i] / tot, 3)))\n return returnlist\n\n\napplication = webapp.WSGIApplication([('/mainpage', maintest), ('/maintest',\n mainpage), ('/split', splittest)], debug=True)\n\n\ndef main():\n run_wsgi_app(application)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\n\nimport re\nimport cgi\nimport os\nimport urllib\nimport urllib2\n\nfrom time import sleep\n\nfrom google.appengine.api import taskqueue\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp.util import run_wsgi_app\nfrom google.appengine.ext import db\nfrom google.appengine.api import urlfetch\nfrom google.appengine.api import backends\nfrom google.appengine.api import logservice\nlogservice.AUTOFLUSH_EVERY_SECONDS = None\nlogservice.AUTOFLUSH_EVERY_BYTES = None\nlogservice.AUTOFLUSH_ENABLED = False\n\nMONTH = \"jun09\"\nNGRAM = \"3\"\nPROB = \"jp\"\nDATASET = \"bing-body\"\nREQUESTURL = \"http://web-ngram.research.microsoft.com/rest/lookup.svc/\"+DATASET+\"/\"+MONTH+\"/\"+NGRAM+\"/\"+PROB+\"?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e\"\nGENURL = \"http://web-ngram.research.microsoft.com/rest/lookup.svc/\"+DATASET+\"/\"+MONTH+\"/\"+NGRAM+\"/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e\"\n\n\nclass lexicon0(db.Model):\n word = db.StringProperty(required = True)\n known = db.StringListProperty(indexed = False)\n\ndef lexicon_key(lexicon_name=None):\n return db.Key.from_path('lexicon0', lexicon_name or 'default')\n\n\ndef combination(wordlist,t):#argument t is to notify that it is the main query while using cobination for first time\n tempc = wordlist\n combinationqueryset = [listtostr(tempc[:i] +\n [\"%s%s\"%(tempc[i],tempc[i+1])] +\n tempc[i+2:] ) for i in range(0, len(tempc)-1)]\n cquery = listtostr(tempc)\n combinationqueryset.append(cquery)\n results = getjp1('',combinationqueryset,'')\n dictionary = dict(results)\n x = results.index((cquery,dictionary[cquery]))\n if (t==0): t = dictionary[cquery]\n if (results[0][0] == cquery):\n return (cquery,results[0][1],t)\n else:\n dictionary = dict(results)\n x = results.index((cquery,dictionary[cquery]))\n y = list()\n for i in range(x):\n y.append(combinationqueryset.index(results[i][0]))\n y.sort(reverse = True)\n cache = wordlist\n for z in y:\n cache[z] += cache[z+1]\n del cache[z+1]\n return combination(cache,t)\n \ndef spacesplits(wordlist):\n temps = wordlist\n query = listtostr(temps)\n strings = []\n for i in range(len(temps)):\n for y in range(1,len(temps[i])):\n strings.append(listtostr(temps[:i]+list([temps[i][:y],temps[i][y:]])+temps[i+1:]))\n strings.append(query) \n results = getjp1('',strings,'')\n if (results[0][0] == query):\n return (query,results[0][1])\n else:\n return spacesplits(results[0][0].split())\n\n\n\ndef getjp(before,wordlist,after): \n global REQUESTURL\n wordli = wordlist\n string = ''\n for x in wordli:\n string += before+\" \"+str(x)+\" \"+after+\"\\n\"\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(\n urllib2.Request(REQUESTURL,str(string))).read().split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])/(querylength(wordli[i]))\n dictionary = dict(zip(wordli,jps))\n return sorted(dictionary.iteritems(), key = lambda entity:entity[1], reverse = True)\n\ndef getjp1(before,wordlist,after): \n global REQUESTURL\n string = ''\n for x in wordlist:\n string += before+\" \"+str(x)+\" \"+after+\"\\n\"\n string = string.strip()\n jps = list()\n jps = urllib2.urlopen(\n urllib2.Request(REQUESTURL,str(string))).read().split()\n for i in range(len(jps)):\n jps[i] = float(jps[i])\n dictionary = dict(zip(wordlist,jps))\n return sorted(dictionary.iteritems(), key = lambda entity:entity[1], reverse = True)\n\nclass mainpage(webapp.RequestHandler):\n def get(self):\n global MONTH,DATASET,NGRAM,PROB,REQUESTURL,GENURL\n if len(self.request.get('m')):\n MONTH = str(self.request.get('m'))\n if len(self.request.get('d')):\n DATASET = str(self.request.get('d'))\n if len(self.request.get('ng')):\n NGRAM = str(self.request.get('ng'))\n if len(self.request.get('pp')):\n PROB = str(self.request.get('pp'))\n REQUESTURL = \"http://web-ngram.research.microsoft.com/rest/lookup.svc/\"+DATASET+\"/\"+MONTH+\"/\"+NGRAM+\"/\"+PROB+\"?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e\" \n GENURL = \"http://web-ngram.research.microsoft.com/rest/lookup.svc/\"+DATASET+\"/\"+MONTH+\"/\"+NGRAM+\"/gen?u=888b8bfe-a203-43c6-a303-ab8e8d47b38e\"\n query = str(self.request.get('q'))\n wordlist = query.strip().split()\n dictionary = dict()\n try:\n cquery = combination(wordlist,0)[0]\n except:\n cquery = query\n try:\n wordlist = query.strip().split()\n squery = spacesplits(wordlist)[0]\n except:\n squery = query\n try: dictionary.update(getdictionary(wordlist))\n except:\n dictionary.update({query:0})\n try:\n if (query != cquery): dictionary.update(getdictionary(cquery.split()))\n except: dictionary.update({cquery:0})\n try:\n if (query != squery): dictionary.update(getdictionary(squery.split()))\n except: dictionary.update({squery:0})\n finallist = dictionary.keys()\n self.response.headers['Content-Type'] = 'text/plain'\n try:\n result = getjp('',finallist,'')\n final = list()\n for i in range(len(result)):\n final.append(10**((result[i][1])))\n printresult = normalize(final)\n for i in range(len(printresult)):\n self.response.out.write(str(result[i][0])+\"\\t\"+printresult[i]+\"\\n\")\n except:\n self.response.out.write(query+\"\\t\"+str(1))\n \n\n \nclass maintest(webapp.RequestHandler):\n def get(self):\n global MONTH,DATASET,NGRAM,PROB,REQUESTURL,GENURL\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write(REQUESTURL+\"\\n\")\n self.response.out.write(GENURL)\n \n\n\ndef getdictionary(wordelist):\n global MONTH,DATASET,NGRAM,PROB\n dictionaryy = dict()\n rpcs = []\n for i in range(len(wordelist)):\n if i<3: t=0\n else: t = i-3\n form_fields = {\n \"word\": wordelist[i],\n \"before\": listtostr(wordelist[t:i]),\n \"after\": listtostr(wordelist[i+1:i+4]),\n \"m\": MONTH,\n \"d\": DATASET,\n \"ng\": NGRAM,\n \"pp\": PROB\n }\n formdata = urllib.urlencode(form_fields)\n rpc = urlfetch.create_rpc()\n url = \"http://timetest.forbackend.appspot.com/wordspellcheck\"\n #rpc.callback = create_callback(rpc)\n urlfetch.make_fetch_call(rpc,\n url,\n payload = formdata,\n method = urlfetch.POST)\n rpcs.append(rpc)\n resultts = list()\n for rpc in rpcs:\n result = rpc.get_result()\n resultts.append(result.content)\n #self.response.out.write(results)\n #self.response.out.write(wordee)\n dictionaryy[listtostr(wordelist)] = 0\n for i in range(len(wordelist)):\n if resultts[i] == wordelist[i]: continue\n else:\n for j in range(i,len(wordelist)+1):\n pp = listtostr(wordelist[:i]+resultts[i:j]+wordelist[j:])\n dictionaryy[pp] = 0\n return dictionaryy\n\n \nclass splittest(webapp.RequestHandler):\n def get(self):\n query = self.request.get('q')\n wordlist = query.split()\n splitted = combination(wordlist,0)\n self.response.out.write(splitted)\n\ndef querylength(query):\n liste = query.split()\n counte = 0\n for x in liste:\n if len(x)>1: counte += 1\n if counte == 0: return 1\n else: return counte\n\ndef listtostr(wordlist):\n string = ''\n for word in wordlist:\n string += word+\" \"\n string = string.strip()\n return string\n#def create_callback(rpc):\n \ndef normalize(problist):\n tot = 0\n for x in problist:\n tot += x\n returnlist = list()\n for i in range(len(problist)):\n returnlist.append(str(round((problist[i]/tot),3)))\n return returnlist\n \napplication = webapp.WSGIApplication([\n ('/mainpage',maintest),#### the main speller is in main page web handler as i submitted maintest as the official submission i changed this\n ('/maintest',mainpage),\n ('/split',splittest)],\n debug = True)\n\ndef main():\n run_wsgi_app(application)\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
13,
14,
17,
21,
22
]
}
|
[
13,
14,
17,
21,
22
] |
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
# (119ms)
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
def traverse(root, now):
if not root:
now.append("$")
return
now.append(`root.val`)
traverse(root.left, now)
traverse(root.right, now)
s_list, t_list = [], []
traverse(s, s_list)
traverse(t, t_list)
s_str, t_str= "," + ",".join(s_list), "," + ",".join(t_list)
return t_str in s_str
|
normal
|
{
"blob_id": "5ac4dd62d8e56c7baf38f9fe9f8b4a5034f1cb80",
"index": 192,
"step-1": "# Definition for a binary tree node.\n# class TreeNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Solution(object):\n# (119ms)\n def isSubtree(self, s, t):\n \"\"\"\n :type s: TreeNode\n :type t: TreeNode\n :rtype: bool\n \"\"\"\n def traverse(root, now):\n if not root:\n now.append(\"$\")\n return\n now.append(`root.val`)\n traverse(root.left, now)\n traverse(root.right, now)\n s_list, t_list = [], []\n traverse(s, s_list)\n traverse(t, t_list)\n s_str, t_str= \",\" + \",\".join(s_list), \",\" + \",\".join(t_list)\n return t_str in s_str",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def swap(a, b):
print(a, b)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def swap(a, b):
print(a, b)
<|reserved_special_token_0|>
print('the vaalues after swaping the variables are below:')
print('the value of a is : ', a)
print('the value of b is : ', b)
<|reserved_special_token_1|>
def swap(a, b):
print(a, b)
a = input('enter a value 1 : ')
b = input('enter b value 2 : ')
a, b = b, a
print('the vaalues after swaping the variables are below:')
print('the value of a is : ', a)
print('the value of b is : ', b)
<|reserved_special_token_1|>
def swap(a,b):
print(a,b)
a=input("enter a value 1 : ")
b=input("enter b value 2 : ")
a,b=b,a
print("the vaalues after swaping the variables are below:")
print("the value of a is : ",a)
print("the value of b is : ",b)
|
flexible
|
{
"blob_id": "4fbe4d474e10e08eafee3bcc6173f8cd6b797dde",
"index": 3203,
"step-1": "<mask token>\n",
"step-2": "def swap(a, b):\n print(a, b)\n\n\n<mask token>\n",
"step-3": "def swap(a, b):\n print(a, b)\n\n\n<mask token>\nprint('the vaalues after swaping the variables are below:')\nprint('the value of a is : ', a)\nprint('the value of b is : ', b)\n",
"step-4": "def swap(a, b):\n print(a, b)\n\n\na = input('enter a value 1 : ')\nb = input('enter b value 2 : ')\na, b = b, a\nprint('the vaalues after swaping the variables are below:')\nprint('the value of a is : ', a)\nprint('the value of b is : ', b)\n",
"step-5": "def swap(a,b):\n print(a,b)\na=input(\"enter a value 1 : \")\nb=input(\"enter b value 2 : \")\na,b=b,a\nprint(\"the vaalues after swaping the variables are below:\")\nprint(\"the value of a is : \",a)\nprint(\"the value of b is : \",b)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
doc.updateStatus()
if script.run_script(doc, script.id) != False:
if doc.naming('richiesta') != 'integrazione':
doc.sendThisMail('rigetta')
script.run_script(doc, script.id, suffix='post')
<|reserved_special_token_1|>
doc = state_change.object
doc.updateStatus()
if script.run_script(doc, script.id) != False:
if doc.naming('richiesta') != 'integrazione':
doc.sendThisMail('rigetta')
script.run_script(doc, script.id, suffix='post')
<|reserved_special_token_1|>
## Script (Python) "after_rigetta"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=state_change
##title=
##
doc = state_change.object
#Aggiornamento dello stato su plominoDocument
doc.updateStatus()
if script.run_script(doc, script.id) != False:
#### OTHER CODE HERE ####
# 1. INVIO MAIL RIGETTO
if doc.naming('richiesta') != 'integrazione':
doc.sendThisMail('rigetta')
script.run_script(doc, script.id, suffix='post')
#### SCRIPT ENDS HERE ####
|
flexible
|
{
"blob_id": "096d82e1f9e8832f6605d23c8bb324e045b6b14f",
"index": 7393,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndoc.updateStatus()\nif script.run_script(doc, script.id) != False:\n if doc.naming('richiesta') != 'integrazione':\n doc.sendThisMail('rigetta')\n script.run_script(doc, script.id, suffix='post')\n",
"step-3": "doc = state_change.object\ndoc.updateStatus()\nif script.run_script(doc, script.id) != False:\n if doc.naming('richiesta') != 'integrazione':\n doc.sendThisMail('rigetta')\n script.run_script(doc, script.id, suffix='post')\n",
"step-4": "## Script (Python) \"after_rigetta\"\n##bind container=container\n##bind context=context\n##bind namespace=\n##bind script=script\n##bind subpath=traverse_subpath\n##parameters=state_change\n##title=\n##\ndoc = state_change.object\n\n#Aggiornamento dello stato su plominoDocument\ndoc.updateStatus()\n\nif script.run_script(doc, script.id) != False:\n\n #### OTHER CODE HERE ####\n\n # 1. INVIO MAIL RIGETTO\n if doc.naming('richiesta') != 'integrazione':\n doc.sendThisMail('rigetta')\n\n script.run_script(doc, script.id, suffix='post')\n\n#### SCRIPT ENDS HERE ####\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import datetime
import json
import re
import time
import discord
from utils.ext import standards as std, checks, context, logs
DISCORD_INVITE = '(discord(app\.com\/invite|\.com(\/invite)?|\.gg)\/?[a-zA-Z0-9-]{2,32})'
EXTERNAL_LINK = '((https?:\/\/(www\.)?|www\.)[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6})'
EVERYONE_MENTION = '@(here|everyone)'
discordRegex = re.compile(DISCORD_INVITE, re.IGNORECASE)
linkRegex = re.compile(EXTERNAL_LINK, re.IGNORECASE)
everyoneRegex = re.compile(EVERYONE_MENTION)
def findWord(word):
return re.compile(r'\b({0})\b'.format(word), flags=re.IGNORECASE).search
async def managePunishment(ctx, punishment, reason):
await ctx.message.delete()
user: discord.Member = ctx.author
msg = ctx.message.content if len(ctx.message.content) < 1015 else f'{ctx.message.content[:1015]}...'
reason = f'Automoderation: {reason}'
embed: discord.Embed = std.getBaseModEmbed(reason, ctx.author, ctx.me)
userEmbed: discord.Embed = std.getBaseModEmbed(reason)
userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.guild.name, inline=False)
userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg, inline=False)
embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.channel.mention, inline=False)
embed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg, inline=False)
data = await ctx.bot.db.fetchrow('SELECT bantime, mutetime, muterole FROM automod.config WHERE sid = $1', ctx.guild.id)
if punishment == 1:
if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):
embed.title = 'AUTOMODERATION [KICK]'
userEmbed.title = 'AUTOMODERATION [KICK]'
await ctx.guild.kick(user, reason=reason)
elif punishment == 2:
if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):
embed.title = 'AUTOMODERATION [BAN]'
userEmbed.title = 'AUTOMODERATION [BAN]'
await ctx.guild.ban(user, reason=reason)
elif punishment == 3:
if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):
embed.title = 'AUTOMODERATION [TEMPBAN]'
userEmbed.title = 'AUTOMODERATION [TEMPBAN]'
unixTime = time.time() + data['bantime']
embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value=datetime.datetime.fromtimestamp(unixTime).strftime('%d. %m. %Y um %H:%M:%S'))
await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',
ctx.guild.id, user.id, 0, unixTime, json.dumps({'reason': reason}))
await ctx.guild.ban(user, reason=reason)
elif punishment == 4:
if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):
muteRole = ctx.guild.get_role(data['muterole'])
if muteRole is None:
return
embed.title = 'AUTOMODERATION [TEMPMUTE]'
userEmbed.title = 'AUTOMODERATION [TEMPMUTE]'
unixTime = time.time() + data['mutetime']
embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value=datetime.datetime.fromtimestamp(unixTime).strftime('%d. %m. %Y um %H:%M:%S'))
await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',
ctx.guild.id, user.id, 1, unixTime, json.dumps({'reason': reason}))
await user.add_roles(muteRole, reason=reason)
await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed, member=user, ignoreMMSG=True, ignoreNoLogging=True)
async def add_points(ctx: context, addPoints, modType, user: discord.Member = None):
await ctx.message.delete()
if user is not None:
punishedUser: discord.Member = user
else:
punishedUser: discord.Message = ctx.author
await ctx.bot.db.execute(
'INSERT INTO automod.users (uid, sid, points, time, reason) VALUES ($1, $2, $3, $4, $5)',
punishedUser.id, ctx.guild.id, addPoints, time.time(), f'Automoderation: {modType}')
points = await ctx.bot.db.fetchval('SELECT sum(points) FROM automod.users WHERE uid = $1 AND sid = $2 AND $3 - time < 2592000', punishedUser.id, ctx.guild.id, time.time())
data = await ctx.bot.db.fetchrow("SELECT action, maxpoints, muterole, mutetime, bantime FROM automod.config WHERE sid = $1", ctx.guild.id)
msg: discord.Message = ctx.message
action = data['action']
maxPoints = data['maxpoints']
unixTimeMute = unixTimeBan = time.time() + 86400
if data['mutetime']:
unixTimeMute: float = time.time() + data['mutetime']
if data['bantime']:
unixTimeBan: float = time.time() + data['bantime']
message = msg.content if len(msg.content) < 1015 else f'{ctx.message.content[:1015]}...'
embed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]', punishedUser)
userEmbed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]')
userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.guild.name)
embed.title = f'AUTOMODERATION [LOG]'
userEmbed.title = f'AUTOMODERATION [LOG]'
if user is not None:
embed.add_field(name=f'{std.supporter_emoji} **__Moderator__**', value=ctx.author.mention, inline=False)
embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.channel.mention, inline=False)
embed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=f'{points}/{maxPoints}', inline=False)
userEmbed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=f'{points}/{maxPoints}', inline=False)
if user is None:
userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=message, inline=False)
embed.add_field(name=f'{std.list_emoji} **__Message__**', value=message, inline=False)
if points >= maxPoints:
if action is None:
embed.title = 'AUTOMODERATION [LOG]'
if action == 1:
if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):
embed.title = 'AUTOMODERATION [KICK]'
await punishedUser.kick(reason="Automoderation")
await ctx.bot.db.execute("DELETE FROM automod.users WHERE uid = $1 AND sid = $2", punishedUser.id, msg.guild.id)
else:
return
if action == 2:
if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):
embed.title = 'AUTOMODERATION [BAN]'
await punishedUser.ban(reason="Automoderation")
await ctx.bot.db.execute("DELETE FROM automod.users WHERE uid = $1 AND sid = $2", punishedUser.id, msg.guild.id)
else:
return
if action == 3:
if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):
embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value=datetime.datetime.fromtimestamp(unixTimeBan).strftime('%d. %m. %Y um %H:%M:%S'))
embed.title = 'AUTOMODERATION [TEMPBAN]'
await punishedUser.ban(reason="Automoderation: Punktesystem")
await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',
ctx.guild.id, punishedUser.id, 0, unixTimeBan, json.dumps({'reason': 'Automoderation: Punktesystem'}))
else:
return
if action == 4:
if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):
muteRole = ctx.guild.get_role(data['muterole'])
if muteRole is None:
return
embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value=datetime.datetime.fromtimestamp(unixTimeMute).strftime('%d. %m. %Y um %H:%M:%S'))
embed.title = 'AUTOMODERATION [TEMPMUTE]'
await punishedUser.add_roles(muteRole, reason='Automoderation')
await ctx.bot.db.execute("DELETE FROM automod.users WHERE uid = $1 AND sid = $2", punishedUser.id, msg.guild.id)
await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',
ctx.guild.id, punishedUser.id, 1, unixTimeMute, json.dumps({'reason': 'Automoderation: Punktesystem'}))
else:
return
await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed, member=punishedUser, ignoreNoLogging=True, ignoreMMSG=True)
async def automod(ctx):
bot = ctx.bot
guild: discord.Guild = ctx.guild
msg: discord.Message = ctx.message
channel: discord.TextChannel = ctx.channel
blState = await bot.get(guild.id, 'state')
if not await bot.get(guild.id, 'automod'):
return
if blState:
words = await bot.get(guild.id, 'words')
if words:
for word in words:
if findWord(word)(msg.content.lower()):
if not await checks.ignores_automod(ctx):
data = await bot.db.fetchrow('SELECT points, whitelist FROM automod.blacklist WHERE sid = $1', guild.id)
if data['whitelist'] is not None:
if channel.id in data['whitelist']:
return
if blState == 5:
return await add_points(ctx, data['points'], 'Blacklisted Word')
else:
return await managePunishment(ctx, blState, 'Blacklisted Word')
if discordRegex.findall(msg.content):
if await checks.ignores_automod(ctx):
return
data = await bot.db.fetchrow("SELECT state, whitelist, partner, points FROM automod.invites WHERE sid = $1", guild.id)
if not data:
return
if not (state := data['state']):
return
if data['whitelist'] is not None:
if channel.id in data['whitelist']:
return
whitelistedServers = [guild.id]
if partner := data['partner']:
whitelistedServers.extend([int(guildID) for guildID in partner])
hasInvite: bool = False
for invite in discordRegex.findall(msg.content):
try:
invite = await bot.fetch_invite(invite[0])
except discord.NotFound:
continue
except discord.Forbidden:
if state == 5:
return await add_points(ctx, data['points'], 'Invite')
else:
return await managePunishment(ctx, state, 'Invite')
if invite.guild.id not in whitelistedServers:
hasInvite = True
break
if hasInvite:
if state == 5:
return await add_points(ctx, data['points'], 'Invite')
else:
return await managePunishment(ctx, state, 'Invite')
elif linkRegex.findall(msg.content):
if await checks.ignores_automod(ctx):
return
data = await bot.db.fetchrow('SELECT points, state, links, whitelist, iswhitelist FROM automod.links WHERE sid = $1', guild.id)
if not data:
return
if not (state := data['state']):
return
if data['whitelist'] is not None:
if channel.id in data['whitelist']:
return
links = ['discord.gg', 'discord.com', 'discordapp.com', 'plyoox.net']
if (linksData := data['links']) is not None:
links.extend(linksData)
linksObj = linkRegex.findall(msg.content)
for linkObj in linksObj:
link = linkObj[0].replace(linkObj[1], '')
if data['iswhitelist']:
if link not in links:
if state == 5:
return await add_points(ctx, data['points'], 'Link')
else:
return await managePunishment(ctx, state, 'Link')
else:
if link in links:
if state == 5:
return await add_points(ctx, data['points'], 'Link')
else:
return await managePunishment(ctx, state, 'Link')
if not msg.clean_content.islower() and len(msg.content) > 15:
if await checks.ignores_automod(ctx):
return
lenCaps = len(re.findall(r'[A-ZÄÖÜ]', msg.clean_content))
percent = lenCaps / len(msg.content)
if percent > 0.7:
data = await bot.db.fetchrow("SELECT points, state, whitelist FROM automod.caps WHERE sid = $1", msg.guild.id)
if not data:
return
if not (state := data['state']):
return
if data['whitelist'] is not None:
if channel.id in data['whitelist']:
return
if state == 5:
return await add_points(ctx, data['points'], 'Caps')
else:
return await managePunishment(ctx, state, 'Caps')
if len(msg.raw_mentions) + len(msg.raw_role_mentions) + len(everyoneRegex.findall(msg.content)) >= 3:
if await checks.ignores_automod(ctx):
return
lenMentions = sum(m != ctx.author.id for m in msg.raw_mentions) + len(msg.raw_role_mentions)
data = await bot.db.fetchrow(
"SELECT state, points, count, whitelist, everyone FROM automod.mentions WHERE sid = $1",
guild.id)
if not data:
return
if not (state := data['state']):
return
if data['whitelist'] is not None:
if channel.id in data['whitelist']:
return
if data['everyone']:
lenMentions += len(everyoneRegex.findall(msg.content))
if lenMentions >= data['count']:
if state == 5:
return await add_points(ctx, data['points'], 'Mentions')
else:
return await managePunishment(ctx, state, 'Caps')
|
normal
|
{
"blob_id": "10c9566503c43e806ca89e03955312c510092859",
"index": 5346,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef findWord(word):\n return re.compile('\\\\b({0})\\\\b'.format(word), flags=re.IGNORECASE).search\n\n\nasync def managePunishment(ctx, punishment, reason):\n await ctx.message.delete()\n user: discord.Member = ctx.author\n msg = ctx.message.content if len(ctx.message.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n reason = f'Automoderation: {reason}'\n embed: discord.Embed = std.getBaseModEmbed(reason, ctx.author, ctx.me)\n userEmbed: discord.Embed = std.getBaseModEmbed(reason)\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name, inline=False)\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n data = await ctx.bot.db.fetchrow(\n 'SELECT bantime, mutetime, muterole FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n if punishment == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n userEmbed.title = 'AUTOMODERATION [KICK]'\n await ctx.guild.kick(user, reason=reason)\n elif punishment == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n userEmbed.title = 'AUTOMODERATION [BAN]'\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n userEmbed.title = 'AUTOMODERATION [TEMPBAN]'\n unixTime = time.time() + data['bantime']\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 0, unixTime, json.dumps({'reason':\n reason}))\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n userEmbed.title = 'AUTOMODERATION [TEMPMUTE]'\n unixTime = time.time() + data['mutetime']\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 1, unixTime, json.dumps({'reason':\n reason}))\n await user.add_roles(muteRole, reason=reason)\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=user, ignoreMMSG=True, ignoreNoLogging=True)\n\n\nasync def add_points(ctx: context, addPoints, modType, user: discord.Member\n =None):\n await ctx.message.delete()\n if user is not None:\n punishedUser: discord.Member = user\n else:\n punishedUser: discord.Message = ctx.author\n await ctx.bot.db.execute(\n 'INSERT INTO automod.users (uid, sid, points, time, reason) VALUES ($1, $2, $3, $4, $5)'\n , punishedUser.id, ctx.guild.id, addPoints, time.time(),\n f'Automoderation: {modType}')\n points = await ctx.bot.db.fetchval(\n 'SELECT sum(points) FROM automod.users WHERE uid = $1 AND sid = $2 AND $3 - time < 2592000'\n , punishedUser.id, ctx.guild.id, time.time())\n data = await ctx.bot.db.fetchrow(\n 'SELECT action, maxpoints, muterole, mutetime, bantime FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n msg: discord.Message = ctx.message\n action = data['action']\n maxPoints = data['maxpoints']\n unixTimeMute = unixTimeBan = time.time() + 86400\n if data['mutetime']:\n unixTimeMute: float = time.time() + data['mutetime']\n if data['bantime']:\n unixTimeBan: float = time.time() + data['bantime']\n message = msg.content if len(msg.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n embed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]',\n punishedUser)\n userEmbed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]')\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name)\n embed.title = f'AUTOMODERATION [LOG]'\n userEmbed.title = f'AUTOMODERATION [LOG]'\n if user is not None:\n embed.add_field(name=f'{std.supporter_emoji} **__Moderator__**',\n value=ctx.author.mention, inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n userEmbed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n if user is None:\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value\n =message, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=\n message, inline=False)\n if points >= maxPoints:\n if action is None:\n embed.title = 'AUTOMODERATION [LOG]'\n if action == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n await punishedUser.kick(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n await punishedUser.ban(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**',\n value=datetime.datetime.fromtimestamp(unixTimeBan).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n await punishedUser.ban(reason='Automoderation: Punktesystem')\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 0, unixTimeBan, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n if action == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**',\n value=datetime.datetime.fromtimestamp(unixTimeMute).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n await punishedUser.add_roles(muteRole, reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 1, unixTimeMute, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=punishedUser, ignoreNoLogging=True, ignoreMMSG=True)\n\n\nasync def automod(ctx):\n bot = ctx.bot\n guild: discord.Guild = ctx.guild\n msg: discord.Message = ctx.message\n channel: discord.TextChannel = ctx.channel\n blState = await bot.get(guild.id, 'state')\n if not await bot.get(guild.id, 'automod'):\n return\n if blState:\n words = await bot.get(guild.id, 'words')\n if words:\n for word in words:\n if findWord(word)(msg.content.lower()):\n if not await checks.ignores_automod(ctx):\n data = await bot.db.fetchrow(\n 'SELECT points, whitelist FROM automod.blacklist WHERE sid = $1'\n , guild.id)\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if blState == 5:\n return await add_points(ctx, data['points'],\n 'Blacklisted Word')\n else:\n return await managePunishment(ctx, blState,\n 'Blacklisted Word')\n if discordRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT state, whitelist, partner, points FROM automod.invites WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n whitelistedServers = [guild.id]\n if (partner := data['partner']):\n whitelistedServers.extend([int(guildID) for guildID in partner])\n hasInvite: bool = False\n for invite in discordRegex.findall(msg.content):\n try:\n invite = await bot.fetch_invite(invite[0])\n except discord.NotFound:\n continue\n except discord.Forbidden:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n if invite.guild.id not in whitelistedServers:\n hasInvite = True\n break\n if hasInvite:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n elif linkRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT points, state, links, whitelist, iswhitelist FROM automod.links WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n links = ['discord.gg', 'discord.com', 'discordapp.com', 'plyoox.net']\n if (linksData := data['links']) is not None:\n links.extend(linksData)\n linksObj = linkRegex.findall(msg.content)\n for linkObj in linksObj:\n link = linkObj[0].replace(linkObj[1], '')\n if data['iswhitelist']:\n if link not in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n elif link in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n if not msg.clean_content.islower() and len(msg.content) > 15:\n if await checks.ignores_automod(ctx):\n return\n lenCaps = len(re.findall('[A-ZÄÖÜ]', msg.clean_content))\n percent = lenCaps / len(msg.content)\n if percent > 0.7:\n data = await bot.db.fetchrow(\n 'SELECT points, state, whitelist FROM automod.caps WHERE sid = $1'\n , msg.guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if state == 5:\n return await add_points(ctx, data['points'], 'Caps')\n else:\n return await managePunishment(ctx, state, 'Caps')\n if len(msg.raw_mentions) + len(msg.raw_role_mentions) + len(everyoneRegex\n .findall(msg.content)) >= 3:\n if await checks.ignores_automod(ctx):\n return\n lenMentions = sum(m != ctx.author.id for m in msg.raw_mentions) + len(\n msg.raw_role_mentions)\n data = await bot.db.fetchrow(\n 'SELECT state, points, count, whitelist, everyone FROM automod.mentions WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if data['everyone']:\n lenMentions += len(everyoneRegex.findall(msg.content))\n if lenMentions >= data['count']:\n if state == 5:\n return await add_points(ctx, data['points'], 'Mentions')\n else:\n return await managePunishment(ctx, state, 'Caps')\n",
"step-3": "<mask token>\nDISCORD_INVITE = (\n '(discord(app\\\\.com\\\\/invite|\\\\.com(\\\\/invite)?|\\\\.gg)\\\\/?[a-zA-Z0-9-]{2,32})'\n )\nEXTERNAL_LINK = (\n '((https?:\\\\/\\\\/(www\\\\.)?|www\\\\.)[-a-zA-Z0-9@:%._\\\\+~#=]{1,256}\\\\.[a-zA-Z0-9()]{1,6})'\n )\nEVERYONE_MENTION = '@(here|everyone)'\ndiscordRegex = re.compile(DISCORD_INVITE, re.IGNORECASE)\nlinkRegex = re.compile(EXTERNAL_LINK, re.IGNORECASE)\neveryoneRegex = re.compile(EVERYONE_MENTION)\n\n\ndef findWord(word):\n return re.compile('\\\\b({0})\\\\b'.format(word), flags=re.IGNORECASE).search\n\n\nasync def managePunishment(ctx, punishment, reason):\n await ctx.message.delete()\n user: discord.Member = ctx.author\n msg = ctx.message.content if len(ctx.message.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n reason = f'Automoderation: {reason}'\n embed: discord.Embed = std.getBaseModEmbed(reason, ctx.author, ctx.me)\n userEmbed: discord.Embed = std.getBaseModEmbed(reason)\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name, inline=False)\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n data = await ctx.bot.db.fetchrow(\n 'SELECT bantime, mutetime, muterole FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n if punishment == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n userEmbed.title = 'AUTOMODERATION [KICK]'\n await ctx.guild.kick(user, reason=reason)\n elif punishment == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n userEmbed.title = 'AUTOMODERATION [BAN]'\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n userEmbed.title = 'AUTOMODERATION [TEMPBAN]'\n unixTime = time.time() + data['bantime']\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 0, unixTime, json.dumps({'reason':\n reason}))\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n userEmbed.title = 'AUTOMODERATION [TEMPMUTE]'\n unixTime = time.time() + data['mutetime']\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 1, unixTime, json.dumps({'reason':\n reason}))\n await user.add_roles(muteRole, reason=reason)\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=user, ignoreMMSG=True, ignoreNoLogging=True)\n\n\nasync def add_points(ctx: context, addPoints, modType, user: discord.Member\n =None):\n await ctx.message.delete()\n if user is not None:\n punishedUser: discord.Member = user\n else:\n punishedUser: discord.Message = ctx.author\n await ctx.bot.db.execute(\n 'INSERT INTO automod.users (uid, sid, points, time, reason) VALUES ($1, $2, $3, $4, $5)'\n , punishedUser.id, ctx.guild.id, addPoints, time.time(),\n f'Automoderation: {modType}')\n points = await ctx.bot.db.fetchval(\n 'SELECT sum(points) FROM automod.users WHERE uid = $1 AND sid = $2 AND $3 - time < 2592000'\n , punishedUser.id, ctx.guild.id, time.time())\n data = await ctx.bot.db.fetchrow(\n 'SELECT action, maxpoints, muterole, mutetime, bantime FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n msg: discord.Message = ctx.message\n action = data['action']\n maxPoints = data['maxpoints']\n unixTimeMute = unixTimeBan = time.time() + 86400\n if data['mutetime']:\n unixTimeMute: float = time.time() + data['mutetime']\n if data['bantime']:\n unixTimeBan: float = time.time() + data['bantime']\n message = msg.content if len(msg.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n embed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]',\n punishedUser)\n userEmbed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]')\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name)\n embed.title = f'AUTOMODERATION [LOG]'\n userEmbed.title = f'AUTOMODERATION [LOG]'\n if user is not None:\n embed.add_field(name=f'{std.supporter_emoji} **__Moderator__**',\n value=ctx.author.mention, inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n userEmbed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n if user is None:\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value\n =message, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=\n message, inline=False)\n if points >= maxPoints:\n if action is None:\n embed.title = 'AUTOMODERATION [LOG]'\n if action == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n await punishedUser.kick(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n await punishedUser.ban(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**',\n value=datetime.datetime.fromtimestamp(unixTimeBan).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n await punishedUser.ban(reason='Automoderation: Punktesystem')\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 0, unixTimeBan, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n if action == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**',\n value=datetime.datetime.fromtimestamp(unixTimeMute).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n await punishedUser.add_roles(muteRole, reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 1, unixTimeMute, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=punishedUser, ignoreNoLogging=True, ignoreMMSG=True)\n\n\nasync def automod(ctx):\n bot = ctx.bot\n guild: discord.Guild = ctx.guild\n msg: discord.Message = ctx.message\n channel: discord.TextChannel = ctx.channel\n blState = await bot.get(guild.id, 'state')\n if not await bot.get(guild.id, 'automod'):\n return\n if blState:\n words = await bot.get(guild.id, 'words')\n if words:\n for word in words:\n if findWord(word)(msg.content.lower()):\n if not await checks.ignores_automod(ctx):\n data = await bot.db.fetchrow(\n 'SELECT points, whitelist FROM automod.blacklist WHERE sid = $1'\n , guild.id)\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if blState == 5:\n return await add_points(ctx, data['points'],\n 'Blacklisted Word')\n else:\n return await managePunishment(ctx, blState,\n 'Blacklisted Word')\n if discordRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT state, whitelist, partner, points FROM automod.invites WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n whitelistedServers = [guild.id]\n if (partner := data['partner']):\n whitelistedServers.extend([int(guildID) for guildID in partner])\n hasInvite: bool = False\n for invite in discordRegex.findall(msg.content):\n try:\n invite = await bot.fetch_invite(invite[0])\n except discord.NotFound:\n continue\n except discord.Forbidden:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n if invite.guild.id not in whitelistedServers:\n hasInvite = True\n break\n if hasInvite:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n elif linkRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT points, state, links, whitelist, iswhitelist FROM automod.links WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n links = ['discord.gg', 'discord.com', 'discordapp.com', 'plyoox.net']\n if (linksData := data['links']) is not None:\n links.extend(linksData)\n linksObj = linkRegex.findall(msg.content)\n for linkObj in linksObj:\n link = linkObj[0].replace(linkObj[1], '')\n if data['iswhitelist']:\n if link not in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n elif link in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n if not msg.clean_content.islower() and len(msg.content) > 15:\n if await checks.ignores_automod(ctx):\n return\n lenCaps = len(re.findall('[A-ZÄÖÜ]', msg.clean_content))\n percent = lenCaps / len(msg.content)\n if percent > 0.7:\n data = await bot.db.fetchrow(\n 'SELECT points, state, whitelist FROM automod.caps WHERE sid = $1'\n , msg.guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if state == 5:\n return await add_points(ctx, data['points'], 'Caps')\n else:\n return await managePunishment(ctx, state, 'Caps')\n if len(msg.raw_mentions) + len(msg.raw_role_mentions) + len(everyoneRegex\n .findall(msg.content)) >= 3:\n if await checks.ignores_automod(ctx):\n return\n lenMentions = sum(m != ctx.author.id for m in msg.raw_mentions) + len(\n msg.raw_role_mentions)\n data = await bot.db.fetchrow(\n 'SELECT state, points, count, whitelist, everyone FROM automod.mentions WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if data['everyone']:\n lenMentions += len(everyoneRegex.findall(msg.content))\n if lenMentions >= data['count']:\n if state == 5:\n return await add_points(ctx, data['points'], 'Mentions')\n else:\n return await managePunishment(ctx, state, 'Caps')\n",
"step-4": "import datetime\nimport json\nimport re\nimport time\nimport discord\nfrom utils.ext import standards as std, checks, context, logs\nDISCORD_INVITE = (\n '(discord(app\\\\.com\\\\/invite|\\\\.com(\\\\/invite)?|\\\\.gg)\\\\/?[a-zA-Z0-9-]{2,32})'\n )\nEXTERNAL_LINK = (\n '((https?:\\\\/\\\\/(www\\\\.)?|www\\\\.)[-a-zA-Z0-9@:%._\\\\+~#=]{1,256}\\\\.[a-zA-Z0-9()]{1,6})'\n )\nEVERYONE_MENTION = '@(here|everyone)'\ndiscordRegex = re.compile(DISCORD_INVITE, re.IGNORECASE)\nlinkRegex = re.compile(EXTERNAL_LINK, re.IGNORECASE)\neveryoneRegex = re.compile(EVERYONE_MENTION)\n\n\ndef findWord(word):\n return re.compile('\\\\b({0})\\\\b'.format(word), flags=re.IGNORECASE).search\n\n\nasync def managePunishment(ctx, punishment, reason):\n await ctx.message.delete()\n user: discord.Member = ctx.author\n msg = ctx.message.content if len(ctx.message.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n reason = f'Automoderation: {reason}'\n embed: discord.Embed = std.getBaseModEmbed(reason, ctx.author, ctx.me)\n userEmbed: discord.Embed = std.getBaseModEmbed(reason)\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name, inline=False)\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg,\n inline=False)\n data = await ctx.bot.db.fetchrow(\n 'SELECT bantime, mutetime, muterole FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n if punishment == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n userEmbed.title = 'AUTOMODERATION [KICK]'\n await ctx.guild.kick(user, reason=reason)\n elif punishment == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n userEmbed.title = 'AUTOMODERATION [BAN]'\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n userEmbed.title = 'AUTOMODERATION [TEMPBAN]'\n unixTime = time.time() + data['bantime']\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 0, unixTime, json.dumps({'reason':\n reason}))\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n userEmbed.title = 'AUTOMODERATION [TEMPMUTE]'\n unixTime = time.time() + data['mutetime']\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value\n =datetime.datetime.fromtimestamp(unixTime).strftime(\n '%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, user.id, 1, unixTime, json.dumps({'reason':\n reason}))\n await user.add_roles(muteRole, reason=reason)\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=user, ignoreMMSG=True, ignoreNoLogging=True)\n\n\nasync def add_points(ctx: context, addPoints, modType, user: discord.Member\n =None):\n await ctx.message.delete()\n if user is not None:\n punishedUser: discord.Member = user\n else:\n punishedUser: discord.Message = ctx.author\n await ctx.bot.db.execute(\n 'INSERT INTO automod.users (uid, sid, points, time, reason) VALUES ($1, $2, $3, $4, $5)'\n , punishedUser.id, ctx.guild.id, addPoints, time.time(),\n f'Automoderation: {modType}')\n points = await ctx.bot.db.fetchval(\n 'SELECT sum(points) FROM automod.users WHERE uid = $1 AND sid = $2 AND $3 - time < 2592000'\n , punishedUser.id, ctx.guild.id, time.time())\n data = await ctx.bot.db.fetchrow(\n 'SELECT action, maxpoints, muterole, mutetime, bantime FROM automod.config WHERE sid = $1'\n , ctx.guild.id)\n msg: discord.Message = ctx.message\n action = data['action']\n maxPoints = data['maxpoints']\n unixTimeMute = unixTimeBan = time.time() + 86400\n if data['mutetime']:\n unixTimeMute: float = time.time() + data['mutetime']\n if data['bantime']:\n unixTimeBan: float = time.time() + data['bantime']\n message = msg.content if len(msg.content\n ) < 1015 else f'{ctx.message.content[:1015]}...'\n embed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]',\n punishedUser)\n userEmbed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]')\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.\n guild.name)\n embed.title = f'AUTOMODERATION [LOG]'\n userEmbed.title = f'AUTOMODERATION [LOG]'\n if user is not None:\n embed.add_field(name=f'{std.supporter_emoji} **__Moderator__**',\n value=ctx.author.mention, inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.\n channel.mention, inline=False)\n embed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n userEmbed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=\n f'{points}/{maxPoints}', inline=False)\n if user is None:\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value\n =message, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=\n message, inline=False)\n if points >= maxPoints:\n if action is None:\n embed.title = 'AUTOMODERATION [LOG]'\n if action == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n await punishedUser.kick(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n await punishedUser.ban(reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n else:\n return\n if action == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**',\n value=datetime.datetime.fromtimestamp(unixTimeBan).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n await punishedUser.ban(reason='Automoderation: Punktesystem')\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 0, unixTimeBan, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n if action == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**',\n value=datetime.datetime.fromtimestamp(unixTimeMute).\n strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n await punishedUser.add_roles(muteRole, reason='Automoderation')\n await ctx.bot.db.execute(\n 'DELETE FROM automod.users WHERE uid = $1 AND sid = $2',\n punishedUser.id, msg.guild.id)\n await ctx.db.execute(\n 'INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)'\n , ctx.guild.id, punishedUser.id, 1, unixTimeMute, json.\n dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed,\n member=punishedUser, ignoreNoLogging=True, ignoreMMSG=True)\n\n\nasync def automod(ctx):\n bot = ctx.bot\n guild: discord.Guild = ctx.guild\n msg: discord.Message = ctx.message\n channel: discord.TextChannel = ctx.channel\n blState = await bot.get(guild.id, 'state')\n if not await bot.get(guild.id, 'automod'):\n return\n if blState:\n words = await bot.get(guild.id, 'words')\n if words:\n for word in words:\n if findWord(word)(msg.content.lower()):\n if not await checks.ignores_automod(ctx):\n data = await bot.db.fetchrow(\n 'SELECT points, whitelist FROM automod.blacklist WHERE sid = $1'\n , guild.id)\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if blState == 5:\n return await add_points(ctx, data['points'],\n 'Blacklisted Word')\n else:\n return await managePunishment(ctx, blState,\n 'Blacklisted Word')\n if discordRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT state, whitelist, partner, points FROM automod.invites WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n whitelistedServers = [guild.id]\n if (partner := data['partner']):\n whitelistedServers.extend([int(guildID) for guildID in partner])\n hasInvite: bool = False\n for invite in discordRegex.findall(msg.content):\n try:\n invite = await bot.fetch_invite(invite[0])\n except discord.NotFound:\n continue\n except discord.Forbidden:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n if invite.guild.id not in whitelistedServers:\n hasInvite = True\n break\n if hasInvite:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n elif linkRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n data = await bot.db.fetchrow(\n 'SELECT points, state, links, whitelist, iswhitelist FROM automod.links WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n links = ['discord.gg', 'discord.com', 'discordapp.com', 'plyoox.net']\n if (linksData := data['links']) is not None:\n links.extend(linksData)\n linksObj = linkRegex.findall(msg.content)\n for linkObj in linksObj:\n link = linkObj[0].replace(linkObj[1], '')\n if data['iswhitelist']:\n if link not in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n elif link in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n if not msg.clean_content.islower() and len(msg.content) > 15:\n if await checks.ignores_automod(ctx):\n return\n lenCaps = len(re.findall('[A-ZÄÖÜ]', msg.clean_content))\n percent = lenCaps / len(msg.content)\n if percent > 0.7:\n data = await bot.db.fetchrow(\n 'SELECT points, state, whitelist FROM automod.caps WHERE sid = $1'\n , msg.guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if state == 5:\n return await add_points(ctx, data['points'], 'Caps')\n else:\n return await managePunishment(ctx, state, 'Caps')\n if len(msg.raw_mentions) + len(msg.raw_role_mentions) + len(everyoneRegex\n .findall(msg.content)) >= 3:\n if await checks.ignores_automod(ctx):\n return\n lenMentions = sum(m != ctx.author.id for m in msg.raw_mentions) + len(\n msg.raw_role_mentions)\n data = await bot.db.fetchrow(\n 'SELECT state, points, count, whitelist, everyone FROM automod.mentions WHERE sid = $1'\n , guild.id)\n if not data:\n return\n if not (state := data['state']):\n return\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n if data['everyone']:\n lenMentions += len(everyoneRegex.findall(msg.content))\n if lenMentions >= data['count']:\n if state == 5:\n return await add_points(ctx, data['points'], 'Mentions')\n else:\n return await managePunishment(ctx, state, 'Caps')\n",
"step-5": "import datetime\nimport json\nimport re\nimport time\n\nimport discord\n\nfrom utils.ext import standards as std, checks, context, logs\n\nDISCORD_INVITE = '(discord(app\\.com\\/invite|\\.com(\\/invite)?|\\.gg)\\/?[a-zA-Z0-9-]{2,32})'\nEXTERNAL_LINK = '((https?:\\/\\/(www\\.)?|www\\.)[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6})'\nEVERYONE_MENTION = '@(here|everyone)'\ndiscordRegex = re.compile(DISCORD_INVITE, re.IGNORECASE)\nlinkRegex = re.compile(EXTERNAL_LINK, re.IGNORECASE)\neveryoneRegex = re.compile(EVERYONE_MENTION)\n\n\ndef findWord(word):\n return re.compile(r'\\b({0})\\b'.format(word), flags=re.IGNORECASE).search\n\n\nasync def managePunishment(ctx, punishment, reason):\n await ctx.message.delete()\n user: discord.Member = ctx.author\n msg = ctx.message.content if len(ctx.message.content) < 1015 else f'{ctx.message.content[:1015]}...'\n reason = f'Automoderation: {reason}'\n\n embed: discord.Embed = std.getBaseModEmbed(reason, ctx.author, ctx.me)\n userEmbed: discord.Embed = std.getBaseModEmbed(reason)\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.guild.name, inline=False)\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg, inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.channel.mention, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=msg, inline=False)\n\n data = await ctx.bot.db.fetchrow('SELECT bantime, mutetime, muterole FROM automod.config WHERE sid = $1', ctx.guild.id)\n\n if punishment == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n userEmbed.title = 'AUTOMODERATION [KICK]'\n await ctx.guild.kick(user, reason=reason)\n elif punishment == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n userEmbed.title = 'AUTOMODERATION [BAN]'\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n userEmbed.title = 'AUTOMODERATION [TEMPBAN]'\n unixTime = time.time() + data['bantime']\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value=datetime.datetime.fromtimestamp(unixTime).strftime('%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',\n ctx.guild.id, user.id, 0, unixTime, json.dumps({'reason': reason}))\n await ctx.guild.ban(user, reason=reason)\n elif punishment == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n userEmbed.title = 'AUTOMODERATION [TEMPMUTE]'\n unixTime = time.time() + data['mutetime']\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value=datetime.datetime.fromtimestamp(unixTime).strftime('%d. %m. %Y um %H:%M:%S'))\n await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',\n ctx.guild.id, user.id, 1, unixTime, json.dumps({'reason': reason}))\n await user.add_roles(muteRole, reason=reason)\n\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed, member=user, ignoreMMSG=True, ignoreNoLogging=True)\n\n\nasync def add_points(ctx: context, addPoints, modType, user: discord.Member = None):\n await ctx.message.delete()\n\n if user is not None:\n punishedUser: discord.Member = user\n else:\n punishedUser: discord.Message = ctx.author\n\n await ctx.bot.db.execute(\n 'INSERT INTO automod.users (uid, sid, points, time, reason) VALUES ($1, $2, $3, $4, $5)',\n punishedUser.id, ctx.guild.id, addPoints, time.time(), f'Automoderation: {modType}')\n\n points = await ctx.bot.db.fetchval('SELECT sum(points) FROM automod.users WHERE uid = $1 AND sid = $2 AND $3 - time < 2592000', punishedUser.id, ctx.guild.id, time.time())\n data = await ctx.bot.db.fetchrow(\"SELECT action, maxpoints, muterole, mutetime, bantime FROM automod.config WHERE sid = $1\", ctx.guild.id)\n msg: discord.Message = ctx.message\n\n action = data['action']\n maxPoints = data['maxpoints']\n unixTimeMute = unixTimeBan = time.time() + 86400\n\n if data['mutetime']:\n unixTimeMute: float = time.time() + data['mutetime']\n if data['bantime']:\n unixTimeBan: float = time.time() + data['bantime']\n\n message = msg.content if len(msg.content) < 1015 else f'{ctx.message.content[:1015]}...'\n\n embed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]', punishedUser)\n userEmbed: discord.Embed = std.getBaseModEmbed(f'{modType} [+{addPoints}]')\n userEmbed.add_field(name=f'{std.folder_emoji} **Server**', value=ctx.guild.name)\n embed.title = f'AUTOMODERATION [LOG]'\n userEmbed.title = f'AUTOMODERATION [LOG]'\n if user is not None:\n embed.add_field(name=f'{std.supporter_emoji} **__Moderator__**', value=ctx.author.mention, inline=False)\n embed.add_field(name=f'{std.channel_emoji} **__Channel__**', value=ctx.channel.mention, inline=False)\n embed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=f'{points}/{maxPoints}', inline=False)\n userEmbed.add_field(name=f'{std.invite_emoji} **__Punkte__**', value=f'{points}/{maxPoints}', inline=False)\n if user is None:\n userEmbed.add_field(name=f'{std.list_emoji} **__Message__**', value=message, inline=False)\n embed.add_field(name=f'{std.list_emoji} **__Message__**', value=message, inline=False)\n\n if points >= maxPoints:\n if action is None:\n embed.title = 'AUTOMODERATION [LOG]'\n\n if action == 1:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [KICK]'\n await punishedUser.kick(reason=\"Automoderation\")\n await ctx.bot.db.execute(\"DELETE FROM automod.users WHERE uid = $1 AND sid = $2\", punishedUser.id, msg.guild.id)\n else:\n return\n\n if action == 2:\n if checks.hasPermsByName(ctx, ctx.me, 'kick_members'):\n embed.title = 'AUTOMODERATION [BAN]'\n await punishedUser.ban(reason=\"Automoderation\")\n await ctx.bot.db.execute(\"DELETE FROM automod.users WHERE uid = $1 AND sid = $2\", punishedUser.id, msg.guild.id)\n else:\n return\n\n if action == 3:\n if checks.hasPermsByName(ctx, ctx.me, 'ban_members'):\n embed.add_field(name=f'{std.date_emoji} **__Entbann__**', value=datetime.datetime.fromtimestamp(unixTimeBan).strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPBAN]'\n await punishedUser.ban(reason=\"Automoderation: Punktesystem\")\n await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',\n ctx.guild.id, punishedUser.id, 0, unixTimeBan, json.dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n if action == 4:\n if checks.hasPermsByName(ctx, ctx.me, 'manage_roles'):\n muteRole = ctx.guild.get_role(data['muterole'])\n if muteRole is None:\n return\n\n embed.add_field(name=f'{std.date_emoji} **__Entmute__**', value=datetime.datetime.fromtimestamp(unixTimeMute).strftime('%d. %m. %Y um %H:%M:%S'))\n embed.title = 'AUTOMODERATION [TEMPMUTE]'\n await punishedUser.add_roles(muteRole, reason='Automoderation')\n await ctx.bot.db.execute(\"DELETE FROM automod.users WHERE uid = $1 AND sid = $2\", punishedUser.id, msg.guild.id)\n await ctx.db.execute('INSERT INTO extra.timers (sid, objid, type, time, data) VALUES ($1, $2, $3, $4, $5)',\n ctx.guild.id, punishedUser.id, 1, unixTimeMute, json.dumps({'reason': 'Automoderation: Punktesystem'}))\n else:\n return\n await logs.createEmbedLog(ctx=ctx, modEmbed=embed, userEmbed=userEmbed, member=punishedUser, ignoreNoLogging=True, ignoreMMSG=True)\n\n\nasync def automod(ctx):\n bot = ctx.bot\n guild: discord.Guild = ctx.guild\n msg: discord.Message = ctx.message\n channel: discord.TextChannel = ctx.channel\n blState = await bot.get(guild.id, 'state')\n\n if not await bot.get(guild.id, 'automod'):\n return\n\n if blState:\n words = await bot.get(guild.id, 'words')\n if words:\n for word in words:\n if findWord(word)(msg.content.lower()):\n if not await checks.ignores_automod(ctx):\n data = await bot.db.fetchrow('SELECT points, whitelist FROM automod.blacklist WHERE sid = $1', guild.id)\n\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n\n if blState == 5:\n return await add_points(ctx, data['points'], 'Blacklisted Word')\n else:\n return await managePunishment(ctx, blState, 'Blacklisted Word')\n\n if discordRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n\n data = await bot.db.fetchrow(\"SELECT state, whitelist, partner, points FROM automod.invites WHERE sid = $1\", guild.id)\n if not data:\n return\n\n if not (state := data['state']):\n return\n\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n\n whitelistedServers = [guild.id]\n if partner := data['partner']:\n whitelistedServers.extend([int(guildID) for guildID in partner])\n\n hasInvite: bool = False\n for invite in discordRegex.findall(msg.content):\n try:\n invite = await bot.fetch_invite(invite[0])\n except discord.NotFound:\n continue\n\n except discord.Forbidden:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n\n if invite.guild.id not in whitelistedServers:\n hasInvite = True\n break\n\n if hasInvite:\n if state == 5:\n return await add_points(ctx, data['points'], 'Invite')\n else:\n return await managePunishment(ctx, state, 'Invite')\n\n\n elif linkRegex.findall(msg.content):\n if await checks.ignores_automod(ctx):\n return\n\n data = await bot.db.fetchrow('SELECT points, state, links, whitelist, iswhitelist FROM automod.links WHERE sid = $1', guild.id)\n if not data:\n return\n\n if not (state := data['state']):\n return\n\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n\n links = ['discord.gg', 'discord.com', 'discordapp.com', 'plyoox.net']\n if (linksData := data['links']) is not None:\n links.extend(linksData)\n\n linksObj = linkRegex.findall(msg.content)\n for linkObj in linksObj:\n link = linkObj[0].replace(linkObj[1], '')\n if data['iswhitelist']:\n if link not in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n else:\n if link in links:\n if state == 5:\n return await add_points(ctx, data['points'], 'Link')\n else:\n return await managePunishment(ctx, state, 'Link')\n\n\n if not msg.clean_content.islower() and len(msg.content) > 15:\n if await checks.ignores_automod(ctx):\n return\n\n lenCaps = len(re.findall(r'[A-ZÄÖÜ]', msg.clean_content))\n percent = lenCaps / len(msg.content)\n if percent > 0.7:\n data = await bot.db.fetchrow(\"SELECT points, state, whitelist FROM automod.caps WHERE sid = $1\", msg.guild.id)\n if not data:\n return\n\n if not (state := data['state']):\n return\n\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n\n if state == 5:\n return await add_points(ctx, data['points'], 'Caps')\n else:\n return await managePunishment(ctx, state, 'Caps')\n\n if len(msg.raw_mentions) + len(msg.raw_role_mentions) + len(everyoneRegex.findall(msg.content)) >= 3:\n if await checks.ignores_automod(ctx):\n return\n\n lenMentions = sum(m != ctx.author.id for m in msg.raw_mentions) + len(msg.raw_role_mentions)\n data = await bot.db.fetchrow(\n \"SELECT state, points, count, whitelist, everyone FROM automod.mentions WHERE sid = $1\",\n guild.id)\n if not data:\n return\n\n if not (state := data['state']):\n return\n\n if data['whitelist'] is not None:\n if channel.id in data['whitelist']:\n return\n\n if data['everyone']:\n lenMentions += len(everyoneRegex.findall(msg.content))\n\n if lenMentions >= data['count']:\n if state == 5:\n return await add_points(ctx, data['points'], 'Mentions')\n else:\n return await managePunishment(ctx, state, 'Caps')\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
import numpy as np
import csv
class PriceTracker:
def __init__(self):
pass
def getValue(self, i):
pass
class CsvTracker:
def __init__(self, csv_file):
self.current_row = 61
self.csv_file_content = []
self.csv_file = csv.reader(csv_file, delimiter =',')
for line in self.csv_file:
self.csv_file_content.append(line)
pass
def getValue(self, i):
if i > 0:
i = i * -1
desired_row = self.current_row + i
row = self.csv_file_content[desired_row]
return row[1]
def increment_time(self, i):
self.current_row += i
class BuyBot:
"""
@type PriceTracker
"""
"""
Decides whether or not we want to buy. Another class will poll us
"""
def __init__(self, price_tracker):
self.price_tracker = None
self.heuristics = []
self.heuristic_weights = [0.25, 0.25, 0.25, 0.25]
self.price_tracker = price_tracker
self.heuristics.append(BuyDerivativeWindow(60, self.price_tracker))
self.heuristics.append(BuyDerivativeWindow(15, self.price_tracker))
self.heuristics.append(BuyPriceWindow(60, self.price_tracker))
self.heuristics.append(BuyPriceWindow(15, self.price_tracker))
def poll(self):
current_price = self.price_tracker.getValue(0)
result = 0
i = 0
for heuristic in self.heuristics:
print "For Heuristic %s at time %s ouptut = %s" % (i, self.price_tracker.current_row, heuristic.getCurrentValue())
result += heuristic.getCurrentValue() * self.heuristic_weights[i]
i = i+1
print "\tresult = %s and price = %s" % (result, current_price)
class BuyPriceWindow:
def __init__(self, window, price_tracker):
self.window = window
self.price_tracker = price_tracker
def getCurrentValue(self):
price_values = []
#store the values
for i in range(0, self.window):
price_values.append(float(self.price_tracker.getValue(self.window - i)))
average = np.average(price_values)
return average - price_values[-1]
class BuyDerivativeWindow:
def __init__(self, window, price_tracker):
self.window = window
self.price_tracker = price_tracker
def getCurrentValue(self):
price_values = []
derivative_values = []
#store the values
for i in range(0, self.window):
price_values.append(self.price_tracker.getValue(self.window - i))
#compute the derivatives
length = len(price_values) - 1
for i in range(0, length):
derivative_values.append(float(price_values[i + 1]) - float(price_values[i]))
average_derivative_value = np.average(derivative_values)
last_derivative_value = derivative_values[-1]
return average_derivative_value
def main():
print 'asdf'
file_path = './data/hourlybitcoin.csv'
price_tracker = CsvTracker(open(file_path, 'r'))
buy_bot = BuyBot(price_tracker)
for i in range(0, 60):
buy_bot.poll()
price_tracker.increment_time(1)
pass
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "eb827998f1ba75ffb95751ddb2b31d4d0e54358b",
"index": 8273,
"step-1": "import numpy as np\nimport csv\n\nclass PriceTracker:\n\n def __init__(self):\n pass\n\n def getValue(self, i):\n pass\n\n\nclass CsvTracker:\n def __init__(self, csv_file):\n self.current_row = 61\n self.csv_file_content = []\n self.csv_file = csv.reader(csv_file, delimiter =',')\n for line in self.csv_file:\n self.csv_file_content.append(line)\n pass\n\n def getValue(self, i):\n if i > 0:\n i = i * -1\n\n desired_row = self.current_row + i\n row = self.csv_file_content[desired_row]\n return row[1]\n\n def increment_time(self, i):\n self.current_row += i\n\nclass BuyBot:\n\n \"\"\"\n @type PriceTracker\n \"\"\"\n\n\n \"\"\"\n Decides whether or not we want to buy. Another class will poll us\n \"\"\"\n def __init__(self, price_tracker):\n self.price_tracker = None\n self.heuristics = []\n self.heuristic_weights = [0.25, 0.25, 0.25, 0.25]\n self.price_tracker = price_tracker\n self.heuristics.append(BuyDerivativeWindow(60, self.price_tracker))\n self.heuristics.append(BuyDerivativeWindow(15, self.price_tracker))\n self.heuristics.append(BuyPriceWindow(60, self.price_tracker))\n self.heuristics.append(BuyPriceWindow(15, self.price_tracker))\n\n def poll(self):\n current_price = self.price_tracker.getValue(0)\n result = 0\n i = 0\n for heuristic in self.heuristics:\n print \"For Heuristic %s at time %s ouptut = %s\" % (i, self.price_tracker.current_row, heuristic.getCurrentValue())\n result += heuristic.getCurrentValue() * self.heuristic_weights[i]\n i = i+1\n print \"\\tresult = %s and price = %s\" % (result, current_price)\n\nclass BuyPriceWindow:\n def __init__(self, window, price_tracker):\n self.window = window\n self.price_tracker = price_tracker\n\n def getCurrentValue(self):\n price_values = []\n\n #store the values\n for i in range(0, self.window):\n price_values.append(float(self.price_tracker.getValue(self.window - i)))\n\n average = np.average(price_values)\n\n return average - price_values[-1]\n\n\nclass BuyDerivativeWindow:\n\n def __init__(self, window, price_tracker):\n self.window = window\n self.price_tracker = price_tracker\n\n def getCurrentValue(self):\n price_values = []\n derivative_values = []\n\n #store the values\n for i in range(0, self.window):\n price_values.append(self.price_tracker.getValue(self.window - i))\n\n #compute the derivatives\n length = len(price_values) - 1\n\n for i in range(0, length):\n derivative_values.append(float(price_values[i + 1]) - float(price_values[i]))\n\n average_derivative_value = np.average(derivative_values)\n last_derivative_value = derivative_values[-1]\n\n return average_derivative_value\n\ndef main():\n print 'asdf'\n file_path = './data/hourlybitcoin.csv'\n price_tracker = CsvTracker(open(file_path, 'r'))\n buy_bot = BuyBot(price_tracker)\n\n for i in range(0, 60):\n buy_bot.poll()\n price_tracker.increment_time(1)\n pass\n\nif __name__ == \"__main__\":\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# SPDX-License-Identifier: Apache-2.0
# Copyright (C) 2020 ifm electronic gmbh
#
# THE PROGRAM IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND.
#
"""
This module provides the recording control GUI service for the nexxT framework.
"""
import logging
from pathlib import Path
from nexxT.Qt.QtCore import Qt, QStorageInfo
from nexxT.Qt.QtGui import QAction, QIcon, QTextOption
from nexxT.Qt.QtWidgets import QApplication, QStyle, QWidget, QBoxLayout, QToolBar, QFileDialog
from nexxT.core.Utils import assertMainThread, ElidedLabel
from nexxT.core.Exceptions import PropertyCollectionPropertyNotFound
from nexxT.interface import Services
from nexxT.services.SrvRecordingControl import MVCRecordingControlBase
logger = logging.getLogger(__name__)
class MVCRecordingControlGUI(MVCRecordingControlBase):
"""
This service implements a GUI frontend for the recording service
"""
def __init__(self, config):
assertMainThread()
super().__init__(config)
# state
self._directory = str(Path('.').absolute())
# gui
srv = Services.getService("MainWindow")
config.configLoaded.connect(self._restoreState)
config.configAboutToSave.connect(self._saveState)
self._config = config
recMenu = srv.menuBar().addMenu("&Recording")
style = QApplication.style()
self.actStart = QAction(QIcon.fromTheme("media-record", QIcon(":icons/media-record.svg")),
"Start Recording", self)
self.actStop = QAction(QIcon.fromTheme("media-playback-stop", style.standardIcon(QStyle.SP_MediaStop)),
"Stop Recording", self)
self.actSetDir = QAction(QIcon.fromTheme("document-open-folder", style.standardIcon(QStyle.SP_DirIcon)),
"Choose directory ...", self)
self.actStart.setEnabled(False)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(False)
self.actStart.triggered.connect(self._startTriggered)
self.actStop.triggered.connect(self._stopTriggered)
self.actSetDir.triggered.connect(self._setDir)
recMenu.addAction(self.actStart)
recMenu.addAction(self.actStop)
recMenu.addAction(self.actSetDir)
self.dockWidget = srv.newDockWidget("RecordingControl", None, Qt.LeftDockWidgetArea,
defaultLoc="PlaybackControl")
self.dockWidgetContents = QWidget(self.dockWidget)
self.dockWidget.setWidget(self.dockWidgetContents)
toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents)
toolLayout.setContentsMargins(0, 0, 0, 0)
toolBar = QToolBar()
toolLayout.addWidget(toolBar)
toolBar.addAction(self.actStart)
toolBar.addAction(self.actStop)
toolBar.addAction(self.actSetDir)
self._directoryLabel = ElidedLabel(self._directory, parent=self.dockWidgetContents)
to = self._directoryLabel.textOption()
to.setWrapMode(QTextOption.NoWrap)
self._directoryLabel.setTextOption(to)
self._directoryLabel.setElideMode(Qt.ElideMiddle)
self._statusLabel = ElidedLabel("(disabled)", parent=self.dockWidgetContents)
to = self._statusLabel.textOption()
to.setWrapMode(QTextOption.NoWrap)
self._statusLabel.setTextOption(to)
self._statusLabel.setElideMode(Qt.ElideMiddle)
toolLayout.addWidget(self._directoryLabel)
toolLayout.addWidget(self._statusLabel, stretch=100)
#toolLayout.addStretch(100)
self.statusUpdate.connect(self._onUpdateStatus)
self.notifyError.connect(self._onNotifyError)
def _startTriggered(self):
self.startRecording(self._directory)
self.actStart.setEnabled(False)
self.actStop.setEnabled(True)
def _stopTriggered(self):
self.stopRecording()
self.actStart.setEnabled(True)
self.actStop.setEnabled(False)
def _setDir(self):
tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,
caption="Select recording target directory",
dir=self._directory)
if tdir != "" and tdir is not None:
self._directory = str(Path(tdir).absolute())
self._directoryLabel.setText(self._directory)
def _supportedFeaturesChanged(self, featureset):
if len(featureset) > 0 and not self.actSetDir.isEnabled():
self.actStart.setEnabled(True)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(True)
self._statusLabel.setText("inactive")
elif len(featureset) == 0 and self.actSetDir.isEnabled():
self.actStart.setEnabled(False)
self.actStop.setEnabled(False)
self.actSetDir.setEnabled(False)
self._statusLabel.setText("(disabled)")
def _onUpdateStatus(self, _, file, length, bytesWritten):
lines = self._statusLabel.text().split("\n")
if length < 0:
length = None
if bytesWritten < 0:
bytesWritten = None
updated = False
if bytesWritten is None:
bw = "??"
elif bytesWritten < 1024:
bw = f"{bytesWritten:3d} bytes"
elif bytesWritten < 1024*1024:
bw = f"{bytesWritten/1024:.1f} kb"
elif bytesWritten < 1024*1024*1024:
bw = f"{bytesWritten/1024/1024:.1f} Mb"
else:
bw = f"{bytesWritten/1024/1024/1024:.1f} Gb"
if length is None:
sl = "?? s"
elif length < 60:
sl = f"{length:.1f} sec"
else:
sl = f"{length/60:.1f} min"
bytesAv = QStorageInfo(file).bytesAvailable()
if length is not None and bytesWritten is not None and bytesAv >= 0 and bytesWritten > 0:
timeAv = length*bytesAv/bytesWritten - length
if timeAv < 60:
av = f"{timeAv:.1f} sec"
elif timeAv < 3600:
av = f"{timeAv/60:.1f} min"
else:
av = "> 1 hour"
else:
av = "?? s"
if length is not None or bytesWritten is not None:
newl = Path(file).name + ": " + sl + " | " + bw + " R: " + av
else:
newl = None
if newl is not None:
for i, l in enumerate(lines):
if l.startswith(Path(file).name + ":"):
updated = True
lines[i] = newl
break
if not updated:
lines.append(newl)
if lines[0] == "inactive":
lines = lines[1:]
else:
toDel = None
for i, l in enumerate(lines):
if l.startswith(Path(file).name + ":"):
toDel = i
break
if toDel is not None:
lines = lines[:toDel] + lines[toDel+1:]
if len(lines) == 0:
lines.append("inactive")
self._statusLabel.setText("\n".join(lines))
def _onNotifyError(self, originFilter, errorDesc):
lines = self._statusLabel.text().split("\n")
newl = originFilter.objectName() + ": " + "ERROR: " + errorDesc
updated = False
for i, l in enumerate(lines):
if l.startswith(originFilter.objectName() + ":"):
updated = True
lines[i] = newl
break
if not updated:
lines.append(newl)
if lines[0] == "inactive":
lines = lines[1:]
self._statusLabel.setText("\n".join(lines))
def _defineProperties(self):
propertyCollection = self._config.guiState()
propertyCollection.defineProperty("RecordingControl_directory",
str(Path('.').absolute()),
"Target directory for recordings")
def _saveState(self):
"""
Saves the state of the playback control
:return:
"""
assertMainThread()
self._defineProperties()
propertyCollection = self._config.guiState()
try:
propertyCollection.setProperty("RecordingControl_directory", self._directory)
except PropertyCollectionPropertyNotFound:
pass
def _restoreState(self):
"""
Restores the state of the playback control from the given property collection
:return:
"""
assertMainThread()
self._defineProperties()
propertyCollection = self._config.guiState()
logger.debug("before restore dir=%s", self._directory)
d = propertyCollection.getProperty("RecordingControl_directory")
if Path(d).exists():
self._directory = d
self._directoryLabel.setText(self._directory)
logger.debug("after restore dir=%s", self._directory)
|
normal
|
{
"blob_id": "3e4771d074218fb0a77332ee61a4cc49f1c301b7",
"index": 9356,
"step-1": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n <mask token>\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n <mask token>\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-3": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n <mask token>\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n <mask token>\n <mask token>\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty('RecordingControl_directory',\n self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-4": "<mask token>\n\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n \"\"\"\n This service implements a GUI frontend for the recording service\n \"\"\"\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n self._directory = str(Path('.').absolute())\n srv = Services.getService('MainWindow')\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu('&Recording')\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme('media-record', QIcon(\n ':icons/media-record.svg')), 'Start Recording', self)\n self.actStop = QAction(QIcon.fromTheme('media-playback-stop', style\n .standardIcon(QStyle.SP_MediaStop)), 'Stop Recording', self)\n self.actSetDir = QAction(QIcon.fromTheme('document-open-folder',\n style.standardIcon(QStyle.SP_DirIcon)), 'Choose directory ...',\n self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n self.dockWidget = srv.newDockWidget('RecordingControl', None, Qt.\n LeftDockWidgetArea, defaultLoc='PlaybackControl')\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents\n )\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n self._directoryLabel = ElidedLabel(self._directory, parent=self.\n dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n self._statusLabel = ElidedLabel('(disabled)', parent=self.\n dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption='Select recording target directory', dir=self._directory)\n if tdir != '' and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText('inactive')\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText('(disabled)')\n\n def _onUpdateStatus(self, _, file, length, bytesWritten):\n lines = self._statusLabel.text().split('\\n')\n if length < 0:\n length = None\n if bytesWritten < 0:\n bytesWritten = None\n updated = False\n if bytesWritten is None:\n bw = '??'\n elif bytesWritten < 1024:\n bw = f'{bytesWritten:3d} bytes'\n elif bytesWritten < 1024 * 1024:\n bw = f'{bytesWritten / 1024:.1f} kb'\n elif bytesWritten < 1024 * 1024 * 1024:\n bw = f'{bytesWritten / 1024 / 1024:.1f} Mb'\n else:\n bw = f'{bytesWritten / 1024 / 1024 / 1024:.1f} Gb'\n if length is None:\n sl = '?? s'\n elif length < 60:\n sl = f'{length:.1f} sec'\n else:\n sl = f'{length / 60:.1f} min'\n bytesAv = QStorageInfo(file).bytesAvailable()\n if (length is not None and bytesWritten is not None and bytesAv >= \n 0 and bytesWritten > 0):\n timeAv = length * bytesAv / bytesWritten - length\n if timeAv < 60:\n av = f'{timeAv:.1f} sec'\n elif timeAv < 3600:\n av = f'{timeAv / 60:.1f} min'\n else:\n av = '> 1 hour'\n else:\n av = '?? s'\n if length is not None or bytesWritten is not None:\n newl = Path(file).name + ': ' + sl + ' | ' + bw + ' R: ' + av\n else:\n newl = None\n if newl is not None:\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + ':'):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == 'inactive':\n lines = lines[1:]\n else:\n toDel = None\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + ':'):\n toDel = i\n break\n if toDel is not None:\n lines = lines[:toDel] + lines[toDel + 1:]\n if len(lines) == 0:\n lines.append('inactive')\n self._statusLabel.setText('\\n'.join(lines))\n\n def _onNotifyError(self, originFilter, errorDesc):\n lines = self._statusLabel.text().split('\\n')\n newl = originFilter.objectName() + ': ' + 'ERROR: ' + errorDesc\n updated = False\n for i, l in enumerate(lines):\n if l.startswith(originFilter.objectName() + ':'):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == 'inactive':\n lines = lines[1:]\n self._statusLabel.setText('\\n'.join(lines))\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty('RecordingControl_directory', str\n (Path('.').absolute()), 'Target directory for recordings')\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty('RecordingControl_directory',\n self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug('before restore dir=%s', self._directory)\n d = propertyCollection.getProperty('RecordingControl_directory')\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug('after restore dir=%s', self._directory)\n",
"step-5": "# SPDX-License-Identifier: Apache-2.0\n# Copyright (C) 2020 ifm electronic gmbh\n#\n# THE PROGRAM IS PROVIDED \"AS IS\" WITHOUT WARRANTY OF ANY KIND.\n#\n\n\"\"\"\nThis module provides the recording control GUI service for the nexxT framework.\n\"\"\"\n\nimport logging\nfrom pathlib import Path\nfrom nexxT.Qt.QtCore import Qt, QStorageInfo\nfrom nexxT.Qt.QtGui import QAction, QIcon, QTextOption\nfrom nexxT.Qt.QtWidgets import QApplication, QStyle, QWidget, QBoxLayout, QToolBar, QFileDialog\nfrom nexxT.core.Utils import assertMainThread, ElidedLabel\nfrom nexxT.core.Exceptions import PropertyCollectionPropertyNotFound\nfrom nexxT.interface import Services\nfrom nexxT.services.SrvRecordingControl import MVCRecordingControlBase\n\nlogger = logging.getLogger(__name__)\n\nclass MVCRecordingControlGUI(MVCRecordingControlBase):\n \"\"\"\n This service implements a GUI frontend for the recording service\n \"\"\"\n\n def __init__(self, config):\n assertMainThread()\n super().__init__(config)\n\n # state\n self._directory = str(Path('.').absolute())\n\n # gui\n srv = Services.getService(\"MainWindow\")\n config.configLoaded.connect(self._restoreState)\n config.configAboutToSave.connect(self._saveState)\n self._config = config\n recMenu = srv.menuBar().addMenu(\"&Recording\")\n style = QApplication.style()\n self.actStart = QAction(QIcon.fromTheme(\"media-record\", QIcon(\":icons/media-record.svg\")),\n \"Start Recording\", self)\n self.actStop = QAction(QIcon.fromTheme(\"media-playback-stop\", style.standardIcon(QStyle.SP_MediaStop)),\n \"Stop Recording\", self)\n self.actSetDir = QAction(QIcon.fromTheme(\"document-open-folder\", style.standardIcon(QStyle.SP_DirIcon)),\n \"Choose directory ...\", self)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n\n self.actStart.triggered.connect(self._startTriggered)\n self.actStop.triggered.connect(self._stopTriggered)\n self.actSetDir.triggered.connect(self._setDir)\n\n recMenu.addAction(self.actStart)\n recMenu.addAction(self.actStop)\n recMenu.addAction(self.actSetDir)\n\n self.dockWidget = srv.newDockWidget(\"RecordingControl\", None, Qt.LeftDockWidgetArea,\n defaultLoc=\"PlaybackControl\")\n self.dockWidgetContents = QWidget(self.dockWidget)\n self.dockWidget.setWidget(self.dockWidgetContents)\n toolLayout = QBoxLayout(QBoxLayout.TopToBottom, self.dockWidgetContents)\n toolLayout.setContentsMargins(0, 0, 0, 0)\n toolBar = QToolBar()\n toolLayout.addWidget(toolBar)\n toolBar.addAction(self.actStart)\n toolBar.addAction(self.actStop)\n toolBar.addAction(self.actSetDir)\n\n self._directoryLabel = ElidedLabel(self._directory, parent=self.dockWidgetContents)\n to = self._directoryLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._directoryLabel.setTextOption(to)\n self._directoryLabel.setElideMode(Qt.ElideMiddle)\n\n self._statusLabel = ElidedLabel(\"(disabled)\", parent=self.dockWidgetContents)\n to = self._statusLabel.textOption()\n to.setWrapMode(QTextOption.NoWrap)\n self._statusLabel.setTextOption(to)\n self._statusLabel.setElideMode(Qt.ElideMiddle)\n\n toolLayout.addWidget(self._directoryLabel)\n toolLayout.addWidget(self._statusLabel, stretch=100)\n #toolLayout.addStretch(100)\n\n self.statusUpdate.connect(self._onUpdateStatus)\n self.notifyError.connect(self._onNotifyError)\n\n def _startTriggered(self):\n self.startRecording(self._directory)\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(True)\n\n def _stopTriggered(self):\n self.stopRecording()\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n\n def _setDir(self):\n tdir = QFileDialog.getExistingDirectory(parent=self.dockWidget,\n caption=\"Select recording target directory\",\n dir=self._directory)\n if tdir != \"\" and tdir is not None:\n self._directory = str(Path(tdir).absolute())\n self._directoryLabel.setText(self._directory)\n\n def _supportedFeaturesChanged(self, featureset):\n if len(featureset) > 0 and not self.actSetDir.isEnabled():\n self.actStart.setEnabled(True)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(True)\n self._statusLabel.setText(\"inactive\")\n elif len(featureset) == 0 and self.actSetDir.isEnabled():\n self.actStart.setEnabled(False)\n self.actStop.setEnabled(False)\n self.actSetDir.setEnabled(False)\n self._statusLabel.setText(\"(disabled)\")\n\n def _onUpdateStatus(self, _, file, length, bytesWritten):\n lines = self._statusLabel.text().split(\"\\n\")\n if length < 0:\n length = None\n if bytesWritten < 0:\n bytesWritten = None\n updated = False\n\n if bytesWritten is None:\n bw = \"??\"\n elif bytesWritten < 1024:\n bw = f\"{bytesWritten:3d} bytes\"\n elif bytesWritten < 1024*1024:\n bw = f\"{bytesWritten/1024:.1f} kb\"\n elif bytesWritten < 1024*1024*1024:\n bw = f\"{bytesWritten/1024/1024:.1f} Mb\"\n else:\n bw = f\"{bytesWritten/1024/1024/1024:.1f} Gb\"\n\n if length is None:\n sl = \"?? s\"\n elif length < 60:\n sl = f\"{length:.1f} sec\"\n else:\n sl = f\"{length/60:.1f} min\"\n\n bytesAv = QStorageInfo(file).bytesAvailable()\n if length is not None and bytesWritten is not None and bytesAv >= 0 and bytesWritten > 0:\n timeAv = length*bytesAv/bytesWritten - length\n if timeAv < 60:\n av = f\"{timeAv:.1f} sec\"\n elif timeAv < 3600:\n av = f\"{timeAv/60:.1f} min\"\n else:\n av = \"> 1 hour\"\n else:\n av = \"?? s\"\n\n if length is not None or bytesWritten is not None:\n newl = Path(file).name + \": \" + sl + \" | \" + bw + \" R: \" + av\n else:\n newl = None\n\n if newl is not None:\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + \":\"):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == \"inactive\":\n lines = lines[1:]\n else:\n toDel = None\n for i, l in enumerate(lines):\n if l.startswith(Path(file).name + \":\"):\n toDel = i\n break\n if toDel is not None:\n lines = lines[:toDel] + lines[toDel+1:]\n if len(lines) == 0:\n lines.append(\"inactive\")\n\n self._statusLabel.setText(\"\\n\".join(lines))\n\n def _onNotifyError(self, originFilter, errorDesc):\n lines = self._statusLabel.text().split(\"\\n\")\n newl = originFilter.objectName() + \": \" + \"ERROR: \" + errorDesc\n updated = False\n for i, l in enumerate(lines):\n if l.startswith(originFilter.objectName() + \":\"):\n updated = True\n lines[i] = newl\n break\n if not updated:\n lines.append(newl)\n if lines[0] == \"inactive\":\n lines = lines[1:]\n self._statusLabel.setText(\"\\n\".join(lines))\n\n def _defineProperties(self):\n propertyCollection = self._config.guiState()\n propertyCollection.defineProperty(\"RecordingControl_directory\",\n str(Path('.').absolute()),\n \"Target directory for recordings\")\n\n\n def _saveState(self):\n \"\"\"\n Saves the state of the playback control\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n try:\n propertyCollection.setProperty(\"RecordingControl_directory\", self._directory)\n except PropertyCollectionPropertyNotFound:\n pass\n\n def _restoreState(self):\n \"\"\"\n Restores the state of the playback control from the given property collection\n\n :return:\n \"\"\"\n assertMainThread()\n self._defineProperties()\n propertyCollection = self._config.guiState()\n logger.debug(\"before restore dir=%s\", self._directory)\n d = propertyCollection.getProperty(\"RecordingControl_directory\")\n if Path(d).exists():\n self._directory = d\n self._directoryLabel.setText(self._directory)\n logger.debug(\"after restore dir=%s\", self._directory)\n",
"step-ids": [
6,
8,
9,
12,
15
]
}
|
[
6,
8,
9,
12,
15
] |
__author__ = "Sarah Hazell Pickering (sarah.pickering@anu.edu.au)"
__date__ = "2018-11-15"
""" QC and Trimming with fastp
Trimming and QC with fastp.
Then subsampling of reads via seqtk.
Now starts with a sample/sample.file structure.
Number of reads to sample is can be supplied via pairs_to_sample
parameter of the sub_sample rule. Additional options can be passed
to fastp via the 'extra' parameter of the fastp rule.
Now can do multiple subsampling runs from the same script
"""
import random
#configfile: "config.json"
RAW = config["raw_dir"]
NOTATION = config["pair_notation"]
QC = "output_data/qc/"
SUB = QC + "subsamples/"
NO_READS = 50000
FASTP_PARAMS = ""
rule all:
version:
"3.0"
input:
expand(SUB + str(NO_READS) + "/{sample}_{notation}{pair}_subsample.fastq.gz",
sample = config["samples"],
notation = NOTATION,
pair = ["1", "2"])
rule fastp_simple_dir:
"""For use when all fastq files are in a single directory. """
version:
"3.5"
input:
r1_raw = RAW + "{sample}_{notation}1_001.fastq.gz",
r2_raw = RAW + "{sample}_{notation}2_001.fastq.gz"
params:
extra = FASTP_PARAMS,
html = QC + "{sample}.html",
json = QC + "{sample}.json"
#onda: "envs/fastp.yml"
output:
r1 = QC + "trimmed/{sample}_{notation}1_trim.fastq.gz",
r2 = QC + "trimmed/{sample}_{notation}2_trim.fastq.gz"
shell:
"fastp -i {input.r1_raw} -I {input.r2_raw} "
"-o {output.r1} -O {output.r2} "
"-h {params.html} -j {params.json} "
"{params.extra}"
rule fastp_twotier_dirs:
"""For use when fastq files are distributed between directories
of the same name. e.g. base_dir/{sample}/{sample}.fastq
"""
version:
"3.5"
params:
extra = FASTP_PARAMS,
html = QC + "{sample}.html",
json = QC + "{sample}.json"
input:
r1_raw = RAW + "{sample}/{sample}_{notation}1_001.fastq.gz",
r2_raw = RAW + "{sample}/{sample}_R2_001.fastq.gz"
#conda: "envs/fastp.yml"
output:
r1 = QC + "trimmed/{sample}_{notation}1_trim.fastq.gz",
r2 = QC + "trimmed/{sample}_{notation}2_trim.fastq.gz"
shell:
"fastp -i {input.r1_raw} -I {input.r2_raw} "
"-o {output.r1} -O {output.r2} "
"-h {params.html} -j {params.json} "
"{params.extra}"
rule sub_sample:
version:
"4.0"
input:
trim_reads1 = QC + "trimmed/{sample}_{notation}1_trim.fastq.gz",
trim_reads2 = QC + "trimmed/{sample}_{notation}2_trim.fastq.gz"
params:
pairs_to_sample = NO_READS
#conda: "envs/fastp.yml"
output:
subsample1 = SUB + str(NO_READS) + "/{sample}_{notation}1_subsample.fastq",
subsample2 = SUB + str(NO_READS) + "/{sample}_{notation}2_subsample.fastq"
run:
seed = random.randrange(10*len(config["samples"]))
shell("seqtk sample -s{seed} {input.trim_reads1} {params.pairs_to_sample} "
" > {output.subsample1} \n"
"seqtk sample -s{seed} {input.trim_reads2} {params.pairs_to_sample} "
" > {output.subsample2} ")
rule zipper:
input:
SUB + str(NO_READS) + "/{sample}_{notation}{pair}_subsample.fastq"
output:
SUB + str(NO_READS) + "/{sample}_{notation}{pair}_subsample.fastq.gz"
shell:
"gzip {input}"
|
normal
|
{
"blob_id": "655e6531dc21dcdf8fa827184444cee483492b81",
"index": 7715,
"step-1": "__author__ = \"Sarah Hazell Pickering (sarah.pickering@anu.edu.au)\"\n__date__ = \"2018-11-15\"\n\n\"\"\" QC and Trimming with fastp\n\n Trimming and QC with fastp.\n Then subsampling of reads via seqtk.\n\n Now starts with a sample/sample.file structure.\n\n Number of reads to sample is can be supplied via pairs_to_sample\n parameter of the sub_sample rule. Additional options can be passed\n to fastp via the 'extra' parameter of the fastp rule.\n\n Now can do multiple subsampling runs from the same script\n\"\"\"\n\nimport random\n\n#configfile: \"config.json\"\n\nRAW = config[\"raw_dir\"]\nNOTATION = config[\"pair_notation\"]\nQC = \"output_data/qc/\"\nSUB = QC + \"subsamples/\"\nNO_READS = 50000\nFASTP_PARAMS = \"\"\n\nrule all:\n version:\n \"3.0\"\n input:\n expand(SUB + str(NO_READS) + \"/{sample}_{notation}{pair}_subsample.fastq.gz\",\n sample = config[\"samples\"],\n notation = NOTATION,\n pair = [\"1\", \"2\"])\n \nrule fastp_simple_dir:\n \"\"\"For use when all fastq files are in a single directory. \"\"\"\n version:\n \"3.5\"\n input:\n r1_raw = RAW + \"{sample}_{notation}1_001.fastq.gz\",\n r2_raw = RAW + \"{sample}_{notation}2_001.fastq.gz\"\n params:\n extra = FASTP_PARAMS, \n html = QC + \"{sample}.html\",\n json = QC + \"{sample}.json\"\n #onda: \"envs/fastp.yml\"\n output:\n r1 = QC + \"trimmed/{sample}_{notation}1_trim.fastq.gz\",\n r2 = QC + \"trimmed/{sample}_{notation}2_trim.fastq.gz\"\n shell:\n \"fastp -i {input.r1_raw} -I {input.r2_raw} \"\n \"-o {output.r1} -O {output.r2} \"\n \"-h {params.html} -j {params.json} \"\n \"{params.extra}\"\n\nrule fastp_twotier_dirs:\n \"\"\"For use when fastq files are distributed between directories\n of the same name. e.g. base_dir/{sample}/{sample}.fastq\n \"\"\"\n version:\n \"3.5\"\n params:\n extra = FASTP_PARAMS, \n html = QC + \"{sample}.html\",\n json = QC + \"{sample}.json\"\n input:\n r1_raw = RAW + \"{sample}/{sample}_{notation}1_001.fastq.gz\",\n r2_raw = RAW + \"{sample}/{sample}_R2_001.fastq.gz\"\n #conda: \"envs/fastp.yml\"\n output:\n r1 = QC + \"trimmed/{sample}_{notation}1_trim.fastq.gz\",\n r2 = QC + \"trimmed/{sample}_{notation}2_trim.fastq.gz\"\n shell:\n \"fastp -i {input.r1_raw} -I {input.r2_raw} \"\n \"-o {output.r1} -O {output.r2} \"\n \"-h {params.html} -j {params.json} \"\n \"{params.extra}\"\n\nrule sub_sample:\n version:\n \"4.0\"\n input:\n trim_reads1 = QC + \"trimmed/{sample}_{notation}1_trim.fastq.gz\",\n trim_reads2 = QC + \"trimmed/{sample}_{notation}2_trim.fastq.gz\"\n params:\n pairs_to_sample = NO_READS\n #conda: \"envs/fastp.yml\"\n output:\n subsample1 = SUB + str(NO_READS) + \"/{sample}_{notation}1_subsample.fastq\",\n subsample2 = SUB + str(NO_READS) + \"/{sample}_{notation}2_subsample.fastq\"\n run:\n seed = random.randrange(10*len(config[\"samples\"]))\n shell(\"seqtk sample -s{seed} {input.trim_reads1} {params.pairs_to_sample} \"\n \" > {output.subsample1} \\n\"\n \"seqtk sample -s{seed} {input.trim_reads2} {params.pairs_to_sample} \"\n \" > {output.subsample2} \")\n\nrule zipper:\n input:\n SUB + str(NO_READS) + \"/{sample}_{notation}{pair}_subsample.fastq\"\n output:\n SUB + str(NO_READS) + \"/{sample}_{notation}{pair}_subsample.fastq.gz\"\n shell:\n \"gzip {input}\"\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class Test(unittest.TestCase):
<|reserved_special_token_0|>
def test(self):
workflow_input = {'result_type': 'posts'}
wf = WeiboOnline()
r = wf.run(workflow_input)
print(json.dumps(r, ensure_ascii=False, indent=2))
def tearDown(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Test(unittest.TestCase):
def setUp(self):
pass
def test(self):
workflow_input = {'result_type': 'posts'}
wf = WeiboOnline()
r = wf.run(workflow_input)
print(json.dumps(r, ensure_ascii=False, indent=2))
def tearDown(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Test(unittest.TestCase):
def setUp(self):
pass
def test(self):
workflow_input = {'result_type': 'posts'}
wf = WeiboOnline()
r = wf.run(workflow_input)
print(json.dumps(r, ensure_ascii=False, indent=2))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import json
import unittest
from music_focus.workflows.weibo_online import WeiboOnline
class Test(unittest.TestCase):
def setUp(self):
pass
def test(self):
workflow_input = {'result_type': 'posts'}
wf = WeiboOnline()
r = wf.run(workflow_input)
print(json.dumps(r, ensure_ascii=False, indent=2))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "7088f7233b67dcb855482a76d304aacc1a26abad",
"index": 3790,
"step-1": "<mask token>\n\n\nclass Test(unittest.TestCase):\n <mask token>\n\n def test(self):\n workflow_input = {'result_type': 'posts'}\n wf = WeiboOnline()\n r = wf.run(workflow_input)\n print(json.dumps(r, ensure_ascii=False, indent=2))\n\n def tearDown(self):\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test(self):\n workflow_input = {'result_type': 'posts'}\n wf = WeiboOnline()\n r = wf.run(workflow_input)\n print(json.dumps(r, ensure_ascii=False, indent=2))\n\n def tearDown(self):\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test(self):\n workflow_input = {'result_type': 'posts'}\n wf = WeiboOnline()\n r = wf.run(workflow_input)\n print(json.dumps(r, ensure_ascii=False, indent=2))\n\n def tearDown(self):\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import json\nimport unittest\nfrom music_focus.workflows.weibo_online import WeiboOnline\n\n\nclass Test(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test(self):\n workflow_input = {'result_type': 'posts'}\n wf = WeiboOnline()\n r = wf.run(workflow_input)\n print(json.dumps(r, ensure_ascii=False, indent=2))\n\n def tearDown(self):\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": null,
"step-ids": [
3,
4,
5,
6
]
}
|
[
3,
4,
5,
6
] |
from tkinter import *
import psycopg2
import sys
import pprint
import Base_de_datos
import MergeSort
class Cliente:
def __init__(self,id=None,nombre=None):
self.id=id
self.nombre=nombre
def ingresar(self):
self.ventanaIngresar= Toplevel()
self.ventanaIngresar.geometry("570x400")
self.ventanaIngresar.title("Cliente")
img = PhotoImage(file="C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png")
imagen= Label(self.ventanaIngresar, image=img)
imagen.pack()
Label(self.ventanaIngresar, text="Cliente",font=("Cambria",14)).place(x=5,y=0)
Label(self.ventanaIngresar, text="Id: ",font=("Cambria",11)).place(x=0,y=30)
Label(self.ventanaIngresar, text="Nombre: ",font=("Cambria",11)).place(x=0,y=60)
self.id=StringVar()
Entry(self.ventanaIngresar, textvariable=self.id).place(x=30,y=30)
self.nombre=StringVar()
Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65,y=60)
Button(self.ventanaIngresar,text="Guardar",font=("Cambria",11),
width=15,command=self.BD).place(x=420,y=5)
#Button(self.ventanaIngresar,text="Modificar",font=("Cambria",11),
# width=15).place(x=420,y=365)
Button(self.ventanaIngresar,text="Mostrar",font=("Cambria",11),
width=15,command=self.Mostrar).place(x=0,y=365)
Button(self.ventanaIngresar,text="Ordenar",font=("Cambria",11),
width=15, command=self.ordenamiento).place(x=220,y=365)
self.ventanaIngresar.mainloop()
def BD(self):
conectar=Base_de_datos.BaseDeDatos()
comando="INSERT INTO public.cliente(id, nombre) VALUES('"+self.id.get()+"','"+self.nombre.get()+"')"
print(comando)
conectar.cursor.execute(comando)
def Mostrar(self):
comando="SELECT * FROM cliente;"
conectar=Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
Scroll=Scrollbar(self.ventanaIngresar, orient=VERTICAL)
self.listbox=Listbox(self.ventanaIngresar, font=("Cambria",9), borderwidth=0, yscrollcommand=Scroll.set,height=15,relief="sunken",width=60)
self.listbox.place(x=5, y=90)
Scroll.config(command=self.listbox.yview)
Scroll.pack(side=RIGHT, fill=Y)
for dato1, dato2 in enumerate(conectar.cursor.fetchall()):
self.listbox.insert(0, "Id: {}".format(dato2[0]))
self.listbox.insert(1, "Nombre: {}".format(dato2[1]))
self.listbox.insert(2, " ")
def ordenamiento(self):
comando="SELECT id FROM cliente;"
conectar=Base_de_datos.BaseDeDatos()
conectar.cursor.execute(comando)
rows= conectar.cursor.fetchall()
ordenar=MergeSort.merge_sort(rows)
print(ordenar)
|
normal
|
{
"blob_id": "63d9aa55463123f32fd608ada83e555be4b5fe2c",
"index": 6946,
"step-1": "<mask token>\n\n\nclass Cliente:\n <mask token>\n <mask token>\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n <mask token>\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-3": "<mask token>\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n\n def Mostrar(self):\n comando = 'SELECT * FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)\n self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),\n borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=\n 'sunken', width=60)\n self.listbox.place(x=5, y=90)\n Scroll.config(command=self.listbox.yview)\n Scroll.pack(side=RIGHT, fill=Y)\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\n self.listbox.insert(0, 'Id: {}'.format(dato2[0]))\n self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))\n self.listbox.insert(2, ' ')\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-4": "from tkinter import *\nimport psycopg2\nimport sys\nimport pprint\nimport Base_de_datos\nimport MergeSort\n\n\nclass Cliente:\n\n def __init__(self, id=None, nombre=None):\n self.id = id\n self.nombre = nombre\n\n def ingresar(self):\n self.ventanaIngresar = Toplevel()\n self.ventanaIngresar.geometry('570x400')\n self.ventanaIngresar.title('Cliente')\n img = PhotoImage(file=\n 'C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png')\n imagen = Label(self.ventanaIngresar, image=img)\n imagen.pack()\n Label(self.ventanaIngresar, text='Cliente', font=('Cambria', 14)\n ).place(x=5, y=0)\n Label(self.ventanaIngresar, text='Id: ', font=('Cambria', 11)).place(x\n =0, y=30)\n Label(self.ventanaIngresar, text='Nombre: ', font=('Cambria', 11)\n ).place(x=0, y=60)\n self.id = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30, y=30)\n self.nombre = StringVar()\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65, y=60)\n Button(self.ventanaIngresar, text='Guardar', font=('Cambria', 11),\n width=15, command=self.BD).place(x=420, y=5)\n Button(self.ventanaIngresar, text='Mostrar', font=('Cambria', 11),\n width=15, command=self.Mostrar).place(x=0, y=365)\n Button(self.ventanaIngresar, text='Ordenar', font=('Cambria', 11),\n width=15, command=self.ordenamiento).place(x=220, y=365)\n self.ventanaIngresar.mainloop()\n\n def BD(self):\n conectar = Base_de_datos.BaseDeDatos()\n comando = (\"INSERT INTO public.cliente(id, nombre) VALUES('\" + self\n .id.get() + \"','\" + self.nombre.get() + \"')\")\n print(comando)\n conectar.cursor.execute(comando)\n\n def Mostrar(self):\n comando = 'SELECT * FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n Scroll = Scrollbar(self.ventanaIngresar, orient=VERTICAL)\n self.listbox = Listbox(self.ventanaIngresar, font=('Cambria', 9),\n borderwidth=0, yscrollcommand=Scroll.set, height=15, relief=\n 'sunken', width=60)\n self.listbox.place(x=5, y=90)\n Scroll.config(command=self.listbox.yview)\n Scroll.pack(side=RIGHT, fill=Y)\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\n self.listbox.insert(0, 'Id: {}'.format(dato2[0]))\n self.listbox.insert(1, 'Nombre: {}'.format(dato2[1]))\n self.listbox.insert(2, ' ')\n\n def ordenamiento(self):\n comando = 'SELECT id FROM cliente;'\n conectar = Base_de_datos.BaseDeDatos()\n conectar.cursor.execute(comando)\n rows = conectar.cursor.fetchall()\n ordenar = MergeSort.merge_sort(rows)\n print(ordenar)\n",
"step-5": "from tkinter import *\r\nimport psycopg2\r\nimport sys\r\nimport pprint\r\nimport Base_de_datos\r\nimport MergeSort\r\n\r\nclass Cliente:\r\n def __init__(self,id=None,nombre=None):\r\n self.id=id\r\n self.nombre=nombre\r\n def ingresar(self):\r\n self.ventanaIngresar= Toplevel()\r\n self.ventanaIngresar.geometry(\"570x400\")\r\n self.ventanaIngresar.title(\"Cliente\")\r\n img = PhotoImage(file=\"C:/Users/checo/Desktop/41-INVERSION-MEDIOS-DIGITALES.png\")\r\n imagen= Label(self.ventanaIngresar, image=img)\r\n imagen.pack()\r\n Label(self.ventanaIngresar, text=\"Cliente\",font=(\"Cambria\",14)).place(x=5,y=0)\r\n Label(self.ventanaIngresar, text=\"Id: \",font=(\"Cambria\",11)).place(x=0,y=30)\r\n Label(self.ventanaIngresar, text=\"Nombre: \",font=(\"Cambria\",11)).place(x=0,y=60)\r\n\r\n self.id=StringVar()\r\n Entry(self.ventanaIngresar, textvariable=self.id).place(x=30,y=30)\r\n self.nombre=StringVar()\r\n Entry(self.ventanaIngresar, textvariable=self.nombre).place(x=65,y=60) \r\n \r\n Button(self.ventanaIngresar,text=\"Guardar\",font=(\"Cambria\",11),\r\n width=15,command=self.BD).place(x=420,y=5)\r\n \r\n #Button(self.ventanaIngresar,text=\"Modificar\",font=(\"Cambria\",11),\r\n # width=15).place(x=420,y=365)\r\n \r\n Button(self.ventanaIngresar,text=\"Mostrar\",font=(\"Cambria\",11),\r\n width=15,command=self.Mostrar).place(x=0,y=365)\r\n \r\n Button(self.ventanaIngresar,text=\"Ordenar\",font=(\"Cambria\",11),\r\n width=15, command=self.ordenamiento).place(x=220,y=365)\r\n \r\n self.ventanaIngresar.mainloop()\r\n \r\n def BD(self):\r\n conectar=Base_de_datos.BaseDeDatos()\r\n comando=\"INSERT INTO public.cliente(id, nombre) VALUES('\"+self.id.get()+\"','\"+self.nombre.get()+\"')\"\r\n print(comando)\r\n conectar.cursor.execute(comando)\r\n def Mostrar(self):\r\n comando=\"SELECT * FROM cliente;\"\r\n conectar=Base_de_datos.BaseDeDatos()\r\n conectar.cursor.execute(comando)\r\n Scroll=Scrollbar(self.ventanaIngresar, orient=VERTICAL)\r\n self.listbox=Listbox(self.ventanaIngresar, font=(\"Cambria\",9), borderwidth=0, yscrollcommand=Scroll.set,height=15,relief=\"sunken\",width=60)\r\n self.listbox.place(x=5, y=90)\r\n Scroll.config(command=self.listbox.yview)\r\n Scroll.pack(side=RIGHT, fill=Y)\r\n for dato1, dato2 in enumerate(conectar.cursor.fetchall()):\r\n self.listbox.insert(0, \"Id: {}\".format(dato2[0]))\r\n self.listbox.insert(1, \"Nombre: {}\".format(dato2[1]))\r\n self.listbox.insert(2, \" \")\r\n def ordenamiento(self):\r\n comando=\"SELECT id FROM cliente;\"\r\n conectar=Base_de_datos.BaseDeDatos()\r\n conectar.cursor.execute(comando)\r\n rows= conectar.cursor.fetchall()\r\n ordenar=MergeSort.merge_sort(rows)\r\n print(ordenar)\r\n\r\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_lookup_table(hole_cards, lookup_table):
"""
Reads the preflop lookup table preflop_EHSs.txt.
Args:
hole_cards: list of int (deuces cards)
lookup_table: read from preflop_EHSs.txt
Return:
tuple (float, float): EHS, EHS^2
"""
sorted_hole = sorted(hole_cards)
sorted_hole.reverse()
card_strings = [Card.int_to_str(card) for card in sorted_hole]
if card_strings[0][1] != card_strings[1][1]:
suited = False
else:
suited = True
card_strings[0] = card_strings[0][0] + 'd'
if suited:
card_strings[1] = card_strings[1][0] + 'd'
else:
card_strings[1] = card_strings[1][0] + 's'
card_strings = tuple(card_strings)
return lookup_table[card_strings]
<|reserved_special_token_1|>
from deuces.card import Card
from deuces.deck import Deck
from fast_utils.hand_strength.original_HS import *
from fast_utils.hand_strength.nn_HS import encode_hs
from fast_utils.expected_hand_strength.nn_EHS import *
from keras.models import load_model
def read_lookup_table(hole_cards, lookup_table):
"""
Reads the preflop lookup table preflop_EHSs.txt.
Args:
hole_cards: list of int (deuces cards)
lookup_table: read from preflop_EHSs.txt
Return:
tuple (float, float): EHS, EHS^2
"""
sorted_hole = sorted(hole_cards)
sorted_hole.reverse()
card_strings = [Card.int_to_str(card) for card in sorted_hole]
if card_strings[0][1] != card_strings[1][1]:
suited = False
else:
suited = True
card_strings[0] = card_strings[0][0] + 'd'
if suited:
card_strings[1] = card_strings[1][0] + 'd'
else:
card_strings[1] = card_strings[1][0] + 's'
card_strings = tuple(card_strings)
return lookup_table[card_strings]
|
flexible
|
{
"blob_id": "8503998fc881f47dc695d3ea4c7f56fa65a96e8a",
"index": 2874,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_lookup_table(hole_cards, lookup_table):\n \"\"\"\n Reads the preflop lookup table preflop_EHSs.txt.\n Args: \n hole_cards: list of int (deuces cards)\n lookup_table: read from preflop_EHSs.txt\n Return:\n tuple (float, float): EHS, EHS^2\n \"\"\"\n sorted_hole = sorted(hole_cards)\n sorted_hole.reverse()\n card_strings = [Card.int_to_str(card) for card in sorted_hole]\n if card_strings[0][1] != card_strings[1][1]:\n suited = False\n else:\n suited = True\n card_strings[0] = card_strings[0][0] + 'd'\n if suited:\n card_strings[1] = card_strings[1][0] + 'd'\n else:\n card_strings[1] = card_strings[1][0] + 's'\n card_strings = tuple(card_strings)\n return lookup_table[card_strings]\n",
"step-3": "from deuces.card import Card\nfrom deuces.deck import Deck\nfrom fast_utils.hand_strength.original_HS import *\nfrom fast_utils.hand_strength.nn_HS import encode_hs\nfrom fast_utils.expected_hand_strength.nn_EHS import *\nfrom keras.models import load_model\n\n\ndef read_lookup_table(hole_cards, lookup_table):\n \"\"\"\n Reads the preflop lookup table preflop_EHSs.txt.\n Args: \n hole_cards: list of int (deuces cards)\n lookup_table: read from preflop_EHSs.txt\n Return:\n tuple (float, float): EHS, EHS^2\n \"\"\"\n sorted_hole = sorted(hole_cards)\n sorted_hole.reverse()\n card_strings = [Card.int_to_str(card) for card in sorted_hole]\n if card_strings[0][1] != card_strings[1][1]:\n suited = False\n else:\n suited = True\n card_strings[0] = card_strings[0][0] + 'd'\n if suited:\n card_strings[1] = card_strings[1][0] + 'd'\n else:\n card_strings[1] = card_strings[1][0] + 's'\n card_strings = tuple(card_strings)\n return lookup_table[card_strings]\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/env python
from django import template
from django.conf import settings
from django.utils.html import format_html
register = template.Library()
@register.simple_tag
def website_title():
return settings.WEBSITE_TITLE
def split_page(result_obj):
"""
分页模块,后台传入一个分页结果集就可以
:param result_obj:
:return:
"""
return_str = "<nav>"
return_str += "<ul class='pagination pull-right'>"
if result_obj.has_previous():
return_str += "<li>"
return_str += "<a href='?page=" + str(result_obj.previous_page_number()) + "' aria-label='Previous'>"
return_str += "<span aria-hidden='true'>«</span>"
return_str += "</a></li>"
for i in result_obj.paginator.page_range:
# print(i,result_obj.paginator.page_range,result_obj.number)
hide_page_num = abs(result_obj.number - i)
if hide_page_num <= 3: # 3为当前页前后显示多少个
return_str += "<li "
if i == result_obj.number:
return_str += "class='active'><a href='?page=" + str(i) + "'>" + str(i) + "</a></li>"
else:
return_str += "><a href='?page=" + str(i) + "'>" + str(i) + "</a></li>"
if result_obj.has_next():
return_str += "<li><a href='?page=" + str(result_obj.next_page_number()) + "' aria-label='Next'>"
return_str += "<span aria-hidden='true'>»</span></a></li></ul></nav>"
#return format_html(return_str)
return return_str
@register.simple_tag
def test(string):
return string
|
normal
|
{
"blob_id": "c2c51dcd05c21e91e591de25fc2de034c88c48a1",
"index": 9052,
"step-1": "<mask token>\n\n\ndef split_page(result_obj):\n \"\"\"\n 分页模块,后台传入一个分页结果集就可以\n :param result_obj:\n :return:\n \"\"\"\n return_str = '<nav>'\n return_str += \"<ul class='pagination pull-right'>\"\n if result_obj.has_previous():\n return_str += '<li>'\n return_str += \"<a href='?page=\" + str(result_obj.previous_page_number()\n ) + \"' aria-label='Previous'>\"\n return_str += \"<span aria-hidden='true'>«</span>\"\n return_str += '</a></li>'\n for i in result_obj.paginator.page_range:\n hide_page_num = abs(result_obj.number - i)\n if hide_page_num <= 3:\n return_str += '<li '\n if i == result_obj.number:\n return_str += \"class='active'><a href='?page=\" + str(i\n ) + \"'>\" + str(i) + '</a></li>'\n else:\n return_str += \"><a href='?page=\" + str(i) + \"'>\" + str(i\n ) + '</a></li>'\n if result_obj.has_next():\n return_str += \"<li><a href='?page=\" + str(result_obj.next_page_number()\n ) + \"' aria-label='Next'>\"\n return_str += (\n \"<span aria-hidden='true'>»</span></a></li></ul></nav>\")\n return return_str\n\n\n@register.simple_tag\ndef test(string):\n return string\n",
"step-2": "<mask token>\n\n\n@register.simple_tag\ndef website_title():\n return settings.WEBSITE_TITLE\n\n\ndef split_page(result_obj):\n \"\"\"\n 分页模块,后台传入一个分页结果集就可以\n :param result_obj:\n :return:\n \"\"\"\n return_str = '<nav>'\n return_str += \"<ul class='pagination pull-right'>\"\n if result_obj.has_previous():\n return_str += '<li>'\n return_str += \"<a href='?page=\" + str(result_obj.previous_page_number()\n ) + \"' aria-label='Previous'>\"\n return_str += \"<span aria-hidden='true'>«</span>\"\n return_str += '</a></li>'\n for i in result_obj.paginator.page_range:\n hide_page_num = abs(result_obj.number - i)\n if hide_page_num <= 3:\n return_str += '<li '\n if i == result_obj.number:\n return_str += \"class='active'><a href='?page=\" + str(i\n ) + \"'>\" + str(i) + '</a></li>'\n else:\n return_str += \"><a href='?page=\" + str(i) + \"'>\" + str(i\n ) + '</a></li>'\n if result_obj.has_next():\n return_str += \"<li><a href='?page=\" + str(result_obj.next_page_number()\n ) + \"' aria-label='Next'>\"\n return_str += (\n \"<span aria-hidden='true'>»</span></a></li></ul></nav>\")\n return return_str\n\n\n@register.simple_tag\ndef test(string):\n return string\n",
"step-3": "<mask token>\nregister = template.Library()\n\n\n@register.simple_tag\ndef website_title():\n return settings.WEBSITE_TITLE\n\n\ndef split_page(result_obj):\n \"\"\"\n 分页模块,后台传入一个分页结果集就可以\n :param result_obj:\n :return:\n \"\"\"\n return_str = '<nav>'\n return_str += \"<ul class='pagination pull-right'>\"\n if result_obj.has_previous():\n return_str += '<li>'\n return_str += \"<a href='?page=\" + str(result_obj.previous_page_number()\n ) + \"' aria-label='Previous'>\"\n return_str += \"<span aria-hidden='true'>«</span>\"\n return_str += '</a></li>'\n for i in result_obj.paginator.page_range:\n hide_page_num = abs(result_obj.number - i)\n if hide_page_num <= 3:\n return_str += '<li '\n if i == result_obj.number:\n return_str += \"class='active'><a href='?page=\" + str(i\n ) + \"'>\" + str(i) + '</a></li>'\n else:\n return_str += \"><a href='?page=\" + str(i) + \"'>\" + str(i\n ) + '</a></li>'\n if result_obj.has_next():\n return_str += \"<li><a href='?page=\" + str(result_obj.next_page_number()\n ) + \"' aria-label='Next'>\"\n return_str += (\n \"<span aria-hidden='true'>»</span></a></li></ul></nav>\")\n return return_str\n\n\n@register.simple_tag\ndef test(string):\n return string\n",
"step-4": "from django import template\nfrom django.conf import settings\nfrom django.utils.html import format_html\nregister = template.Library()\n\n\n@register.simple_tag\ndef website_title():\n return settings.WEBSITE_TITLE\n\n\ndef split_page(result_obj):\n \"\"\"\n 分页模块,后台传入一个分页结果集就可以\n :param result_obj:\n :return:\n \"\"\"\n return_str = '<nav>'\n return_str += \"<ul class='pagination pull-right'>\"\n if result_obj.has_previous():\n return_str += '<li>'\n return_str += \"<a href='?page=\" + str(result_obj.previous_page_number()\n ) + \"' aria-label='Previous'>\"\n return_str += \"<span aria-hidden='true'>«</span>\"\n return_str += '</a></li>'\n for i in result_obj.paginator.page_range:\n hide_page_num = abs(result_obj.number - i)\n if hide_page_num <= 3:\n return_str += '<li '\n if i == result_obj.number:\n return_str += \"class='active'><a href='?page=\" + str(i\n ) + \"'>\" + str(i) + '</a></li>'\n else:\n return_str += \"><a href='?page=\" + str(i) + \"'>\" + str(i\n ) + '</a></li>'\n if result_obj.has_next():\n return_str += \"<li><a href='?page=\" + str(result_obj.next_page_number()\n ) + \"' aria-label='Next'>\"\n return_str += (\n \"<span aria-hidden='true'>»</span></a></li></ul></nav>\")\n return return_str\n\n\n@register.simple_tag\ndef test(string):\n return string\n",
"step-5": "#!/usr/bin/env python\nfrom django import template\nfrom django.conf import settings\nfrom django.utils.html import format_html\n\n\nregister = template.Library()\n\n@register.simple_tag\ndef website_title():\n return settings.WEBSITE_TITLE\n\n\ndef split_page(result_obj):\n \"\"\"\n 分页模块,后台传入一个分页结果集就可以\n :param result_obj:\n :return:\n \"\"\"\n return_str = \"<nav>\"\n return_str += \"<ul class='pagination pull-right'>\"\n if result_obj.has_previous():\n return_str += \"<li>\"\n return_str += \"<a href='?page=\" + str(result_obj.previous_page_number()) + \"' aria-label='Previous'>\"\n return_str += \"<span aria-hidden='true'>«</span>\"\n return_str += \"</a></li>\"\n\n for i in result_obj.paginator.page_range:\n # print(i,result_obj.paginator.page_range,result_obj.number)\n hide_page_num = abs(result_obj.number - i)\n if hide_page_num <= 3: # 3为当前页前后显示多少个\n return_str += \"<li \"\n if i == result_obj.number:\n return_str += \"class='active'><a href='?page=\" + str(i) + \"'>\" + str(i) + \"</a></li>\"\n else:\n return_str += \"><a href='?page=\" + str(i) + \"'>\" + str(i) + \"</a></li>\"\n\n if result_obj.has_next():\n return_str += \"<li><a href='?page=\" + str(result_obj.next_page_number()) + \"' aria-label='Next'>\"\n return_str += \"<span aria-hidden='true'>»</span></a></li></ul></nav>\"\n\n #return format_html(return_str)\n return return_str\n\n\n@register.simple_tag\ndef test(string):\n return string\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class BusRoute(Base):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class BusRoutePos(Base):
__tablename__ = 'bus_route_pos'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusPos(Base):
__tablename__ = 'bus_pos'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable
=False)
bus_internal_id = Column(Integer)
lat = Column(String)
lon = Column(String)
orientation = Column(Integer)
timestamp = Column(Integer)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusStop(Base):
__tablename__ = 'bus_stops'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
stop_code = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusTrip(Base):
__tablename__ = 'bus_trip'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
bus_internal_id = Column(Integer)
route_id = Column(Integer)
last_updated = Column(DateTime, default=datetime.utcnow)
last_pos_timestamp = Column(Integer, default=0)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BusLine(Base):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class BusRoute(Base):
__tablename__ = 'bus_routes'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
route_id = Column(Integer)
route_description = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusRoutePos(Base):
__tablename__ = 'bus_route_pos'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusPos(Base):
__tablename__ = 'bus_pos'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable
=False)
bus_internal_id = Column(Integer)
lat = Column(String)
lon = Column(String)
orientation = Column(Integer)
timestamp = Column(Integer)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusStop(Base):
__tablename__ = 'bus_stops'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
stop_code = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusTrip(Base):
__tablename__ = 'bus_trip'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
bus_internal_id = Column(Integer)
route_id = Column(Integer)
last_updated = Column(DateTime, default=datetime.utcnow)
last_pos_timestamp = Column(Integer, default=0)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BusLine(Base):
__tablename__ = 'bus_lines'
id = Column(Integer, primary_key=True)
line_id = Column(Integer)
line_description = Column(String)
class BusRoute(Base):
__tablename__ = 'bus_routes'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
route_id = Column(Integer)
route_description = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusRoutePos(Base):
__tablename__ = 'bus_route_pos'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusPos(Base):
__tablename__ = 'bus_pos'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable
=False)
bus_internal_id = Column(Integer)
lat = Column(String)
lon = Column(String)
orientation = Column(Integer)
timestamp = Column(Integer)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusStop(Base):
__tablename__ = 'bus_stops'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
stop_code = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusTrip(Base):
__tablename__ = 'bus_trip'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
bus_internal_id = Column(Integer)
route_id = Column(Integer)
last_updated = Column(DateTime, default=datetime.utcnow)
last_pos_timestamp = Column(Integer, default=0)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Base = declarative_base()
class BusLine(Base):
__tablename__ = 'bus_lines'
id = Column(Integer, primary_key=True)
line_id = Column(Integer)
line_description = Column(String)
class BusRoute(Base):
__tablename__ = 'bus_routes'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
route_id = Column(Integer)
route_description = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusRoutePos(Base):
__tablename__ = 'bus_route_pos'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusPos(Base):
__tablename__ = 'bus_pos'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable
=False)
bus_internal_id = Column(Integer)
lat = Column(String)
lon = Column(String)
orientation = Column(Integer)
timestamp = Column(Integer)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusStop(Base):
__tablename__ = 'bus_stops'
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=
False)
lat = Column(String)
lon = Column(String)
stop_code = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusTrip(Base):
__tablename__ = 'bus_trip'
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
bus_internal_id = Column(Integer)
route_id = Column(Integer)
last_updated = Column(DateTime, default=datetime.utcnow)
last_pos_timestamp = Column(Integer, default=0)
<|reserved_special_token_1|>
from datetime import datetime
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
Base = declarative_base()
class BusLine(Base):
__tablename__ = "bus_lines"
id = Column(Integer, primary_key=True)
line_id = Column(Integer)
line_description = Column(String)
class BusRoute(Base):
__tablename__ = "bus_routes"
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
route_id = Column(Integer)
route_description = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusRoutePos(Base):
__tablename__ = "bus_route_pos"
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey("bus_routes.route_id"), nullable=False)
lat = Column(String)
lon = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusPos(Base):
__tablename__ = "bus_pos"
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer, ForeignKey("bus_lines.line_id"), nullable=False)
bus_internal_id = Column(Integer)
lat = Column(String)
lon = Column(String)
orientation = Column(Integer)
timestamp = Column(Integer)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusStop(Base):
__tablename__ = "bus_stops"
id = Column(Integer, primary_key=True)
route_id = Column(Integer, ForeignKey("bus_routes.route_id"), nullable=False)
lat = Column(String)
lon = Column(String)
stop_code = Column(String)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class BusTrip(Base):
__tablename__ = "bus_trip"
id = Column(Integer, primary_key=True)
bus_line_id = Column(Integer)
bus_internal_id = Column(Integer)
route_id = Column(Integer)
last_updated = Column(DateTime, default=datetime.utcnow)
last_pos_timestamp = Column(Integer, default=0)
|
flexible
|
{
"blob_id": "9e896d935cc57e580ed46cd501b41053bbaab38f",
"index": 6490,
"step-1": "<mask token>\n\n\nclass BusRoute(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass BusRoutePos(Base):\n __tablename__ = 'bus_route_pos'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusPos(Base):\n __tablename__ = 'bus_pos'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable\n =False)\n bus_internal_id = Column(Integer)\n lat = Column(String)\n lon = Column(String)\n orientation = Column(Integer)\n timestamp = Column(Integer)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusStop(Base):\n __tablename__ = 'bus_stops'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n stop_code = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusTrip(Base):\n __tablename__ = 'bus_trip'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n bus_internal_id = Column(Integer)\n route_id = Column(Integer)\n last_updated = Column(DateTime, default=datetime.utcnow)\n last_pos_timestamp = Column(Integer, default=0)\n",
"step-2": "<mask token>\n\n\nclass BusLine(Base):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass BusRoute(Base):\n __tablename__ = 'bus_routes'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n route_id = Column(Integer)\n route_description = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusRoutePos(Base):\n __tablename__ = 'bus_route_pos'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusPos(Base):\n __tablename__ = 'bus_pos'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable\n =False)\n bus_internal_id = Column(Integer)\n lat = Column(String)\n lon = Column(String)\n orientation = Column(Integer)\n timestamp = Column(Integer)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusStop(Base):\n __tablename__ = 'bus_stops'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n stop_code = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusTrip(Base):\n __tablename__ = 'bus_trip'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n bus_internal_id = Column(Integer)\n route_id = Column(Integer)\n last_updated = Column(DateTime, default=datetime.utcnow)\n last_pos_timestamp = Column(Integer, default=0)\n",
"step-3": "<mask token>\n\n\nclass BusLine(Base):\n __tablename__ = 'bus_lines'\n id = Column(Integer, primary_key=True)\n line_id = Column(Integer)\n line_description = Column(String)\n\n\nclass BusRoute(Base):\n __tablename__ = 'bus_routes'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n route_id = Column(Integer)\n route_description = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusRoutePos(Base):\n __tablename__ = 'bus_route_pos'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusPos(Base):\n __tablename__ = 'bus_pos'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable\n =False)\n bus_internal_id = Column(Integer)\n lat = Column(String)\n lon = Column(String)\n orientation = Column(Integer)\n timestamp = Column(Integer)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusStop(Base):\n __tablename__ = 'bus_stops'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n stop_code = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusTrip(Base):\n __tablename__ = 'bus_trip'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n bus_internal_id = Column(Integer)\n route_id = Column(Integer)\n last_updated = Column(DateTime, default=datetime.utcnow)\n last_pos_timestamp = Column(Integer, default=0)\n",
"step-4": "<mask token>\nBase = declarative_base()\n\n\nclass BusLine(Base):\n __tablename__ = 'bus_lines'\n id = Column(Integer, primary_key=True)\n line_id = Column(Integer)\n line_description = Column(String)\n\n\nclass BusRoute(Base):\n __tablename__ = 'bus_routes'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n route_id = Column(Integer)\n route_description = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusRoutePos(Base):\n __tablename__ = 'bus_route_pos'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusPos(Base):\n __tablename__ = 'bus_pos'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer, ForeignKey('bus_lines.line_id'), nullable\n =False)\n bus_internal_id = Column(Integer)\n lat = Column(String)\n lon = Column(String)\n orientation = Column(Integer)\n timestamp = Column(Integer)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusStop(Base):\n __tablename__ = 'bus_stops'\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey('bus_routes.route_id'), nullable=\n False)\n lat = Column(String)\n lon = Column(String)\n stop_code = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusTrip(Base):\n __tablename__ = 'bus_trip'\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n bus_internal_id = Column(Integer)\n route_id = Column(Integer)\n last_updated = Column(DateTime, default=datetime.utcnow)\n last_pos_timestamp = Column(Integer, default=0)\n",
"step-5": "from datetime import datetime\n\nfrom sqlalchemy import Column, Integer, String, ForeignKey, DateTime\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import relationship\n\nBase = declarative_base()\n\n\nclass BusLine(Base):\n __tablename__ = \"bus_lines\"\n id = Column(Integer, primary_key=True)\n line_id = Column(Integer)\n line_description = Column(String)\n\n\nclass BusRoute(Base):\n __tablename__ = \"bus_routes\"\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer)\n route_id = Column(Integer)\n route_description = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusRoutePos(Base):\n __tablename__ = \"bus_route_pos\"\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey(\"bus_routes.route_id\"), nullable=False)\n lat = Column(String)\n lon = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusPos(Base):\n __tablename__ = \"bus_pos\"\n id = Column(Integer, primary_key=True)\n bus_line_id = Column(Integer, ForeignKey(\"bus_lines.line_id\"), nullable=False)\n bus_internal_id = Column(Integer)\n lat = Column(String)\n lon = Column(String)\n orientation = Column(Integer)\n timestamp = Column(Integer)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusStop(Base):\n __tablename__ = \"bus_stops\"\n id = Column(Integer, primary_key=True)\n route_id = Column(Integer, ForeignKey(\"bus_routes.route_id\"), nullable=False)\n lat = Column(String)\n lon = Column(String)\n stop_code = Column(String)\n\n def as_dict(self):\n return {c.name: getattr(self, c.name) for c in self.__table__.columns}\n\n\nclass BusTrip(Base):\n __tablename__ = \"bus_trip\"\n id = Column(Integer, primary_key=True)\n\n bus_line_id = Column(Integer)\n bus_internal_id = Column(Integer)\n\n route_id = Column(Integer)\n last_updated = Column(DateTime, default=datetime.utcnow)\n last_pos_timestamp = Column(Integer, default=0)\n",
"step-ids": [
12,
15,
16,
17,
19
]
}
|
[
12,
15,
16,
17,
19
] |
<|reserved_special_token_0|>
def cut_rod2(price, n):
val = [(0) for x in range(n + 1)]
val[0] = 0
for i in range(1, n + 1):
max_val = -1
for j in range(i):
max_val = max(max_val, price[j] + val[i - j - 1])
val[i] = max_val
return val[n]
<|reserved_special_token_0|>
def rodCut(price, n):
if n <= 0:
return 0
max_val = -1
for i in range(n):
max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))
return max_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cut_rod(price, n):
if n <= 0:
return 0
max_val = -1
val = 0
for i in range(0, n):
val = price[i] + cut_rod(price, n - i - 1)
if max_val < val:
max_val = val
return max_val
def cut_rod2(price, n):
val = [(0) for x in range(n + 1)]
val[0] = 0
for i in range(1, n + 1):
max_val = -1
for j in range(i):
max_val = max(max_val, price[j] + val[i - j - 1])
val[i] = max_val
return val[n]
<|reserved_special_token_0|>
def rodCut(price, n):
if n <= 0:
return 0
max_val = -1
for i in range(n):
max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))
return max_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cut_rod(price, n):
if n <= 0:
return 0
max_val = -1
val = 0
for i in range(0, n):
val = price[i] + cut_rod(price, n - i - 1)
if max_val < val:
max_val = val
return max_val
def cut_rod2(price, n):
val = [(0) for x in range(n + 1)]
val[0] = 0
for i in range(1, n + 1):
max_val = -1
for j in range(i):
max_val = max(max_val, price[j] + val[i - j - 1])
val[i] = max_val
return val[n]
<|reserved_special_token_0|>
print('Maximum Obtainable Value is', cut_rod2([2, 5, 7, 3, 9], 5))
def rodCut(price, n):
if n <= 0:
return 0
max_val = -1
for i in range(n):
max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))
return max_val
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cut_rod(price, n):
if n <= 0:
return 0
max_val = -1
val = 0
for i in range(0, n):
val = price[i] + cut_rod(price, n - i - 1)
if max_val < val:
max_val = val
return max_val
def cut_rod2(price, n):
val = [(0) for x in range(n + 1)]
val[0] = 0
for i in range(1, n + 1):
max_val = -1
for j in range(i):
max_val = max(max_val, price[j] + val[i - j - 1])
val[i] = max_val
return val[n]
arr = [1, 5, 8, 9, 10, 17, 17, 20]
arr1 = [3, 5, 8, 9, 10, 17, 17, 20]
arr2 = [5, 5, 8, 9, 10, 17, 17, 20]
size = len(arr)
print('Maximum Obtainable Value is', cut_rod2([2, 5, 7, 3, 9], 5))
def rodCut(price, n):
if n <= 0:
return 0
max_val = -1
for i in range(n):
max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))
return max_val
<|reserved_special_token_1|>
# Cutting a Rod | DP-13
# Difficulty Level : Medium
# Last Updated : 13 Nov, 2020
# Given a rod of length n inches and an array of prices that contains prices of all pieces of size smaller than n. Determine the maximum value obtainable by cutting up the rod and selling the pieces. For example, if length of the rod is 8 and the values of different pieces are given as following, then the maximum obtainable value is 22 (by cutting in two pieces of lengths 2 and 6)
# length | 1 2 3 4 5 6 7 8
# --------------------------------------------
# price | 1 5 8 9 10 17 17 20
# And if the prices are as following, then the maximum obtainable value is 24 (by cutting in eight pieces of length 1)
# length | 1 2 3 4 5 6 7 8
# --------------------------------------------
# price | 3 5 8 9 10 17 17 20
import numpy as np
def cut_rod(price, n):
if n <= 0:
return 0
max_val = -1
val = 0
for i in range(0, n):
val = price[i] + cut_rod(price, n - i - 1)
if max_val < val:
max_val = val
# print("i:", i, "n:", n, "max_val:", max_val)
return max_val
def cut_rod2(price, n):
val = [0 for x in range(n+1)]
val[0] = 0
for i in range(1, n+1):
max_val = -1
for j in range(i):
max_val = max(max_val, price[j] + val[i-j-1])
# print("i:", i, "j:", j, "max_val:", max_val, "val:", val)
val[i] = max_val
# print("i:", i, "val:", val)
return val[n]
# Driver code
arr = [1, 5, 8, 9, 10, 17, 17, 20]
arr1 = [3, 5, 8, 9, 10, 17, 17, 20]
arr2 = [5, 5, 8, 9, 10, 17, 17, 20]
size = len(arr)
# print("Maximum Obtainable Value is", cut_rod(arr1, size))
# print("Maximum Obtainable Value is", cut_rod2(arr1, size))
print("Maximum Obtainable Value is", cut_rod2([2, 5, 7, 3, 9], 5))
def rodCut(price, n):
if n <= 0:
return 0
max_val = -1
# val = 0
for i in range(n):
# val = price[i] + rodCut(price, n-1-i)
max_val = max(max_val, price[i] + rodCut(price, n-1-i))
return max_val
# print("Maximum Obtainable Value is", rodCut(arr1, size))
|
flexible
|
{
"blob_id": "9cca73ebdf2b05fe29c14dc63ec1b1a7c917b085",
"index": 6508,
"step-1": "<mask token>\n\n\ndef cut_rod2(price, n):\n val = [(0) for x in range(n + 1)]\n val[0] = 0\n for i in range(1, n + 1):\n max_val = -1\n for j in range(i):\n max_val = max(max_val, price[j] + val[i - j - 1])\n val[i] = max_val\n return val[n]\n\n\n<mask token>\n\n\ndef rodCut(price, n):\n if n <= 0:\n return 0\n max_val = -1\n for i in range(n):\n max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))\n return max_val\n",
"step-2": "<mask token>\n\n\ndef cut_rod(price, n):\n if n <= 0:\n return 0\n max_val = -1\n val = 0\n for i in range(0, n):\n val = price[i] + cut_rod(price, n - i - 1)\n if max_val < val:\n max_val = val\n return max_val\n\n\ndef cut_rod2(price, n):\n val = [(0) for x in range(n + 1)]\n val[0] = 0\n for i in range(1, n + 1):\n max_val = -1\n for j in range(i):\n max_val = max(max_val, price[j] + val[i - j - 1])\n val[i] = max_val\n return val[n]\n\n\n<mask token>\n\n\ndef rodCut(price, n):\n if n <= 0:\n return 0\n max_val = -1\n for i in range(n):\n max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))\n return max_val\n",
"step-3": "<mask token>\n\n\ndef cut_rod(price, n):\n if n <= 0:\n return 0\n max_val = -1\n val = 0\n for i in range(0, n):\n val = price[i] + cut_rod(price, n - i - 1)\n if max_val < val:\n max_val = val\n return max_val\n\n\ndef cut_rod2(price, n):\n val = [(0) for x in range(n + 1)]\n val[0] = 0\n for i in range(1, n + 1):\n max_val = -1\n for j in range(i):\n max_val = max(max_val, price[j] + val[i - j - 1])\n val[i] = max_val\n return val[n]\n\n\n<mask token>\nprint('Maximum Obtainable Value is', cut_rod2([2, 5, 7, 3, 9], 5))\n\n\ndef rodCut(price, n):\n if n <= 0:\n return 0\n max_val = -1\n for i in range(n):\n max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))\n return max_val\n",
"step-4": "<mask token>\n\n\ndef cut_rod(price, n):\n if n <= 0:\n return 0\n max_val = -1\n val = 0\n for i in range(0, n):\n val = price[i] + cut_rod(price, n - i - 1)\n if max_val < val:\n max_val = val\n return max_val\n\n\ndef cut_rod2(price, n):\n val = [(0) for x in range(n + 1)]\n val[0] = 0\n for i in range(1, n + 1):\n max_val = -1\n for j in range(i):\n max_val = max(max_val, price[j] + val[i - j - 1])\n val[i] = max_val\n return val[n]\n\n\narr = [1, 5, 8, 9, 10, 17, 17, 20]\narr1 = [3, 5, 8, 9, 10, 17, 17, 20]\narr2 = [5, 5, 8, 9, 10, 17, 17, 20]\nsize = len(arr)\nprint('Maximum Obtainable Value is', cut_rod2([2, 5, 7, 3, 9], 5))\n\n\ndef rodCut(price, n):\n if n <= 0:\n return 0\n max_val = -1\n for i in range(n):\n max_val = max(max_val, price[i] + rodCut(price, n - 1 - i))\n return max_val\n",
"step-5": "# Cutting a Rod | DP-13\n# Difficulty Level : Medium\n# Last Updated : 13 Nov, 2020\n\n# Given a rod of length n inches and an array of prices that contains prices of all pieces of size smaller than n. Determine the maximum value obtainable by cutting up the rod and selling the pieces. For example, if length of the rod is 8 and the values of different pieces are given as following, then the maximum obtainable value is 22 (by cutting in two pieces of lengths 2 and 6)\n\n# length | 1 2 3 4 5 6 7 8\n# --------------------------------------------\n# price | 1 5 8 9 10 17 17 20\n# And if the prices are as following, then the maximum obtainable value is 24 (by cutting in eight pieces of length 1)\n\n# length | 1 2 3 4 5 6 7 8\n# --------------------------------------------\n# price | 3 5 8 9 10 17 17 20\n\nimport numpy as np\n\n\ndef cut_rod(price, n):\n if n <= 0:\n return 0\n max_val = -1\n\n val = 0\n for i in range(0, n):\n val = price[i] + cut_rod(price, n - i - 1)\n if max_val < val:\n max_val = val\n # print(\"i:\", i, \"n:\", n, \"max_val:\", max_val)\n return max_val\n\n\ndef cut_rod2(price, n):\n val = [0 for x in range(n+1)]\n val[0] = 0\n\n for i in range(1, n+1):\n max_val = -1\n for j in range(i):\n max_val = max(max_val, price[j] + val[i-j-1])\n # print(\"i:\", i, \"j:\", j, \"max_val:\", max_val, \"val:\", val)\n val[i] = max_val\n # print(\"i:\", i, \"val:\", val)\n\n return val[n]\n\n\n# Driver code\narr = [1, 5, 8, 9, 10, 17, 17, 20]\narr1 = [3, 5, 8, 9, 10, 17, 17, 20]\narr2 = [5, 5, 8, 9, 10, 17, 17, 20]\nsize = len(arr)\n# print(\"Maximum Obtainable Value is\", cut_rod(arr1, size))\n# print(\"Maximum Obtainable Value is\", cut_rod2(arr1, size))\nprint(\"Maximum Obtainable Value is\", cut_rod2([2, 5, 7, 3, 9], 5))\n\n\ndef rodCut(price, n):\n if n <= 0:\n return 0\n max_val = -1\n\n # val = 0\n for i in range(n):\n # val = price[i] + rodCut(price, n-1-i)\n max_val = max(max_val, price[i] + rodCut(price, n-1-i))\n\n return max_val\n\n\n# print(\"Maximum Obtainable Value is\", rodCut(arr1, size))\n",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
db.add_collection(coll)
db.add_collection(coll2)
pickle.dump(db, open(''))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
console = Console()
doc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})
doc2 = FourierDocument({'a': 'foo', 'b': 'bar'})
doc3 = FourierDocument({'abc': 'xyz'})
doc4 = FourierDocument({(1): 2, (3): 4, (5): 6})
doc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})
FOURIER_DIR = Path.home() / '.fourier'
FOURIER_LOGS = FOURIER_DIR / 'logs'
FOURIER_DBS = FOURIER_DIR / 'databases'
coll = FourierCollection('coll', doc, doc2)
coll2 = FourierCollection('coll2', doc3, doc4, doc5)
db = FourierDB('db')
db.add_collection(coll)
db.add_collection(coll2)
pickle.dump(db, open(''))
<|reserved_special_token_1|>
import pickle
from pathlib import Path
from rich.console import Console
from fourierdb import FourierDocument, FourierCollection, FourierDB
console = Console()
doc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})
doc2 = FourierDocument({'a': 'foo', 'b': 'bar'})
doc3 = FourierDocument({'abc': 'xyz'})
doc4 = FourierDocument({(1): 2, (3): 4, (5): 6})
doc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})
FOURIER_DIR = Path.home() / '.fourier'
FOURIER_LOGS = FOURIER_DIR / 'logs'
FOURIER_DBS = FOURIER_DIR / 'databases'
coll = FourierCollection('coll', doc, doc2)
coll2 = FourierCollection('coll2', doc3, doc4, doc5)
db = FourierDB('db')
db.add_collection(coll)
db.add_collection(coll2)
pickle.dump(db, open(''))
<|reserved_special_token_1|>
import pickle
from pathlib import Path
from rich.console import Console
from fourierdb import FourierDocument, FourierCollection, FourierDB
console = Console()
doc = FourierDocument({"bar": "eggs", "xyz": "spam"})
doc2 = FourierDocument({"a": "foo", "b": "bar"})
doc3 = FourierDocument({"abc": "xyz"})
doc4 = FourierDocument({1: 2, 3: 4, 5: 6})
doc5 = FourierDocument({"hello": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
FOURIER_DIR = Path.home() / ".fourier"
FOURIER_LOGS = FOURIER_DIR / "logs"
FOURIER_DBS = FOURIER_DIR / "databases"
coll = FourierCollection("coll", doc, doc2)
coll2 = FourierCollection("coll2", doc3, doc4, doc5)
db = FourierDB("db")
db.add_collection(coll)
db.add_collection(coll2)
pickle.dump(db, open(""))
|
flexible
|
{
"blob_id": "f15f96658130ac9bba748a518371ad80d9772fbc",
"index": 4121,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-3": "<mask token>\nconsole = Console()\ndoc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})\ndoc2 = FourierDocument({'a': 'foo', 'b': 'bar'})\ndoc3 = FourierDocument({'abc': 'xyz'})\ndoc4 = FourierDocument({(1): 2, (3): 4, (5): 6})\ndoc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / '.fourier'\nFOURIER_LOGS = FOURIER_DIR / 'logs'\nFOURIER_DBS = FOURIER_DIR / 'databases'\ncoll = FourierCollection('coll', doc, doc2)\ncoll2 = FourierCollection('coll2', doc3, doc4, doc5)\ndb = FourierDB('db')\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-4": "import pickle\nfrom pathlib import Path\nfrom rich.console import Console\nfrom fourierdb import FourierDocument, FourierCollection, FourierDB\nconsole = Console()\ndoc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})\ndoc2 = FourierDocument({'a': 'foo', 'b': 'bar'})\ndoc3 = FourierDocument({'abc': 'xyz'})\ndoc4 = FourierDocument({(1): 2, (3): 4, (5): 6})\ndoc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / '.fourier'\nFOURIER_LOGS = FOURIER_DIR / 'logs'\nFOURIER_DBS = FOURIER_DIR / 'databases'\ncoll = FourierCollection('coll', doc, doc2)\ncoll2 = FourierCollection('coll2', doc3, doc4, doc5)\ndb = FourierDB('db')\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-5": "import pickle\nfrom pathlib import Path\nfrom rich.console import Console\nfrom fourierdb import FourierDocument, FourierCollection, FourierDB\n\nconsole = Console()\n\ndoc = FourierDocument({\"bar\": \"eggs\", \"xyz\": \"spam\"})\ndoc2 = FourierDocument({\"a\": \"foo\", \"b\": \"bar\"})\ndoc3 = FourierDocument({\"abc\": \"xyz\"})\ndoc4 = FourierDocument({1: 2, 3: 4, 5: 6})\ndoc5 = FourierDocument({\"hello\": [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / \".fourier\"\nFOURIER_LOGS = FOURIER_DIR / \"logs\"\nFOURIER_DBS = FOURIER_DIR / \"databases\"\ncoll = FourierCollection(\"coll\", doc, doc2)\ncoll2 = FourierCollection(\"coll2\", doc3, doc4, doc5)\n\ndb = FourierDB(\"db\")\n\ndb.add_collection(coll)\ndb.add_collection(coll2)\n\npickle.dump(db, open(\"\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# flush in poker
def IsContinuous(numbers):
if not numbers or len(numbers) < 1 :
return False
numbers.sort()
number_of_zero = 0
number_of_gap = 0
for i in range(len(numbers)):
if numbers[i] == 0:
number_of_zero += 1
small = number_of_zero
big = small + 1
while(big < len(numbers)):
if numbers[small] == numbers[big]:
return False
number_of_gap += (numbers[big] - numbers[small] - 1)
small = big
big += 1
if number_of_gap <= number_of_zero:
return True
else:
return False
|
normal
|
{
"blob_id": "68a776d7fccc8d8496a944baff51d2a862fc7d31",
"index": 1259,
"step-1": "<mask token>\n",
"step-2": "def IsContinuous(numbers):\n if not numbers or len(numbers) < 1:\n return False\n numbers.sort()\n number_of_zero = 0\n number_of_gap = 0\n for i in range(len(numbers)):\n if numbers[i] == 0:\n number_of_zero += 1\n small = number_of_zero\n big = small + 1\n while big < len(numbers):\n if numbers[small] == numbers[big]:\n return False\n number_of_gap += numbers[big] - numbers[small] - 1\n small = big\n big += 1\n if number_of_gap <= number_of_zero:\n return True\n else:\n return False\n",
"step-3": "# flush in poker\ndef IsContinuous(numbers):\n if not numbers or len(numbers) < 1 :\n return False\n\n numbers.sort()\n number_of_zero = 0\n number_of_gap = 0\n for i in range(len(numbers)):\n if numbers[i] == 0:\n number_of_zero += 1\n\n small = number_of_zero\n big = small + 1\n while(big < len(numbers)):\n if numbers[small] == numbers[big]:\n return False\n\n number_of_gap += (numbers[big] - numbers[small] - 1)\n small = big\n big += 1\n\n if number_of_gap <= number_of_zero:\n return True\n else:\n return False\n\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def write_csv(url, recursive=False, writer=None, token=''):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f'next cursor exists...{cursor}')
ret = urlparse(url)
next_url = (
f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'
)
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
<|reserved_special_token_0|>
def write_rows(writer, response):
for msg in response['results']:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response['meta']['cursor']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def parse_options():
parser = OptionParser()
parser.add_option('-H', '--host')
parser.add_option('-t', '--token')
parser.add_option('-r', '--recursive', action='store_true', default=False)
return parser.parse_args()
def write_csv(url, recursive=False, writer=None, token=''):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f'next cursor exists...{cursor}')
ret = urlparse(url)
next_url = (
f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'
)
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
def fetch(url):
print(f'url...{url}\n')
urlData = request.urlopen(url)
data = urlData.read()
encoding = urlData.info().get_content_charset('utf-8')
return json.loads(data.decode(encoding))
def write_rows(writer, response):
for msg in response['results']:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response['meta']['cursor']
if __name__ == '__main__':
opt, args = parse_options()
if opt.host is not None:
url = urljoin(f'https://{opt.host}',
f'datastore/v1/channels?token={opt.token}')
else:
url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'
f = open('./datastore.csv', 'w')
writer = csv.writer(f, lineterminator='\n')
writer.writerow(HEADER)
write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)
f.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
HEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']
def parse_options():
parser = OptionParser()
parser.add_option('-H', '--host')
parser.add_option('-t', '--token')
parser.add_option('-r', '--recursive', action='store_true', default=False)
return parser.parse_args()
def write_csv(url, recursive=False, writer=None, token=''):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f'next cursor exists...{cursor}')
ret = urlparse(url)
next_url = (
f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'
)
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
def fetch(url):
print(f'url...{url}\n')
urlData = request.urlopen(url)
data = urlData.read()
encoding = urlData.info().get_content_charset('utf-8')
return json.loads(data.decode(encoding))
def write_rows(writer, response):
for msg in response['results']:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response['meta']['cursor']
if __name__ == '__main__':
opt, args = parse_options()
if opt.host is not None:
url = urljoin(f'https://{opt.host}',
f'datastore/v1/channels?token={opt.token}')
else:
url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'
f = open('./datastore.csv', 'w')
writer = csv.writer(f, lineterminator='\n')
writer.writerow(HEADER)
write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)
f.close()
<|reserved_special_token_1|>
import csv
import json
from urllib import request
from urllib.error import HTTPError
from urllib.parse import urljoin, urlparse, quote_plus
from optparse import OptionParser
HEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']
def parse_options():
parser = OptionParser()
parser.add_option('-H', '--host')
parser.add_option('-t', '--token')
parser.add_option('-r', '--recursive', action='store_true', default=False)
return parser.parse_args()
def write_csv(url, recursive=False, writer=None, token=''):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f'next cursor exists...{cursor}')
ret = urlparse(url)
next_url = (
f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'
)
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
def fetch(url):
print(f'url...{url}\n')
urlData = request.urlopen(url)
data = urlData.read()
encoding = urlData.info().get_content_charset('utf-8')
return json.loads(data.decode(encoding))
def write_rows(writer, response):
for msg in response['results']:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response['meta']['cursor']
if __name__ == '__main__':
opt, args = parse_options()
if opt.host is not None:
url = urljoin(f'https://{opt.host}',
f'datastore/v1/channels?token={opt.token}')
else:
url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'
f = open('./datastore.csv', 'w')
writer = csv.writer(f, lineterminator='\n')
writer.writerow(HEADER)
write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)
f.close()
<|reserved_special_token_1|>
import csv
import json
from urllib import request
from urllib.error import HTTPError
from urllib.parse import urljoin, urlparse, quote_plus
from optparse import OptionParser
HEADER = ["id", "module", "channel", "type", "value", "datetime"]
def parse_options():
parser = OptionParser()
parser.add_option("-H", "--host")
parser.add_option("-t", "--token")
parser.add_option("-r", "--recursive", action="store_true", default=False)
return parser.parse_args()
def write_csv(url, recursive=False, writer=None, token=""):
response = fetch(url)
if recursive:
write_rows(writer, response)
cursor = next_cursor(response)
if cursor is not None:
print(f"next cursor exists...{cursor}")
ret = urlparse(url)
next_url = f"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}"
write_csv(next_url, recursive=True, writer=writer, token=token)
else:
write_rows(writer, response)
def fetch(url):
print(f"url...{url}\n")
urlData = request.urlopen(url)
data = urlData.read()
encoding = urlData.info().get_content_charset("utf-8")
return json.loads(data.decode(encoding))
def write_rows(writer, response):
for msg in response["results"]:
values = [msg[k] for k in HEADER]
writer.writerow(values)
def next_cursor(response):
return response["meta"]["cursor"]
if __name__ == "__main__":
opt, args = parse_options()
if opt.host is not None:
url = urljoin(f"https://{opt.host}",
f"datastore/v1/channels?token={opt.token}")
else:
url = f"https://api.sakura.io/datastore/v1/channels?token={opt.token}"
f = open('./datastore.csv', 'w')
writer = csv.writer(f, lineterminator="\n")
# write header
writer.writerow(HEADER)
write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)
f.close()
|
flexible
|
{
"blob_id": "b47f15a79f7a82304c2be6af00a5854ff0f6ad3e",
"index": 6987,
"step-1": "<mask token>\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\n<mask token>\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-3": "<mask token>\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-4": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n",
"step-5": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\n\nHEADER = [\"id\", \"module\", \"channel\", \"type\", \"value\", \"datetime\"]\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option(\"-H\", \"--host\")\n parser.add_option(\"-t\", \"--token\")\n parser.add_option(\"-r\", \"--recursive\", action=\"store_true\", default=False)\n return parser.parse_args()\n\ndef write_csv(url, recursive=False, writer=None, token=\"\"):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f\"next cursor exists...{cursor}\")\n ret = urlparse(url)\n next_url = f\"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}\"\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\ndef fetch(url):\n print(f\"url...{url}\\n\")\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset(\"utf-8\")\n return json.loads(data.decode(encoding))\n\ndef write_rows(writer, response):\n for msg in response[\"results\"]:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\ndef next_cursor(response):\n return response[\"meta\"][\"cursor\"]\n\nif __name__ == \"__main__\":\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f\"https://{opt.host}\",\n f\"datastore/v1/channels?token={opt.token}\")\n else:\n url = f\"https://api.sakura.io/datastore/v1/channels?token={opt.token}\"\n f = open('./datastore.csv', 'w')\n\n writer = csv.writer(f, lineterminator=\"\\n\")\n # write header\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for word in words:
if word_dict.has_key(word):
word_dict[word.lower()] = max(word_dict[word.lower()], words.count(
word.lower()) + words.count(word.upper()) + words.count(word))
else:
word_dict[word.lower()] = max(0, words.count(word.lower()) + words.
count(word.upper()) + words.count(word))
for word, number in word_dict.items():
fout.write(word + ':%d\n' % number)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fin = open('example', 'r')
fout = open('reuslt.txt', 'w')
str = fin.read()
reObj = re.compile('\x08?([a-zA-Z]+)\x08?')
words = reObj.findall(str)
word_dict = {}
for word in words:
if word_dict.has_key(word):
word_dict[word.lower()] = max(word_dict[word.lower()], words.count(
word.lower()) + words.count(word.upper()) + words.count(word))
else:
word_dict[word.lower()] = max(0, words.count(word.lower()) + words.
count(word.upper()) + words.count(word))
for word, number in word_dict.items():
fout.write(word + ':%d\n' % number)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import re
fin = open('example', 'r')
fout = open('reuslt.txt', 'w')
str = fin.read()
reObj = re.compile('\x08?([a-zA-Z]+)\x08?')
words = reObj.findall(str)
word_dict = {}
for word in words:
if word_dict.has_key(word):
word_dict[word.lower()] = max(word_dict[word.lower()], words.count(
word.lower()) + words.count(word.upper()) + words.count(word))
else:
word_dict[word.lower()] = max(0, words.count(word.lower()) + words.
count(word.upper()) + words.count(word))
for word, number in word_dict.items():
fout.write(word + ':%d\n' % number)
<|reserved_special_token_1|>
#coding=utf-8
'''
find words and count
By @liuxingpuu
'''
import re
fin= open("example","r")
fout = open("reuslt.txt","w")
str=fin.read()
reObj = re.compile("\b?([a-zA-Z]+)\b?")
words = reObj.findall(str)
word_dict={}
for word in words:
if(word_dict.has_key(word)):
word_dict[word.lower()]=max(word_dict[word.lower()],words.count(word.lower())+words.count(word.upper())+words.count(word))
else:
word_dict[word.lower()]=max(0,words.count(word.lower())+words.count(word.upper())+words.count(word))
for(word,number) in word_dict.items():
fout.write(word+":%d\n"%number)
|
flexible
|
{
"blob_id": "addab37cb23abead2d9f77a65336cd6026c52c68",
"index": 8559,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-3": "<mask token>\nfin = open('example', 'r')\nfout = open('reuslt.txt', 'w')\nstr = fin.read()\nreObj = re.compile('\\x08?([a-zA-Z]+)\\x08?')\nwords = reObj.findall(str)\nword_dict = {}\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-4": "<mask token>\nimport re\nfin = open('example', 'r')\nfout = open('reuslt.txt', 'w')\nstr = fin.read()\nreObj = re.compile('\\x08?([a-zA-Z]+)\\x08?')\nwords = reObj.findall(str)\nword_dict = {}\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-5": "#coding=utf-8\n'''\nfind words and count\nBy @liuxingpuu\n'''\nimport re\n\nfin= open(\"example\",\"r\")\nfout = open(\"reuslt.txt\",\"w\")\nstr=fin.read()\nreObj = re.compile(\"\\b?([a-zA-Z]+)\\b?\")\nwords = reObj.findall(str)\nword_dict={}\nfor word in words:\n if(word_dict.has_key(word)):\n word_dict[word.lower()]=max(word_dict[word.lower()],words.count(word.lower())+words.count(word.upper())+words.count(word))\n else:\n word_dict[word.lower()]=max(0,words.count(word.lower())+words.count(word.upper())+words.count(word))\nfor(word,number) in word_dict.items():\n fout.write(word+\":%d\\n\"%number)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import sys
sys.stdin = open("sample_input_17.txt","r")
T = int(input())
def code(N): # 암호코드가 있는 열의 위치를 찾음
code = []
for i in range(N-4):
for j in range(49,53):
if S[i][j] == "1" :
code = S[i]
return code
def code_s(code): # 암호코드의 행 위치를 찾아 슬라이싱
for x in range(M-1,0,-1):
if code[x] == "1" :
return code[x-55:x+1]
def code_c(code_s) : # 암호코드를 7개의 숫자로 슬라이싱하여 해독 정보와 비교
lists = []
for n in range(8):
for m in range(10):
if code_s[n*7:(n+1)*7] == numbers[m] :
lists.append(m)
return lists # 해독 코드
for tc in range(T):
N,M = map(int,input().split())
S = [input() for _ in range(N)]
numbers = ["0001101","0011001","0010011","0111101","0100011",
"0110001","0101111","0111011","0110111","0001011"]
print(f"#{tc+1}",end=" ")
if not (sum(code_c(code_s(code(N)))[0:7:2])*3+sum(code_c(code_s(code(N)))[1:8:2]))%10 : # 해독코드 10배수인지 확인
print(sum(code_c(code_s(code(N))))) # 10배수면 암호코드의 1을 모두 더함
else : # 아니라면 0
print(0)
|
normal
|
{
"blob_id": "b739c1de6c008158ee3806bed9fa2865eb484b4f",
"index": 5596,
"step-1": "<mask token>\n\n\ndef code(N):\n code = []\n for i in range(N - 4):\n for j in range(49, 53):\n if S[i][j] == '1':\n code = S[i]\n return code\n\n\ndef code_s(code):\n for x in range(M - 1, 0, -1):\n if code[x] == '1':\n return code[x - 55:x + 1]\n\n\ndef code_c(code_s):\n lists = []\n for n in range(8):\n for m in range(10):\n if code_s[n * 7:(n + 1) * 7] == numbers[m]:\n lists.append(m)\n return lists\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef code(N):\n code = []\n for i in range(N - 4):\n for j in range(49, 53):\n if S[i][j] == '1':\n code = S[i]\n return code\n\n\ndef code_s(code):\n for x in range(M - 1, 0, -1):\n if code[x] == '1':\n return code[x - 55:x + 1]\n\n\ndef code_c(code_s):\n lists = []\n for n in range(8):\n for m in range(10):\n if code_s[n * 7:(n + 1) * 7] == numbers[m]:\n lists.append(m)\n return lists\n\n\nfor tc in range(T):\n N, M = map(int, input().split())\n S = [input() for _ in range(N)]\n numbers = ['0001101', '0011001', '0010011', '0111101', '0100011',\n '0110001', '0101111', '0111011', '0110111', '0001011']\n print(f'#{tc + 1}', end=' ')\n if not (sum(code_c(code_s(code(N)))[0:7:2]) * 3 + sum(code_c(code_s(\n code(N)))[1:8:2])) % 10:\n print(sum(code_c(code_s(code(N)))))\n else:\n print(0)\n",
"step-3": "<mask token>\nsys.stdin = open('sample_input_17.txt', 'r')\nT = int(input())\n\n\ndef code(N):\n code = []\n for i in range(N - 4):\n for j in range(49, 53):\n if S[i][j] == '1':\n code = S[i]\n return code\n\n\ndef code_s(code):\n for x in range(M - 1, 0, -1):\n if code[x] == '1':\n return code[x - 55:x + 1]\n\n\ndef code_c(code_s):\n lists = []\n for n in range(8):\n for m in range(10):\n if code_s[n * 7:(n + 1) * 7] == numbers[m]:\n lists.append(m)\n return lists\n\n\nfor tc in range(T):\n N, M = map(int, input().split())\n S = [input() for _ in range(N)]\n numbers = ['0001101', '0011001', '0010011', '0111101', '0100011',\n '0110001', '0101111', '0111011', '0110111', '0001011']\n print(f'#{tc + 1}', end=' ')\n if not (sum(code_c(code_s(code(N)))[0:7:2]) * 3 + sum(code_c(code_s(\n code(N)))[1:8:2])) % 10:\n print(sum(code_c(code_s(code(N)))))\n else:\n print(0)\n",
"step-4": "import sys\nsys.stdin = open('sample_input_17.txt', 'r')\nT = int(input())\n\n\ndef code(N):\n code = []\n for i in range(N - 4):\n for j in range(49, 53):\n if S[i][j] == '1':\n code = S[i]\n return code\n\n\ndef code_s(code):\n for x in range(M - 1, 0, -1):\n if code[x] == '1':\n return code[x - 55:x + 1]\n\n\ndef code_c(code_s):\n lists = []\n for n in range(8):\n for m in range(10):\n if code_s[n * 7:(n + 1) * 7] == numbers[m]:\n lists.append(m)\n return lists\n\n\nfor tc in range(T):\n N, M = map(int, input().split())\n S = [input() for _ in range(N)]\n numbers = ['0001101', '0011001', '0010011', '0111101', '0100011',\n '0110001', '0101111', '0111011', '0110111', '0001011']\n print(f'#{tc + 1}', end=' ')\n if not (sum(code_c(code_s(code(N)))[0:7:2]) * 3 + sum(code_c(code_s(\n code(N)))[1:8:2])) % 10:\n print(sum(code_c(code_s(code(N)))))\n else:\n print(0)\n",
"step-5": "import sys\nsys.stdin = open(\"sample_input_17.txt\",\"r\")\n\nT = int(input())\n\ndef code(N): # 암호코드가 있는 열의 위치를 찾음\n code = []\n for i in range(N-4):\n for j in range(49,53):\n if S[i][j] == \"1\" :\n code = S[i]\n return code\n\ndef code_s(code): # 암호코드의 행 위치를 찾아 슬라이싱\n for x in range(M-1,0,-1):\n if code[x] == \"1\" :\n return code[x-55:x+1]\n\ndef code_c(code_s) : # 암호코드를 7개의 숫자로 슬라이싱하여 해독 정보와 비교\n lists = []\n for n in range(8):\n for m in range(10):\n if code_s[n*7:(n+1)*7] == numbers[m] :\n lists.append(m) \n return lists # 해독 코드\n\nfor tc in range(T):\n N,M = map(int,input().split())\n S = [input() for _ in range(N)]\n numbers = [\"0001101\",\"0011001\",\"0010011\",\"0111101\",\"0100011\",\n \"0110001\",\"0101111\",\"0111011\",\"0110111\",\"0001011\"]\n\n print(f\"#{tc+1}\",end=\" \")\n if not (sum(code_c(code_s(code(N)))[0:7:2])*3+sum(code_c(code_s(code(N)))[1:8:2]))%10 : # 해독코드 10배수인지 확인\n print(sum(code_c(code_s(code(N))))) # 10배수면 암호코드의 1을 모두 더함\n else : # 아니라면 0\n print(0)\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
n = int(input())
a = [int(e) for e in input().split()]
ans = [0] * n
for i in range(n):
s = a[i]
ans[s - 1] = i + 1
print(*ans)
|
normal
|
{
"blob_id": "f74e2e6b59330bd63fee9192e74a72178abc1cab",
"index": 8195,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(n):\n s = a[i]\n ans[s - 1] = i + 1\nprint(*ans)\n",
"step-3": "n = int(input())\na = [int(e) for e in input().split()]\nans = [0] * n\nfor i in range(n):\n s = a[i]\n ans[s - 1] = i + 1\nprint(*ans)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
import scrapy
import MySQLdb
import openpyxl
from scrapy.crawler import CrawlerProcess
import sys
class AllabolaSpider(scrapy.Spider):
name = 'allabola'
allowed_domains = ['https://www.allabolag.se']
start_urls = []
#'https://www.allabolag.se/7696250484/befattningar'
host = '104.197.180.57'
user = 'root'
password = 'root'
DB_name = "db_allabolag"
f = open('Facebook_Auidance.csv', 'w')
f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')
f.write('\n')
f.close()
try:
connection = MySQLdb.connect(host, user, password,DB_name ,charset='utf8')
cursor = connection.cursor()
except Exception as e:
print(str(e))
try:
strquery2 = "CREATE TABLE tbl_allabola""""(Id INT NOT NULL AUTO_INCREMENT,
Registration_no varchar(250) DEFAULT NULL,
First_name varchar(250) DEFAULT NULL,
Middle_name varchar(250) DEFAULT NULL,
Famaily_name varchar(250) DEFAULT NULL,
Gender longtext DEFAULT NULL,
Year longtext DEFAULT NULL,
Board_member longtext DEFAULT NULL,
PRIMARY KEY (`Id`))"""
cursor.execute(strquery2)
except Exception as e:
print(str(e))
def start_requests(self):
try:
wb = openpyxl.load_workbook(
'/home//Business_numbers.xlsx')
ws = wb.get_active_sheet()
row_count = ws.max_row
for h in range(2,row_count):
regi_number = ws.cell(row=h, column=2).value
Post_Code = ws.cell(row=h, column=4).value
main_link = 'https://www.allabolag.se/'+str(regi_number)+'/befattningar'
yield scrapy.FormRequest(main_link,callback=self.parse,dont_filter=True,meta={'Post_Code':Post_Code})
except Exception as e:
print(e)
def parse(self, response):
Post_Code = response.meta['Post_Code']
Registration_no = response.url
Registration_no = Registration_no.split('.se/')[1]
Registration_no = Registration_no.split('/')[0]
print(Registration_no)
ALl_data = response.xpath('//*[@class="list--personnel accordion-body"]/li')
for datas in ALl_data:
gender = datas.xpath(".//div[1]/span[contains(@class,'male')]/@class").extract_first()
gender = gender.split('--')[1]
gender = gender.encode('utf-8')
if gender == 'male':
gender = 'm'
elif gender == 'female':
gender = 'f'
name = datas.xpath('.//div[2]/a/text()').extract_first()
name = name.strip()
name = name.split(' (f. ')
year = name[1].replace(')','')
if year != None:
age = str(2019 - int(year))
fullname = name[0]
# try:
# fullname = str(fullname)
# except Exception as e:
# print e
fullname = fullname.split(' ')
firstname = ''
middlename = ''
familyname = ''
if len(fullname) == 3:
firstname = fullname[0]
middlename = fullname[1]
familyname = fullname[2]
elif len(fullname) == 2:
firstname = fullname[0]
middlename = fullname[1]
elif len(fullname) > 3:
firstname = fullname[0]
familyname = fullname[-1]
middlename = ''
for k in range(1,len(fullname)-1):
if middlename == '':
middlename = fullname[k]
else:
middlename = middlename + ' ' + fullname[k]
type = datas.xpath('.//div[2]/text()').extract()[2]
Board_member = type.replace('\n','').strip()
if gender != '':
f = open('Facebook_Auidance.csv', 'a')
try:
f.write(firstname+','+familyname+','+Post_Code+','+''+','+''+','+'Sweden'+','+''+','+year+','+gender+','+age+','+'')
except Exception as e:
f.close()
try:
f.write('\n')
f.close()
except Exception as e:
''
if gender != '':
try:
reload(sys)
sys.setdefaultencoding('utf8')
self.cursor.execute(
"""INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)""",
(Registration_no, firstname, middlename,familyname,gender,year,Board_member))
self.connection.commit()
except Exception as e:
print(e)
process = CrawlerProcess({'LOG_ENABLED': False})
process.crawl(AllabolaSpider)
try:
process.start()
except:
pass
|
normal
|
{
"blob_id": "d60a2100127db859162890204655d313cdc2a4a5",
"index": 4614,
"step-1": "<mask token>\n\n\nclass AllabolaSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')\n f.write('\\n')\n f.close()\n try:\n connection = MySQLdb.connect(host, user, password, DB_name, charset\n ='utf8')\n cursor = connection.cursor()\n except Exception as e:\n print(str(e))\n try:\n strquery2 = \"\"\"CREATE TABLE tbl_allabola(Id INT NOT NULL AUTO_INCREMENT,\n Registration_no varchar(250) DEFAULT NULL,\n First_name varchar(250) DEFAULT NULL,\n Middle_name varchar(250) DEFAULT NULL,\n Famaily_name varchar(250) DEFAULT NULL,\n Gender longtext DEFAULT NULL,\n Year longtext DEFAULT NULL,\n Board_member longtext DEFAULT NULL,\n PRIMARY KEY (`Id`))\"\"\"\n cursor.execute(strquery2)\n except Exception as e:\n print(str(e))\n <mask token>\n\n def parse(self, response):\n Post_Code = response.meta['Post_Code']\n Registration_no = response.url\n Registration_no = Registration_no.split('.se/')[1]\n Registration_no = Registration_no.split('/')[0]\n print(Registration_no)\n ALl_data = response.xpath(\n '//*[@class=\"list--personnel accordion-body\"]/li')\n for datas in ALl_data:\n gender = datas.xpath(\n \".//div[1]/span[contains(@class,'male')]/@class\"\n ).extract_first()\n gender = gender.split('--')[1]\n gender = gender.encode('utf-8')\n if gender == 'male':\n gender = 'm'\n elif gender == 'female':\n gender = 'f'\n name = datas.xpath('.//div[2]/a/text()').extract_first()\n name = name.strip()\n name = name.split(' (f. ')\n year = name[1].replace(')', '')\n if year != None:\n age = str(2019 - int(year))\n fullname = name[0]\n fullname = fullname.split(' ')\n firstname = ''\n middlename = ''\n familyname = ''\n if len(fullname) == 3:\n firstname = fullname[0]\n middlename = fullname[1]\n familyname = fullname[2]\n elif len(fullname) == 2:\n firstname = fullname[0]\n middlename = fullname[1]\n elif len(fullname) > 3:\n firstname = fullname[0]\n familyname = fullname[-1]\n middlename = ''\n for k in range(1, len(fullname) - 1):\n if middlename == '':\n middlename = fullname[k]\n else:\n middlename = middlename + ' ' + fullname[k]\n type = datas.xpath('.//div[2]/text()').extract()[2]\n Board_member = type.replace('\\n', '').strip()\n if gender != '':\n f = open('Facebook_Auidance.csv', 'a')\n try:\n f.write(firstname + ',' + familyname + ',' + Post_Code +\n ',' + '' + ',' + '' + ',' + 'Sweden' + ',' + '' +\n ',' + year + ',' + gender + ',' + age + ',' + '')\n except Exception as e:\n f.close()\n try:\n f.write('\\n')\n f.close()\n except Exception as e:\n \"\"\"\"\"\"\n if gender != '':\n try:\n reload(sys)\n sys.setdefaultencoding('utf8')\n self.cursor.execute(\n 'INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (Registration_no, firstname, middlename,\n familyname, gender, year, Board_member))\n self.connection.commit()\n except Exception as e:\n print(e)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass AllabolaSpider(scrapy.Spider):\n name = 'allabola'\n allowed_domains = ['https://www.allabolag.se']\n start_urls = []\n host = '104.197.180.57'\n user = 'root'\n password = 'root'\n DB_name = 'db_allabolag'\n f = open('Facebook_Auidance.csv', 'w')\n f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')\n f.write('\\n')\n f.close()\n try:\n connection = MySQLdb.connect(host, user, password, DB_name, charset\n ='utf8')\n cursor = connection.cursor()\n except Exception as e:\n print(str(e))\n try:\n strquery2 = \"\"\"CREATE TABLE tbl_allabola(Id INT NOT NULL AUTO_INCREMENT,\n Registration_no varchar(250) DEFAULT NULL,\n First_name varchar(250) DEFAULT NULL,\n Middle_name varchar(250) DEFAULT NULL,\n Famaily_name varchar(250) DEFAULT NULL,\n Gender longtext DEFAULT NULL,\n Year longtext DEFAULT NULL,\n Board_member longtext DEFAULT NULL,\n PRIMARY KEY (`Id`))\"\"\"\n cursor.execute(strquery2)\n except Exception as e:\n print(str(e))\n\n def start_requests(self):\n try:\n wb = openpyxl.load_workbook('/home//Business_numbers.xlsx')\n ws = wb.get_active_sheet()\n row_count = ws.max_row\n for h in range(2, row_count):\n regi_number = ws.cell(row=h, column=2).value\n Post_Code = ws.cell(row=h, column=4).value\n main_link = 'https://www.allabolag.se/' + str(regi_number\n ) + '/befattningar'\n yield scrapy.FormRequest(main_link, callback=self.parse,\n dont_filter=True, meta={'Post_Code': Post_Code})\n except Exception as e:\n print(e)\n\n def parse(self, response):\n Post_Code = response.meta['Post_Code']\n Registration_no = response.url\n Registration_no = Registration_no.split('.se/')[1]\n Registration_no = Registration_no.split('/')[0]\n print(Registration_no)\n ALl_data = response.xpath(\n '//*[@class=\"list--personnel accordion-body\"]/li')\n for datas in ALl_data:\n gender = datas.xpath(\n \".//div[1]/span[contains(@class,'male')]/@class\"\n ).extract_first()\n gender = gender.split('--')[1]\n gender = gender.encode('utf-8')\n if gender == 'male':\n gender = 'm'\n elif gender == 'female':\n gender = 'f'\n name = datas.xpath('.//div[2]/a/text()').extract_first()\n name = name.strip()\n name = name.split(' (f. ')\n year = name[1].replace(')', '')\n if year != None:\n age = str(2019 - int(year))\n fullname = name[0]\n fullname = fullname.split(' ')\n firstname = ''\n middlename = ''\n familyname = ''\n if len(fullname) == 3:\n firstname = fullname[0]\n middlename = fullname[1]\n familyname = fullname[2]\n elif len(fullname) == 2:\n firstname = fullname[0]\n middlename = fullname[1]\n elif len(fullname) > 3:\n firstname = fullname[0]\n familyname = fullname[-1]\n middlename = ''\n for k in range(1, len(fullname) - 1):\n if middlename == '':\n middlename = fullname[k]\n else:\n middlename = middlename + ' ' + fullname[k]\n type = datas.xpath('.//div[2]/text()').extract()[2]\n Board_member = type.replace('\\n', '').strip()\n if gender != '':\n f = open('Facebook_Auidance.csv', 'a')\n try:\n f.write(firstname + ',' + familyname + ',' + Post_Code +\n ',' + '' + ',' + '' + ',' + 'Sweden' + ',' + '' +\n ',' + year + ',' + gender + ',' + age + ',' + '')\n except Exception as e:\n f.close()\n try:\n f.write('\\n')\n f.close()\n except Exception as e:\n \"\"\"\"\"\"\n if gender != '':\n try:\n reload(sys)\n sys.setdefaultencoding('utf8')\n self.cursor.execute(\n 'INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (Registration_no, firstname, middlename,\n familyname, gender, year, Board_member))\n self.connection.commit()\n except Exception as e:\n print(e)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass AllabolaSpider(scrapy.Spider):\n name = 'allabola'\n allowed_domains = ['https://www.allabolag.se']\n start_urls = []\n host = '104.197.180.57'\n user = 'root'\n password = 'root'\n DB_name = 'db_allabolag'\n f = open('Facebook_Auidance.csv', 'w')\n f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')\n f.write('\\n')\n f.close()\n try:\n connection = MySQLdb.connect(host, user, password, DB_name, charset\n ='utf8')\n cursor = connection.cursor()\n except Exception as e:\n print(str(e))\n try:\n strquery2 = \"\"\"CREATE TABLE tbl_allabola(Id INT NOT NULL AUTO_INCREMENT,\n Registration_no varchar(250) DEFAULT NULL,\n First_name varchar(250) DEFAULT NULL,\n Middle_name varchar(250) DEFAULT NULL,\n Famaily_name varchar(250) DEFAULT NULL,\n Gender longtext DEFAULT NULL,\n Year longtext DEFAULT NULL,\n Board_member longtext DEFAULT NULL,\n PRIMARY KEY (`Id`))\"\"\"\n cursor.execute(strquery2)\n except Exception as e:\n print(str(e))\n\n def start_requests(self):\n try:\n wb = openpyxl.load_workbook('/home//Business_numbers.xlsx')\n ws = wb.get_active_sheet()\n row_count = ws.max_row\n for h in range(2, row_count):\n regi_number = ws.cell(row=h, column=2).value\n Post_Code = ws.cell(row=h, column=4).value\n main_link = 'https://www.allabolag.se/' + str(regi_number\n ) + '/befattningar'\n yield scrapy.FormRequest(main_link, callback=self.parse,\n dont_filter=True, meta={'Post_Code': Post_Code})\n except Exception as e:\n print(e)\n\n def parse(self, response):\n Post_Code = response.meta['Post_Code']\n Registration_no = response.url\n Registration_no = Registration_no.split('.se/')[1]\n Registration_no = Registration_no.split('/')[0]\n print(Registration_no)\n ALl_data = response.xpath(\n '//*[@class=\"list--personnel accordion-body\"]/li')\n for datas in ALl_data:\n gender = datas.xpath(\n \".//div[1]/span[contains(@class,'male')]/@class\"\n ).extract_first()\n gender = gender.split('--')[1]\n gender = gender.encode('utf-8')\n if gender == 'male':\n gender = 'm'\n elif gender == 'female':\n gender = 'f'\n name = datas.xpath('.//div[2]/a/text()').extract_first()\n name = name.strip()\n name = name.split(' (f. ')\n year = name[1].replace(')', '')\n if year != None:\n age = str(2019 - int(year))\n fullname = name[0]\n fullname = fullname.split(' ')\n firstname = ''\n middlename = ''\n familyname = ''\n if len(fullname) == 3:\n firstname = fullname[0]\n middlename = fullname[1]\n familyname = fullname[2]\n elif len(fullname) == 2:\n firstname = fullname[0]\n middlename = fullname[1]\n elif len(fullname) > 3:\n firstname = fullname[0]\n familyname = fullname[-1]\n middlename = ''\n for k in range(1, len(fullname) - 1):\n if middlename == '':\n middlename = fullname[k]\n else:\n middlename = middlename + ' ' + fullname[k]\n type = datas.xpath('.//div[2]/text()').extract()[2]\n Board_member = type.replace('\\n', '').strip()\n if gender != '':\n f = open('Facebook_Auidance.csv', 'a')\n try:\n f.write(firstname + ',' + familyname + ',' + Post_Code +\n ',' + '' + ',' + '' + ',' + 'Sweden' + ',' + '' +\n ',' + year + ',' + gender + ',' + age + ',' + '')\n except Exception as e:\n f.close()\n try:\n f.write('\\n')\n f.close()\n except Exception as e:\n \"\"\"\"\"\"\n if gender != '':\n try:\n reload(sys)\n sys.setdefaultencoding('utf8')\n self.cursor.execute(\n 'INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (Registration_no, firstname, middlename,\n familyname, gender, year, Board_member))\n self.connection.commit()\n except Exception as e:\n print(e)\n\n\n<mask token>\nprocess.crawl(AllabolaSpider)\ntry:\n process.start()\nexcept:\n pass\n",
"step-4": "<mask token>\n\n\nclass AllabolaSpider(scrapy.Spider):\n name = 'allabola'\n allowed_domains = ['https://www.allabolag.se']\n start_urls = []\n host = '104.197.180.57'\n user = 'root'\n password = 'root'\n DB_name = 'db_allabolag'\n f = open('Facebook_Auidance.csv', 'w')\n f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')\n f.write('\\n')\n f.close()\n try:\n connection = MySQLdb.connect(host, user, password, DB_name, charset\n ='utf8')\n cursor = connection.cursor()\n except Exception as e:\n print(str(e))\n try:\n strquery2 = \"\"\"CREATE TABLE tbl_allabola(Id INT NOT NULL AUTO_INCREMENT,\n Registration_no varchar(250) DEFAULT NULL,\n First_name varchar(250) DEFAULT NULL,\n Middle_name varchar(250) DEFAULT NULL,\n Famaily_name varchar(250) DEFAULT NULL,\n Gender longtext DEFAULT NULL,\n Year longtext DEFAULT NULL,\n Board_member longtext DEFAULT NULL,\n PRIMARY KEY (`Id`))\"\"\"\n cursor.execute(strquery2)\n except Exception as e:\n print(str(e))\n\n def start_requests(self):\n try:\n wb = openpyxl.load_workbook('/home//Business_numbers.xlsx')\n ws = wb.get_active_sheet()\n row_count = ws.max_row\n for h in range(2, row_count):\n regi_number = ws.cell(row=h, column=2).value\n Post_Code = ws.cell(row=h, column=4).value\n main_link = 'https://www.allabolag.se/' + str(regi_number\n ) + '/befattningar'\n yield scrapy.FormRequest(main_link, callback=self.parse,\n dont_filter=True, meta={'Post_Code': Post_Code})\n except Exception as e:\n print(e)\n\n def parse(self, response):\n Post_Code = response.meta['Post_Code']\n Registration_no = response.url\n Registration_no = Registration_no.split('.se/')[1]\n Registration_no = Registration_no.split('/')[0]\n print(Registration_no)\n ALl_data = response.xpath(\n '//*[@class=\"list--personnel accordion-body\"]/li')\n for datas in ALl_data:\n gender = datas.xpath(\n \".//div[1]/span[contains(@class,'male')]/@class\"\n ).extract_first()\n gender = gender.split('--')[1]\n gender = gender.encode('utf-8')\n if gender == 'male':\n gender = 'm'\n elif gender == 'female':\n gender = 'f'\n name = datas.xpath('.//div[2]/a/text()').extract_first()\n name = name.strip()\n name = name.split(' (f. ')\n year = name[1].replace(')', '')\n if year != None:\n age = str(2019 - int(year))\n fullname = name[0]\n fullname = fullname.split(' ')\n firstname = ''\n middlename = ''\n familyname = ''\n if len(fullname) == 3:\n firstname = fullname[0]\n middlename = fullname[1]\n familyname = fullname[2]\n elif len(fullname) == 2:\n firstname = fullname[0]\n middlename = fullname[1]\n elif len(fullname) > 3:\n firstname = fullname[0]\n familyname = fullname[-1]\n middlename = ''\n for k in range(1, len(fullname) - 1):\n if middlename == '':\n middlename = fullname[k]\n else:\n middlename = middlename + ' ' + fullname[k]\n type = datas.xpath('.//div[2]/text()').extract()[2]\n Board_member = type.replace('\\n', '').strip()\n if gender != '':\n f = open('Facebook_Auidance.csv', 'a')\n try:\n f.write(firstname + ',' + familyname + ',' + Post_Code +\n ',' + '' + ',' + '' + ',' + 'Sweden' + ',' + '' +\n ',' + year + ',' + gender + ',' + age + ',' + '')\n except Exception as e:\n f.close()\n try:\n f.write('\\n')\n f.close()\n except Exception as e:\n \"\"\"\"\"\"\n if gender != '':\n try:\n reload(sys)\n sys.setdefaultencoding('utf8')\n self.cursor.execute(\n 'INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)'\n , (Registration_no, firstname, middlename,\n familyname, gender, year, Board_member))\n self.connection.commit()\n except Exception as e:\n print(e)\n\n\nprocess = CrawlerProcess({'LOG_ENABLED': False})\nprocess.crawl(AllabolaSpider)\ntry:\n process.start()\nexcept:\n pass\n",
"step-5": "# -*- coding: utf-8 -*-\nimport scrapy\nimport MySQLdb\nimport openpyxl\nfrom scrapy.crawler import CrawlerProcess\nimport sys\n\n\nclass AllabolaSpider(scrapy.Spider):\n name = 'allabola'\n allowed_domains = ['https://www.allabolag.se']\n start_urls = []\n #'https://www.allabolag.se/7696250484/befattningar'\n host = '104.197.180.57'\n user = 'root'\n password = 'root'\n DB_name = \"db_allabolag\"\n f = open('Facebook_Auidance.csv', 'w')\n f.write('fn,ln,zip,ct,st,country,dob,doby,gen,age,uid')\n f.write('\\n')\n f.close()\n try:\n connection = MySQLdb.connect(host, user, password,DB_name ,charset='utf8')\n cursor = connection.cursor()\n except Exception as e:\n print(str(e))\n\n try:\n strquery2 = \"CREATE TABLE tbl_allabola\"\"\"\"(Id INT NOT NULL AUTO_INCREMENT,\n Registration_no varchar(250) DEFAULT NULL,\n First_name varchar(250) DEFAULT NULL,\n Middle_name varchar(250) DEFAULT NULL,\n Famaily_name varchar(250) DEFAULT NULL,\n Gender longtext DEFAULT NULL,\n Year longtext DEFAULT NULL,\n Board_member longtext DEFAULT NULL,\n PRIMARY KEY (`Id`))\"\"\"\n\n cursor.execute(strquery2)\n except Exception as e:\n print(str(e))\n\n def start_requests(self):\n try:\n\n wb = openpyxl.load_workbook(\n '/home//Business_numbers.xlsx')\n ws = wb.get_active_sheet()\n\n row_count = ws.max_row\n\n\n\n for h in range(2,row_count):\n regi_number = ws.cell(row=h, column=2).value\n Post_Code = ws.cell(row=h, column=4).value\n main_link = 'https://www.allabolag.se/'+str(regi_number)+'/befattningar'\n yield scrapy.FormRequest(main_link,callback=self.parse,dont_filter=True,meta={'Post_Code':Post_Code})\n except Exception as e:\n print(e)\n\n def parse(self, response):\n\n Post_Code = response.meta['Post_Code']\n Registration_no = response.url\n Registration_no = Registration_no.split('.se/')[1]\n Registration_no = Registration_no.split('/')[0]\n print(Registration_no)\n ALl_data = response.xpath('//*[@class=\"list--personnel accordion-body\"]/li')\n\n for datas in ALl_data:\n\n gender = datas.xpath(\".//div[1]/span[contains(@class,'male')]/@class\").extract_first()\n gender = gender.split('--')[1]\n gender = gender.encode('utf-8')\n if gender == 'male':\n gender = 'm'\n elif gender == 'female':\n gender = 'f'\n\n name = datas.xpath('.//div[2]/a/text()').extract_first()\n name = name.strip()\n name = name.split(' (f. ')\n year = name[1].replace(')','')\n if year != None:\n age = str(2019 - int(year))\n fullname = name[0]\n # try:\n # fullname = str(fullname)\n # except Exception as e:\n # print e\n fullname = fullname.split(' ')\n firstname = ''\n middlename = ''\n familyname = ''\n if len(fullname) == 3:\n firstname = fullname[0]\n middlename = fullname[1]\n familyname = fullname[2]\n elif len(fullname) == 2:\n firstname = fullname[0]\n middlename = fullname[1]\n elif len(fullname) > 3:\n firstname = fullname[0]\n familyname = fullname[-1]\n middlename = ''\n for k in range(1,len(fullname)-1):\n if middlename == '':\n middlename = fullname[k]\n else:\n middlename = middlename + ' ' + fullname[k]\n\n\n type = datas.xpath('.//div[2]/text()').extract()[2]\n Board_member = type.replace('\\n','').strip()\n if gender != '':\n\n f = open('Facebook_Auidance.csv', 'a')\n try:\n f.write(firstname+','+familyname+','+Post_Code+','+''+','+''+','+'Sweden'+','+''+','+year+','+gender+','+age+','+'')\n except Exception as e:\n f.close()\n try:\n f.write('\\n')\n f.close()\n except Exception as e:\n ''\n\n if gender != '':\n try:\n reload(sys)\n sys.setdefaultencoding('utf8')\n self.cursor.execute(\n \"\"\"INSERT INTO tbl_allabola(Registration_no,First_name,Middle_name,Famaily_name,Gender,Year,Board_member)VALUES (%s,%s,%s,%s,%s,%s,%s)\"\"\",\n (Registration_no, firstname, middlename,familyname,gender,year,Board_member))\n self.connection.commit()\n except Exception as e:\n print(e)\n\n\nprocess = CrawlerProcess({'LOG_ENABLED': False})\nprocess.crawl(AllabolaSpider)\ntry:\n process.start()\nexcept:\n pass\n\n\n",
"step-ids": [
2,
4,
5,
6,
8
]
}
|
[
2,
4,
5,
6,
8
] |
from django.shortcuts import render
from rest_framework.response import Response
from .serializers import *
from rest_framework import generics, status
class HistoryMyList(generics.ListCreateAPIView):
serializer_class = HistorySer
queryset = History.objects.all()
class HistoryListView(generics.GenericAPIView):
serializer_class = HistorySerializer
def post(self, request):
serializer_class = self.serializer_class(data=request.data)
serializer_class.is_valid(raise_exception=True)
return Response(serializer_class.data, status=status.HTTP_200_OK)
class HistoryView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = HistorySerializer
queryset = Sentiment.objects.all()
class SentimenListView(generics.ListCreateAPIView):
queryset = Sentiment.objects.all()
serializer_class = SentimenSerializer(many=True)
class SentimenView(generics.RetrieveUpdateDestroyAPIView):
serializer_class = SentimenSerializer
queryset = Sentiment.objects.all()
|
normal
|
{
"blob_id": "8edca4c50e48734073e80de85088964837247696",
"index": 2597,
"step-1": "<mask token>\n\n\nclass HistoryListView(generics.GenericAPIView):\n <mask token>\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-2": "<mask token>\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-3": "<mask token>\n\n\nclass HistoryMyList(generics.ListCreateAPIView):\n serializer_class = HistorySer\n queryset = History.objects.all()\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-4": "from django.shortcuts import render\nfrom rest_framework.response import Response\nfrom .serializers import *\nfrom rest_framework import generics, status\n\n\nclass HistoryMyList(generics.ListCreateAPIView):\n serializer_class = HistorySer\n queryset = History.objects.all()\n\n\nclass HistoryListView(generics.GenericAPIView):\n serializer_class = HistorySerializer\n\n def post(self, request):\n serializer_class = self.serializer_class(data=request.data)\n serializer_class.is_valid(raise_exception=True)\n return Response(serializer_class.data, status=status.HTTP_200_OK)\n\n\nclass HistoryView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = HistorySerializer\n queryset = Sentiment.objects.all()\n\n\nclass SentimenListView(generics.ListCreateAPIView):\n queryset = Sentiment.objects.all()\n serializer_class = SentimenSerializer(many=True)\n\n\nclass SentimenView(generics.RetrieveUpdateDestroyAPIView):\n serializer_class = SentimenSerializer\n queryset = Sentiment.objects.all()\n",
"step-5": null,
"step-ids": [
8,
9,
11,
12
]
}
|
[
8,
9,
11,
12
] |
from django.urls import path
from . import views
# url configuration for view.index function
app_name = 'movies'
urlpatterns = [
path('', views.index, name='index'), # represents a root of this app
path('<int:movie_id>', views.detail, name='detail')
]
|
normal
|
{
"blob_id": "5aaac757b766b0143ca3ea54d8fc4b8936160ec7",
"index": 5090,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'movies'\nurlpatterns = [path('', views.index, name='index'), path('<int:movie_id>',\n views.detail, name='detail')]\n",
"step-3": "from django.urls import path\nfrom . import views\napp_name = 'movies'\nurlpatterns = [path('', views.index, name='index'), path('<int:movie_id>',\n views.detail, name='detail')]\n",
"step-4": "from django.urls import path\nfrom . import views\n\n# url configuration for view.index function\napp_name = 'movies'\nurlpatterns = [\n path('', views.index, name='index'), # represents a root of this app\n path('<int:movie_id>', views.detail, name='detail')\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def group(arr):
low, mid, high = 0, 0, len(arr)-1
while mid <= high:
print(arr)
if arr[mid] == 'R' :
arr[low], arr[mid] = arr[mid], arr[low]
low += 1
mid += 1
elif arr[mid] == 'G':
mid += 1
else:
arr[high], arr[mid] = arr[mid], arr[high]
high -= 1
return arr
*arr, = map(str, input("enter the list of R, G, B").split())
print(group(arr))
|
normal
|
{
"blob_id": "8ad47bf292e0046550cc0ef6f6bb75cf179ebd4b",
"index": 7477,
"step-1": "<mask token>\n",
"step-2": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n<mask token>\n",
"step-3": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n<mask token>\nprint(group(arr))\n",
"step-4": "def group(arr):\n low, mid, high = 0, 0, len(arr) - 1\n while mid <= high:\n print(arr)\n if arr[mid] == 'R':\n arr[low], arr[mid] = arr[mid], arr[low]\n low += 1\n mid += 1\n elif arr[mid] == 'G':\n mid += 1\n else:\n arr[high], arr[mid] = arr[mid], arr[high]\n high -= 1\n return arr\n\n\n*arr, = map(str, input('enter the list of R, G, B').split())\nprint(group(arr))\n",
"step-5": "def group(arr):\r\n low, mid, high = 0, 0, len(arr)-1\r\n while mid <= high:\r\n print(arr)\r\n if arr[mid] == 'R' :\r\n arr[low], arr[mid] = arr[mid], arr[low]\r\n low += 1\r\n mid += 1\r\n elif arr[mid] == 'G':\r\n mid += 1\r\n else:\r\n arr[high], arr[mid] = arr[mid], arr[high]\r\n high -= 1\r\n return arr\r\n \r\n*arr, = map(str, input(\"enter the list of R, G, B\").split())\r\n\r\nprint(group(arr))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#coding=utf-8
import urllib.parse
import json
'''转化从charles复制下来的字串,转为json格式'''
def to_str(body_str):
'''检查需要转化的str是否符合标准'''
if not body_str == '':
par = body_str.split("&")
# print(par)
_temp = []
try:
for each in par:
if "=" not in each:
print("参数不合规,请检查")
return ''
if len(each.split("=")) != 2:
print("参数不合规,请检查")
return ''
if each.split("=")[1] != '':
_temp.append(each.split('=')[1])
except:
print("参数不合规,请检查")
return ''
else:
print("传入为空:%s"%body_str)
return ''
return urllib.parse.unquote(body_str)
def to_json(body_str):
'''转化格式'''
try:
body_str = to_str(body_str)
except:
return False
body_dict = {}
# print(body_str)
for each in body_str.split("&"):
body_dict[str(each.split("=")[0])] = str(each.split("=")[1])
print(body_dict)
with open("demo.json","w") as demo:
demo.write(json.dumps(body_dict,indent=4))
if __name__ == '__main__':
bstr = '123'
to_json(bstr)
|
normal
|
{
"blob_id": "d8e9b9f7a8d5ec2a72f083ec2283e8c0724dbe0d",
"index": 9119,
"step-1": "<mask token>\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)\n",
"step-4": "import urllib.parse\nimport json\n<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)\n",
"step-5": "#coding=utf-8\nimport urllib.parse\nimport json\n'''转化从charles复制下来的字串,转为json格式'''\ndef to_str(body_str):\n '''检查需要转化的str是否符合标准'''\n if not body_str == '':\n par = body_str.split(\"&\")\n # print(par)\n _temp = []\n try:\n for each in par:\n if \"=\" not in each:\n print(\"参数不合规,请检查\")\n return ''\n if len(each.split(\"=\")) != 2:\n print(\"参数不合规,请检查\")\n return ''\n if each.split(\"=\")[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print(\"参数不合规,请检查\")\n return ''\n else:\n print(\"传入为空:%s\"%body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\ndef to_json(body_str):\n '''转化格式'''\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n # print(body_str)\n for each in body_str.split(\"&\"):\n body_dict[str(each.split(\"=\")[0])] = str(each.split(\"=\")[1])\n print(body_dict)\n with open(\"demo.json\",\"w\") as demo:\n demo.write(json.dumps(body_dict,indent=4))\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
ii = [('LeakWTI2.py', 6)]
|
flexible
|
{
"blob_id": "997b68e42547b8f8a1059776c55c3ad16df494da",
"index": 1468,
"step-1": "<mask token>\n",
"step-2": "ii = [('LeakWTI2.py', 6)]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
animal = 'cat'
def f():
global animal
animal = 'dog'
print('local_scope:', animal)
print('local:', locals())
f()
print('global_scope:', animal)
print('global:', locals())
|
normal
|
{
"blob_id": "4f3908e12102cfd58737952803c710772e960b0e",
"index": 2385,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef f():\n global animal\n animal = 'dog'\n print('local_scope:', animal)\n print('local:', locals())\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef f():\n global animal\n animal = 'dog'\n print('local_scope:', animal)\n print('local:', locals())\n\n\nf()\nprint('global_scope:', animal)\nprint('global:', locals())\n",
"step-4": "animal = 'cat'\n\n\ndef f():\n global animal\n animal = 'dog'\n print('local_scope:', animal)\n print('local:', locals())\n\n\nf()\nprint('global_scope:', animal)\nprint('global:', locals())\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def addnumber(i,j):
sum= i+j
print(sum)
num1 = int(input("Enter 1st number"))
num2 = int(input("Enter 2nd number"))
z = addnumber(num1,num2)
|
normal
|
{
"blob_id": "2350c2ab05499f1b40ba61f2101c51d9581d57f6",
"index": 8668,
"step-1": "<mask token>\n",
"step-2": "def addnumber(i, j):\n sum = i + j\n print(sum)\n\n\n<mask token>\n",
"step-3": "def addnumber(i, j):\n sum = i + j\n print(sum)\n\n\nnum1 = int(input('Enter 1st number'))\nnum2 = int(input('Enter 2nd number'))\nz = addnumber(num1, num2)\n",
"step-4": "\n\n\ndef addnumber(i,j):\n sum= i+j\n print(sum)\n\nnum1 = int(input(\"Enter 1st number\"))\nnum2 = int(input(\"Enter 2nd number\"))\nz = addnumber(num1,num2)\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def train(hp):
os.makedirs(hp.out_dir, exist_ok=True)
device = torch.device('cuda' if hp.use_cuda else 'cpu')
dataset = SVHN(root='svhn', split='train', download=True, transform=
ToTensor())
eval_dataset = SVHN(root='svhn', split='test', download=True, transform
=ToTensor())
model = VAE(hp.z_dim).to(device)
print_model_info(model)
opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.
opt_kwargs)
logger = Logger(hp.out_dir)
total_step = 0
error_occured = False
start_time = time.time()
stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],
'start_time': start_time, 'epoch_times': []}
for epoch in range(1, hp.epochs + 1):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)
for x, _ in loader:
total_step += 1
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = (loss_rec + loss_kl).mean()
if torch.isnan(loss).item():
error_occured = True
break
opt.zero_grad()
loss.backward()
opt.step()
if total_step % 10 == 0:
stats['loss'].append(loss.cpu().item())
stats['loss_rec'].append(loss_rec.cpu().mean().item())
stats['loss_kl'].append(loss_kl.cpu().mean().item())
logger.log_scalars({'train/loss': stats['loss'][-1],
'train/loss_rec': stats['loss_rec'][-1],
'train/loss_kl': stats['loss_kl'][-1]}, total_step)
print(
f"\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) "
, end='', flush=True)
print()
if error_occured:
print('NaN detected -- Ending training!')
break
stats['epoch_times'].append(time.time())
eval_loss = evaluate(model=model, dataset=eval_dataset, logger=
logger, step=total_step, epoch=epoch, device=device, hparams=hp)
stats['eval_loss'].append(eval_loss.cpu().mean().item())
if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:
torch.save({'model_state_dict': model.state_dict(), 'epoch':
epoch, 'total_step': total_step, 'stats': stats, 'hparams':
vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))
end_time = time.time()
with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:
f.write(f'Started: {start_time}\n')
f.write(f'Finished: {end_time}\n')
f.write(f'Total time: {end_time - start_time:.2f}\n')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def train(hp):
os.makedirs(hp.out_dir, exist_ok=True)
device = torch.device('cuda' if hp.use_cuda else 'cpu')
dataset = SVHN(root='svhn', split='train', download=True, transform=
ToTensor())
eval_dataset = SVHN(root='svhn', split='test', download=True, transform
=ToTensor())
model = VAE(hp.z_dim).to(device)
print_model_info(model)
opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.
opt_kwargs)
logger = Logger(hp.out_dir)
total_step = 0
error_occured = False
start_time = time.time()
stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],
'start_time': start_time, 'epoch_times': []}
for epoch in range(1, hp.epochs + 1):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)
for x, _ in loader:
total_step += 1
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = (loss_rec + loss_kl).mean()
if torch.isnan(loss).item():
error_occured = True
break
opt.zero_grad()
loss.backward()
opt.step()
if total_step % 10 == 0:
stats['loss'].append(loss.cpu().item())
stats['loss_rec'].append(loss_rec.cpu().mean().item())
stats['loss_kl'].append(loss_kl.cpu().mean().item())
logger.log_scalars({'train/loss': stats['loss'][-1],
'train/loss_rec': stats['loss_rec'][-1],
'train/loss_kl': stats['loss_kl'][-1]}, total_step)
print(
f"\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) "
, end='', flush=True)
print()
if error_occured:
print('NaN detected -- Ending training!')
break
stats['epoch_times'].append(time.time())
eval_loss = evaluate(model=model, dataset=eval_dataset, logger=
logger, step=total_step, epoch=epoch, device=device, hparams=hp)
stats['eval_loss'].append(eval_loss.cpu().mean().item())
if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:
torch.save({'model_state_dict': model.state_dict(), 'epoch':
epoch, 'total_step': total_step, 'stats': stats, 'hparams':
vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))
end_time = time.time()
with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:
f.write(f'Started: {start_time}\n')
f.write(f'Finished: {end_time}\n')
f.write(f'Total time: {end_time - start_time:.2f}\n')
@torch.no_grad()
def evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int,
epoch: int, device, hparams):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False,
drop_last=False)
model.eval()
losses = []
for i, (x, _) in enumerate(loader):
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = loss_rec + loss_kl
losses.append(loss.cpu())
if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams
.epochs):
n = 6
samples = model.decoder(torch.randn(n ** 2, hparams.z_dim,
device=device))
logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n **
2]), step, nrow=n)
logger.log_image_grid('samples', tanh_to_uint8(samples), step,
nrow=n)
losses = torch.cat(losses)
logger.log_scalar('eval/loss', losses.mean().item(), step)
model.train()
return losses
<|reserved_special_token_1|>
import os
import time
import torch
from torch.utils.data import DataLoader
from torchvision.datasets import SVHN
from torchvision.transforms import ToTensor
from lib.utils import Logger, normal_logpdf, sumflat, print_model_info, tanh_to_uint8, get_optimizer
from lib.vae import VAE
def train(hp):
os.makedirs(hp.out_dir, exist_ok=True)
device = torch.device('cuda' if hp.use_cuda else 'cpu')
dataset = SVHN(root='svhn', split='train', download=True, transform=
ToTensor())
eval_dataset = SVHN(root='svhn', split='test', download=True, transform
=ToTensor())
model = VAE(hp.z_dim).to(device)
print_model_info(model)
opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.
opt_kwargs)
logger = Logger(hp.out_dir)
total_step = 0
error_occured = False
start_time = time.time()
stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],
'start_time': start_time, 'epoch_times': []}
for epoch in range(1, hp.epochs + 1):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)
for x, _ in loader:
total_step += 1
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = (loss_rec + loss_kl).mean()
if torch.isnan(loss).item():
error_occured = True
break
opt.zero_grad()
loss.backward()
opt.step()
if total_step % 10 == 0:
stats['loss'].append(loss.cpu().item())
stats['loss_rec'].append(loss_rec.cpu().mean().item())
stats['loss_kl'].append(loss_kl.cpu().mean().item())
logger.log_scalars({'train/loss': stats['loss'][-1],
'train/loss_rec': stats['loss_rec'][-1],
'train/loss_kl': stats['loss_kl'][-1]}, total_step)
print(
f"\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) "
, end='', flush=True)
print()
if error_occured:
print('NaN detected -- Ending training!')
break
stats['epoch_times'].append(time.time())
eval_loss = evaluate(model=model, dataset=eval_dataset, logger=
logger, step=total_step, epoch=epoch, device=device, hparams=hp)
stats['eval_loss'].append(eval_loss.cpu().mean().item())
if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:
torch.save({'model_state_dict': model.state_dict(), 'epoch':
epoch, 'total_step': total_step, 'stats': stats, 'hparams':
vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))
end_time = time.time()
with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:
f.write(f'Started: {start_time}\n')
f.write(f'Finished: {end_time}\n')
f.write(f'Total time: {end_time - start_time:.2f}\n')
@torch.no_grad()
def evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int,
epoch: int, device, hparams):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False,
drop_last=False)
model.eval()
losses = []
for i, (x, _) in enumerate(loader):
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = loss_rec + loss_kl
losses.append(loss.cpu())
if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams
.epochs):
n = 6
samples = model.decoder(torch.randn(n ** 2, hparams.z_dim,
device=device))
logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n **
2]), step, nrow=n)
logger.log_image_grid('samples', tanh_to_uint8(samples), step,
nrow=n)
losses = torch.cat(losses)
logger.log_scalar('eval/loss', losses.mean().item(), step)
model.train()
return losses
<|reserved_special_token_1|>
import os
import time
import torch
from torch.utils.data import DataLoader
from torchvision.datasets import SVHN
from torchvision.transforms import ToTensor
from lib.utils import Logger, normal_logpdf, sumflat, print_model_info, tanh_to_uint8, get_optimizer
from lib.vae import VAE
def train(hp):
os.makedirs(hp.out_dir, exist_ok=True)
device = torch.device('cuda' if hp.use_cuda else 'cpu')
dataset = SVHN(root='svhn', split='train', download=True, transform=ToTensor())
eval_dataset = SVHN(root='svhn', split='test', download=True, transform=ToTensor())
model = VAE(hp.z_dim).to(device)
print_model_info(model)
opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.opt_kwargs)
logger = Logger(hp.out_dir)
total_step = 0
error_occured = False
start_time = time.time()
stats = {
'loss': [],
'loss_kl': [],
'loss_rec': [],
'eval_loss': [],
'start_time': start_time,
'epoch_times': [],
}
for epoch in range(1, hp.epochs+1):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)
for x, _ in loader:
total_step += 1
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = (loss_rec + loss_kl).mean()
if torch.isnan(loss).item():
error_occured = True
break
opt.zero_grad()
loss.backward()
opt.step()
if total_step % 10 == 0:
stats['loss'].append(loss.cpu().item())
stats['loss_rec'].append(loss_rec.cpu().mean().item())
stats['loss_kl'].append(loss_kl.cpu().mean().item())
logger.log_scalars({
'train/loss': stats['loss'][-1],
'train/loss_rec': stats['loss_rec'][-1],
'train/loss_kl': stats['loss_kl'][-1],
}, total_step)
print(f'\rep {epoch:02d} step {total_step:03d} '
f'loss {stats["loss"][-1]:.2f} '
f'loss_rec {stats["loss_rec"][-1]:.2f} '
f'loss_kl {stats["loss_kl"][-1]:.2f} '
f'({time.time() - start_time:.2f} sec) '
' ',
end='', flush=True)
print()
if error_occured:
print('NaN detected -- Ending training!')
break
stats['epoch_times'].append(time.time())
eval_loss = evaluate(model=model, dataset=eval_dataset, logger=logger,
step=total_step, epoch=epoch, device=device, hparams=hp)
stats['eval_loss'].append(eval_loss.cpu().mean().item())
if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:
torch.save(
{
'model_state_dict': model.state_dict(),
'epoch': epoch,
'total_step': total_step,
'stats': stats,
'hparams': vars(hp),
},
os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))
end_time = time.time()
with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:
f.write(f'Started: {start_time}\n')
f.write(f'Finished: {end_time}\n')
f.write(f'Total time: {end_time - start_time:.2f}\n')
@torch.no_grad()
def evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int, epoch: int, device, hparams):
loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False, drop_last=False)
model.eval()
losses = []
for i, (x, _) in enumerate(loader):
x = x.to(device) * 2 - 1.0
z, mu, sigma, x_hat = model(x)
loss_rec = 0.5 * sumflat((x - x_hat) ** 2)
loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)
loss = loss_rec + loss_kl
losses.append(loss.cpu())
if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams.epochs):
n = 6
samples = model.decoder(torch.randn(n**2, hparams.z_dim, device=device))
logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n**2]), step, nrow=n)
logger.log_image_grid('samples', tanh_to_uint8(samples), step, nrow=n)
losses = torch.cat(losses)
logger.log_scalar('eval/loss', losses.mean().item(), step)
model.train()
return losses
|
flexible
|
{
"blob_id": "43db8ed10face1c668aeadd3cbc5b13f87fb0126",
"index": 4997,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef train(hp):\n os.makedirs(hp.out_dir, exist_ok=True)\n device = torch.device('cuda' if hp.use_cuda else 'cpu')\n dataset = SVHN(root='svhn', split='train', download=True, transform=\n ToTensor())\n eval_dataset = SVHN(root='svhn', split='test', download=True, transform\n =ToTensor())\n model = VAE(hp.z_dim).to(device)\n print_model_info(model)\n opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.\n opt_kwargs)\n logger = Logger(hp.out_dir)\n total_step = 0\n error_occured = False\n start_time = time.time()\n stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],\n 'start_time': start_time, 'epoch_times': []}\n for epoch in range(1, hp.epochs + 1):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)\n for x, _ in loader:\n total_step += 1\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = (loss_rec + loss_kl).mean()\n if torch.isnan(loss).item():\n error_occured = True\n break\n opt.zero_grad()\n loss.backward()\n opt.step()\n if total_step % 10 == 0:\n stats['loss'].append(loss.cpu().item())\n stats['loss_rec'].append(loss_rec.cpu().mean().item())\n stats['loss_kl'].append(loss_kl.cpu().mean().item())\n logger.log_scalars({'train/loss': stats['loss'][-1],\n 'train/loss_rec': stats['loss_rec'][-1],\n 'train/loss_kl': stats['loss_kl'][-1]}, total_step)\n print(\n f\"\\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) \"\n , end='', flush=True)\n print()\n if error_occured:\n print('NaN detected -- Ending training!')\n break\n stats['epoch_times'].append(time.time())\n eval_loss = evaluate(model=model, dataset=eval_dataset, logger=\n logger, step=total_step, epoch=epoch, device=device, hparams=hp)\n stats['eval_loss'].append(eval_loss.cpu().mean().item())\n if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:\n torch.save({'model_state_dict': model.state_dict(), 'epoch':\n epoch, 'total_step': total_step, 'stats': stats, 'hparams':\n vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))\n end_time = time.time()\n with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:\n f.write(f'Started: {start_time}\\n')\n f.write(f'Finished: {end_time}\\n')\n f.write(f'Total time: {end_time - start_time:.2f}\\n')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef train(hp):\n os.makedirs(hp.out_dir, exist_ok=True)\n device = torch.device('cuda' if hp.use_cuda else 'cpu')\n dataset = SVHN(root='svhn', split='train', download=True, transform=\n ToTensor())\n eval_dataset = SVHN(root='svhn', split='test', download=True, transform\n =ToTensor())\n model = VAE(hp.z_dim).to(device)\n print_model_info(model)\n opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.\n opt_kwargs)\n logger = Logger(hp.out_dir)\n total_step = 0\n error_occured = False\n start_time = time.time()\n stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],\n 'start_time': start_time, 'epoch_times': []}\n for epoch in range(1, hp.epochs + 1):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)\n for x, _ in loader:\n total_step += 1\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = (loss_rec + loss_kl).mean()\n if torch.isnan(loss).item():\n error_occured = True\n break\n opt.zero_grad()\n loss.backward()\n opt.step()\n if total_step % 10 == 0:\n stats['loss'].append(loss.cpu().item())\n stats['loss_rec'].append(loss_rec.cpu().mean().item())\n stats['loss_kl'].append(loss_kl.cpu().mean().item())\n logger.log_scalars({'train/loss': stats['loss'][-1],\n 'train/loss_rec': stats['loss_rec'][-1],\n 'train/loss_kl': stats['loss_kl'][-1]}, total_step)\n print(\n f\"\\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) \"\n , end='', flush=True)\n print()\n if error_occured:\n print('NaN detected -- Ending training!')\n break\n stats['epoch_times'].append(time.time())\n eval_loss = evaluate(model=model, dataset=eval_dataset, logger=\n logger, step=total_step, epoch=epoch, device=device, hparams=hp)\n stats['eval_loss'].append(eval_loss.cpu().mean().item())\n if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:\n torch.save({'model_state_dict': model.state_dict(), 'epoch':\n epoch, 'total_step': total_step, 'stats': stats, 'hparams':\n vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))\n end_time = time.time()\n with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:\n f.write(f'Started: {start_time}\\n')\n f.write(f'Finished: {end_time}\\n')\n f.write(f'Total time: {end_time - start_time:.2f}\\n')\n\n\n@torch.no_grad()\ndef evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int,\n epoch: int, device, hparams):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False,\n drop_last=False)\n model.eval()\n losses = []\n for i, (x, _) in enumerate(loader):\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = loss_rec + loss_kl\n losses.append(loss.cpu())\n if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams\n .epochs):\n n = 6\n samples = model.decoder(torch.randn(n ** 2, hparams.z_dim,\n device=device))\n logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n **\n 2]), step, nrow=n)\n logger.log_image_grid('samples', tanh_to_uint8(samples), step,\n nrow=n)\n losses = torch.cat(losses)\n logger.log_scalar('eval/loss', losses.mean().item(), step)\n model.train()\n return losses\n",
"step-4": "import os\nimport time\nimport torch\nfrom torch.utils.data import DataLoader\nfrom torchvision.datasets import SVHN\nfrom torchvision.transforms import ToTensor\nfrom lib.utils import Logger, normal_logpdf, sumflat, print_model_info, tanh_to_uint8, get_optimizer\nfrom lib.vae import VAE\n\n\ndef train(hp):\n os.makedirs(hp.out_dir, exist_ok=True)\n device = torch.device('cuda' if hp.use_cuda else 'cpu')\n dataset = SVHN(root='svhn', split='train', download=True, transform=\n ToTensor())\n eval_dataset = SVHN(root='svhn', split='test', download=True, transform\n =ToTensor())\n model = VAE(hp.z_dim).to(device)\n print_model_info(model)\n opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.\n opt_kwargs)\n logger = Logger(hp.out_dir)\n total_step = 0\n error_occured = False\n start_time = time.time()\n stats = {'loss': [], 'loss_kl': [], 'loss_rec': [], 'eval_loss': [],\n 'start_time': start_time, 'epoch_times': []}\n for epoch in range(1, hp.epochs + 1):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)\n for x, _ in loader:\n total_step += 1\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = (loss_rec + loss_kl).mean()\n if torch.isnan(loss).item():\n error_occured = True\n break\n opt.zero_grad()\n loss.backward()\n opt.step()\n if total_step % 10 == 0:\n stats['loss'].append(loss.cpu().item())\n stats['loss_rec'].append(loss_rec.cpu().mean().item())\n stats['loss_kl'].append(loss_kl.cpu().mean().item())\n logger.log_scalars({'train/loss': stats['loss'][-1],\n 'train/loss_rec': stats['loss_rec'][-1],\n 'train/loss_kl': stats['loss_kl'][-1]}, total_step)\n print(\n f\"\\rep {epoch:02d} step {total_step:03d} loss {stats['loss'][-1]:.2f} loss_rec {stats['loss_rec'][-1]:.2f} loss_kl {stats['loss_kl'][-1]:.2f} ({time.time() - start_time:.2f} sec) \"\n , end='', flush=True)\n print()\n if error_occured:\n print('NaN detected -- Ending training!')\n break\n stats['epoch_times'].append(time.time())\n eval_loss = evaluate(model=model, dataset=eval_dataset, logger=\n logger, step=total_step, epoch=epoch, device=device, hparams=hp)\n stats['eval_loss'].append(eval_loss.cpu().mean().item())\n if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:\n torch.save({'model_state_dict': model.state_dict(), 'epoch':\n epoch, 'total_step': total_step, 'stats': stats, 'hparams':\n vars(hp)}, os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))\n end_time = time.time()\n with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:\n f.write(f'Started: {start_time}\\n')\n f.write(f'Finished: {end_time}\\n')\n f.write(f'Total time: {end_time - start_time:.2f}\\n')\n\n\n@torch.no_grad()\ndef evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int,\n epoch: int, device, hparams):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False,\n drop_last=False)\n model.eval()\n losses = []\n for i, (x, _) in enumerate(loader):\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = loss_rec + loss_kl\n losses.append(loss.cpu())\n if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams\n .epochs):\n n = 6\n samples = model.decoder(torch.randn(n ** 2, hparams.z_dim,\n device=device))\n logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n **\n 2]), step, nrow=n)\n logger.log_image_grid('samples', tanh_to_uint8(samples), step,\n nrow=n)\n losses = torch.cat(losses)\n logger.log_scalar('eval/loss', losses.mean().item(), step)\n model.train()\n return losses\n",
"step-5": "import os\nimport time\nimport torch\nfrom torch.utils.data import DataLoader\nfrom torchvision.datasets import SVHN\nfrom torchvision.transforms import ToTensor \nfrom lib.utils import Logger, normal_logpdf, sumflat, print_model_info, tanh_to_uint8, get_optimizer\nfrom lib.vae import VAE\n\n\ndef train(hp):\n os.makedirs(hp.out_dir, exist_ok=True)\n device = torch.device('cuda' if hp.use_cuda else 'cpu')\n dataset = SVHN(root='svhn', split='train', download=True, transform=ToTensor())\n eval_dataset = SVHN(root='svhn', split='test', download=True, transform=ToTensor())\n model = VAE(hp.z_dim).to(device)\n print_model_info(model)\n opt = get_optimizer(hp.opt_name, model.parameters(), lr=hp.lr, **hp.opt_kwargs)\n logger = Logger(hp.out_dir)\n total_step = 0\n error_occured = False\n\n start_time = time.time()\n stats = {\n 'loss': [],\n 'loss_kl': [],\n 'loss_rec': [],\n 'eval_loss': [],\n 'start_time': start_time,\n 'epoch_times': [],\n }\n for epoch in range(1, hp.epochs+1):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=True)\n for x, _ in loader:\n total_step += 1\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = (loss_rec + loss_kl).mean()\n if torch.isnan(loss).item():\n error_occured = True\n break\n\n opt.zero_grad()\n loss.backward()\n opt.step()\n\n if total_step % 10 == 0:\n stats['loss'].append(loss.cpu().item())\n stats['loss_rec'].append(loss_rec.cpu().mean().item())\n stats['loss_kl'].append(loss_kl.cpu().mean().item())\n logger.log_scalars({\n 'train/loss': stats['loss'][-1],\n 'train/loss_rec': stats['loss_rec'][-1],\n 'train/loss_kl': stats['loss_kl'][-1],\n\n }, total_step)\n\n print(f'\\rep {epoch:02d} step {total_step:03d} '\n f'loss {stats[\"loss\"][-1]:.2f} '\n f'loss_rec {stats[\"loss_rec\"][-1]:.2f} '\n f'loss_kl {stats[\"loss_kl\"][-1]:.2f} '\n f'({time.time() - start_time:.2f} sec) '\n ' ',\n end='', flush=True)\n\n print()\n if error_occured:\n print('NaN detected -- Ending training!')\n break\n stats['epoch_times'].append(time.time())\n eval_loss = evaluate(model=model, dataset=eval_dataset, logger=logger,\n step=total_step, epoch=epoch, device=device, hparams=hp)\n stats['eval_loss'].append(eval_loss.cpu().mean().item())\n\n if epoch % hp.ckpt_freq == 0 or epoch == hp.epochs:\n torch.save(\n {\n 'model_state_dict': model.state_dict(),\n 'epoch': epoch,\n 'total_step': total_step,\n 'stats': stats,\n 'hparams': vars(hp),\n },\n os.path.join(hp.out_dir, f'ckpt_ep={epoch:03d}.pt'))\n\n end_time = time.time()\n with open(os.path.join(hp.out_dir, 'FINISHED'), 'w') as f:\n f.write(f'Started: {start_time}\\n')\n f.write(f'Finished: {end_time}\\n')\n f.write(f'Total time: {end_time - start_time:.2f}\\n')\n\n\n@torch.no_grad()\ndef evaluate(*, model: torch.nn.Module, dataset, logger: Logger, step: int, epoch: int, device, hparams):\n loader = DataLoader(dataset=dataset, batch_size=256, shuffle=False, drop_last=False)\n\n model.eval()\n losses = []\n for i, (x, _) in enumerate(loader):\n x = x.to(device) * 2 - 1.0\n z, mu, sigma, x_hat = model(x)\n\n loss_rec = 0.5 * sumflat((x - x_hat) ** 2)\n loss_kl = normal_logpdf(z, mu, sigma) - normal_logpdf(z)\n loss = loss_rec + loss_kl\n losses.append(loss.cpu())\n\n if i == 0 and (epoch % hparams.sample_freq == 0 or epoch == hparams.epochs):\n n = 6\n samples = model.decoder(torch.randn(n**2, hparams.z_dim, device=device))\n logger.log_image_grid('reconstructions', tanh_to_uint8(x_hat[:n**2]), step, nrow=n)\n logger.log_image_grid('samples', tanh_to_uint8(samples), step, nrow=n)\n\n losses = torch.cat(losses)\n logger.log_scalar('eval/loss', losses.mean().item(), step)\n model.train()\n return losses",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,
stride, padding='VALID'), self.biases)
return output_data
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Layer:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,
stride, padding='VALID'), self.biases)
return output_data
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Layer:
def __init__(self, shape, mean, stddev):
self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean,
stddev=stddev))
self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))
<|reserved_special_token_0|>
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,
stride, padding='VALID'), self.biases)
return output_data
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Layer:
def __init__(self, shape, mean, stddev):
self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean,
stddev=stddev))
self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))
def feed_forward(self, input_data, stride=None):
raise NotImplementedError
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,
stride, padding='VALID'), self.biases)
return output_data
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 23 20:33:08 2018
@author: ashima.garg
"""
import tensorflow as tf
class Layer():
def __init__(self, shape, mean, stddev):
self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean, stddev=stddev))
self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))
def feed_forward(self, input_data, stride=None):
raise NotImplementedError
class Convolution_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Convolution_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
conv = tf.nn.conv2d(input_data, self.weights, stride, padding="VALID")
output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))
return output_data
class Output_Layer(Layer):
def __init__(self, shape, mean, stddev):
super(Output_Layer, self).__init__(shape, mean, stddev)
def feed_forward(self, input_data, stride):
output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights, stride, padding="VALID"), self.biases)
return output_data
|
flexible
|
{
"blob_id": "ed246f2887f19ccf922a4d386918f0f0771fb443",
"index": 5106,
"step-1": "<mask token>\n\n\nclass Convolution_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Convolution_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')\n output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))\n return output_data\n\n\nclass Output_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Output_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,\n stride, padding='VALID'), self.biases)\n return output_data\n",
"step-2": "<mask token>\n\n\nclass Layer:\n <mask token>\n <mask token>\n\n\nclass Convolution_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Convolution_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')\n output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))\n return output_data\n\n\nclass Output_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Output_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,\n stride, padding='VALID'), self.biases)\n return output_data\n",
"step-3": "<mask token>\n\n\nclass Layer:\n\n def __init__(self, shape, mean, stddev):\n self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean,\n stddev=stddev))\n self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))\n <mask token>\n\n\nclass Convolution_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Convolution_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')\n output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))\n return output_data\n\n\nclass Output_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Output_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,\n stride, padding='VALID'), self.biases)\n return output_data\n",
"step-4": "<mask token>\n\n\nclass Layer:\n\n def __init__(self, shape, mean, stddev):\n self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean,\n stddev=stddev))\n self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))\n\n def feed_forward(self, input_data, stride=None):\n raise NotImplementedError\n\n\nclass Convolution_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Convolution_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n conv = tf.nn.conv2d(input_data, self.weights, stride, padding='VALID')\n output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))\n return output_data\n\n\nclass Output_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Output_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights,\n stride, padding='VALID'), self.biases)\n return output_data\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Jun 23 20:33:08 2018\n\n@author: ashima.garg\n\"\"\"\n\nimport tensorflow as tf\n\nclass Layer():\n\n def __init__(self, shape, mean, stddev):\n self.weights = tf.Variable(tf.random_normal(shape=shape, mean=mean, stddev=stddev))\n self.biases = tf.Variable(tf.zeros(shape=[shape[-1]]))\n\n def feed_forward(self, input_data, stride=None):\n raise NotImplementedError\n\n\nclass Convolution_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Convolution_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n conv = tf.nn.conv2d(input_data, self.weights, stride, padding=\"VALID\")\n output_data = tf.nn.relu(tf.nn.bias_add(conv, self.biases))\n return output_data\n\n\nclass Output_Layer(Layer):\n\n def __init__(self, shape, mean, stddev):\n super(Output_Layer, self).__init__(shape, mean, stddev)\n\n def feed_forward(self, input_data, stride):\n output_data = tf.nn.bias_add(tf.nn.conv2d(input_data, self.weights, stride, padding=\"VALID\"), self.biases)\n return output_data\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
plt.show()
<|reserved_special_token_1|>
import Individual
import Grupal
import matplotlib.pyplot as plt
import pandas as pd
plt.show()
|
flexible
|
{
"blob_id": "bb1caf4d04c8a42279afa0ac586ced991e0dff84",
"index": 4574,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplt.show()\n",
"step-3": "import Individual\nimport Grupal\nimport matplotlib.pyplot as plt\nimport pandas as pd\nplt.show()\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pickle
import numpy as np
in_dir = "C:\\Users\\ganga\\Github\\Generative-Models\\Project\\Data\\Dynamics\\"
out_dir = f"C:\\Users\\ganga\\Github\\Generative-Models\\Project\\Data\\Dynamics\\"
# Read frames
train_frames = pickle.load( open(in_dir +'\\train_frames.pkl' , 'rb' ))
test_frames = pickle.load( open(in_dir +'\\test_frames.pkl' , 'rb' ))
# Read the rbm learned weights
rbm_dir = "C:\\Users\\ganga\\Github\\Generative-Models\\Project\\Outputs\\RBM\\"
W, b_h, b_v = pickle.load( open(rbm_dir+'\\weights.pkl' , 'rb' ))
print("Loaded learned weights from RBM")
print("W", W.shape)
print("b_h", b_h.shape)
print("b_v", b_v.shape)
def sigmoid(x):
#Sigmoid activation
#Implemented interms of tanh for increased stability
return .5 * (1 + np.tanh(.5 * x))
def bernoulli_array(prob_array, dim):
# Simulating Bernoulli from uniform
sample = np.zeros(dim)
# Draw x~Uni[0,1]
uni_sample = np.random.uniform(0, 1, dim)
# return 1 if x < p else return 0
diff = uni_sample - prob_array
coords = np.argwhere(diff<0)
sample[[*coords.T]] = 1
return sample
# ------------------------ Train Data ----------------------------------------
for count in range(5):
hidden = []
for i in range(train_frames.shape[0]):
v = train_frames[i].T
# Getting hidden states of RBM using frames
# (h x v) @ (v x b) + (h x 1) = (h x b)
p_h_v = sigmoid(W @ v + b_h)
h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))
hidden.append(h.T)
hidden = np.array(hidden)
print("Train Hidden h ", count, ": ", hidden.shape)
pickle.dump(hidden, open(f"{out_dir}\\train_h_{count}.pkl" , 'wb' ) )
hidden = []
for i in range(train_frames.shape[0]):
v = train_frames[i].T
# Getting hidden states of RBM using frames
# (h x v) @ (v x b) + (h x 1) = (h x b)
p_h_v = sigmoid(W @ v + b_h)
hidden.append(p_h_v.T)
hidden = np.array(hidden)
print("Train Hidden p_h_v : ", hidden.shape)
pickle.dump(hidden, open(f"{out_dir}\\train_p_h_v.pkl" , 'wb' ) )
# ------------------------ Test Data ----------------------------------------
for count in range(5):
hidden = []
for i in range(test_frames.shape[0]):
v = test_frames[i].T
# Getting hidden states of RBM using frames
# (h x v) @ (v x b) + (h x 1) = (h x b)
p_h_v = sigmoid(W @ v + b_h)
h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))
hidden.append(h.T)
hidden = np.array(hidden)
print("Test Latent Dynamics h ", count, ": ", hidden.shape)
pickle.dump(hidden, open(f"{out_dir}\\test_h_{count}.pkl" , 'wb' ) )
hidden = []
for i in range(test_frames.shape[0]):
v = test_frames[i].T
# Getting hidden states of RBM using frames
# (h x v) @ (v x b) + (h x 1) = (h x b)
p_h_v = sigmoid(W @ v + b_h)
hidden.append(p_h_v.T)
hidden = np.array(hidden)
print("Test Hidden p_h_v : ", hidden.shape)
pickle.dump(hidden, open(f"{out_dir}\\test_p_h_v.pkl" , 'wb' ) )
|
normal
|
{
"blob_id": "e048170775c589cf0a9fb3d54c72dab4df3f1bcb",
"index": 7558,
"step-1": "<mask token>\n\n\ndef sigmoid(x):\n return 0.5 * (1 + np.tanh(0.5 * x))\n\n\ndef bernoulli_array(prob_array, dim):\n sample = np.zeros(dim)\n uni_sample = np.random.uniform(0, 1, dim)\n diff = uni_sample - prob_array\n coords = np.argwhere(diff < 0)\n sample[[*coords.T]] = 1\n return sample\n\n\n<mask token>\n",
"step-2": "<mask token>\nprint('Loaded learned weights from RBM')\nprint('W', W.shape)\nprint('b_h', b_h.shape)\nprint('b_v', b_v.shape)\n\n\ndef sigmoid(x):\n return 0.5 * (1 + np.tanh(0.5 * x))\n\n\ndef bernoulli_array(prob_array, dim):\n sample = np.zeros(dim)\n uni_sample = np.random.uniform(0, 1, dim)\n diff = uni_sample - prob_array\n coords = np.argwhere(diff < 0)\n sample[[*coords.T]] = 1\n return sample\n\n\nfor count in range(5):\n hidden = []\n for i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Train Hidden h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\train_h_{count}.pkl', 'wb'))\n<mask token>\nfor i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\n<mask token>\nprint('Train Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\train_p_h_v.pkl', 'wb'))\nfor count in range(5):\n hidden = []\n for i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Test Latent Dynamics h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\test_h_{count}.pkl', 'wb'))\n<mask token>\nfor i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\n<mask token>\nprint('Test Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\test_p_h_v.pkl', 'wb'))\n",
"step-3": "<mask token>\nin_dir = (\n 'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\')\nout_dir = (\n f'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\')\ntrain_frames = pickle.load(open(in_dir + '\\\\train_frames.pkl', 'rb'))\ntest_frames = pickle.load(open(in_dir + '\\\\test_frames.pkl', 'rb'))\nrbm_dir = (\n 'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Outputs\\\\RBM\\\\')\nW, b_h, b_v = pickle.load(open(rbm_dir + '\\\\weights.pkl', 'rb'))\nprint('Loaded learned weights from RBM')\nprint('W', W.shape)\nprint('b_h', b_h.shape)\nprint('b_v', b_v.shape)\n\n\ndef sigmoid(x):\n return 0.5 * (1 + np.tanh(0.5 * x))\n\n\ndef bernoulli_array(prob_array, dim):\n sample = np.zeros(dim)\n uni_sample = np.random.uniform(0, 1, dim)\n diff = uni_sample - prob_array\n coords = np.argwhere(diff < 0)\n sample[[*coords.T]] = 1\n return sample\n\n\nfor count in range(5):\n hidden = []\n for i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Train Hidden h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\train_h_{count}.pkl', 'wb'))\nhidden = []\nfor i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\nhidden = np.array(hidden)\nprint('Train Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\train_p_h_v.pkl', 'wb'))\nfor count in range(5):\n hidden = []\n for i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Test Latent Dynamics h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\test_h_{count}.pkl', 'wb'))\nhidden = []\nfor i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\nhidden = np.array(hidden)\nprint('Test Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\test_p_h_v.pkl', 'wb'))\n",
"step-4": "import pickle\nimport numpy as np\nin_dir = (\n 'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\')\nout_dir = (\n f'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\')\ntrain_frames = pickle.load(open(in_dir + '\\\\train_frames.pkl', 'rb'))\ntest_frames = pickle.load(open(in_dir + '\\\\test_frames.pkl', 'rb'))\nrbm_dir = (\n 'C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Outputs\\\\RBM\\\\')\nW, b_h, b_v = pickle.load(open(rbm_dir + '\\\\weights.pkl', 'rb'))\nprint('Loaded learned weights from RBM')\nprint('W', W.shape)\nprint('b_h', b_h.shape)\nprint('b_v', b_v.shape)\n\n\ndef sigmoid(x):\n return 0.5 * (1 + np.tanh(0.5 * x))\n\n\ndef bernoulli_array(prob_array, dim):\n sample = np.zeros(dim)\n uni_sample = np.random.uniform(0, 1, dim)\n diff = uni_sample - prob_array\n coords = np.argwhere(diff < 0)\n sample[[*coords.T]] = 1\n return sample\n\n\nfor count in range(5):\n hidden = []\n for i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Train Hidden h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\train_h_{count}.pkl', 'wb'))\nhidden = []\nfor i in range(train_frames.shape[0]):\n v = train_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\nhidden = np.array(hidden)\nprint('Train Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\train_p_h_v.pkl', 'wb'))\nfor count in range(5):\n hidden = []\n for i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n h = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n hidden.append(h.T)\n hidden = np.array(hidden)\n print('Test Latent Dynamics h ', count, ': ', hidden.shape)\n pickle.dump(hidden, open(f'{out_dir}\\\\test_h_{count}.pkl', 'wb'))\nhidden = []\nfor i in range(test_frames.shape[0]):\n v = test_frames[i].T\n p_h_v = sigmoid(W @ v + b_h)\n hidden.append(p_h_v.T)\nhidden = np.array(hidden)\nprint('Test Hidden p_h_v : ', hidden.shape)\npickle.dump(hidden, open(f'{out_dir}\\\\test_p_h_v.pkl', 'wb'))\n",
"step-5": "import pickle\nimport numpy as np\n\nin_dir = \"C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\\"\nout_dir = f\"C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Data\\\\Dynamics\\\\\"\n\n\n# Read frames\ntrain_frames = pickle.load( open(in_dir +'\\\\train_frames.pkl' , 'rb' ))\ntest_frames = pickle.load( open(in_dir +'\\\\test_frames.pkl' , 'rb' ))\n\n# Read the rbm learned weights\nrbm_dir = \"C:\\\\Users\\\\ganga\\\\Github\\\\Generative-Models\\\\Project\\\\Outputs\\\\RBM\\\\\"\nW, b_h, b_v = pickle.load( open(rbm_dir+'\\\\weights.pkl' , 'rb' ))\n\nprint(\"Loaded learned weights from RBM\")\nprint(\"W\", W.shape)\nprint(\"b_h\", b_h.shape)\nprint(\"b_v\", b_v.shape)\n\ndef sigmoid(x): \n\t#Sigmoid activation \n\t#Implemented interms of tanh for increased stability\n\treturn .5 * (1 + np.tanh(.5 * x))\n\ndef bernoulli_array(prob_array, dim):\n\t# Simulating Bernoulli from uniform\n\tsample = np.zeros(dim)\n\n\t# Draw x~Uni[0,1]\n\tuni_sample = np.random.uniform(0, 1, dim)\n\n\t# return 1 if x < p else return 0\n\tdiff = uni_sample - prob_array\n\tcoords = np.argwhere(diff<0)\n\tsample[[*coords.T]] = 1 \n\n\treturn sample\n\n# ------------------------ Train Data ----------------------------------------\n\nfor count in range(5):\n\thidden = []\n\tfor i in range(train_frames.shape[0]):\n\n\t\tv = train_frames[i].T\n\n\t\t# Getting hidden states of RBM using frames\n\t\t# (h x v) @ (v x b) + (h x 1) = (h x b)\n\t\tp_h_v = sigmoid(W @ v + b_h)\n\t\th = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n\n\t\thidden.append(h.T)\n\n\thidden = np.array(hidden)\n\tprint(\"Train Hidden h \", count, \": \", hidden.shape)\n\n\tpickle.dump(hidden, open(f\"{out_dir}\\\\train_h_{count}.pkl\" , 'wb' ) )\n\n\nhidden = []\nfor i in range(train_frames.shape[0]):\n\n\tv = train_frames[i].T\n\n\t# Getting hidden states of RBM using frames\n\t# (h x v) @ (v x b) + (h x 1) = (h x b)\n\tp_h_v = sigmoid(W @ v + b_h)\n\n\thidden.append(p_h_v.T)\n\nhidden = np.array(hidden)\nprint(\"Train Hidden p_h_v : \", hidden.shape)\n\npickle.dump(hidden, open(f\"{out_dir}\\\\train_p_h_v.pkl\" , 'wb' ) )\n\n\n# ------------------------ Test Data ----------------------------------------\n\nfor count in range(5):\n\thidden = []\n\tfor i in range(test_frames.shape[0]):\n\n\t\tv = test_frames[i].T\n\n\t\t# Getting hidden states of RBM using frames\n\t\t# (h x v) @ (v x b) + (h x 1) = (h x b)\n\t\tp_h_v = sigmoid(W @ v + b_h)\n\t\th = bernoulli_array(p_h_v, (p_h_v.shape[0], p_h_v.shape[1]))\n\n\t\thidden.append(h.T)\n\n\thidden = np.array(hidden)\n\tprint(\"Test Latent Dynamics h \", count, \": \", hidden.shape)\n\n\tpickle.dump(hidden, open(f\"{out_dir}\\\\test_h_{count}.pkl\" , 'wb' ) )\n\n\n\nhidden = []\nfor i in range(test_frames.shape[0]):\n\n\tv = test_frames[i].T\n\n\t# Getting hidden states of RBM using frames\n\t# (h x v) @ (v x b) + (h x 1) = (h x b)\n\tp_h_v = sigmoid(W @ v + b_h)\n\n\thidden.append(p_h_v.T)\n\nhidden = np.array(hidden)\nprint(\"Test Hidden p_h_v : \", hidden.shape)\n\npickle.dump(hidden, open(f\"{out_dir}\\\\test_p_h_v.pkl\" , 'wb' ) )",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
dbindexer.autodiscover()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
dbindexer.autodiscover()
urlpatterns = patterns('harvester.views', url('^$', 'home', name='home'),
url('^settings/', 'settings', name='settings'))
<|reserved_special_token_1|>
from django.conf.urls import patterns, include, url
import dbindexer
dbindexer.autodiscover()
urlpatterns = patterns('harvester.views', url('^$', 'home', name='home'),
url('^settings/', 'settings', name='settings'))
<|reserved_special_token_1|>
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
import dbindexer
dbindexer.autodiscover() #This needs to happen before anything else, hence strange import ordering
urlpatterns = patterns('harvester.views',
url(r'^$', 'home', name='home'),
url(r'^settings/', 'settings', name='settings'),
# Examples:
# url(r'^$', 'harvester.views.home', name='home'),
# url(r'^harvester/', include('harvester.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
|
flexible
|
{
"blob_id": "9fc9d766915bcefde4f0ba5c24cb83e33fc66272",
"index": 1094,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndbindexer.autodiscover()\n<mask token>\n",
"step-3": "<mask token>\ndbindexer.autodiscover()\nurlpatterns = patterns('harvester.views', url('^$', 'home', name='home'),\n url('^settings/', 'settings', name='settings'))\n",
"step-4": "from django.conf.urls import patterns, include, url\nimport dbindexer\ndbindexer.autodiscover()\nurlpatterns = patterns('harvester.views', url('^$', 'home', name='home'),\n url('^settings/', 'settings', name='settings'))\n",
"step-5": "from django.conf.urls import patterns, include, url\n\n# Uncomment the next two lines to enable the admin:\n# from django.contrib import admin\n# admin.autodiscover()\nimport dbindexer\ndbindexer.autodiscover() #This needs to happen before anything else, hence strange import ordering\n\nurlpatterns = patterns('harvester.views',\n url(r'^$', 'home', name='home'),\n url(r'^settings/', 'settings', name='settings'),\n # Examples:\n # url(r'^$', 'harvester.views.home', name='home'),\n # url(r'^harvester/', include('harvester.foo.urls')),\n\n # Uncomment the admin/doc line below to enable admin documentation:\n # url(r'^admin/doc/', include('django.contrib.admindocs.urls')),\n\n # Uncomment the next line to enable the admin:\n # url(r'^admin/', include(admin.site.urls)),\n)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__version__ = '0.90.03'
|
flexible
|
{
"blob_id": "284e4f79748c17d44518f2ce424db5b1697373dc",
"index": 3156,
"step-1": "<mask token>\n",
"step-2": "__version__ = '0.90.03'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from DHT_Python import dht22
from oled96 import oled
from PiBlynk import Blynk
# read data using pin 4
instance = dht22.DHT22(pin=4)
token = "---token---"
blynk = Blynk(token)
def cnct_cb():
print ("Connected: ")
blynk.on_connect(cnct_cb)
def _funCb(ACT):
result = instance.read()
if result.is_valid():
strTemp=("%.2f" % result.temperature)
strHumi=("%.2f" % result.humidity)
# Show temperature and humidity on OLED
oled.yell2("Temp="+strTemp,"Humi="+strHumi)
blynk.virtual_write(1,strTemp) # User Virtual port V1
blynk.virtual_write(2,strHumi) # User Virtual port V2
blynk.Ticker(_funCb, 140, False) # ~2 Hz
blynk.gpio_auto("button")
blynk.run()
|
normal
|
{
"blob_id": "e95ebb2aa6526e3bf3789da17d144e71cdb49aca",
"index": 2712,
"step-1": "<mask token>\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\n<mask token>\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-3": "<mask token>\ninstance = dht22.DHT22(pin=4)\ntoken = '---token---'\nblynk = Blynk(token)\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-4": "from DHT_Python import dht22\nfrom oled96 import oled\nfrom PiBlynk import Blynk\ninstance = dht22.DHT22(pin=4)\ntoken = '---token---'\nblynk = Blynk(token)\n\n\ndef cnct_cb():\n print('Connected: ')\n\n\nblynk.on_connect(cnct_cb)\n\n\ndef _funCb(ACT):\n result = instance.read()\n if result.is_valid():\n strTemp = '%.2f' % result.temperature\n strHumi = '%.2f' % result.humidity\n oled.yell2('Temp=' + strTemp, 'Humi=' + strHumi)\n blynk.virtual_write(1, strTemp)\n blynk.virtual_write(2, strHumi)\n\n\nblynk.Ticker(_funCb, 140, False)\nblynk.gpio_auto('button')\nblynk.run()\n",
"step-5": "from DHT_Python import dht22\nfrom oled96 import oled \nfrom PiBlynk import Blynk\n\n# read data using pin 4\ninstance = dht22.DHT22(pin=4)\n\ntoken = \"---token---\"\nblynk = Blynk(token)\ndef cnct_cb():\n\tprint (\"Connected: \")\n\t\nblynk.on_connect(cnct_cb)\n\ndef _funCb(ACT):\n\tresult = instance.read()\n\tif result.is_valid():\n\t\tstrTemp=(\"%.2f\" % result.temperature)\n\t\tstrHumi=(\"%.2f\" % result.humidity)\n\t\t# Show temperature and humidity on OLED\n\t\toled.yell2(\"Temp=\"+strTemp,\"Humi=\"+strHumi) \n\t\tblynk.virtual_write(1,strTemp) # User Virtual port V1\n\t\tblynk.virtual_write(2,strHumi) # User Virtual port V2\nblynk.Ticker(_funCb, 140, False) # ~2 Hz\n\nblynk.gpio_auto(\"button\")\n\nblynk.run()\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from page_objects import PageObject, PageElement
class MainPage(PageObject):
level_menu_opened = False
level_menu_created = False
css_input = PageElement(css='input.input-strobe')
level_text_span = PageElement(css='span.level-text')
instruction_h2 = PageElement(css='h2.order')
enter_button = PageElement(css='div.enter-button')
level_menu = PageElement(
xpath='//div[@class="level-menu-toggle-wrapper"]')
def __init__(self, webdriver, root_uri=None):
super(MainPage, self).__init__(webdriver, root_uri)
# hack to initialize all the menu items
self.open_level_menu()
self.close_level_menu()
def ensure_menu_created(self):
if not self.level_menu_created:
self.open_level_menu()
self.close_level_menu()
def open_level_menu(self):
if not self.level_menu_opened:
self.level_menu.click()
self.level_menu_opened = True
self.level_menu_created = True
def close_level_menu(self):
if self.level_menu_opened:
self.level_menu.click()
self.level_menu_opened = False
def get_level_link(self, level_number):
return PageElement(
xpath='//span[@class="level-number" and text() = "{0}"]/..'
.format(level_number)
)
def open_level(self, level_number):
self.open_level_menu()
self.get_level_link(level_number).click()
def css_write(self, css):
self.css_input = css
self.enter_button.click()
def do_level1(self):
self.open_level(1)
self.css_write("page")
self.level1_link.click()
|
normal
|
{
"blob_id": "c6cf085330f47ffb139c5acc91d91e9758f5396a",
"index": 274,
"step-1": "<mask token>\n\n\nclass MainPage(PageObject):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, webdriver, root_uri=None):\n super(MainPage, self).__init__(webdriver, root_uri)\n self.open_level_menu()\n self.close_level_menu()\n <mask token>\n <mask token>\n\n def close_level_menu(self):\n if self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = False\n\n def get_level_link(self, level_number):\n return PageElement(xpath=\n '//span[@class=\"level-number\" and text() = \"{0}\"]/..'.format(\n level_number))\n\n def open_level(self, level_number):\n self.open_level_menu()\n self.get_level_link(level_number).click()\n\n def css_write(self, css):\n self.css_input = css\n self.enter_button.click()\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass MainPage(PageObject):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, webdriver, root_uri=None):\n super(MainPage, self).__init__(webdriver, root_uri)\n self.open_level_menu()\n self.close_level_menu()\n <mask token>\n <mask token>\n\n def close_level_menu(self):\n if self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = False\n\n def get_level_link(self, level_number):\n return PageElement(xpath=\n '//span[@class=\"level-number\" and text() = \"{0}\"]/..'.format(\n level_number))\n\n def open_level(self, level_number):\n self.open_level_menu()\n self.get_level_link(level_number).click()\n\n def css_write(self, css):\n self.css_input = css\n self.enter_button.click()\n\n def do_level1(self):\n self.open_level(1)\n self.css_write('page')\n self.level1_link.click()\n",
"step-3": "<mask token>\n\n\nclass MainPage(PageObject):\n level_menu_opened = False\n level_menu_created = False\n css_input = PageElement(css='input.input-strobe')\n level_text_span = PageElement(css='span.level-text')\n instruction_h2 = PageElement(css='h2.order')\n enter_button = PageElement(css='div.enter-button')\n level_menu = PageElement(xpath='//div[@class=\"level-menu-toggle-wrapper\"]')\n\n def __init__(self, webdriver, root_uri=None):\n super(MainPage, self).__init__(webdriver, root_uri)\n self.open_level_menu()\n self.close_level_menu()\n\n def ensure_menu_created(self):\n if not self.level_menu_created:\n self.open_level_menu()\n self.close_level_menu()\n\n def open_level_menu(self):\n if not self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = True\n self.level_menu_created = True\n\n def close_level_menu(self):\n if self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = False\n\n def get_level_link(self, level_number):\n return PageElement(xpath=\n '//span[@class=\"level-number\" and text() = \"{0}\"]/..'.format(\n level_number))\n\n def open_level(self, level_number):\n self.open_level_menu()\n self.get_level_link(level_number).click()\n\n def css_write(self, css):\n self.css_input = css\n self.enter_button.click()\n\n def do_level1(self):\n self.open_level(1)\n self.css_write('page')\n self.level1_link.click()\n",
"step-4": "from page_objects import PageObject, PageElement\n\n\nclass MainPage(PageObject):\n level_menu_opened = False\n level_menu_created = False\n css_input = PageElement(css='input.input-strobe')\n level_text_span = PageElement(css='span.level-text')\n instruction_h2 = PageElement(css='h2.order')\n enter_button = PageElement(css='div.enter-button')\n level_menu = PageElement(xpath='//div[@class=\"level-menu-toggle-wrapper\"]')\n\n def __init__(self, webdriver, root_uri=None):\n super(MainPage, self).__init__(webdriver, root_uri)\n self.open_level_menu()\n self.close_level_menu()\n\n def ensure_menu_created(self):\n if not self.level_menu_created:\n self.open_level_menu()\n self.close_level_menu()\n\n def open_level_menu(self):\n if not self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = True\n self.level_menu_created = True\n\n def close_level_menu(self):\n if self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = False\n\n def get_level_link(self, level_number):\n return PageElement(xpath=\n '//span[@class=\"level-number\" and text() = \"{0}\"]/..'.format(\n level_number))\n\n def open_level(self, level_number):\n self.open_level_menu()\n self.get_level_link(level_number).click()\n\n def css_write(self, css):\n self.css_input = css\n self.enter_button.click()\n\n def do_level1(self):\n self.open_level(1)\n self.css_write('page')\n self.level1_link.click()\n",
"step-5": "from page_objects import PageObject, PageElement\n\n\nclass MainPage(PageObject):\n level_menu_opened = False\n level_menu_created = False\n css_input = PageElement(css='input.input-strobe')\n level_text_span = PageElement(css='span.level-text')\n instruction_h2 = PageElement(css='h2.order')\n enter_button = PageElement(css='div.enter-button')\n\n level_menu = PageElement(\n xpath='//div[@class=\"level-menu-toggle-wrapper\"]')\n\n def __init__(self, webdriver, root_uri=None):\n super(MainPage, self).__init__(webdriver, root_uri)\n # hack to initialize all the menu items\n self.open_level_menu()\n self.close_level_menu()\n\n def ensure_menu_created(self):\n if not self.level_menu_created:\n self.open_level_menu()\n self.close_level_menu()\n\n def open_level_menu(self):\n if not self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = True\n self.level_menu_created = True\n\n def close_level_menu(self):\n if self.level_menu_opened:\n self.level_menu.click()\n self.level_menu_opened = False\n\n def get_level_link(self, level_number):\n return PageElement(\n xpath='//span[@class=\"level-number\" and text() = \"{0}\"]/..'\n .format(level_number)\n )\n\n def open_level(self, level_number):\n self.open_level_menu()\n self.get_level_link(level_number).click()\n\n def css_write(self, css):\n self.css_input = css\n self.enter_button.click()\n\n def do_level1(self):\n self.open_level(1)\n self.css_write(\"page\")\n self.level1_link.click()\n",
"step-ids": [
6,
7,
10,
11,
12
]
}
|
[
6,
7,
10,
11,
12
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fig.tight_layout()
fig.subplots_adjust(wspace=0.05)
<|reserved_special_token_0|>
for year in years:
train = get(year, features, index)
train = pre(train)
for method in methods:
ax = axes[i, j]
Z = linkage(train, method=method)
dn = dendrogram(Z, ax=ax, labels=index)
ax.set_yticks([])
i += 1
j += 1
i = 0
for i in range(3):
axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)
axes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')
for j in range(3):
axes[0, j].set_title(years[j])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
index = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']
years = [2010, 2013, 2016]
features = ['TOTAL ASSETS', 'Cash & Equivalents',
'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',
'Cost of Good Sold', 'GROSS PROFIT']
methods = ['single', 'complete', 'average', 'ward']
fig, axes = plt.subplots(4, 3, figsize=(16, 9))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05)
i = 0
j = 0
for year in years:
train = get(year, features, index)
train = pre(train)
for method in methods:
ax = axes[i, j]
Z = linkage(train, method=method)
dn = dendrogram(Z, ax=ax, labels=index)
ax.set_yticks([])
i += 1
j += 1
i = 0
for i in range(3):
axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)
axes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')
for j in range(3):
axes[0, j].set_title(years[j])
<|reserved_special_token_1|>
from scipy.cluster.hierarchy import dendrogram, linkage
from get_train import get, pre
import matplotlib.pyplot as plt
index = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']
years = [2010, 2013, 2016]
features = ['TOTAL ASSETS', 'Cash & Equivalents',
'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',
'Cost of Good Sold', 'GROSS PROFIT']
methods = ['single', 'complete', 'average', 'ward']
fig, axes = plt.subplots(4, 3, figsize=(16, 9))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05)
i = 0
j = 0
for year in years:
train = get(year, features, index)
train = pre(train)
for method in methods:
ax = axes[i, j]
Z = linkage(train, method=method)
dn = dendrogram(Z, ax=ax, labels=index)
ax.set_yticks([])
i += 1
j += 1
i = 0
for i in range(3):
axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)
axes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')
for j in range(3):
axes[0, j].set_title(years[j])
<|reserved_special_token_1|>
from scipy.cluster.hierarchy import dendrogram, linkage
from get_train import get, pre
import matplotlib.pyplot as plt
#%%
index = [
'BAC',
'JPM',
'GS',
'C',
'AAPL',
'IBM',
'MSFT',
'ORCL'
]
years = [
2010,
2013,
2016
]
features = [
'TOTAL ASSETS',
'Cash & Equivalents',
'Receivables - Total (Net)',
'Inventories - Total',
'Sales (Net)',
'Cost of Good Sold',
'GROSS PROFIT'
]
methods = [
'single',
'complete',
'average',
'ward'
]
#%%
fig, axes = plt.subplots(4, 3, figsize=(16, 9))
fig.tight_layout()
fig.subplots_adjust(wspace=0.05)
i = 0
j = 0
for year in years:
train = get(year, features, index)
train = pre(train)
for method in methods:
ax = axes[i, j]
Z = linkage(train, method=method)
dn = dendrogram(Z, ax=ax, labels=index)
ax.set_yticks([])
i += 1
j += 1
i = 0
for i in range(3):
axes[i, 0].set_ylabel(
methods[i],
rotation=0,
labelpad=25
)
axes[3, 0].set_ylabel(
'WARD',
rotation=0,
labelpad=25,
color='r'
)
for j in range(3):
axes[0, j].set_title(years[j])
|
flexible
|
{
"blob_id": "8279f8a80d96a7231e35100d2c39fa5e1f34f5f5",
"index": 9777,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\n<mask token>\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"step-3": "<mask token>\nindex = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']\nyears = [2010, 2013, 2016]\nfeatures = ['TOTAL ASSETS', 'Cash & Equivalents',\n 'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',\n 'Cost of Good Sold', 'GROSS PROFIT']\nmethods = ['single', 'complete', 'average', 'ward']\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\ni = 0\nj = 0\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"step-4": "from scipy.cluster.hierarchy import dendrogram, linkage\nfrom get_train import get, pre\nimport matplotlib.pyplot as plt\nindex = ['BAC', 'JPM', 'GS', 'C', 'AAPL', 'IBM', 'MSFT', 'ORCL']\nyears = [2010, 2013, 2016]\nfeatures = ['TOTAL ASSETS', 'Cash & Equivalents',\n 'Receivables - Total (Net)', 'Inventories - Total', 'Sales (Net)',\n 'Cost of Good Sold', 'GROSS PROFIT']\nmethods = ['single', 'complete', 'average', 'ward']\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\nfig.tight_layout()\nfig.subplots_adjust(wspace=0.05)\ni = 0\nj = 0\nfor year in years:\n train = get(year, features, index)\n train = pre(train)\n for method in methods:\n ax = axes[i, j]\n Z = linkage(train, method=method)\n dn = dendrogram(Z, ax=ax, labels=index)\n ax.set_yticks([])\n i += 1\n j += 1\n i = 0\nfor i in range(3):\n axes[i, 0].set_ylabel(methods[i], rotation=0, labelpad=25)\naxes[3, 0].set_ylabel('WARD', rotation=0, labelpad=25, color='r')\nfor j in range(3):\n axes[0, j].set_title(years[j])\n",
"step-5": "from scipy.cluster.hierarchy import dendrogram, linkage\r\nfrom get_train import get, pre\r\nimport matplotlib.pyplot as plt\r\n#%%\r\nindex = [\r\n 'BAC', \r\n 'JPM', \r\n 'GS', \r\n 'C',\r\n 'AAPL', \r\n 'IBM', \r\n 'MSFT', \r\n 'ORCL'\r\n ]\r\n\r\nyears = [\r\n 2010,\r\n 2013,\r\n 2016\r\n ]\r\n\r\nfeatures = [\r\n 'TOTAL ASSETS', \r\n 'Cash & Equivalents',\r\n 'Receivables - Total (Net)',\r\n 'Inventories - Total',\r\n 'Sales (Net)',\r\n 'Cost of Good Sold',\r\n 'GROSS PROFIT'\r\n ]\r\n\r\nmethods = [\r\n 'single', \r\n 'complete', \r\n 'average', \r\n 'ward'\r\n ]\r\n\r\n#%%\r\nfig, axes = plt.subplots(4, 3, figsize=(16, 9))\r\nfig.tight_layout()\r\nfig.subplots_adjust(wspace=0.05)\r\n\r\ni = 0\r\nj = 0\r\nfor year in years:\r\n train = get(year, features, index)\r\n train = pre(train)\r\n \r\n for method in methods:\r\n ax = axes[i, j]\r\n Z = linkage(train, method=method)\r\n dn = dendrogram(Z, ax=ax, labels=index)\r\n ax.set_yticks([])\r\n \r\n i += 1\r\n \r\n j += 1\r\n i = 0\r\n\r\nfor i in range(3):\r\n axes[i, 0].set_ylabel(\r\n methods[i], \r\n rotation=0, \r\n labelpad=25\r\n )\r\naxes[3, 0].set_ylabel(\r\n 'WARD', \r\n rotation=0, \r\n labelpad=25,\r\n color='r'\r\n )\r\n \r\nfor j in range(3):\r\n axes[0, j].set_title(years[j])\r\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class location_accommodation(models.AbstractModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name(
'sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':
data, 'docs': records, 'time': time, 'get_companies': self.
get_companies}
return self.env['report'].render(
'sg_accommodation.view_location_report', docargs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class location_accommodation(models.AbstractModel):
<|reserved_special_token_0|>
@api.model
def get_companies(self):
company_list = []
self.td_list = []
comp_ids = self.env['res.company'].search([('tenant', '=', True)])
for comp in comp_ids:
company_list.append(comp.company_code)
if company_list:
company_list.sort()
no_of_td = company_list
for td in range(0, len(no_of_td)):
self.td_list.append(td)
return company_list
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name(
'sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':
data, 'docs': records, 'time': time, 'get_companies': self.
get_companies}
return self.env['report'].render(
'sg_accommodation.view_location_report', docargs)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class location_accommodation(models.AbstractModel):
_name = 'report.sg_accommodation.view_location_report'
@api.model
def get_companies(self):
company_list = []
self.td_list = []
comp_ids = self.env['res.company'].search([('tenant', '=', True)])
for comp in comp_ids:
company_list.append(comp.company_code)
if company_list:
company_list.sort()
no_of_td = company_list
for td in range(0, len(no_of_td)):
self.td_list.append(td)
return company_list
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name(
'sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':
data, 'docs': records, 'time': time, 'get_companies': self.
get_companies}
return self.env['report'].render(
'sg_accommodation.view_location_report', docargs)
<|reserved_special_token_1|>
from odoo import api, models
import time
class location_accommodation(models.AbstractModel):
_name = 'report.sg_accommodation.view_location_report'
@api.model
def get_companies(self):
company_list = []
self.td_list = []
comp_ids = self.env['res.company'].search([('tenant', '=', True)])
for comp in comp_ids:
company_list.append(comp.company_code)
if company_list:
company_list.sort()
no_of_td = company_list
for td in range(0, len(no_of_td)):
self.td_list.append(td)
return company_list
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name(
'sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':
data, 'docs': records, 'time': time, 'get_companies': self.
get_companies}
return self.env['report'].render(
'sg_accommodation.view_location_report', docargs)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-Today Serpent Consulting Services Pvt.Ltd. (<http://www.serpentcs.com>).
# Copyright (C) 2004 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from odoo import api, models
import time
class location_accommodation(models.AbstractModel):
_name = 'report.sg_accommodation.view_location_report'
@api.model
def get_companies(self):
company_list=[]
self.td_list = []
comp_ids=self.env['res.company'].search([('tenant', '=', True)])
for comp in comp_ids:
company_list.append(comp.company_code)
if company_list:
company_list.sort()
no_of_td=company_list
for td in range(0,len(no_of_td)):
self.td_list.append(td)
return company_list
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name('sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids' : self.ids,
'doc_model' : report.model,
'data' : data,
'docs' : records,
'time' : time,
'get_companies' : self.get_companies}
return self.env['report'].render('sg_accommodation.view_location_report', docargs)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
flexible
|
{
"blob_id": "ac99c19294661657d383b036c9ab83e7b610cb7d",
"index": 6896,
"step-1": "<mask token>\n\n\nclass location_accommodation(models.AbstractModel):\n <mask token>\n <mask token>\n\n @api.multi\n def render_html(self, docids, data=None):\n report = self.env['report']._get_report_from_name(\n 'sg_accommodation.view_location_report')\n records = self.env['accommodation.accommodation'].browse(self.ids)\n docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':\n data, 'docs': records, 'time': time, 'get_companies': self.\n get_companies}\n return self.env['report'].render(\n 'sg_accommodation.view_location_report', docargs)\n",
"step-2": "<mask token>\n\n\nclass location_accommodation(models.AbstractModel):\n <mask token>\n\n @api.model\n def get_companies(self):\n company_list = []\n self.td_list = []\n comp_ids = self.env['res.company'].search([('tenant', '=', True)])\n for comp in comp_ids:\n company_list.append(comp.company_code)\n if company_list:\n company_list.sort()\n no_of_td = company_list\n for td in range(0, len(no_of_td)):\n self.td_list.append(td)\n return company_list\n\n @api.multi\n def render_html(self, docids, data=None):\n report = self.env['report']._get_report_from_name(\n 'sg_accommodation.view_location_report')\n records = self.env['accommodation.accommodation'].browse(self.ids)\n docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':\n data, 'docs': records, 'time': time, 'get_companies': self.\n get_companies}\n return self.env['report'].render(\n 'sg_accommodation.view_location_report', docargs)\n",
"step-3": "<mask token>\n\n\nclass location_accommodation(models.AbstractModel):\n _name = 'report.sg_accommodation.view_location_report'\n\n @api.model\n def get_companies(self):\n company_list = []\n self.td_list = []\n comp_ids = self.env['res.company'].search([('tenant', '=', True)])\n for comp in comp_ids:\n company_list.append(comp.company_code)\n if company_list:\n company_list.sort()\n no_of_td = company_list\n for td in range(0, len(no_of_td)):\n self.td_list.append(td)\n return company_list\n\n @api.multi\n def render_html(self, docids, data=None):\n report = self.env['report']._get_report_from_name(\n 'sg_accommodation.view_location_report')\n records = self.env['accommodation.accommodation'].browse(self.ids)\n docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':\n data, 'docs': records, 'time': time, 'get_companies': self.\n get_companies}\n return self.env['report'].render(\n 'sg_accommodation.view_location_report', docargs)\n",
"step-4": "from odoo import api, models\nimport time\n\n\nclass location_accommodation(models.AbstractModel):\n _name = 'report.sg_accommodation.view_location_report'\n\n @api.model\n def get_companies(self):\n company_list = []\n self.td_list = []\n comp_ids = self.env['res.company'].search([('tenant', '=', True)])\n for comp in comp_ids:\n company_list.append(comp.company_code)\n if company_list:\n company_list.sort()\n no_of_td = company_list\n for td in range(0, len(no_of_td)):\n self.td_list.append(td)\n return company_list\n\n @api.multi\n def render_html(self, docids, data=None):\n report = self.env['report']._get_report_from_name(\n 'sg_accommodation.view_location_report')\n records = self.env['accommodation.accommodation'].browse(self.ids)\n docargs = {'doc_ids': self.ids, 'doc_model': report.model, 'data':\n data, 'docs': records, 'time': time, 'get_companies': self.\n get_companies}\n return self.env['report'].render(\n 'sg_accommodation.view_location_report', docargs)\n",
"step-5": "# -*- coding: utf-8 -*-\n##############################################################################\n#\n# OpenERP, Open Source Management Solution\n# Copyright (C) 2011-Today Serpent Consulting Services Pvt.Ltd. (<http://www.serpentcs.com>).\n# Copyright (C) 2004 OpenERP SA (<http://www.openerp.com>)\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>\n#\n##############################################################################\nfrom odoo import api, models\nimport time\n\n\nclass location_accommodation(models.AbstractModel):\n _name = 'report.sg_accommodation.view_location_report'\n\n @api.model\n def get_companies(self):\n company_list=[]\n self.td_list = []\n comp_ids=self.env['res.company'].search([('tenant', '=', True)])\n for comp in comp_ids:\n company_list.append(comp.company_code)\n if company_list:\n company_list.sort()\n no_of_td=company_list\n for td in range(0,len(no_of_td)):\n self.td_list.append(td)\n return company_list\n\n @api.multi\n def render_html(self, docids, data=None):\n report = self.env['report']._get_report_from_name('sg_accommodation.view_location_report')\n records = self.env['accommodation.accommodation'].browse(self.ids)\n docargs = {'doc_ids' : self.ids,\n 'doc_model' : report.model,\n 'data' : data,\n 'docs' : records,\n 'time' : time,\n 'get_companies' : self.get_companies}\n return self.env['report'].render('sg_accommodation.view_location_report', docargs)\n \n# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# Generated by Django 2.2.6 on 2019-11-05 02:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('drchrono', '0011_patient_cell_phone'),
]
operations = [
migrations.AddField(
model_name='appointment',
name='date',
field=models.DateTimeField(default=None, null=True),
),
]
|
normal
|
{
"blob_id": "0c7f2412fe9a83d70d41fbc4bbaf135e6bc4149a",
"index": 8129,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('drchrono', '0011_patient_cell_phone')]\n operations = [migrations.AddField(model_name='appointment', name='date',\n field=models.DateTimeField(default=None, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('drchrono', '0011_patient_cell_phone')]\n operations = [migrations.AddField(model_name='appointment', name='date',\n field=models.DateTimeField(default=None, null=True))]\n",
"step-5": "# Generated by Django 2.2.6 on 2019-11-05 02:28\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('drchrono', '0011_patient_cell_phone'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='appointment',\n name='date',\n field=models.DateTimeField(default=None, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password):
try:
cookie_request = requests.post(URL + ADMIN + mailinglist, data={
'adminpw': password})
cookie_request.raise_for_status()
return cookie_request.cookies
except:
print(messages.error_message)
return None
def make_roster(mailinglist, cookies):
roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)
roster_soup = BeautifulSoup(roster_request.text, 'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4]
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ', '@'))
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input(
"What's the name of the mailing list you want to download?> ")
password = input('What is the list admin password?> ')
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster, 1):
print(count, '/', len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print('Saved', len(roster), 'email addresses in', os.path.
abspath(filename))
input('press enter to close')
break
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password):
try:
cookie_request = requests.post(URL + ADMIN + mailinglist, data={
'adminpw': password})
cookie_request.raise_for_status()
return cookie_request.cookies
except:
print(messages.error_message)
return None
def make_roster(mailinglist, cookies):
roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)
roster_soup = BeautifulSoup(roster_request.text, 'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4]
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ', '@'))
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input(
"What's the name of the mailing list you want to download?> ")
password = input('What is the list admin password?> ')
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster, 1):
print(count, '/', len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print('Saved', len(roster), 'email addresses in', os.path.
abspath(filename))
input('press enter to close')
break
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
URL = 'https://mailman.kcl.ac.uk/mailman/'
ADMIN = 'admin/'
ROSTER = 'roster/'
OUTPUT_FOLDER = '../output/'
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password):
try:
cookie_request = requests.post(URL + ADMIN + mailinglist, data={
'adminpw': password})
cookie_request.raise_for_status()
return cookie_request.cookies
except:
print(messages.error_message)
return None
def make_roster(mailinglist, cookies):
roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)
roster_soup = BeautifulSoup(roster_request.text, 'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4]
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ', '@'))
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input(
"What's the name of the mailing list you want to download?> ")
password = input('What is the list admin password?> ')
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster, 1):
print(count, '/', len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print('Saved', len(roster), 'email addresses in', os.path.
abspath(filename))
input('press enter to close')
break
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import messages
import os
import requests
from bs4 import BeautifulSoup
URL = 'https://mailman.kcl.ac.uk/mailman/'
ADMIN = 'admin/'
ROSTER = 'roster/'
OUTPUT_FOLDER = '../output/'
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password):
try:
cookie_request = requests.post(URL + ADMIN + mailinglist, data={
'adminpw': password})
cookie_request.raise_for_status()
return cookie_request.cookies
except:
print(messages.error_message)
return None
def make_roster(mailinglist, cookies):
roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)
roster_soup = BeautifulSoup(roster_request.text, 'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4]
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ', '@'))
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input(
"What's the name of the mailing list you want to download?> ")
password = input('What is the list admin password?> ')
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster, 1):
print(count, '/', len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print('Saved', len(roster), 'email addresses in', os.path.
abspath(filename))
input('press enter to close')
break
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import messages
import os
import requests
from bs4 import BeautifulSoup
URL = "https://mailman.kcl.ac.uk/mailman/"
ADMIN = "admin/"
ROSTER = "roster/"
OUTPUT_FOLDER = "../output/"
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password): # this opens up the admin page, enters the password, and saves the returned cookie to be passed to the next request
try:
cookie_request = requests.post(URL+ ADMIN + mailinglist, data = {'adminpw':password})
cookie_request.raise_for_status()
return cookie_request.cookies
except: # raises exception if the password is incorrect (or any other 4XX error)
print(messages.error_message)
return None
def make_roster(mailinglist, cookies): # takes the cookie from the cookie request and requests the roster
roster_request = requests.get(URL+ ROSTER + mailinglist, cookies = cookies)
roster_soup = BeautifulSoup(roster_request.text,'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4] # the last 4 links on the page are admin links
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ','@')) #the mailman list inexplicably uses a stupid ' at ' display format
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input("What's the name of the mailing list you want to download?> ")
password = input("What is the list admin password?> ")
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster,1):
print(count,"/",len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print("Saved", len(roster), "email addresses in", os.path.abspath(filename))
input("press enter to close")
break
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "0e337ce21450e0fdb7688183d0542ebf902a9614",
"index": 1293,
"step-1": "<mask token>\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nURL = 'https://mailman.kcl.ac.uk/mailman/'\nADMIN = 'admin/'\nROSTER = 'roster/'\nOUTPUT_FOLDER = '../output/'\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import messages\nimport os\nimport requests\nfrom bs4 import BeautifulSoup\nURL = 'https://mailman.kcl.ac.uk/mailman/'\nADMIN = 'admin/'\nROSTER = 'roster/'\nOUTPUT_FOLDER = '../output/'\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\nimport messages\nimport os\nimport requests\nfrom bs4 import BeautifulSoup\n\nURL = \"https://mailman.kcl.ac.uk/mailman/\"\nADMIN = \"admin/\"\nROSTER = \"roster/\"\nOUTPUT_FOLDER = \"../output/\"\n\ndef makeoutput(path):\t\n\tif os.path.exists(path):\n\t\tpass\n\telse:\n\t\tos.mkdir(path)\n\ndef mailinglist_cookies(mailinglist, password): # this opens up the admin page, enters the password, and saves the returned cookie to be passed to the next request\n\ttry:\n\t\tcookie_request = requests.post(URL+ ADMIN + mailinglist, data = {'adminpw':password})\n\t\tcookie_request.raise_for_status()\n\t\treturn cookie_request.cookies \n\texcept: # raises exception if the password is incorrect (or any other 4XX error)\n\t\tprint(messages.error_message)\n\t\treturn None\n\ndef make_roster(mailinglist, cookies): # takes the cookie from the cookie request and requests the roster\n\troster_request = requests.get(URL+ ROSTER + mailinglist, cookies = cookies)\n\troster_soup = BeautifulSoup(roster_request.text,'html.parser')\n\troster_result_set = roster_soup.find_all('a')[:-4] # the last 4 links on the page are admin links\n\troster = []\n\tfor r in roster_result_set:\n\t\troster.append(r.text.replace(' at ','@')) #the mailman list inexplicably uses a stupid ' at ' display format\n\n\treturn roster\n\ndef main():\n\t\n\tmakeoutput(OUTPUT_FOLDER)\t\n\tprint(messages.welcome_message)\t\n\n\twhile True:\t\t\n\t\tmailinglist = input(\"What's the name of the mailing list you want to download?> \")\n\t\tpassword = input(\"What is the list admin password?> \")\n\t\tfilename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n\n\t\tcookies = mailinglist_cookies(mailinglist, password)\n\t\tif cookies != None:\n\t\t\troster = make_roster(mailinglist, cookies)\t\t\n\t\t\tfor count, email in enumerate(roster,1):\n\t\t\t\t\n\t\t\t\tprint(count,\"/\",len(roster))\n\n\t\t\t\twith open(filename, 'a') as output:\n\t\t\t\t\toutput.write(email + ';\\n')\n\t\t\t\n\t\t\tprint(\"Saved\", len(roster), \"email addresses in\", os.path.abspath(filename))\n\t\t\tinput(\"press enter to close\")\n\t\t\tbreak\t\t\n\nif __name__ == '__main__':\n\tmain()",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# website = urlopen("https://webservices.ulm.edu/forms/forms-list")
# data = bs(website, "lxml")
# forms = data.findAll("span", {"class": "file"})
# forms_list = []
# names = []
# for f in forms:
# forms_list.append(f.find("a")["href"])
# names.append(f.get_text())
# # print(forms_list)
# for f in forms_list:
# webbrowser.open(f)
from urllib.request import urlopen
from bs4 import BeautifulSoup as bs
import lxml
import urllib.request
import webbrowser
# download function
def downloader(url, div, classTag, className, specificData1, specificData2):
website = urlopen(url)
data = bs(website, "lxml")
contents = data.findAll(div, {"+" + str(classTag) +":" + str(className) + "}"})
contents_list = []
names_list = []
for file in contents:
contents_list.append(file.find(specificData1['"' + specificData2 + '"']))
names_list.append(file.get_text())
print(contents_list)
return contents_list
def main():
website = input("Enter the website you want to download file from: ")
div = input("Enter the div/span (be as specific as you can): ")
classTag = input("Enter the class/id tag you want to extract link from: ")
className = input("Enter the class/id name: ")
specific1 = input("Enter specific tag a, li, : ")
specific2 = input("Enter specific tag inside specific1 : ")
# download the content
contents = downloader(website, div, classTag, className, specific1, specific2)
print(contents)
main()
|
normal
|
{
"blob_id": "a61f351391ca1b18359323fd9e49f1efa4c7513c",
"index": 4007,
"step-1": "<mask token>\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\nmain()\n",
"step-4": "from urllib.request import urlopen\nfrom bs4 import BeautifulSoup as bs\nimport lxml\nimport urllib.request\nimport webbrowser\n\n\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, 'lxml')\n contents = data.findAll(div, {'+' + str(classTag) + ':' + str(className\n ) + '}'})\n contents_list = []\n names_list = []\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 +\n '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n\n\ndef main():\n website = input('Enter the website you want to download file from: ')\n div = input('Enter the div/span (be as specific as you can): ')\n classTag = input('Enter the class/id tag you want to extract link from: ')\n className = input('Enter the class/id name: ')\n specific1 = input('Enter specific tag a, li, : ')\n specific2 = input('Enter specific tag inside specific1 : ')\n contents = downloader(website, div, classTag, className, specific1,\n specific2)\n print(contents)\n\n\nmain()\n",
"step-5": "\n\n# website = urlopen(\"https://webservices.ulm.edu/forms/forms-list\")\n# data = bs(website, \"lxml\")\n\n# forms = data.findAll(\"span\", {\"class\": \"file\"})\n\n# forms_list = []\n# names = []\n# for f in forms:\n# forms_list.append(f.find(\"a\")[\"href\"])\n# names.append(f.get_text())\n\n# # print(forms_list)\n\n# for f in forms_list:\n# webbrowser.open(f)\n\n\nfrom urllib.request import urlopen\nfrom bs4 import BeautifulSoup as bs\nimport lxml\nimport urllib.request\nimport webbrowser\n\n# download function\ndef downloader(url, div, classTag, className, specificData1, specificData2):\n website = urlopen(url)\n data = bs(website, \"lxml\")\n\n contents = data.findAll(div, {\"+\" + str(classTag) +\":\" + str(className) + \"}\"})\n\n contents_list = []\n names_list = []\n\n for file in contents:\n contents_list.append(file.find(specificData1['\"' + specificData2 + '\"']))\n names_list.append(file.get_text())\n print(contents_list)\n return contents_list\n \ndef main():\n website = input(\"Enter the website you want to download file from: \")\n div = input(\"Enter the div/span (be as specific as you can): \")\n classTag = input(\"Enter the class/id tag you want to extract link from: \")\n className = input(\"Enter the class/id name: \")\n specific1 = input(\"Enter specific tag a, li, : \")\n specific2 = input(\"Enter specific tag inside specific1 : \")\n\n # download the content\n contents = downloader(website, div, classTag, className, specific1, specific2)\n print(contents)\n\nmain()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for file in glob.glob(pwd + '/*spectrum.json'):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
<|reserved_special_token_0|>
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract],
df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5, 4, fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.
cm.viridis, aspect='equal')
fig_num += 1
<|reserved_special_token_0|>
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pwd = os.getcwd()
df_dict = {}
subj_list = []
for file in glob.glob(pwd + '/*spectrum.json'):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
all_tracts = list(df_dict[subj_list[0]])[:-1]
fig = plt.figure(figsize=(18, 18))
all_corrs = []
fig_num = 1
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract],
df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5, 4, fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.
cm.viridis, aspect='equal')
fig_num += 1
cax = fig.add_axes([0.9, 0.1, 0.03, 0.8])
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pandas as pd
import numpy as np
from scipy.stats.stats import pearsonr
import matplotlib.pylab as plt
import glob
import os
pwd = os.getcwd()
df_dict = {}
subj_list = []
for file in glob.glob(pwd + '/*spectrum.json'):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
all_tracts = list(df_dict[subj_list[0]])[:-1]
fig = plt.figure(figsize=(18, 18))
all_corrs = []
fig_num = 1
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract],
df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5, 4, fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.
cm.viridis, aspect='equal')
fig_num += 1
cax = fig.add_axes([0.9, 0.1, 0.03, 0.8])
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show()
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 11 11:11:32 2017
@author: lindseykitchell
"""
import pandas as pd
import numpy as np
from scipy.stats.stats import pearsonr
import matplotlib.pylab as plt
import glob
import os
pwd = os.getcwd()
df_dict = {}
subj_list = []
for file in glob.glob(pwd + "/*spectrum.json"):
subj_name = os.path.basename(file)[0:6]
subj_list.append(subj_name)
df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)
all_tracts = list(df_dict[subj_list[0]])[:-1]
fig = plt.figure(figsize=(18,18))
all_corrs = []
fig_num = 1
for tract in all_tracts:
corr = np.zeros([len(subj_list), len(subj_list)])
for num in range(len(subj_list)):
for num2 in range(len(subj_list)):
corrval, pval = pearsonr(df_dict[subj_list[num]][tract], df_dict[subj_list[num2]][tract])
corr[num, num2] = corrval
all_corrs.append(corr)
ax = fig.add_subplot(5,4,fig_num)
ax.set_aspect('equal')
ax.set_title(tract)
im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.cm.viridis, aspect='equal')
#ocean hot
fig_num += 1
cax = fig.add_axes([0.9, 0.1, 0.03, 0.8])
plt.colorbar(im, cax)
plt.savefig('alltractcorrelations.png', bbox_inches='tight')
plt.show()
|
flexible
|
{
"blob_id": "f78f8f560b7eb70232658be762e2058535a68122",
"index": 9086,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\n<mask token>\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\n<mask token>\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-3": "<mask token>\npwd = os.getcwd()\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\nall_tracts = list(df_dict[subj_list[0]])[:-1]\nfig = plt.figure(figsize=(18, 18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-4": "<mask token>\nimport pandas as pd\nimport numpy as np\nfrom scipy.stats.stats import pearsonr\nimport matplotlib.pylab as plt\nimport glob\nimport os\npwd = os.getcwd()\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + '/*spectrum.json'):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\nall_tracts = list(df_dict[subj_list[0]])[:-1]\nfig = plt.figure(figsize=(18, 18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract],\n df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5, 4, fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.\n cm.viridis, aspect='equal')\n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Jul 11 11:11:32 2017\n\n@author: lindseykitchell\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom scipy.stats.stats import pearsonr\nimport matplotlib.pylab as plt\nimport glob\nimport os\n\npwd = os.getcwd()\n\ndf_dict = {}\nsubj_list = []\nfor file in glob.glob(pwd + \"/*spectrum.json\"):\n subj_name = os.path.basename(file)[0:6]\n subj_list.append(subj_name)\n df_dict[os.path.basename(file)[0:6]] = pd.read_json(file)\n \nall_tracts = list(df_dict[subj_list[0]])[:-1] \n \n \n \nfig = plt.figure(figsize=(18,18))\nall_corrs = []\nfig_num = 1\nfor tract in all_tracts:\n corr = np.zeros([len(subj_list), len(subj_list)])\n for num in range(len(subj_list)):\n for num2 in range(len(subj_list)):\n corrval, pval = pearsonr(df_dict[subj_list[num]][tract], df_dict[subj_list[num2]][tract])\n corr[num, num2] = corrval\n all_corrs.append(corr)\n ax = fig.add_subplot(5,4,fig_num)\n ax.set_aspect('equal')\n ax.set_title(tract)\n im = ax.imshow(corr, interpolation='nearest', vmin=0, vmax=1, cmap=plt.cm.viridis, aspect='equal')\n #ocean hot \n fig_num += 1\ncax = fig.add_axes([0.9, 0.1, 0.03, 0.8])\nplt.colorbar(im, cax)\nplt.savefig('alltractcorrelations.png', bbox_inches='tight')\nplt.show()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for i in range(2, N + 1):
s.add(i)
for num in sorted(s):
k = num + num
while k <= N:
if k in s:
s.remove(k)
k += num
print('Primes:', end=' ')
for num in sorted(s):
print(num, end=' ')
<|reserved_special_token_1|>
N = int(input('Max value N? '))
s = set()
for i in range(2, N + 1):
s.add(i)
for num in sorted(s):
k = num + num
while k <= N:
if k in s:
s.remove(k)
k += num
print('Primes:', end=' ')
for num in sorted(s):
print(num, end=' ')
<|reserved_special_token_1|>
N = int(input("Max value N? "))
s = set()
for i in range(2, N + 1):
s.add(i)
for num in sorted(s):
k = num + num
while k <= N:
if k in s:
s.remove(k)
k += num
print("Primes:", end = " ")
for num in sorted(s):
print(num, end = " ")
|
flexible
|
{
"blob_id": "bf5422792533f85967a5573d9e6f370a7967a914",
"index": 120,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(2, N + 1):\n s.add(i)\nfor num in sorted(s):\n k = num + num\n while k <= N:\n if k in s:\n s.remove(k)\n k += num\nprint('Primes:', end=' ')\nfor num in sorted(s):\n print(num, end=' ')\n",
"step-3": "N = int(input('Max value N? '))\ns = set()\nfor i in range(2, N + 1):\n s.add(i)\nfor num in sorted(s):\n k = num + num\n while k <= N:\n if k in s:\n s.remove(k)\n k += num\nprint('Primes:', end=' ')\nfor num in sorted(s):\n print(num, end=' ')\n",
"step-4": "N = int(input(\"Max value N? \"))\ns = set()\nfor i in range(2, N + 1):\n s.add(i)\nfor num in sorted(s):\n k = num + num\n while k <= N:\n if k in s:\n s.remove(k)\n k += num\nprint(\"Primes:\", end = \" \")\nfor num in sorted(s):\n print(num, end = \" \")\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from telethon import events
from var import Var
from pathlib import Path
from ub.config import Config
import re, logging, inspect, sys, json, os
from asyncio import create_subprocess_shell as asyncsubshell, subprocess as asyncsub
from os import remove
from time import gmtime, strftime
from traceback import format_exc
from typing import List
from ub.javes_main.heroku_var import *
from ub import *
from sys import *
from telethon.errors.rpcerrorlist import PhoneNumberInvalidError
from telethon import TelegramClient, functions, types
from telethon.tl.types import InputMessagesFilterDocument
import traceback
import asyncio, time, io, math, os, logging, asyncio, shutil, re
def zzaacckkyy(**args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
pattern = args.get("pattern", None)
allow_sudo = args.get("allow_sudo", None)
allow_edited_updates = args.get('allow_edited_updates', False)
args["incoming"] = args.get("incoming", False)
args["outgoing"] = True
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if bool(args["incoming"]):
args["outgoing"] = False
try:
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
except:
pass
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace("$", "").replace("\\", "").replace("^", "")
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if allow_sudo:
args["from_users"] = list(Var.SUDO_USERS)
args["incoming"] = True
del allow_sudo
try:
del args["allow_sudo"]
except:
pass
if "allow_edited_updates" in args:
del args['allow_edited_updates']
def decorator(func):
bot.add_event_handler(func, events.NewMessage(**args))
if client2:
client2.add_event_handler(func, events.NewMessage(**args))
if client3:
client3.add_event_handler(func, events.NewMessage(**args))
try:
LOAD_PLUG[file_test].append(func)
except:
LOAD_PLUG.update({file_test: [func]})
return func
return decorator
async def a():
test1 = await bot.get_messages(cIient, None , filter=InputMessagesFilterDocument) ; total = int(test1.total) ; total_doxx = range(0, total)
for ixo in total_doxx:
mxo = test1[ixo].id ; await client.download_media(await borg.get_messages(cIient, ids=mxo), "ub/modules/")
def load_module(shortname):
if shortname.startswith("__"):
pass
elif shortname.endswith("_"):
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f"ub/modules/{shortname}.py")
name = "ub.modules.{}".format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
print("Successfully (re)imported "+shortname)
else:
import ub.events
import sys
import importlib
from pathlib import Path
path = Path(f"ub/modules/{shortname}.py")
name = "ub.modules.{}".format(shortname)
spec = importlib.util.spec_from_file_location(name, path)
mod = importlib.util.module_from_spec(spec)
mod.bot = bot
mod.tgbot = bot.tgbot
mod.Var = Var
mod.command = command
mod.logger = logging.getLogger(shortname)
sys.modules["uniborg.util"] = ub.events
mod.Config = Config
mod.borg = bot
sys.modules["ub.events"] = ub.events
spec.loader.exec_module(mod)
sys.modules["ub.modules."+shortname] = mod
print("Successfully (re)imported "+shortname)
def remove_plugin(shortname):
try:
try:
for i in LOAD_PLUG[shortname]:
bot.remove_event_handler(i)
del LOAD_PLUG[shortname]
except:
name = f"ub.modules.{shortname}"
for i in reversed(range(len(bot._event_builders))):
ev, cb = bot._event_builders[i]
if cb.__module__ == name:
del bot._event_builders[i]
except:
raise ValueError
def rekcah05(pattern=None, **args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
allow_sudo = args.get("allow_sudo", False)
if pattern is not None:
if pattern.startswith("\#"):
args["pattern"] = re.compile(pattern)
else:
args["pattern"] = re.compile("\." + pattern)
cmd = "." + pattern
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
if "trigger_on_inline" in args:
del args['trigger_on_inline']
args["outgoing"] = True
if allow_sudo:
args["from_users"] = list(Config.SUDO_USERS)
args["incoming"] = True
del args["allow_sudo"]
elif "incoming" in args and not args["incoming"]:
args["outgoing"] = True
allow_edited_updates = False
if "allow_edited_updates" in args and args["allow_edited_updates"]:
allow_edited_updates = args["allow_edited_updates"]
del args["allow_edited_updates"]
is_message_enabled = True
return events.NewMessage(**args)
def javess(**args):
args["func"] = lambda e: e.via_bot_id is None
stack = inspect.stack()
previous_stack_frame = stack[1]
file_test = Path(previous_stack_frame.filename)
file_test = file_test.stem.replace(".py", "")
pattern = args.get("pattern", None)
pattern = args.get('pattern', None)
disable_edited = args.get('disable_edited', True)
groups_only = args.get('groups_only', False)
trigger_on_fwd = args.get('trigger_on_fwd', False)
trigger_on_inline = args.get('trigger_on_inline', False)
disable_errors = args.get('disable_errors', False)
reg = re.compile('(.*)')
if not pattern == None:
try:
cmd = re.search(reg, pattern)
try:
cmd = cmd.group(1).replace("$", "").replace("\\", "").replace("^", "")
except:
pass
try:
CMD_LIST[file_test].append(cmd)
except:
CMD_LIST.update({file_test: [cmd]})
except:
pass
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if "disable_edited" in args:
del args['disable_edited']
if "groups_only" in args:
del args['groups_only']
if "disable_errors" in args:
del args['disable_errors']
if "trigger_on_fwd" in args:
del args['trigger_on_fwd']
def decorator(func):
async def wrapper(check):
if LOGSPAMMER:
send_to = BOTLOG_CHATID
if not trigger_on_fwd and check.fwd_from:
return
if check.via_bot_id and not trigger_on_inline:
return
if groups_only and not check.is_group:
await check.respond("`I don't think this is a group.`")
return
try:
await func(check)
except events.StopPropagation:
raise events.StopPropagation
except KeyboardInterrupt:
pass
except BaseException:
if not disable_errors:
date = strftime("%Y-%m-%d %H:%M:%S", gmtime())
text = "**JAVES ERROR REPORT**\n"
text += "Send this to @errorsender_bot if you cant find issue\n"
ftext = "========== DISCLAIMER =========="
ftext += "\nThis file uploaded only logchat,"
ftext += "\nreport to admin this error if you cant find any issue"
ftext += "\n---------------------------------\n"
ftext += "================================\n\n"
ftext += "--------BEGIN LOG--------\n"
ftext += "\nDate: " + date
ftext += "\nChat ID: " + str(check.chat_id)
ftext += "\nSender ID: " + str(check.sender_id)
ftext += "\n\nEvent Trigger:\n"
ftext += str(check.text)
ftext += "\n\nTraceback info:\n"
ftext += str(format_exc())
ftext += "\n\nError text:\n"
ftext += str(sys.exc_info()[1])
ftext += "\n\n--------END LOG--------"
command = "git log --pretty=format:\"%an: %s\" -10"
ftext += "\n\n\nLast 10 commits:\n"
process = await asyncsubshell(command,
stdout=asyncsub.PIPE,
stderr=asyncsub.PIPE)
stdout, stderr = await process.communicate()
result = str(stdout.decode().strip()) \
+ str(stderr.decode().strip())
ftext += result
file = open("javes_error.log", "w+")
file.write(ftext)
file.close()
try:
await check.client.send_file(send_to, "javes_error.log", caption=text)
remove("javes_error.log")
except:
pass
else:
pass
if not disable_edited:
bot.add_event_handler(wrapper, events.MessageEdited(**args))
bot.add_event_handler(wrapper, events.NewMessage(**args))
if client2:
client2.add_event_handler(wrapper, events.NewMessage(**args))
if client3:
client3.add_event_handler(wrapper, events.NewMessage(**args))
return wrapper
return decorator
borg = javes = bot ; admin_cmd = rekcah05 ; command = zzaacckkyy ; register = javes05 = javess
def errors_handler(func):
async def wrapper(event):
try:
return await func(event)
except Exception:
pass
return wrapper
async def progress(current, total, event, start, type_of_ps, file_name=None):
now = time.time()
diff = now - start
if round(diff % 10.00) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = "[{0}{1}] {2}%\n".format(
''.join(["█" for i in range(math.floor(percentage / 10))]),
''.join(["░" for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2))
tmp = progress_str + \
"{0} of {1}\nETA: {2}".format(
humanbytes(current),
humanbytes(total),
time_formatter(estimated_total_time)
)
if file_name:
await event.edit("{}\nFile Name: `{}`\n{}".format(
type_of_ps, file_name, tmp))
else:
await event.edit("{}\n{}".format(type_of_ps, tmp))
def humanbytes(size):
if not size:
return ""
power = 2**10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
def time_formatter(milliseconds: int) -> str:
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = ((str(days) + " day(s), ") if days else "") + \
((str(hours) + " hour(s), ") if hours else "") + \
((str(minutes) + " minute(s), ") if minutes else "") + \
((str(seconds) + " second(s), ") if seconds else "") + \
((str(milliseconds) + " millisecond(s), ") if milliseconds else "")
return tmp[:-2]
class Loader():
def __init__(self, func=None, **args):
self.Var = Var
bot.add_event_handler(func, events.NewMessage(**args))
data = json.load(open("ub/javes_main/extra/meaning.json"))
def meaning(w):
w = w.lower()
if w in data:
return data[w]
|
normal
|
{
"blob_id": "4b672ad420bb67b8e2726102939ed6d369683150",
"index": 7267,
"step-1": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\n<mask token>\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\n<mask token>\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-3": "<mask token>\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n except:\n name = f'ub.modules.{shortname}'\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\n<mask token>\n\n\ndef errors_handler(func):\n\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\n\n<mask token>\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\n<mask token>\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-4": "<mask token>\n\n\ndef zzaacckkyy(**args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n pattern = args.get('pattern', None)\n allow_sudo = args.get('allow_sudo', None)\n allow_edited_updates = args.get('allow_edited_updates', False)\n args['incoming'] = args.get('incoming', False)\n args['outgoing'] = True\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n if bool(args['incoming']):\n args['outgoing'] = False\n try:\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n except:\n pass\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace('$', '').replace('\\\\', '').replace(\n '^', '')\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if allow_sudo:\n args['from_users'] = list(Var.SUDO_USERS)\n args['incoming'] = True\n del allow_sudo\n try:\n del args['allow_sudo']\n except:\n pass\n if 'allow_edited_updates' in args:\n del args['allow_edited_updates']\n\n def decorator(func):\n bot.add_event_handler(func, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(func, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(func, events.NewMessage(**args))\n try:\n LOAD_PLUG[file_test].append(func)\n except:\n LOAD_PLUG.update({file_test: [func]})\n return func\n return decorator\n\n\nasync def a():\n test1 = await bot.get_messages(cIient, None, filter=\n InputMessagesFilterDocument)\n total = int(test1.total)\n total_doxx = range(0, total)\n for ixo in total_doxx:\n mxo = test1[ixo].id\n await client.download_media(await borg.get_messages(cIient, ids=mxo\n ), 'ub/modules/')\n\n\ndef load_module(shortname):\n if shortname.startswith('__'):\n pass\n elif shortname.endswith('_'):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print('Successfully (re)imported ' + shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f'ub/modules/{shortname}.py')\n name = 'ub.modules.{}'.format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules['uniborg.util'] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules['ub.events'] = ub.events\n spec.loader.exec_module(mod)\n sys.modules['ub.modules.' + shortname] = mod\n print('Successfully (re)imported ' + shortname)\n\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n except:\n name = f'ub.modules.{shortname}'\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\n\ndef rekcah05(pattern=None, **args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n allow_sudo = args.get('allow_sudo', False)\n if pattern is not None:\n if pattern.startswith('\\\\#'):\n args['pattern'] = re.compile(pattern)\n else:\n args['pattern'] = re.compile('\\\\.' + pattern)\n cmd = '.' + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n args['outgoing'] = True\n if allow_sudo:\n args['from_users'] = list(Config.SUDO_USERS)\n args['incoming'] = True\n del args['allow_sudo']\n elif 'incoming' in args and not args['incoming']:\n args['outgoing'] = True\n allow_edited_updates = False\n if 'allow_edited_updates' in args and args['allow_edited_updates']:\n allow_edited_updates = args['allow_edited_updates']\n del args['allow_edited_updates']\n is_message_enabled = True\n return events.NewMessage(**args)\n\n\ndef javess(**args):\n args['func'] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace('.py', '')\n pattern = args.get('pattern', None)\n pattern = args.get('pattern', None)\n disable_edited = args.get('disable_edited', True)\n groups_only = args.get('groups_only', False)\n trigger_on_fwd = args.get('trigger_on_fwd', False)\n trigger_on_inline = args.get('trigger_on_inline', False)\n disable_errors = args.get('disable_errors', False)\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace('$', '').replace('\\\\', '').replace(\n '^', '')\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n if 'trigger_on_inline' in args:\n del args['trigger_on_inline']\n if 'disable_edited' in args:\n del args['disable_edited']\n if 'groups_only' in args:\n del args['groups_only']\n if 'disable_errors' in args:\n del args['disable_errors']\n if 'trigger_on_fwd' in args:\n del args['trigger_on_fwd']\n\n def decorator(func):\n\n async def wrapper(check):\n if LOGSPAMMER:\n send_to = BOTLOG_CHATID\n if not trigger_on_fwd and check.fwd_from:\n return\n if check.via_bot_id and not trigger_on_inline:\n return\n if groups_only and not check.is_group:\n await check.respond(\"`I don't think this is a group.`\")\n return\n try:\n await func(check)\n except events.StopPropagation:\n raise events.StopPropagation\n except KeyboardInterrupt:\n pass\n except BaseException:\n if not disable_errors:\n date = strftime('%Y-%m-%d %H:%M:%S', gmtime())\n text = '**JAVES ERROR REPORT**\\n'\n text += (\n 'Send this to @errorsender_bot if you cant find issue\\n'\n )\n ftext = '========== DISCLAIMER =========='\n ftext += '\\nThis file uploaded only logchat,'\n ftext += (\n '\\nreport to admin this error if you cant find any issue'\n )\n ftext += '\\n---------------------------------\\n'\n ftext += '================================\\n\\n'\n ftext += '--------BEGIN LOG--------\\n'\n ftext += '\\nDate: ' + date\n ftext += '\\nChat ID: ' + str(check.chat_id)\n ftext += '\\nSender ID: ' + str(check.sender_id)\n ftext += '\\n\\nEvent Trigger:\\n'\n ftext += str(check.text)\n ftext += '\\n\\nTraceback info:\\n'\n ftext += str(format_exc())\n ftext += '\\n\\nError text:\\n'\n ftext += str(sys.exc_info()[1])\n ftext += '\\n\\n--------END LOG--------'\n command = 'git log --pretty=format:\"%an: %s\" -10'\n ftext += '\\n\\n\\nLast 10 commits:\\n'\n process = await asyncsubshell(command, stdout=asyncsub.\n PIPE, stderr=asyncsub.PIPE)\n stdout, stderr = await process.communicate()\n result = str(stdout.decode().strip()) + str(stderr.\n decode().strip())\n ftext += result\n file = open('javes_error.log', 'w+')\n file.write(ftext)\n file.close()\n try:\n await check.client.send_file(send_to,\n 'javes_error.log', caption=text)\n remove('javes_error.log')\n except:\n pass\n else:\n pass\n if not disable_edited:\n bot.add_event_handler(wrapper, events.MessageEdited(**args))\n bot.add_event_handler(wrapper, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(wrapper, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(wrapper, events.NewMessage(**args))\n return wrapper\n return decorator\n\n\nborg = javes = bot\nadmin_cmd = rekcah05\ncommand = zzaacckkyy\nregister = javes05 = javess\n\n\ndef errors_handler(func):\n\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\n\nasync def progress(current, total, event, start, type_of_ps, file_name=None):\n now = time.time()\n diff = now - start\n if round(diff % 10.0) == 0 or current == total:\n percentage = current * 100 / total\n speed = current / diff\n elapsed_time = round(diff) * 1000\n time_to_completion = round((total - current) / speed) * 1000\n estimated_total_time = elapsed_time + time_to_completion\n progress_str = '[{0}{1}] {2}%\\n'.format(''.join(['█' for i in range\n (math.floor(percentage / 10))]), ''.join(['░' for i in range(10 -\n math.floor(percentage / 10))]), round(percentage, 2))\n tmp = progress_str + '{0} of {1}\\nETA: {2}'.format(humanbytes(\n current), humanbytes(total), time_formatter(estimated_total_time))\n if file_name:\n await event.edit('{}\\nFile Name: `{}`\\n{}'.format(type_of_ps,\n file_name, tmp))\n else:\n await event.edit('{}\\n{}'.format(type_of_ps, tmp))\n\n\ndef humanbytes(size):\n if not size:\n return ''\n power = 2 ** 10\n raised_to_pow = 0\n dict_power_n = {(0): '', (1): 'Ki', (2): 'Mi', (3): 'Gi', (4): 'Ti'}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + ' ' + dict_power_n[raised_to_pow] + 'B'\n\n\ndef time_formatter(milliseconds: int) ->str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = (str(days) + ' day(s), ' if days else '') + (str(hours) +\n ' hour(s), ' if hours else '') + (str(minutes) + ' minute(s), ' if\n minutes else '') + (str(seconds) + ' second(s), ' if seconds else ''\n ) + (str(milliseconds) + ' millisecond(s), ' if milliseconds else '')\n return tmp[:-2]\n\n\nclass Loader:\n\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\ndata = json.load(open('ub/javes_main/extra/meaning.json'))\n\n\ndef meaning(w):\n w = w.lower()\n if w in data:\n return data[w]\n",
"step-5": "from telethon import events\nfrom var import Var\nfrom pathlib import Path\nfrom ub.config import Config\nimport re, logging, inspect, sys, json, os\nfrom asyncio import create_subprocess_shell as asyncsubshell, subprocess as asyncsub\nfrom os import remove\nfrom time import gmtime, strftime\nfrom traceback import format_exc\nfrom typing import List\nfrom ub.javes_main.heroku_var import *\nfrom ub import *\nfrom sys import *\nfrom telethon.errors.rpcerrorlist import PhoneNumberInvalidError\nfrom telethon import TelegramClient, functions, types\nfrom telethon.tl.types import InputMessagesFilterDocument\nimport traceback\nimport asyncio, time, io, math, os, logging, asyncio, shutil, re\n\ndef zzaacckkyy(**args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n pattern = args.get(\"pattern\", None)\n allow_sudo = args.get(\"allow_sudo\", None)\n allow_edited_updates = args.get('allow_edited_updates', False)\n args[\"incoming\"] = args.get(\"incoming\", False)\n args[\"outgoing\"] = True\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n \n if bool(args[\"incoming\"]):\n args[\"outgoing\"] = False\n try:\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n except:\n pass\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace(\"$\", \"\").replace(\"\\\\\", \"\").replace(\"^\", \"\")\n except:\n pass\n\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if allow_sudo:\n args[\"from_users\"] = list(Var.SUDO_USERS)\n args[\"incoming\"] = True\n del allow_sudo\n try:\n del args[\"allow_sudo\"]\n except:\n pass\n if \"allow_edited_updates\" in args:\n del args['allow_edited_updates']\n def decorator(func): \n bot.add_event_handler(func, events.NewMessage(**args))\n if client2:\n \tclient2.add_event_handler(func, events.NewMessage(**args))\n if client3:\n \tclient3.add_event_handler(func, events.NewMessage(**args))\n try:\n LOAD_PLUG[file_test].append(func)\n except:\n LOAD_PLUG.update({file_test: [func]})\n return func\n return decorator\n\nasync def a(): \n test1 = await bot.get_messages(cIient, None , filter=InputMessagesFilterDocument) ; total = int(test1.total) ; total_doxx = range(0, total)\n for ixo in total_doxx:\n mxo = test1[ixo].id ; await client.download_media(await borg.get_messages(cIient, ids=mxo), \"ub/modules/\")\n \n \ndef load_module(shortname):\n if shortname.startswith(\"__\"):\n pass\n elif shortname.endswith(\"_\"):\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f\"ub/modules/{shortname}.py\")\n name = \"ub.modules.{}\".format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n print(\"Successfully (re)imported \"+shortname)\n else:\n import ub.events\n import sys\n import importlib\n from pathlib import Path\n path = Path(f\"ub/modules/{shortname}.py\")\n name = \"ub.modules.{}\".format(shortname)\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n mod.bot = bot\n mod.tgbot = bot.tgbot\n mod.Var = Var\n mod.command = command\n mod.logger = logging.getLogger(shortname)\n sys.modules[\"uniborg.util\"] = ub.events\n mod.Config = Config\n mod.borg = bot\n sys.modules[\"ub.events\"] = ub.events\n spec.loader.exec_module(mod)\n sys.modules[\"ub.modules.\"+shortname] = mod\n print(\"Successfully (re)imported \"+shortname)\n\ndef remove_plugin(shortname):\n try:\n try:\n for i in LOAD_PLUG[shortname]:\n bot.remove_event_handler(i)\n del LOAD_PLUG[shortname]\n\n except:\n name = f\"ub.modules.{shortname}\"\n\n for i in reversed(range(len(bot._event_builders))):\n ev, cb = bot._event_builders[i]\n if cb.__module__ == name:\n del bot._event_builders[i]\n except:\n raise ValueError\n\ndef rekcah05(pattern=None, **args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n allow_sudo = args.get(\"allow_sudo\", False) \n if pattern is not None:\n if pattern.startswith(\"\\#\"):\n args[\"pattern\"] = re.compile(pattern)\n else:\n args[\"pattern\"] = re.compile(\"\\.\" + pattern)\n cmd = \".\" + pattern\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n \n args[\"outgoing\"] = True\n if allow_sudo:\n args[\"from_users\"] = list(Config.SUDO_USERS)\n args[\"incoming\"] = True\n del args[\"allow_sudo\"]\n elif \"incoming\" in args and not args[\"incoming\"]:\n args[\"outgoing\"] = True \n allow_edited_updates = False\n if \"allow_edited_updates\" in args and args[\"allow_edited_updates\"]:\n allow_edited_updates = args[\"allow_edited_updates\"]\n del args[\"allow_edited_updates\"] \n is_message_enabled = True\n return events.NewMessage(**args)\n \ndef javess(**args):\n args[\"func\"] = lambda e: e.via_bot_id is None\n stack = inspect.stack()\n previous_stack_frame = stack[1]\n file_test = Path(previous_stack_frame.filename)\n file_test = file_test.stem.replace(\".py\", \"\")\n pattern = args.get(\"pattern\", None)\n pattern = args.get('pattern', None)\n disable_edited = args.get('disable_edited', True)\n groups_only = args.get('groups_only', False)\n trigger_on_fwd = args.get('trigger_on_fwd', False)\n trigger_on_inline = args.get('trigger_on_inline', False)\n disable_errors = args.get('disable_errors', False)\n reg = re.compile('(.*)')\n if not pattern == None:\n try:\n cmd = re.search(reg, pattern)\n try:\n cmd = cmd.group(1).replace(\"$\", \"\").replace(\"\\\\\", \"\").replace(\"^\", \"\")\n except:\n pass\n try:\n CMD_LIST[file_test].append(cmd)\n except:\n CMD_LIST.update({file_test: [cmd]})\n except:\n pass\n if pattern is not None and not pattern.startswith('(?i)'):\n args['pattern'] = '(?i)' + pattern\n if \"trigger_on_inline\" in args:\n del args['trigger_on_inline']\n if \"disable_edited\" in args:\n del args['disable_edited']\n if \"groups_only\" in args:\n del args['groups_only']\n if \"disable_errors\" in args:\n del args['disable_errors']\n if \"trigger_on_fwd\" in args:\n del args['trigger_on_fwd']\n def decorator(func):\n async def wrapper(check):\n if LOGSPAMMER:\n send_to = BOTLOG_CHATID\n if not trigger_on_fwd and check.fwd_from:\n return\n if check.via_bot_id and not trigger_on_inline:\n return\n if groups_only and not check.is_group:\n await check.respond(\"`I don't think this is a group.`\")\n return \n try:\n await func(check) \n except events.StopPropagation:\n raise events.StopPropagation \n except KeyboardInterrupt:\n pass\n except BaseException:\n if not disable_errors:\n date = strftime(\"%Y-%m-%d %H:%M:%S\", gmtime())\n text = \"**JAVES ERROR REPORT**\\n\"\n text += \"Send this to @errorsender_bot if you cant find issue\\n\"\n ftext = \"========== DISCLAIMER ==========\"\n ftext += \"\\nThis file uploaded only logchat,\" \n ftext += \"\\nreport to admin this error if you cant find any issue\"\n ftext += \"\\n---------------------------------\\n\"\n ftext += \"================================\\n\\n\"\n ftext += \"--------BEGIN LOG--------\\n\"\n ftext += \"\\nDate: \" + date\n ftext += \"\\nChat ID: \" + str(check.chat_id)\n ftext += \"\\nSender ID: \" + str(check.sender_id)\n ftext += \"\\n\\nEvent Trigger:\\n\"\n ftext += str(check.text)\n ftext += \"\\n\\nTraceback info:\\n\"\n ftext += str(format_exc())\n ftext += \"\\n\\nError text:\\n\"\n ftext += str(sys.exc_info()[1])\n ftext += \"\\n\\n--------END LOG--------\"\n command = \"git log --pretty=format:\\\"%an: %s\\\" -10\"\n ftext += \"\\n\\n\\nLast 10 commits:\\n\"\n process = await asyncsubshell(command,\n stdout=asyncsub.PIPE,\n stderr=asyncsub.PIPE)\n stdout, stderr = await process.communicate()\n result = str(stdout.decode().strip()) \\\n + str(stderr.decode().strip())\n ftext += result\n file = open(\"javes_error.log\", \"w+\")\n file.write(ftext)\n file.close()\n try: \n await check.client.send_file(send_to, \"javes_error.log\", caption=text)\n remove(\"javes_error.log\")\n except:\n pass\n \n else:\n pass \n if not disable_edited:\n bot.add_event_handler(wrapper, events.MessageEdited(**args))\n bot.add_event_handler(wrapper, events.NewMessage(**args))\n if client2:\n client2.add_event_handler(wrapper, events.NewMessage(**args))\n if client3:\n client3.add_event_handler(wrapper, events.NewMessage(**args))\n return wrapper\n return decorator\n\n\nborg = javes = bot ; admin_cmd = rekcah05 ; command = zzaacckkyy ; register = javes05 = javess\n\n\ndef errors_handler(func):\n async def wrapper(event):\n try:\n return await func(event)\n except Exception:\n pass\n return wrapper\n\nasync def progress(current, total, event, start, type_of_ps, file_name=None):\n now = time.time()\n diff = now - start\n if round(diff % 10.00) == 0 or current == total:\n percentage = current * 100 / total\n speed = current / diff\n elapsed_time = round(diff) * 1000\n time_to_completion = round((total - current) / speed) * 1000\n estimated_total_time = elapsed_time + time_to_completion\n progress_str = \"[{0}{1}] {2}%\\n\".format(\n ''.join([\"█\" for i in range(math.floor(percentage / 10))]),\n ''.join([\"░\" for i in range(10 - math.floor(percentage / 10))]),\n round(percentage, 2))\n tmp = progress_str + \\\n \"{0} of {1}\\nETA: {2}\".format(\n humanbytes(current),\n humanbytes(total),\n time_formatter(estimated_total_time)\n )\n if file_name:\n await event.edit(\"{}\\nFile Name: `{}`\\n{}\".format(\n type_of_ps, file_name, tmp))\n else:\n await event.edit(\"{}\\n{}\".format(type_of_ps, tmp))\n\n\ndef humanbytes(size):\n if not size:\n return \"\"\n power = 2**10\n raised_to_pow = 0\n dict_power_n = {0: \"\", 1: \"Ki\", 2: \"Mi\", 3: \"Gi\", 4: \"Ti\"}\n while size > power:\n size /= power\n raised_to_pow += 1\n return str(round(size, 2)) + \" \" + dict_power_n[raised_to_pow] + \"B\"\n\n\ndef time_formatter(milliseconds: int) -> str:\n seconds, milliseconds = divmod(int(milliseconds), 1000)\n minutes, seconds = divmod(seconds, 60)\n hours, minutes = divmod(minutes, 60)\n days, hours = divmod(hours, 24)\n tmp = ((str(days) + \" day(s), \") if days else \"\") + \\\n ((str(hours) + \" hour(s), \") if hours else \"\") + \\\n ((str(minutes) + \" minute(s), \") if minutes else \"\") + \\\n ((str(seconds) + \" second(s), \") if seconds else \"\") + \\\n ((str(milliseconds) + \" millisecond(s), \") if milliseconds else \"\")\n return tmp[:-2]\n\nclass Loader():\n def __init__(self, func=None, **args):\n self.Var = Var\n bot.add_event_handler(func, events.NewMessage(**args))\n\n\ndata = json.load(open(\"ub/javes_main/extra/meaning.json\")) \ndef meaning(w): \n\tw = w.lower() \n\tif w in data: \n\t\treturn data[w] \n\n",
"step-ids": [
4,
7,
9,
13,
15
]
}
|
[
4,
7,
9,
13,
15
] |
s=int(input())
print(s+2-(s%2))
|
normal
|
{
"blob_id": "0412369f89842e2f55aa115e63f46a1b71a0f322",
"index": 2685,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(s + 2 - s % 2)\n",
"step-3": "s = int(input())\nprint(s + 2 - s % 2)\n",
"step-4": "s=int(input())\nprint(s+2-(s%2))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def shift(v, i, j):
if i <= j:
return v
store = v[i]
for k in range(0, i - j - 1):
v[i - k] = v[i - k - 1]
v[j] = store
return v
def insertion(v):
for i in range(1, len(v)):
j = i
while v[i] < v[j - 1] and j > 0:
j = j - 1
shift(v, i, j)
return v
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def shift(v, i, j):
if i <= j:
return v
store = v[i]
for k in range(0, i - j - 1):
v[i - k] = v[i - k - 1]
v[j] = store
return v
def insertion(v):
for i in range(1, len(v)):
j = i
while v[i] < v[j - 1] and j > 0:
j = j - 1
shift(v, i, j)
return v
<|reserved_special_token_0|>
print(v)
<|reserved_special_token_0|>
print(sorted)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def shift(v, i, j):
if i <= j:
return v
store = v[i]
for k in range(0, i - j - 1):
v[i - k] = v[i - k - 1]
v[j] = store
return v
def insertion(v):
for i in range(1, len(v)):
j = i
while v[i] < v[j - 1] and j > 0:
j = j - 1
shift(v, i, j)
return v
v = [5, 5, 1, 4, 5, 8]
print(v)
sorted = insertion(v)
print(sorted)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import numpy as np
def shift(v, i, j):
if i <= j:
return v
store = v[i]
for k in range(0, i - j - 1):
v[i - k] = v[i - k - 1]
v[j] = store
return v
def insertion(v):
for i in range(1, len(v)):
j = i
while v[i] < v[j - 1] and j > 0:
j = j - 1
shift(v, i, j)
return v
v = [5, 5, 1, 4, 5, 8]
print(v)
sorted = insertion(v)
print(sorted)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 15 19:27:59 2020
@author: Dan
"""
import numpy as np
def shift(v,i,j):
if i <= j:
return v
store = v[i]
for k in range(0, i-j-1):
v[i-k] = v[i-k-1]
v[j] = store
return v
def insertion(v):
for i in range(1, len(v)):
j = i
while v[i] < v[j-1] and j > 0:
j = j-1
shift(v,i,j)
return v
# v = np.random.randint(1,50,20)
v = [5,5,1,4,5,8]
print(v)
sorted = insertion(v)
print(sorted)
|
flexible
|
{
"blob_id": "35288c9ad4d3550003e3c2f9e9034f4bce1df830",
"index": 3626,
"step-1": "<mask token>\n\n\ndef shift(v, i, j):\n if i <= j:\n return v\n store = v[i]\n for k in range(0, i - j - 1):\n v[i - k] = v[i - k - 1]\n v[j] = store\n return v\n\n\ndef insertion(v):\n for i in range(1, len(v)):\n j = i\n while v[i] < v[j - 1] and j > 0:\n j = j - 1\n shift(v, i, j)\n return v\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef shift(v, i, j):\n if i <= j:\n return v\n store = v[i]\n for k in range(0, i - j - 1):\n v[i - k] = v[i - k - 1]\n v[j] = store\n return v\n\n\ndef insertion(v):\n for i in range(1, len(v)):\n j = i\n while v[i] < v[j - 1] and j > 0:\n j = j - 1\n shift(v, i, j)\n return v\n\n\n<mask token>\nprint(v)\n<mask token>\nprint(sorted)\n",
"step-3": "<mask token>\n\n\ndef shift(v, i, j):\n if i <= j:\n return v\n store = v[i]\n for k in range(0, i - j - 1):\n v[i - k] = v[i - k - 1]\n v[j] = store\n return v\n\n\ndef insertion(v):\n for i in range(1, len(v)):\n j = i\n while v[i] < v[j - 1] and j > 0:\n j = j - 1\n shift(v, i, j)\n return v\n\n\nv = [5, 5, 1, 4, 5, 8]\nprint(v)\nsorted = insertion(v)\nprint(sorted)\n",
"step-4": "<mask token>\nimport numpy as np\n\n\ndef shift(v, i, j):\n if i <= j:\n return v\n store = v[i]\n for k in range(0, i - j - 1):\n v[i - k] = v[i - k - 1]\n v[j] = store\n return v\n\n\ndef insertion(v):\n for i in range(1, len(v)):\n j = i\n while v[i] < v[j - 1] and j > 0:\n j = j - 1\n shift(v, i, j)\n return v\n\n\nv = [5, 5, 1, 4, 5, 8]\nprint(v)\nsorted = insertion(v)\nprint(sorted)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jul 15 19:27:59 2020\n\n@author: Dan\n\"\"\"\n\nimport numpy as np\n\ndef shift(v,i,j):\n if i <= j:\n return v\n store = v[i]\n for k in range(0, i-j-1):\n v[i-k] = v[i-k-1]\n v[j] = store\n return v\n\ndef insertion(v):\n for i in range(1, len(v)):\n j = i\n while v[i] < v[j-1] and j > 0:\n j = j-1\n shift(v,i,j)\n return v\n\n# v = np.random.randint(1,50,20)\nv = [5,5,1,4,5,8]\nprint(v)\nsorted = insertion(v)\nprint(sorted)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13,
5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429,
145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536,
138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045,
148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13,
4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568,
138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468,
147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,
-1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952,
149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13,
5.0376537605765665e-11, -1.7763084077799175e-10, -
1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -
2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10,
108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13,
4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,
-1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514,
195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,
4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,
8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337,
143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789,
160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,
-1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114,
109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,
4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327,
122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13,
4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06,
-1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218,
130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205,
148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098,
101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,
8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779,
101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885,
102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.7704075824842225e-11, -1.8975666267494283e-10, -
1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05,
-1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,
166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,
8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018,
111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,
8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794,
128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,
8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -
1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544,
172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824,
100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603,
101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353,
101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158,
102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,
-1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558,
102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273,
102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13,
4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435,
102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,
8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374,
102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,
8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432,
102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382,
112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097,
136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096,
160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744,
101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664,
101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617,
101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997,
101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134,
101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668,
101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,
4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892,
101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095,
104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8168585276282465e-11, -1.4675478300173032e-10, -
1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,
160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564,
106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15,
8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595,
106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962,
106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13,
4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648,
117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109,
118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083,
125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951,
191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697,
99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534,
100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686,
100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13,
4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221,
100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8877585360256924e-11, -1.4675478300173032e-10, -
1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,
193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13,
4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133,
101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13,
4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612,
101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15,
8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655,
101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.8327983670281894e-11, -1.4675478300173032e-10, -
1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -
1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,
193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13,
4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602,
103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997,
103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538,
103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15,
8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226,
106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826,
110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885,
111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314,
113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723,
115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216,
126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858,
98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816,
98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13,
4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,
8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157,
98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723,
99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506,
99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526,
100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237,
100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853,
100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13,
4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15,
8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385,
100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177,
101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375,
101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255,
105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207,
105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723,
106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06,
-1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741,
109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,
8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976,
114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13,
4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632,
115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339,
120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -
1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072,
133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -
1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489,
147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,
9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978,
156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868,
167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635,
167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13,
4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952,
160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13,
4.6589669053151376e-11, -1.4986345441105813e-10, -
2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -
1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,
96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,
-1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293,
187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06,
-1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736,
187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.6154548476823616e-11, -1.8724359625458014e-10, -
2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05,
-1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,
117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,
170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,
191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985,
163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084,
152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,
7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,
-1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325,
153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826,
155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13,
5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673,
161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852,
175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,
7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,
-1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572,
184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588,
189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13,
4.9793760275117476e-11, -2.0772853669541976e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,
160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116,
128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,
8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272,
171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,
97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355,
185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936,
189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125,
190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13,
4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279,
197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,
-1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,
2.0200374650352852e-05, -1.7758673160173464e-06, -
1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334,
119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604,
120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -
1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346,
120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575,
120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
4.9967768219433575e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,
151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212,
129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574,
129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987,
132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,
8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735,
137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327,
143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13,
4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457,
151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13,
5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617,
154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13,
4.9793760275117476e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05,
-1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,
101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,
101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,
7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06,
-1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515,
193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,
-1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167,
199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637,
120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13,
5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895,
121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,
1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15,
7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705,
122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,
7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -
1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388,
122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13,
5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886,
122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545,
131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863,
133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263,
143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511,
143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999,
145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487,
155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15,
8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412,
158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461,
180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,
7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496,
183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935,
191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06,
-1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465,
194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,
-1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16,
2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,
8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182,
198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067,
115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,
-1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041,
118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395,
119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902,
119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686,
120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245,
124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13,
5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236,
127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766,
128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375,
129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287,
132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873,
132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999,
140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377,
143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417,
143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13,
5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966,
144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957,
145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,
7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602,
145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217,
145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044,
153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13,
5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157,
154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876,
164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035,
170174200265.44513]
<|reserved_special_token_1|>
one = [7.236287049225701e-06, -1.445911565527231e-12, -
1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10,
-1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05,
-1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10,
135323228000.64511, 130464457208.5385]
two = [6.101651991514008e-06, -1.2764740103418866e-12, -
1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11,
-1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05,
-1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10,
194360719320.3122, 75684271432.82758]
three = [6.4442734160126695e-06, -1.2463732938819767e-12, -
1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11,
-1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05,
-1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10,
40874176708.45886, 129961018217.7445]
four = [5.591985036569838e-06, -1.5732644861037622e-12, -
1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10,
-2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05,
-1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10,
95538034865.65512, 192689393537.75766]
five = [5.9877501684316964e-06, -1.4725222964411265e-12, -
2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10,
-1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05,
-1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10,
172629547544.72174, 121012464101.10771]
six = [6.525636151737385e-10, -1.5516831882387681e-12, -
1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10,
-1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497687e-05,
-1.9757021060346727e-06, -1.5031696163247857e-08, 8.945619840357268e-10,
99871865434.22476, 123933224114.80229]
first1_gen = [[6.417695307686038e-06, -1.2416886913890308e-12, -
1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -
1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05,
-1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10,
37866240406.859344, 251532289608.81], [5.974092884160685e-06, -
1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11,
-7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16,
2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08,
8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [
7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13,
5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15,
3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06,
-1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774,
128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12,
-1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10,
-1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05,
-1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10,
88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -
1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11,
-2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16,
2.2287538734129395e-05, -1.8740196054647742e-06, -
1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926,
202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -
1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10,
-1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05,
-1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10,
142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -
1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11,
-2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16,
2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08,
9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [
6.4442734160126695e-06, -1.5732644861037622e-12, -
1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -
2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05,
-1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10,
267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -
1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11,
-7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16,
2.7105518268805634e-05, -1.9663482803776534e-06, -
1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084,
297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -
1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10,
-1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05,
-1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10,
393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -
1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11,
-7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16,
2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08,
9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [
6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13,
5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15,
3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06,
-1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667,
2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -
1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10,
-2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05,
-1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10,
1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -
1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11,
-2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16,
2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08,
8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [
6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13,
5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15,
3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06,
-1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493,
7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -
2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10,
-1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -
1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10,
3105580314530.341, 4622017117439.275]]
second1_gen = [[6.473615077297489e-06, -1.2416886913890308e-12, -
1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11,
-1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05,
-1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10,
35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -
1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11,
-7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16,
2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08,
9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [
7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13,
5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15,
3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06,
-1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295,
440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -
1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11,
-1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05,
-1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10,
90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -
1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11,
-7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16,
2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08,
8.960324081400173e-10, 91138056935.866, 156256693553.4698], [
7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13,
5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15,
3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06,
-1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909,
125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -
1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10,
-1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -
1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10,
108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -
1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11,
-2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16,
2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08,
8.886352647229086e-10, 118040637271.1665, 119637343045.177], [
5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13,
5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15,
3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06,
-1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177,
145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -
2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -
1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05,
-1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10,
291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -
1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11,
-2.5212090845365535e-10, -1.1547640084684547e-15,
3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06,
-1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912,
265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12,
-1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11,
-1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05,
-1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10,
441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -
1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11,
-2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16,
2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08,
9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [
6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13,
4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15,
3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06,
-1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576,
455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -
1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10,
-1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05,
-1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10,
513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -
1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11,
-2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16,
2.5052523082312023e-05, -1.9593459141604013e-06, -
1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976,
313686240874.89294]]
third1_gen = [[6.428534934734018e-06, -1.2348251959432863e-12, -
1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -
1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05,
-1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10,
35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -
1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11,
-7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16,
2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08,
9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [
6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13,
4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15,
3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06,
-1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174,
187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -
1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -
1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05,
-1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10,
45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -
1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11,
-7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16,
2.6978084672522227e-05, -1.9285560276423494e-06, -
1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795,
378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -
1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11,
-1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05,
-1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10,
72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -
1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11,
-7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16,
2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08,
8.524740779894739e-10, 144497176198.74966, 733034177617.006], [
6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13,
3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15,
3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06,
-1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066,
169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -
1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11,
-1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05,
-1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10,
202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -
1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11,
-7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16,
2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08,
8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [
6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13,
5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15,
3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -
1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565,
940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -
1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11,
-1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05,
-1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10,
242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -
1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -
7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16,
2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08,
8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [
6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13,
5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15,
3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06,
-1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613,
886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -
1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -
1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05,
-1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10,
1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -
1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11,
-7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16,
2.4725904181789833e-05, -1.7849753358990938e-06, -
1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113,
3971854766728.4727]]
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13,
5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429,
145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536,
138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,
8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045,
148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13,
4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568,
138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,
9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,
-1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468,
147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13,
4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,
-1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985,
148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13,
5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,
8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,
-1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952,
149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13,
5.0376537605765665e-11, -1.7763084077799175e-10, -
1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -
2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10,
108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13,
4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,
8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,
-1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514,
195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,
4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,
8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337,
143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789,
160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,
-1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114,
109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,
4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327,
122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13,
4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06,
-1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218,
130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,
8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,
-1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205,
148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098,
101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,
8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779,
101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885,
102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13,
4.7704075824842225e-11, -1.8975666267494283e-10, -
1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05,
-1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,
166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,
8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018,
111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,
8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794,
128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,
8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -
1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544,
172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824,
100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603,
101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353,
101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158,
102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,
-1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558,
102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273,
102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13,
4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435,
102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,
8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374,
102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,
8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432,
102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874,
106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382,
112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097,
136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,
8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096,
160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744,
101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142,
101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664,
101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617,
101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283,
101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13,
4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997,
101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134,
101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668,
101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,
4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892,
101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095,
104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8168585276282465e-11, -1.4675478300173032e-10, -
1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,
160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564,
106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15,
8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595,
106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962,
106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13,
4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648,
117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13,
4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,
8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109,
118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13,
4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,
8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083,
125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951,
191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448,
98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697,
99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007,
100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534,
100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686,
100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13,
4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221,
100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8877585360256924e-11, -1.4675478300173032e-10, -
1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05,
-1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,
193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13,
4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133,
101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13,
4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612,
101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15,
8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655,
101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13,
4.8327983670281894e-11, -1.4675478300173032e-10, -
1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -
1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,
193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13,
4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602,
103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997,
103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,
8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538,
103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15,
8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -
1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226,
106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826,
110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15,
8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885,
111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314,
113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723,
115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216,
126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858,
98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816,
98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13,
4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,
8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157,
98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723,
99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506,
99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -
1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526,
100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237,
100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13,
4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,
8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853,
100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13,
4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15,
8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385,
100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,
8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177,
101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,
8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06,
-1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375,
101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255,
105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13,
4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,
8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207,
105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,
8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723,
106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,
8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06,
-1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741,
109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,
8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976,
114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13,
4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,
8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632,
115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,
-1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339,
120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -
1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072,
133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,
4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,
8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -
1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489,
147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,
9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978,
156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868,
167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13,
4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635,
167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13,
4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,
8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,
-1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952,
160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13,
4.6589669053151376e-11, -1.4986345441105813e-10, -
2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -
1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,
96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,
-1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293,
187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06,
-1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736,
187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13,
4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,
9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,
-1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189,
192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13,
4.6154548476823616e-11, -1.8724359625458014e-10, -
2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05,
-1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,
117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,
170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -2.0780774158604122e-10, -
2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,
191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985,
163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084,
152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,
7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,
-1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325,
153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826,
155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13,
5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673,
161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852,
175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,
7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,
-1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572,
184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588,
189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13,
4.9793760275117476e-11, -2.0772853669541976e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,
160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116,
128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,
1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,
8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272,
171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05,
-1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,
97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,
-1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,
2.0053347897812537e-05, -1.7639524821935923e-06, -
1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355,
185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936,
189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125,
190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13,
4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279,
197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,
-1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,
2.0200374650352852e-05, -1.7758673160173464e-06, -
1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334,
119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604,
120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -
1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346,
120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,
-1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575,
120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13,
4.9967768219433575e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05,
-1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,
151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212,
129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574,
129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11,
-1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16,
1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,
8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987,
132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,
8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735,
137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327,
143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13,
4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457,
151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13,
5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617,
154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,
8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659,
161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13,
4.9793760275117476e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05,
-1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,
101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13,
4.9793760275117476e-11, -1.7352085678160897e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05,
-1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,
101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
4.9675085987122204e-11, -1.7558160485557454e-10, -
1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -
1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,
92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13,
5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,
7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06,
-1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515,
193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,
-1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167,
199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,
7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -
1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637,
120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13,
5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895,
121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11,
-1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,
1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15,
7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705,
122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13,
4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,
7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -
1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388,
122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13,
5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,
7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886,
122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545,
131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863,
133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263,
143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511,
143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999,
145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487,
155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15,
8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412,
158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11,
-1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16,
1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,
8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461,
180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,
7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,
-1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496,
183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13,
4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06,
-1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935,
191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,
7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06,
-1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465,
194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,
-1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16,
2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,
8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13,
5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15,
7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182,
198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067,
115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,
-1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041,
118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395,
119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902,
119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686,
120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149,
123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245,
124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13,
5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236,
127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766,
128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13,
5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375,
129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287,
132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873,
132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06,
-1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999,
140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377,
143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417,
143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159,
143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13,
5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,
7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966,
144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957,
145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,
7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602,
145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13,
5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217,
145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044,
153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13,
5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,
7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06,
-1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157,
154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13,
5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15,
7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06,
-1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876,
164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13,
5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15,
7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,
-1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035,
170174200265.44513]
<|reserved_special_token_1|>
one=[7.236287049225701e-06, -1.445911565527231e-12, -1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10, -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10, 135323228000.64511, 130464457208.5385]
two=[6.101651991514008e-06, -1.2764740103418866e-12, -1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05, -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10, 194360719320.3122, 75684271432.82758]
three=[6.4442734160126695e-06, -1.2463732938819767e-12, -1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, 40874176708.45886, 129961018217.7445]
four=[5.591985036569838e-06, -1.5732644861037622e-12, -1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10, 95538034865.65512, 192689393537.75766]
five=[5.9877501684316964e-06, -1.4725222964411265e-12, -2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10, 172629547544.72174, 121012464101.10771]
six = [6.525636151737385e-10, -1.5516831882387681e-12, -1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10, -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497686e-05, -1.9757021060346726e-06, -1.5031696163247858e-08, 8.945619840357268e-10, 99871865434.22476, 123933224114.80229]
first1_gen= [[6.417695307686038e-06, -1.2416886913890308e-12, -1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10, 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11, -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08, 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13, 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10, -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05, -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10, 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11, -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16, 2.2287538734129395e-05, -1.8740196054647742e-06, -1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10, -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05, -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10, 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16, 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08, 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [6.4442734160126695e-06, -1.5732644861037622e-12, -1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10, 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11, -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16, 2.7105518268805634e-05, -1.9663482803776534e-06, -1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10, -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05, -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10, 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11, -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16, 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13, 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15, 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16, 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13, 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10, 3105580314530.341, 4622017117439.275]]
second1_gen= [[6.473615077297489e-06, -1.2416886913890308e-12, -1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05, -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10, 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11, -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08, 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13, 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06, -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05, -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11, -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08, 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13, 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15, 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10, -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11, -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13, 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15, 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06, -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05, -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10, 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11, -2.5212090845365535e-10, -1.1547640084684547e-15, 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06, -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11, -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11, -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16, 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08, 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15, 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06, -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10, -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05, -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10, 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16, 2.5052523082312023e-05, -1.9593459141604013e-06, -1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, 313686240874.89294]]
third1_gen= [[6.428534934734018e-06, -1.2348251959432863e-12, -1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10, 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11, -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16, 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08, 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13, 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10, 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16, 2.6978084672522227e-05, -1.9285560276423494e-06, -1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10, 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11, -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16, 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08, 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13, 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15, 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05, -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10, 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08, 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13, 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15, 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05, -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16, 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13, 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15, 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06, -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10, 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16, 2.4725904181789833e-05, -1.7849753358990938e-06, -1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, 3971854766728.4727]]
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, 145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, 138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, 148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, 138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, 147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06, -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, 149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, 5.0376537605765665e-11, -1.7763084077799175e-10, -1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, 108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06, -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, 195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13, 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15, 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, 143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, 160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, 109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13, 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, 122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, 130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, 148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, 101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15, 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, 101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, 102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.7704075824842225e-11, -1.8975666267494283e-10, -1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15, 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, 111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15, 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, 128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15, 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, 172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, 100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, 101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, 101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, 102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06, -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, 102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, 102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, 102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15, 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, 102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15, 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, 102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, 112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, 136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, 160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, 101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, 101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, 101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, 101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06, -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, 101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, 101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13, 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, 101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, 104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8168585276282465e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10, 160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, 106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, 106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, 106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, 117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, 118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, 125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, 191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, 99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, 100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, 100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, 100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8877585360256924e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, 101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, 101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, 101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.8327983670281894e-11, -1.4675478300173032e-10, -1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10, 193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, 103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, 103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, 103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, 106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, 110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, 111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, 113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, 115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, 126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, 98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, 98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15, 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, 98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, 99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, 99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, 100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, 100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, 100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, 100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, 101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, 101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, 105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, 105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, 106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, 109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15, 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, 114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, 115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, 120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, 133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, 147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15, 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, 156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, 167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, 167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, 160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, 4.6589669053151376e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06, -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, 187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, 187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.6154548476823616e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10, 170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, 163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, 152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15, 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06, -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, 153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, 155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, 161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, 175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15, 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06, -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, 184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, 189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, 4.9793760275117476e-11, -2.0772853669541976e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10, 160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, 128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, 171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, 185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, 189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, 190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, 197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11, -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16, 2.0200374650352852e-05, -1.7758673160173464e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, 119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, 120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, 120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, 120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 4.9967768219433575e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10, 151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, 129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06, -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, 129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, 132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, 137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, 143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, 151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, 154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, 4.9793760275117476e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10, 101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15, 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, 193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06, -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, 199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, 120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, 121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16, 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, 122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15, 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, 122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, 122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, 131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, 133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, 143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, 143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, 145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, 155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, 158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, 180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15, 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, 183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, 191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, 194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08, 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, 198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, 115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06, -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, 118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, 119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, 119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, 120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, 124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, 127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, 128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, 129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, 132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, 132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, 140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, 143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, 143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, 144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, 145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15, 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, 145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, 145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, 153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, 154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, 164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, 170174200265.44513]
|
flexible
|
{
"blob_id": "bdf3cb1830021b10d6c8966b3341fd9297d9a371",
"index": 2045,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, \n 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, \n -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, \n 145011267381.10236]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, \n 138194745977.8172]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, \n 148499957167.59894]\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, \n 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, \n 138376625633.08905]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, \n 147143586736.12967]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,\n -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, \n 149819556305.94864]\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, \n 5.0376537605765665e-11, -1.7763084077799175e-10, -\n 1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -\n 2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, \n 108694336300.90585, 154375559012.27695]\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, \n 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,\n -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, \n 195080915978.15582]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,\n 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,\n 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, \n 143318140783.98648]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, \n 160453198244.84198]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,\n -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, \n 109895891048.79645]\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,\n 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, \n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, \n 122880053749.32047]\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, \n 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, \n -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, \n 130994741061.18477]\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, \n 148716985588.15564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, \n 101545825010.15762]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,\n 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, \n 101879284463.33914]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, \n 102270797763.39908]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.7704075824842225e-11, -1.8975666267494283e-10, -\n 1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, \n -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,\n 166731944707.48343, 109962566902.69849]\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,\n 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, \n 111850971687.16727]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,\n 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, \n 128488226222.4665]\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,\n 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -\n 1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, \n 172987399752.44284]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, \n 100937635343.36494]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, \n 101440046940.62292]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, \n 101522685052.87083]\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, \n 102059630396.96977]\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,\n -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, \n 102134941196.42899]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, \n 102270797763.3992]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, \n 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, \n 102270797763.39929]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,\n 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, \n 102518032445.5969]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,\n 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, \n 102577021916.3392]\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, \n 112061347287.60056]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, \n 136457449593.06062]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, \n 160562679389.67618]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, \n 101215117638.35565]\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, \n 101220474756.55742]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, \n 101440046940.6675]\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, \n 101479475091.5439]\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, \n 101707557509.25955]\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, \n 101910116331.42278]\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,\n 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, \n 101942928295.47075]\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, \n 104790698646.6004]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8168585276282465e-11, -1.4675478300173032e-10, -\n 1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,\n 160649925757.17908, 106424978687.80653]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, \n 106648081137.30634]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, \n 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, \n 106784848298.00577]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, \n 106918161793.97298]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, \n 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, \n 117274357359.96004]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, \n 118996909122.33968]\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, \n 125656067768.88814]\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, \n 191438895729.71088]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, \n 99223644222.007]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, \n 100180028793.61896]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, \n 100223589650.82378]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, \n 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, \n 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, \n 100558408593.70113]\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8877585360256924e-11, -1.4675478300173032e-10, -\n 1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,\n 193351738763.71564, 100949387586.23102]\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, \n 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, \n 101220474756.86967]\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, \n 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, \n 101440046940.05927]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, \n 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, \n 101467426817.57397]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.8327983670281894e-11, -1.4675478300173032e-10, -\n 1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -\n 1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,\n 193392923341.53983, 101900620617.14302]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, \n 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, \n 103131734300.077]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, \n 103180541968.40872]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, \n 103805616436.34537]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, \n 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, \n 106843736334.12831]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, \n 110030788135.34956]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, \n 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, \n 111006224451.55664]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, \n 113087422800.04585]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, \n 115101067854.69138]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, \n 126984206927.84627]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, \n 98138013390.26245]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, \n 98829512345.71414]\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, \n 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,\n 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, \n 98891303611.42876]\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, \n 99638222233.03885]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, \n 99962477826.90034]\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, \n 100180028793.6191]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, \n 100290100926.3771]\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, \n 100447140164.3877]\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, \n 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, \n 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, \n 100872818268.9527]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, \n 101076246798.6337]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, \n -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, \n 101683114493.3993]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, \n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, \n 105699410466.83022]\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, \n 105861289429.36061]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, \n 106068644665.40553]\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, \n -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, \n 109638154986.2024]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,\n 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, \n 114344342719.97507]\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, \n 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, \n 115101067854.31332]\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, \n 120797794814.05704]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -\n 1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, \n 133721716481.47603]\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -\n 1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, \n 147005409641.27127]\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,\n 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, \n 156722470654.13324]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, \n 167972224844.19583]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, \n 167972224843.92523]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, \n 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, \n 160840990423.46024]\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, \n 4.6589669053151376e-11, -1.4986345441105813e-10, -\n 2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -\n 1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,\n 96467208837.94556, 179586543004.98117]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,\n -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, \n 187118262382.8758]\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, \n -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, \n 187415567631.77402]\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.6154548476823616e-11, -1.8724359625458014e-10, -\n 2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, \n -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,\n 117723326371.02731, 192873830899.82806]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,\n 170388218306.66492, 168925348515.4128]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,\n 191821821495.1242, 158798904598.69617]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, \n 163375067226.8736]\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, \n 152444791757.7255]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,\n 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,\n -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, \n 153164597685.87036]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, \n 155849166742.8801]\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, \n 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, \n 161472427331.15216]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, \n 175966043507.07343]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,\n 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,\n -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, \n 184829802626.36642]\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, \n 189416231139.84406]\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 4.9793760275117476e-11, -2.0772853669541976e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,\n 160631139543.06137, 122019730569.7476]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, \n 128597452665.91768]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,\n 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,\n 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, \n 171303112707.4717]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,\n 97245352689.07887, 174341101475.58182]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, \n 185221791801.95062]\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, \n 189416231139.85312]\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, \n 190153350507.14474]\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, \n 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, \n 197738317572.1617]\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,\n -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,\n 2.0200374650352852e-05, -1.7758673160173464e-06, -\n 1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, \n 119035825863.27417]\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, \n 120144468135.82727]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -\n 1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, \n 120359956158.03543]\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, \n 120995758664.39177]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 4.9967768219433575e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,\n 151029089477.88403, 121221447183.73479]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, \n 129257349906.46594]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, \n 129372470770.49553]\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, \n -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,\n 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, \n 132029509845.4832]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,\n 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, \n 137741348069.72827]\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, \n 143862344272.2216]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, \n 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, \n 151496866956.06183]\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, \n 154679332976.7693]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, \n 4.9793760275117476e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, \n -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,\n 101036412554.48618, 178952195751.12357]\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,\n 101115281125.52821, 181312381109.07834]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,\n 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, \n -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, \n 193403737351.61066]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,\n -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, \n 199093039398.6542]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, \n 120593643708.66519]\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, \n 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, \n 121269083493.68436]\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, \n -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,\n 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, \n 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, \n 122027384226.92]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,\n 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -\n 1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, \n 122750625888.09634]\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, \n 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, \n 122935226427.98189]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, \n 131702579310.68652]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, \n 133211383937.09729]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, \n 143105235055.608]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, \n 143860615432.91934]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, \n 145092770865.8836]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, \n 155477031697.76462]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, \n 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, \n 158587944243.89005]\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, \n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, \n 180430143233.58368]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,\n 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, \n 183449646874.34637]\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, \n 191076754457.2524]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, \n -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, \n 194275355409.06598]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,\n -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,\n 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, \n 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, \n 198112832281.90573]\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, \n 115813093887.0164]\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,\n -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, \n 118508631814.89664]\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, \n 119478476003.54858]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, \n 119746195767.88297]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, \n 120002114057.9749]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, \n 124495463707.0261]\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, \n 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, \n 127226107362.62663]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, \n 128048566261.66084]\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, \n 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, \n 129146670219.88675]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, \n 132556338910.10567]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, \n 132653030892.18918]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, \n 140436120253.29218]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, \n 143105235055.60883]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, \n 143860615432.91846]\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, \n 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, \n 144269444777.14786]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, \n 145085114899.6645]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,\n 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, \n 145085114900.12366]\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, \n 145590447784.79443]\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, \n 153694065180.84283]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, \n 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, \n 154263245256.49524]\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, \n 164710456294.5225]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, \n 170174200265.44513]\n",
"step-3": "one = [7.236287049225701e-06, -1.445911565527231e-12, -\n 1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10,\n -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, \n -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10,\n 135323228000.64511, 130464457208.5385]\ntwo = [6.101651991514008e-06, -1.2764740103418866e-12, -\n 1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, \n -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05,\n -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10,\n 194360719320.3122, 75684271432.82758]\nthree = [6.4442734160126695e-06, -1.2463732938819767e-12, -\n 1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, \n -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, \n -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, \n 40874176708.45886, 129961018217.7445]\nfour = [5.591985036569838e-06, -1.5732644861037622e-12, -\n 1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, \n -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, \n -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10,\n 95538034865.65512, 192689393537.75766]\nfive = [5.9877501684316964e-06, -1.4725222964411265e-12, -\n 2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, \n -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, \n -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10,\n 172629547544.72174, 121012464101.10771]\nsix = [6.525636151737385e-10, -1.5516831882387681e-12, -\n 1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10,\n -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497687e-05,\n -1.9757021060346727e-06, -1.5031696163247857e-08, 8.945619840357268e-10,\n 99871865434.22476, 123933224114.80229]\nfirst1_gen = [[6.417695307686038e-06, -1.2416886913890308e-12, -\n 1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -\n 1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, \n -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10,\n 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -\n 1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11,\n -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, \n 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08,\n 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [\n 7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13,\n 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15,\n 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, \n -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, \n 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, \n -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10,\n -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05,\n -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10,\n 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -\n 1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11,\n -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16,\n 2.2287538734129395e-05, -1.8740196054647742e-06, -\n 1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, \n 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -\n 1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10,\n -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05,\n -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10,\n 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -\n 1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, \n -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16,\n 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08,\n 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [\n 6.4442734160126695e-06, -1.5732644861037622e-12, -\n 1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -\n 2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, \n -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10,\n 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -\n 1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11,\n -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16,\n 2.7105518268805634e-05, -1.9663482803776534e-06, -\n 1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, \n 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -\n 1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10,\n -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05,\n -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10,\n 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -\n 1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11,\n -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16,\n 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, \n 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [\n 6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13,\n 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15,\n 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, \n -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, \n 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -\n 1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, \n -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, \n -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, \n 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -\n 1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, \n -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16,\n 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, \n 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [\n 6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13,\n 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15,\n 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, \n -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, \n 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -\n 2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, \n -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -\n 1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10,\n 3105580314530.341, 4622017117439.275]]\nsecond1_gen = [[6.473615077297489e-06, -1.2416886913890308e-12, -\n 1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, \n -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05,\n -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10,\n 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -\n 1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11,\n -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, \n 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08,\n 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [\n 7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13,\n 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15,\n 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06,\n -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, \n 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -\n 1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, \n -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05,\n -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, \n 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -\n 1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11,\n -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16,\n 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08,\n 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [\n 7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13,\n 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15,\n 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06,\n -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, \n 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -\n 1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10,\n -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -\n 1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, \n 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -\n 1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11,\n -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16,\n 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, \n 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [\n 5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13,\n 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15,\n 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06,\n -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, \n 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -\n 2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -\n 1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05,\n -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10,\n 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -\n 1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11,\n -2.5212090845365535e-10, -1.1547640084684547e-15, \n 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06,\n -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, \n 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, \n -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11,\n -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, \n -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, \n 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -\n 1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11,\n -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16,\n 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08,\n 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [\n 6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, \n 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15,\n 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06,\n -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, \n 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -\n 1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10,\n -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05,\n -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10,\n 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -\n 1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, \n -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16,\n 2.5052523082312023e-05, -1.9593459141604013e-06, -\n 1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, \n 313686240874.89294]]\nthird1_gen = [[6.428534934734018e-06, -1.2348251959432863e-12, -\n 1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -\n 1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, \n -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10,\n 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -\n 1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11,\n -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16,\n 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08,\n 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [\n 6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13,\n 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, \n 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, \n -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, \n 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -\n 1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -\n 1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, \n -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10,\n 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -\n 1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, \n -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16,\n 2.6978084672522227e-05, -1.9285560276423494e-06, -\n 1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, \n 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -\n 1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, \n -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, \n -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10,\n 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -\n 1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11,\n -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16,\n 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08,\n 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [\n 6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13,\n 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15,\n 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, \n -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, \n 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -\n 1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, \n -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05,\n -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10,\n 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -\n 1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, \n -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16,\n 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08,\n 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [\n 6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13,\n 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15,\n 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -\n 1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, \n 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -\n 1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, \n -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05,\n -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, \n 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -\n 1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -\n 7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16,\n 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, \n 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [\n 6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13,\n 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15,\n 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06,\n -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, \n 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -\n 1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -\n 1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, \n -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10,\n 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -\n 1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, \n -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16,\n 2.4725904181789833e-05, -1.7849753358990938e-06, -\n 1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, \n 3971854766728.4727]]\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, \n 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, \n -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, \n 145011267381.10236]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, \n 138194745977.8172]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, \n 148499957167.59894]\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, \n 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, \n 138376625633.08905]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, \n 147143586736.12967]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,\n -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, \n 149819556305.94864]\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, \n 5.0376537605765665e-11, -1.7763084077799175e-10, -\n 1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -\n 2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, \n 108694336300.90585, 154375559012.27695]\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, \n 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,\n -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, \n 195080915978.15582]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,\n 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,\n 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, \n 143318140783.98648]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, \n 160453198244.84198]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,\n -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, \n 109895891048.79645]\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,\n 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, \n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, \n 122880053749.32047]\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, \n 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, \n -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, \n 130994741061.18477]\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, \n 148716985588.15564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, \n 101545825010.15762]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,\n 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, \n 101879284463.33914]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, \n 102270797763.39908]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.7704075824842225e-11, -1.8975666267494283e-10, -\n 1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, \n -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,\n 166731944707.48343, 109962566902.69849]\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,\n 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, \n 111850971687.16727]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,\n 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, \n 128488226222.4665]\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,\n 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -\n 1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, \n 172987399752.44284]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, \n 100937635343.36494]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, \n 101440046940.62292]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, \n 101522685052.87083]\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, \n 102059630396.96977]\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,\n -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, \n 102134941196.42899]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, \n 102270797763.3992]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, \n 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, \n 102270797763.39929]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,\n 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, \n 102518032445.5969]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,\n 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, \n 102577021916.3392]\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, \n 112061347287.60056]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, \n 136457449593.06062]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, \n 160562679389.67618]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, \n 101215117638.35565]\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, \n 101220474756.55742]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, \n 101440046940.6675]\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, \n 101479475091.5439]\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, \n 101707557509.25955]\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, \n 101910116331.42278]\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,\n 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, \n 101942928295.47075]\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, \n 104790698646.6004]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8168585276282465e-11, -1.4675478300173032e-10, -\n 1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,\n 160649925757.17908, 106424978687.80653]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, \n 106648081137.30634]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, \n 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, \n 106784848298.00577]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, \n 106918161793.97298]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, \n 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, \n 117274357359.96004]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, \n 118996909122.33968]\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, \n 125656067768.88814]\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, \n 191438895729.71088]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, \n 99223644222.007]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, \n 100180028793.61896]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, \n 100223589650.82378]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, \n 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, \n 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, \n 100558408593.70113]\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8877585360256924e-11, -1.4675478300173032e-10, -\n 1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,\n 193351738763.71564, 100949387586.23102]\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, \n 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, \n 101220474756.86967]\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, \n 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, \n 101440046940.05927]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, \n 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, \n 101467426817.57397]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.8327983670281894e-11, -1.4675478300173032e-10, -\n 1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -\n 1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,\n 193392923341.53983, 101900620617.14302]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, \n 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, \n 103131734300.077]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, \n 103180541968.40872]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, \n 103805616436.34537]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, \n 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, \n 106843736334.12831]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, \n 110030788135.34956]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, \n 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, \n 111006224451.55664]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, \n 113087422800.04585]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, \n 115101067854.69138]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, \n 126984206927.84627]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, \n 98138013390.26245]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, \n 98829512345.71414]\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, \n 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,\n 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, \n 98891303611.42876]\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, \n 99638222233.03885]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, \n 99962477826.90034]\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, \n 100180028793.6191]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, \n 100290100926.3771]\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, \n 100447140164.3877]\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, \n 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, \n 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, \n 100872818268.9527]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, \n 101076246798.6337]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, \n -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, \n 101683114493.3993]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, \n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, \n 105699410466.83022]\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, \n 105861289429.36061]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, \n 106068644665.40553]\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, \n -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, \n 109638154986.2024]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,\n 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, \n 114344342719.97507]\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, \n 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, \n 115101067854.31332]\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, \n 120797794814.05704]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -\n 1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, \n 133721716481.47603]\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -\n 1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, \n 147005409641.27127]\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,\n 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, \n 156722470654.13324]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, \n 167972224844.19583]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, \n 167972224843.92523]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, \n 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, \n 160840990423.46024]\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, \n 4.6589669053151376e-11, -1.4986345441105813e-10, -\n 2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -\n 1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,\n 96467208837.94556, 179586543004.98117]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,\n -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, \n 187118262382.8758]\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, \n -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, \n 187415567631.77402]\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.6154548476823616e-11, -1.8724359625458014e-10, -\n 2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, \n -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,\n 117723326371.02731, 192873830899.82806]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,\n 170388218306.66492, 168925348515.4128]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,\n 191821821495.1242, 158798904598.69617]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, \n 163375067226.8736]\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, \n 152444791757.7255]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,\n 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,\n -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, \n 153164597685.87036]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, \n 155849166742.8801]\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, \n 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, \n 161472427331.15216]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, \n 175966043507.07343]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,\n 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,\n -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, \n 184829802626.36642]\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, \n 189416231139.84406]\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 4.9793760275117476e-11, -2.0772853669541976e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,\n 160631139543.06137, 122019730569.7476]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, \n 128597452665.91768]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,\n 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,\n 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, \n 171303112707.4717]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,\n 97245352689.07887, 174341101475.58182]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, \n 185221791801.95062]\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, \n 189416231139.85312]\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, \n 190153350507.14474]\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, \n 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, \n 197738317572.1617]\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,\n -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,\n 2.0200374650352852e-05, -1.7758673160173464e-06, -\n 1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, \n 119035825863.27417]\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, \n 120144468135.82727]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -\n 1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, \n 120359956158.03543]\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, \n 120995758664.39177]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 4.9967768219433575e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,\n 151029089477.88403, 121221447183.73479]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, \n 129257349906.46594]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, \n 129372470770.49553]\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, \n -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,\n 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, \n 132029509845.4832]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,\n 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, \n 137741348069.72827]\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, \n 143862344272.2216]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, \n 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, \n 151496866956.06183]\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, \n 154679332976.7693]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, \n 4.9793760275117476e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, \n -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,\n 101036412554.48618, 178952195751.12357]\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,\n 101115281125.52821, 181312381109.07834]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,\n 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, \n -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, \n 193403737351.61066]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,\n -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, \n 199093039398.6542]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, \n 120593643708.66519]\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, \n 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, \n 121269083493.68436]\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, \n -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,\n 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, \n 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, \n 122027384226.92]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,\n 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -\n 1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, \n 122750625888.09634]\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, \n 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, \n 122935226427.98189]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, \n 131702579310.68652]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, \n 133211383937.09729]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, \n 143105235055.608]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, \n 143860615432.91934]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, \n 145092770865.8836]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, \n 155477031697.76462]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, \n 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, \n 158587944243.89005]\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, \n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, \n 180430143233.58368]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,\n 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, \n 183449646874.34637]\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, \n 191076754457.2524]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, \n -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, \n 194275355409.06598]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,\n -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,\n 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, \n 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, \n 198112832281.90573]\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, \n 115813093887.0164]\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,\n -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, \n 118508631814.89664]\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, \n 119478476003.54858]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, \n 119746195767.88297]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, \n 120002114057.9749]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, \n 124495463707.0261]\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, \n 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, \n 127226107362.62663]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, \n 128048566261.66084]\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, \n 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, \n 129146670219.88675]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, \n 132556338910.10567]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, \n 132653030892.18918]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, \n 140436120253.29218]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, \n 143105235055.60883]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, \n 143860615432.91846]\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, \n 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, \n 144269444777.14786]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, \n 145085114899.6645]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,\n 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, \n 145085114900.12366]\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, \n 145590447784.79443]\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, \n 153694065180.84283]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, \n 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, \n 154263245256.49524]\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, \n 164710456294.5225]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, \n 170174200265.44513]\n",
"step-4": "one=[7.236287049225701e-06, -1.445911565527231e-12, -1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10, -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10, 135323228000.64511, 130464457208.5385]\ntwo=[6.101651991514008e-06, -1.2764740103418866e-12, -1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05, -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10, 194360719320.3122, 75684271432.82758]\nthree=[6.4442734160126695e-06, -1.2463732938819767e-12, -1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, 40874176708.45886, 129961018217.7445]\nfour=[5.591985036569838e-06, -1.5732644861037622e-12, -1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10, 95538034865.65512, 192689393537.75766]\nfive=[5.9877501684316964e-06, -1.4725222964411265e-12, -2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10, 172629547544.72174, 121012464101.10771]\nsix = [6.525636151737385e-10, -1.5516831882387681e-12, -1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10, -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497686e-05, -1.9757021060346726e-06, -1.5031696163247858e-08, 8.945619840357268e-10, 99871865434.22476, 123933224114.80229]\n\nfirst1_gen= [[6.417695307686038e-06, -1.2416886913890308e-12, -1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10, 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11, -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08, 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13, 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10, -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05, -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10, 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11, -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16, 2.2287538734129395e-05, -1.8740196054647742e-06, -1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10, -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05, -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10, 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16, 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08, 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [6.4442734160126695e-06, -1.5732644861037622e-12, -1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10, 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11, -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16, 2.7105518268805634e-05, -1.9663482803776534e-06, -1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10, -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05, -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10, 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11, -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16, 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13, 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15, 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16, 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13, 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10, 3105580314530.341, 4622017117439.275]]\nsecond1_gen= [[6.473615077297489e-06, -1.2416886913890308e-12, -1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05, -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10, 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11, -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08, 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13, 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06, -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05, -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11, -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08, 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13, 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15, 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10, -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11, -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13, 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15, 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06, -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05, -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10, 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11, -2.5212090845365535e-10, -1.1547640084684547e-15, 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06, -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11, -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11, -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16, 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08, 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15, 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06, -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10, -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05, -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10, 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16, 2.5052523082312023e-05, -1.9593459141604013e-06, -1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, 313686240874.89294]]\nthird1_gen= [[6.428534934734018e-06, -1.2348251959432863e-12, -1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10, 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11, -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16, 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08, 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13, 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10, 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16, 2.6978084672522227e-05, -1.9285560276423494e-06, -1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10, 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11, -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16, 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08, 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13, 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15, 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05, -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10, 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08, 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13, 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15, 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05, -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16, 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13, 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15, 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06, -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10, 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16, 2.4725904181789833e-05, -1.7849753358990938e-06, -1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, 3971854766728.4727]]\n\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, 145011267381.10236]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, 138194745977.8172]\n\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, 148499957167.59894]\n\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, 138376625633.08905]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, 147143586736.12967]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]\n\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06, -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, 149819556305.94864]\n\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, 5.0376537605765665e-11, -1.7763084077799175e-10, -1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, 108694336300.90585, 154375559012.27695]\n\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06, -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, 195080915978.15582]\n\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13, 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15, 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, 143318140783.98648]\n\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, 160453198244.84198]\n\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, 109895891048.79645]\n\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13, 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, 122880053749.32047]\n\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, 130994741061.18477]\n\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, 148716985588.15564]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, 101545825010.15762]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15, 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, 101879284463.33914]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, 102270797763.39908]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]\n\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.7704075824842225e-11, -1.8975666267494283e-10, -1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 166731944707.48343, 109962566902.69849]\n\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15, 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, 111850971687.16727]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15, 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, 128488226222.4665]\n\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15, 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, 172987399752.44284]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, 100937635343.36494]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, 101440046940.62292]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, 101522685052.87083]\n\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, 102059630396.96977]\n\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06, -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, 102134941196.42899]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, 102270797763.3992]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, 102270797763.39929]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15, 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, 102518032445.5969]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15, 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, 102577021916.3392]\n\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, 112061347287.60056]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, 136457449593.06062]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, 160562679389.67618]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, 101215117638.35565]\n\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, 101220474756.55742]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, 101440046940.6675]\n\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, 101479475091.5439]\n\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06, -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, 101707557509.25955]\n\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, 101910116331.42278]\n\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13, 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, 101942928295.47075]\n\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, 104790698646.6004]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8168585276282465e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10, 160649925757.17908, 106424978687.80653]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, 106648081137.30634]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, 106784848298.00577]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, 106918161793.97298]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, 117274357359.96004]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, 118996909122.33968]\n\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, 125656067768.88814]\n\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, 191438895729.71088]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, 99223644222.007]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]\n\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, 100180028793.61896]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, 100223589650.82378]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, 100558408593.70113]\n\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8877585360256924e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193351738763.71564, 100949387586.23102]\n\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, 101220474756.86967]\n\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, 101440046940.05927]\n\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, 101467426817.57397]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.8327983670281894e-11, -1.4675478300173032e-10, -1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10, 193392923341.53983, 101900620617.14302]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, 103131734300.077]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, 103180541968.40872]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, 103805616436.34537]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, 106843736334.12831]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, 110030788135.34956]\n\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, 111006224451.55664]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, 113087422800.04585]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, 115101067854.69138]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, 126984206927.84627]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, 98138013390.26245]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, 98829512345.71414]\n\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15, 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, 98891303611.42876]\n\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, 99638222233.03885]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, 99962477826.90034]\n\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, 100180028793.6191]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, 100290100926.3771]\n\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, 100447140164.3877]\n\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, 100872818268.9527]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, 101076246798.6337]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, 101683114493.3993]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, 105699410466.83022]\n\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, 105861289429.36061]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, 106068644665.40553]\n\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, 109638154986.2024]\n\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15, 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, 114344342719.97507]\n\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, 115101067854.31332]\n\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, 120797794814.05704]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, 133721716481.47603]\n\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, 147005409641.27127]\n\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15, 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, 156722470654.13324]\n\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, 167972224844.19583]\n\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, 167972224843.92523]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]\n\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, 160840990423.46024]\n\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, 4.6589669053151376e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 96467208837.94556, 179586543004.98117]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06, -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, 187118262382.8758]\n\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, 187415567631.77402]\n\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.6154548476823616e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.02731, 192873830899.82806]\n\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10, 170388218306.66492, 168925348515.4128]\n\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 191821821495.1242, 158798904598.69617]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, 163375067226.8736]\n\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, 152444791757.7255]\n\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15, 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06, -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, 153164597685.87036]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, 155849166742.8801]\n\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, 161472427331.15216]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, 175966043507.07343]\n\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15, 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06, -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, 184829802626.36642]\n\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, 189416231139.84406]\n\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, 4.9793760275117476e-11, -2.0772853669541976e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10, 160631139543.06137, 122019730569.7476]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, 128597452665.91768]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, 171303112707.4717]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 97245352689.07887, 174341101475.58182]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, 185221791801.95062]\n\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, 189416231139.85312]\n\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, 190153350507.14474]\n\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, 197738317572.1617]\n\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11, -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16, 2.0200374650352852e-05, -1.7758673160173464e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, 119035825863.27417]\n\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, 120144468135.82727]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, 120359956158.03543]\n\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, 120995758664.39177]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 4.9967768219433575e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10, 151029089477.88403, 121221447183.73479]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, 129257349906.46594]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06, -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, 129372470770.49553]\n\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, 132029509845.4832]\n\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, 137741348069.72827]\n\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, 143862344272.2216]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, 151496866956.06183]\n\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, 154679332976.7693]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, 4.9793760275117476e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 101036412554.48618, 178952195751.12357]\n\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10, 101115281125.52821, 181312381109.07834]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15, 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, 193403737351.61066]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06, -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, 199093039398.6542]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, 120593643708.66519]\n\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, 121269083493.68436]\n\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16, 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, 122027384226.92]\n\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15, 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, 122750625888.09634]\n\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, 122935226427.98189]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, 131702579310.68652]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, 133211383937.09729]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, 143105235055.608]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, 143860615432.91934]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, 145092770865.8836]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, 155477031697.76462]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, 158587944243.89005]\n\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, 180430143233.58368]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15, 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, 183449646874.34637]\n\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, 191076754457.2524]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, 194275355409.06598]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08, 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, 198112832281.90573]\n\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, 115813093887.0164]\n\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06, -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, 118508631814.89664]\n\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, 119478476003.54858]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, 119746195767.88297]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, 120002114057.9749]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, 124495463707.0261]\n\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, 127226107362.62663]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, 128048566261.66084]\n\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, 129146670219.88675]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, 132556338910.10567]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, 132653030892.18918]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, 140436120253.29218]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, 143105235055.60883]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, 143860615432.91846]\n\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]\n\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, 144269444777.14786]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, 145085114899.6645]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15, 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, 145085114900.12366]\n\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, 145590447784.79443]\n\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, 153694065180.84283]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, 154263245256.49524]\n\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, 164710456294.5225]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, 170174200265.44513]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# from dataclasses import InitVar, dataclass
# standard library imports
from math import floor
# third-party imports
import gym
import torch
from torch.nn import Conv2d, Linear, MaxPool2d, Module, ModuleList, ReLU, Sequential
from torch.nn import functional as F
# local imports
from tmrl.nn import TanhNormalLayer
from tmrl.sac_models import ActorModule, MlpActionValue, SacLinear, prod
import logging
# === Trackmania =======================================================================================================
def num_flat_features(x):
size = x.size()[1:]
num_features = 1
for s in size:
num_features *= s
return num_features
def conv2d_out_dims(conv_layer, h_in, w_in):
h_out = floor((h_in + 2 * conv_layer.padding[0] - conv_layer.dilation[0] * (conv_layer.kernel_size[0] - 1) - 1) / conv_layer.stride[0] + 1)
w_out = floor((w_in + 2 * conv_layer.padding[1] - conv_layer.dilation[1] * (conv_layer.kernel_size[1] - 1) - 1) / conv_layer.stride[1] + 1)
return h_out, w_out
class Net(Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = Conv2d(3, 8, (8, 8))
self.conv2 = Conv2d(8, 16, (4, 4))
self.conv3 = Conv2d(16, 32, (3, 3))
self.conv4 = Conv2d(32, 64, (3, 3))
self.fc1 = Linear(672, 253)
def forward(self, x):
x = F.max_pool2d(F.relu(self.conv1(x)), (4, 4))
x = F.max_pool2d(F.relu(self.conv2(x)), (4, 4))
x = F.max_pool2d(F.relu(self.conv3(x)), (4, 4))
x = x.view(-1, num_flat_features(x))
x = F.relu(self.fc1(x))
return x
class DeepmindCNN(Module):
def __init__(self, h_in, w_in, channels_in):
super(DeepmindCNN, self).__init__()
self.h_out, self.w_out = h_in, w_in
self.conv1 = Conv2d(in_channels=channels_in, out_channels=32, kernel_size=(8, 8), stride=4, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)
self.conv2 = Conv2d(in_channels=32, out_channels=64, kernel_size=(4, 4), stride=2, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)
self.conv3 = Conv2d(in_channels=64, out_channels=64, kernel_size=(3, 3), stride=1, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)
self.out_channels = self.conv3.out_channels
self.flat_features = self.out_channels * self.h_out * self.w_out
logging.debug(f" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}")
def forward(self, x):
logging.debug(f" forward, shape x :{x.shape}")
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
flat_features = num_flat_features(x)
assert flat_features == self.flat_features, f"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}"
x = x.view(-1, flat_features)
return x
class BigCNN(Module):
def __init__(self, h_in, w_in, channels_in):
super(BigCNN, self).__init__()
self.h_out, self.w_out = h_in, w_in
self.conv1 = Conv2d(channels_in, 64, 8, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)
self.conv2 = Conv2d(64, 64, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)
self.conv3 = Conv2d(64, 128, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)
self.conv4 = Conv2d(128, 128, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out, self.w_out)
self.out_channels = self.conv4.out_channels
self.flat_features = self.out_channels * self.h_out * self.w_out
logging.debug(f" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}")
def forward(self, x): # TODO: Simon uses leaky relu instead of relu, see what works best
# logging.debug(f" forward, shape x :{x.shape}")
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
flat_features = num_flat_features(x)
assert flat_features == self.flat_features, f"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}"
x = x.view(-1, flat_features)
return x
class TM20CNNModule(Module):
def __init__(self, observation_space, action_space, is_q_network, act_buf_len=0):
super().__init__()
assert isinstance(observation_space, gym.spaces.Tuple)
# torch.autograd.set_detect_anomaly(True) # FIXME: remove for optimization
self.img_dims = observation_space[3].shape
self.vel_dim = observation_space[0].shape[0]
self.gear_dim = observation_space[1].shape[0]
self.rpm_dim = observation_space[2].shape[0]
self.is_q_network = is_q_network
self.act_buf_len = act_buf_len
self.act_dim = action_space.shape[0]
logging.debug(f" self.img_dims: {self.img_dims}")
h_in = self.img_dims[2]
w_in = self.img_dims[3]
channels_in = self.img_dims[0] * self.img_dims[1] # successive images as channels
self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)
dim_fc1_in = self.cnn.flat_features + self.vel_dim + self.gear_dim + self.rpm_dim
if self.is_q_network:
dim_fc1_in += self.act_dim
if self.act_buf_len:
dim_fc1_in += self.act_dim * self.act_buf_len
self.fc1 = Linear(dim_fc1_in, 512)
def forward(self, x):
# assert isinstance(x, tuple), f"x is not a tuple: {x}"
vel = x[0].float()
gear = x[1].float()
rpm = x[2].float()
ims = x[3].float()
im1 = ims[:, 0]
im2 = ims[:, 1]
im3 = ims[:, 2]
im4 = ims[:, 3]
# logging.debug(f" forward: im1.shape:{im1.shape}")
if self.act_buf_len:
all_acts = torch.cat((x[4:]), dim=1).float() # if q network, the last action will be act
else:
raise NotImplementedError
cat_im = torch.cat((im1, im2, im3, im4), dim=1) # cat on channel dimension # TODO : check device
h = self.cnn(cat_im)
h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)
h = self.fc1(h) # No ReLU here because this is done in the Sequential
return h
class TMActionValue(Sequential):
def __init__(self, observation_space, action_space, act_buf_len=0):
super().__init__(
TM20CNNModule(observation_space, action_space, is_q_network=True, act_buf_len=act_buf_len),
ReLU(),
Linear(512, 256),
ReLU(),
Linear(256, 2) # we separate reward components
)
# noinspection PyMethodOverriding
def forward(self, obs, action):
x = (*obs, action)
res = super().forward(x)
# logging.debug(f" av res:{res}")
return res
class TMPolicy(Sequential):
def __init__(self, observation_space, action_space, act_buf_len=0):
super().__init__(TM20CNNModule(observation_space, action_space, is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))
# noinspection PyMethodOverriding
def forward(self, obs):
# res = super().forward(torch.cat(obs, 1))
res = super().forward(obs)
# logging.debug(f" po res:{res}")
return res
class Tm_hybrid_1(ActorModule):
def __init__(self, observation_space, action_space, hidden_units: int = 512, num_critics: int = 2, act_buf_len=0):
super().__init__()
assert isinstance(observation_space, gym.spaces.Tuple), f"{observation_space} is not a spaces.Tuple"
self.critics = ModuleList(TMActionValue(observation_space, action_space, act_buf_len=act_buf_len) for _ in range(num_critics))
self.actor = TMPolicy(observation_space, action_space, act_buf_len=act_buf_len)
self.critic_output_layers = [c[-1] for c in self.critics]
|
normal
|
{
"blob_id": "6f6d3fbb9a6a118e0f4026a7f9054b90b8cf2fca",
"index": 5677,
"step-1": "<mask token>\n\n\nclass BigCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(BigCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(channels_in, 64, 8, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(64, 64, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(64, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.conv4 = Conv2d(128, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out,\n self.w_out)\n self.out_channels = self.conv4.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n x = F.relu(self.conv4(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass TM20CNNModule(Module):\n\n def __init__(self, observation_space, action_space, is_q_network,\n act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple)\n self.img_dims = observation_space[3].shape\n self.vel_dim = observation_space[0].shape[0]\n self.gear_dim = observation_space[1].shape[0]\n self.rpm_dim = observation_space[2].shape[0]\n self.is_q_network = is_q_network\n self.act_buf_len = act_buf_len\n self.act_dim = action_space.shape[0]\n logging.debug(f' self.img_dims: {self.img_dims}')\n h_in = self.img_dims[2]\n w_in = self.img_dims[3]\n channels_in = self.img_dims[0] * self.img_dims[1]\n self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)\n dim_fc1_in = (self.cnn.flat_features + self.vel_dim + self.gear_dim +\n self.rpm_dim)\n if self.is_q_network:\n dim_fc1_in += self.act_dim\n if self.act_buf_len:\n dim_fc1_in += self.act_dim * self.act_buf_len\n self.fc1 = Linear(dim_fc1_in, 512)\n\n def forward(self, x):\n vel = x[0].float()\n gear = x[1].float()\n rpm = x[2].float()\n ims = x[3].float()\n im1 = ims[:, 0]\n im2 = ims[:, 1]\n im3 = ims[:, 2]\n im4 = ims[:, 3]\n if self.act_buf_len:\n all_acts = torch.cat(x[4:], dim=1).float()\n else:\n raise NotImplementedError\n cat_im = torch.cat((im1, im2, im3, im4), dim=1)\n h = self.cnn(cat_im)\n h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)\n h = self.fc1(h)\n return h\n\n\nclass TMActionValue(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=True, act_buf_len=act_buf_len), ReLU(), Linear(512,\n 256), ReLU(), Linear(256, 2))\n\n def forward(self, obs, action):\n x = *obs, action\n res = super().forward(x)\n return res\n\n\nclass TMPolicy(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(\n 512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))\n\n def forward(self, obs):\n res = super().forward(obs)\n return res\n\n\nclass Tm_hybrid_1(ActorModule):\n\n def __init__(self, observation_space, action_space, hidden_units: int=\n 512, num_critics: int=2, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple\n ), f'{observation_space} is not a spaces.Tuple'\n self.critics = ModuleList(TMActionValue(observation_space,\n action_space, act_buf_len=act_buf_len) for _ in range(num_critics))\n self.actor = TMPolicy(observation_space, action_space, act_buf_len=\n act_buf_len)\n self.critic_output_layers = [c[-1] for c in self.critics]\n",
"step-2": "<mask token>\n\n\nclass DeepmindCNN(Module):\n <mask token>\n\n def forward(self, x):\n logging.debug(f' forward, shape x :{x.shape}')\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass BigCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(BigCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(channels_in, 64, 8, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(64, 64, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(64, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.conv4 = Conv2d(128, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out,\n self.w_out)\n self.out_channels = self.conv4.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n x = F.relu(self.conv4(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass TM20CNNModule(Module):\n\n def __init__(self, observation_space, action_space, is_q_network,\n act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple)\n self.img_dims = observation_space[3].shape\n self.vel_dim = observation_space[0].shape[0]\n self.gear_dim = observation_space[1].shape[0]\n self.rpm_dim = observation_space[2].shape[0]\n self.is_q_network = is_q_network\n self.act_buf_len = act_buf_len\n self.act_dim = action_space.shape[0]\n logging.debug(f' self.img_dims: {self.img_dims}')\n h_in = self.img_dims[2]\n w_in = self.img_dims[3]\n channels_in = self.img_dims[0] * self.img_dims[1]\n self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)\n dim_fc1_in = (self.cnn.flat_features + self.vel_dim + self.gear_dim +\n self.rpm_dim)\n if self.is_q_network:\n dim_fc1_in += self.act_dim\n if self.act_buf_len:\n dim_fc1_in += self.act_dim * self.act_buf_len\n self.fc1 = Linear(dim_fc1_in, 512)\n\n def forward(self, x):\n vel = x[0].float()\n gear = x[1].float()\n rpm = x[2].float()\n ims = x[3].float()\n im1 = ims[:, 0]\n im2 = ims[:, 1]\n im3 = ims[:, 2]\n im4 = ims[:, 3]\n if self.act_buf_len:\n all_acts = torch.cat(x[4:], dim=1).float()\n else:\n raise NotImplementedError\n cat_im = torch.cat((im1, im2, im3, im4), dim=1)\n h = self.cnn(cat_im)\n h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)\n h = self.fc1(h)\n return h\n\n\nclass TMActionValue(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=True, act_buf_len=act_buf_len), ReLU(), Linear(512,\n 256), ReLU(), Linear(256, 2))\n\n def forward(self, obs, action):\n x = *obs, action\n res = super().forward(x)\n return res\n\n\nclass TMPolicy(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(\n 512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))\n\n def forward(self, obs):\n res = super().forward(obs)\n return res\n\n\nclass Tm_hybrid_1(ActorModule):\n\n def __init__(self, observation_space, action_space, hidden_units: int=\n 512, num_critics: int=2, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple\n ), f'{observation_space} is not a spaces.Tuple'\n self.critics = ModuleList(TMActionValue(observation_space,\n action_space, act_buf_len=act_buf_len) for _ in range(num_critics))\n self.actor = TMPolicy(observation_space, action_space, act_buf_len=\n act_buf_len)\n self.critic_output_layers = [c[-1] for c in self.critics]\n",
"step-3": "<mask token>\n\n\nclass Net(Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = Conv2d(3, 8, (8, 8))\n self.conv2 = Conv2d(8, 16, (4, 4))\n self.conv3 = Conv2d(16, 32, (3, 3))\n self.conv4 = Conv2d(32, 64, (3, 3))\n self.fc1 = Linear(672, 253)\n <mask token>\n\n\nclass DeepmindCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(DeepmindCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(in_channels=channels_in, out_channels=32,\n kernel_size=(8, 8), stride=4, padding=0, dilation=1, bias=True,\n padding_mode='zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(in_channels=32, out_channels=64, kernel_size=(4,\n 4), stride=2, padding=0, dilation=1, bias=True, padding_mode=\n 'zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(in_channels=64, out_channels=64, kernel_size=(3,\n 3), stride=1, padding=0, dilation=1, bias=True, padding_mode=\n 'zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.out_channels = self.conv3.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n logging.debug(f' forward, shape x :{x.shape}')\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass BigCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(BigCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(channels_in, 64, 8, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(64, 64, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(64, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.conv4 = Conv2d(128, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out,\n self.w_out)\n self.out_channels = self.conv4.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n x = F.relu(self.conv4(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass TM20CNNModule(Module):\n\n def __init__(self, observation_space, action_space, is_q_network,\n act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple)\n self.img_dims = observation_space[3].shape\n self.vel_dim = observation_space[0].shape[0]\n self.gear_dim = observation_space[1].shape[0]\n self.rpm_dim = observation_space[2].shape[0]\n self.is_q_network = is_q_network\n self.act_buf_len = act_buf_len\n self.act_dim = action_space.shape[0]\n logging.debug(f' self.img_dims: {self.img_dims}')\n h_in = self.img_dims[2]\n w_in = self.img_dims[3]\n channels_in = self.img_dims[0] * self.img_dims[1]\n self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)\n dim_fc1_in = (self.cnn.flat_features + self.vel_dim + self.gear_dim +\n self.rpm_dim)\n if self.is_q_network:\n dim_fc1_in += self.act_dim\n if self.act_buf_len:\n dim_fc1_in += self.act_dim * self.act_buf_len\n self.fc1 = Linear(dim_fc1_in, 512)\n\n def forward(self, x):\n vel = x[0].float()\n gear = x[1].float()\n rpm = x[2].float()\n ims = x[3].float()\n im1 = ims[:, 0]\n im2 = ims[:, 1]\n im3 = ims[:, 2]\n im4 = ims[:, 3]\n if self.act_buf_len:\n all_acts = torch.cat(x[4:], dim=1).float()\n else:\n raise NotImplementedError\n cat_im = torch.cat((im1, im2, im3, im4), dim=1)\n h = self.cnn(cat_im)\n h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)\n h = self.fc1(h)\n return h\n\n\nclass TMActionValue(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=True, act_buf_len=act_buf_len), ReLU(), Linear(512,\n 256), ReLU(), Linear(256, 2))\n\n def forward(self, obs, action):\n x = *obs, action\n res = super().forward(x)\n return res\n\n\nclass TMPolicy(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(\n 512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))\n\n def forward(self, obs):\n res = super().forward(obs)\n return res\n\n\nclass Tm_hybrid_1(ActorModule):\n\n def __init__(self, observation_space, action_space, hidden_units: int=\n 512, num_critics: int=2, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple\n ), f'{observation_space} is not a spaces.Tuple'\n self.critics = ModuleList(TMActionValue(observation_space,\n action_space, act_buf_len=act_buf_len) for _ in range(num_critics))\n self.actor = TMPolicy(observation_space, action_space, act_buf_len=\n act_buf_len)\n self.critic_output_layers = [c[-1] for c in self.critics]\n",
"step-4": "<mask token>\n\n\ndef num_flat_features(x):\n size = x.size()[1:]\n num_features = 1\n for s in size:\n num_features *= s\n return num_features\n\n\ndef conv2d_out_dims(conv_layer, h_in, w_in):\n h_out = floor((h_in + 2 * conv_layer.padding[0] - conv_layer.dilation[0\n ] * (conv_layer.kernel_size[0] - 1) - 1) / conv_layer.stride[0] + 1)\n w_out = floor((w_in + 2 * conv_layer.padding[1] - conv_layer.dilation[1\n ] * (conv_layer.kernel_size[1] - 1) - 1) / conv_layer.stride[1] + 1)\n return h_out, w_out\n\n\nclass Net(Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = Conv2d(3, 8, (8, 8))\n self.conv2 = Conv2d(8, 16, (4, 4))\n self.conv3 = Conv2d(16, 32, (3, 3))\n self.conv4 = Conv2d(32, 64, (3, 3))\n self.fc1 = Linear(672, 253)\n\n def forward(self, x):\n x = F.max_pool2d(F.relu(self.conv1(x)), (4, 4))\n x = F.max_pool2d(F.relu(self.conv2(x)), (4, 4))\n x = F.max_pool2d(F.relu(self.conv3(x)), (4, 4))\n x = x.view(-1, num_flat_features(x))\n x = F.relu(self.fc1(x))\n return x\n\n\nclass DeepmindCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(DeepmindCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(in_channels=channels_in, out_channels=32,\n kernel_size=(8, 8), stride=4, padding=0, dilation=1, bias=True,\n padding_mode='zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(in_channels=32, out_channels=64, kernel_size=(4,\n 4), stride=2, padding=0, dilation=1, bias=True, padding_mode=\n 'zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(in_channels=64, out_channels=64, kernel_size=(3,\n 3), stride=1, padding=0, dilation=1, bias=True, padding_mode=\n 'zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.out_channels = self.conv3.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n logging.debug(f' forward, shape x :{x.shape}')\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass BigCNN(Module):\n\n def __init__(self, h_in, w_in, channels_in):\n super(BigCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n self.conv1 = Conv2d(channels_in, 64, 8, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out,\n self.w_out)\n self.conv2 = Conv2d(64, 64, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out,\n self.w_out)\n self.conv3 = Conv2d(64, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out,\n self.w_out)\n self.conv4 = Conv2d(128, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out,\n self.w_out)\n self.out_channels = self.conv4.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n logging.debug(\n f' h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}'\n )\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n x = F.relu(self.conv4(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f'x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}'\n x = x.view(-1, flat_features)\n return x\n\n\nclass TM20CNNModule(Module):\n\n def __init__(self, observation_space, action_space, is_q_network,\n act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple)\n self.img_dims = observation_space[3].shape\n self.vel_dim = observation_space[0].shape[0]\n self.gear_dim = observation_space[1].shape[0]\n self.rpm_dim = observation_space[2].shape[0]\n self.is_q_network = is_q_network\n self.act_buf_len = act_buf_len\n self.act_dim = action_space.shape[0]\n logging.debug(f' self.img_dims: {self.img_dims}')\n h_in = self.img_dims[2]\n w_in = self.img_dims[3]\n channels_in = self.img_dims[0] * self.img_dims[1]\n self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)\n dim_fc1_in = (self.cnn.flat_features + self.vel_dim + self.gear_dim +\n self.rpm_dim)\n if self.is_q_network:\n dim_fc1_in += self.act_dim\n if self.act_buf_len:\n dim_fc1_in += self.act_dim * self.act_buf_len\n self.fc1 = Linear(dim_fc1_in, 512)\n\n def forward(self, x):\n vel = x[0].float()\n gear = x[1].float()\n rpm = x[2].float()\n ims = x[3].float()\n im1 = ims[:, 0]\n im2 = ims[:, 1]\n im3 = ims[:, 2]\n im4 = ims[:, 3]\n if self.act_buf_len:\n all_acts = torch.cat(x[4:], dim=1).float()\n else:\n raise NotImplementedError\n cat_im = torch.cat((im1, im2, im3, im4), dim=1)\n h = self.cnn(cat_im)\n h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)\n h = self.fc1(h)\n return h\n\n\nclass TMActionValue(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=True, act_buf_len=act_buf_len), ReLU(), Linear(512,\n 256), ReLU(), Linear(256, 2))\n\n def forward(self, obs, action):\n x = *obs, action\n res = super().forward(x)\n return res\n\n\nclass TMPolicy(Sequential):\n\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space,\n is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(\n 512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))\n\n def forward(self, obs):\n res = super().forward(obs)\n return res\n\n\nclass Tm_hybrid_1(ActorModule):\n\n def __init__(self, observation_space, action_space, hidden_units: int=\n 512, num_critics: int=2, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple\n ), f'{observation_space} is not a spaces.Tuple'\n self.critics = ModuleList(TMActionValue(observation_space,\n action_space, act_buf_len=act_buf_len) for _ in range(num_critics))\n self.actor = TMPolicy(observation_space, action_space, act_buf_len=\n act_buf_len)\n self.critic_output_layers = [c[-1] for c in self.critics]\n",
"step-5": "# from dataclasses import InitVar, dataclass\n# standard library imports\nfrom math import floor\n\n# third-party imports\nimport gym\nimport torch\nfrom torch.nn import Conv2d, Linear, MaxPool2d, Module, ModuleList, ReLU, Sequential\nfrom torch.nn import functional as F\n\n# local imports\nfrom tmrl.nn import TanhNormalLayer\nfrom tmrl.sac_models import ActorModule, MlpActionValue, SacLinear, prod\nimport logging\n# === Trackmania =======================================================================================================\n\n\ndef num_flat_features(x):\n size = x.size()[1:]\n num_features = 1\n for s in size:\n num_features *= s\n return num_features\n\n\ndef conv2d_out_dims(conv_layer, h_in, w_in):\n h_out = floor((h_in + 2 * conv_layer.padding[0] - conv_layer.dilation[0] * (conv_layer.kernel_size[0] - 1) - 1) / conv_layer.stride[0] + 1)\n w_out = floor((w_in + 2 * conv_layer.padding[1] - conv_layer.dilation[1] * (conv_layer.kernel_size[1] - 1) - 1) / conv_layer.stride[1] + 1)\n return h_out, w_out\n\n\nclass Net(Module):\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = Conv2d(3, 8, (8, 8))\n self.conv2 = Conv2d(8, 16, (4, 4))\n self.conv3 = Conv2d(16, 32, (3, 3))\n self.conv4 = Conv2d(32, 64, (3, 3))\n self.fc1 = Linear(672, 253)\n\n def forward(self, x):\n x = F.max_pool2d(F.relu(self.conv1(x)), (4, 4))\n x = F.max_pool2d(F.relu(self.conv2(x)), (4, 4))\n x = F.max_pool2d(F.relu(self.conv3(x)), (4, 4))\n x = x.view(-1, num_flat_features(x))\n x = F.relu(self.fc1(x))\n return x\n\n\nclass DeepmindCNN(Module):\n def __init__(self, h_in, w_in, channels_in):\n super(DeepmindCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n\n self.conv1 = Conv2d(in_channels=channels_in, out_channels=32, kernel_size=(8, 8), stride=4, padding=0, dilation=1, bias=True, padding_mode='zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)\n self.conv2 = Conv2d(in_channels=32, out_channels=64, kernel_size=(4, 4), stride=2, padding=0, dilation=1, bias=True, padding_mode='zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)\n self.conv3 = Conv2d(in_channels=64, out_channels=64, kernel_size=(3, 3), stride=1, padding=0, dilation=1, bias=True, padding_mode='zeros')\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)\n self.out_channels = self.conv3.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n\n logging.debug(f\" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}\")\n\n def forward(self, x):\n logging.debug(f\" forward, shape x :{x.shape}\")\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f\"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}\"\n x = x.view(-1, flat_features)\n return x\n\n\nclass BigCNN(Module):\n def __init__(self, h_in, w_in, channels_in):\n super(BigCNN, self).__init__()\n self.h_out, self.w_out = h_in, w_in\n\n self.conv1 = Conv2d(channels_in, 64, 8, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)\n self.conv2 = Conv2d(64, 64, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)\n self.conv3 = Conv2d(64, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)\n self.conv4 = Conv2d(128, 128, 4, stride=2)\n self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out, self.w_out)\n self.out_channels = self.conv4.out_channels\n self.flat_features = self.out_channels * self.h_out * self.w_out\n\n logging.debug(f\" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}\")\n\n def forward(self, x): # TODO: Simon uses leaky relu instead of relu, see what works best\n # logging.debug(f\" forward, shape x :{x.shape}\")\n x = F.relu(self.conv1(x))\n x = F.relu(self.conv2(x))\n x = F.relu(self.conv3(x))\n x = F.relu(self.conv4(x))\n flat_features = num_flat_features(x)\n assert flat_features == self.flat_features, f\"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}\"\n x = x.view(-1, flat_features)\n return x\n\n\nclass TM20CNNModule(Module):\n def __init__(self, observation_space, action_space, is_q_network, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple)\n # torch.autograd.set_detect_anomaly(True) # FIXME: remove for optimization\n self.img_dims = observation_space[3].shape\n self.vel_dim = observation_space[0].shape[0]\n self.gear_dim = observation_space[1].shape[0]\n self.rpm_dim = observation_space[2].shape[0]\n self.is_q_network = is_q_network\n self.act_buf_len = act_buf_len\n self.act_dim = action_space.shape[0]\n\n logging.debug(f\" self.img_dims: {self.img_dims}\")\n h_in = self.img_dims[2]\n w_in = self.img_dims[3]\n channels_in = self.img_dims[0] * self.img_dims[1] # successive images as channels\n\n self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)\n\n dim_fc1_in = self.cnn.flat_features + self.vel_dim + self.gear_dim + self.rpm_dim\n if self.is_q_network:\n dim_fc1_in += self.act_dim\n if self.act_buf_len:\n dim_fc1_in += self.act_dim * self.act_buf_len\n self.fc1 = Linear(dim_fc1_in, 512)\n\n def forward(self, x):\n # assert isinstance(x, tuple), f\"x is not a tuple: {x}\"\n vel = x[0].float()\n gear = x[1].float()\n rpm = x[2].float()\n ims = x[3].float()\n im1 = ims[:, 0]\n im2 = ims[:, 1]\n im3 = ims[:, 2]\n im4 = ims[:, 3]\n # logging.debug(f\" forward: im1.shape:{im1.shape}\")\n if self.act_buf_len:\n all_acts = torch.cat((x[4:]), dim=1).float() # if q network, the last action will be act\n else:\n raise NotImplementedError\n cat_im = torch.cat((im1, im2, im3, im4), dim=1) # cat on channel dimension # TODO : check device\n h = self.cnn(cat_im)\n h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)\n h = self.fc1(h) # No ReLU here because this is done in the Sequential\n return h\n\n\nclass TMActionValue(Sequential):\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(\n TM20CNNModule(observation_space, action_space, is_q_network=True, act_buf_len=act_buf_len),\n ReLU(),\n Linear(512, 256),\n ReLU(),\n Linear(256, 2) # we separate reward components\n )\n\n # noinspection PyMethodOverriding\n def forward(self, obs, action):\n x = (*obs, action)\n res = super().forward(x)\n # logging.debug(f\" av res:{res}\")\n return res\n\n\nclass TMPolicy(Sequential):\n def __init__(self, observation_space, action_space, act_buf_len=0):\n super().__init__(TM20CNNModule(observation_space, action_space, is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))\n\n # noinspection PyMethodOverriding\n def forward(self, obs):\n # res = super().forward(torch.cat(obs, 1))\n res = super().forward(obs)\n # logging.debug(f\" po res:{res}\")\n return res\n\n\nclass Tm_hybrid_1(ActorModule):\n def __init__(self, observation_space, action_space, hidden_units: int = 512, num_critics: int = 2, act_buf_len=0):\n super().__init__()\n assert isinstance(observation_space, gym.spaces.Tuple), f\"{observation_space} is not a spaces.Tuple\"\n self.critics = ModuleList(TMActionValue(observation_space, action_space, act_buf_len=act_buf_len) for _ in range(num_critics))\n self.actor = TMPolicy(observation_space, action_space, act_buf_len=act_buf_len)\n self.critic_output_layers = [c[-1] for c in self.critics]\n",
"step-ids": [
14,
16,
19,
22,
24
]
}
|
[
14,
16,
19,
22,
24
] |
<|reserved_special_token_0|>
class MyAdmin(admin.ModelAdmin):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class CalcResultAdmin(MyAdmin):
list_display = 'result', 'message', 'time'
search_fields = 'result', 'message', 'time'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyAdmin(admin.ModelAdmin):
<|reserved_special_token_0|>
def has_delete_permission(self, request, obj=None):
return False
class CalcResultAdmin(MyAdmin):
list_display = 'result', 'message', 'time'
search_fields = 'result', 'message', 'time'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyAdmin(admin.ModelAdmin):
def has_add_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
class CalcResultAdmin(MyAdmin):
list_display = 'result', 'message', 'time'
search_fields = 'result', 'message', 'time'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MyAdmin(admin.ModelAdmin):
def has_add_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
class CalcResultAdmin(MyAdmin):
list_display = 'result', 'message', 'time'
search_fields = 'result', 'message', 'time'
admin.site.register(CalcResult, CalcResultAdmin)
<|reserved_special_token_1|>
from django.contrib import admin
from calc.models import CalcResult
class MyAdmin(admin.ModelAdmin):
def has_add_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
class CalcResultAdmin(MyAdmin):
list_display = ('result', 'message', 'time',)
search_fields = ('result', 'message', 'time',)
admin.site.register(CalcResult, CalcResultAdmin)
|
flexible
|
{
"blob_id": "e2573a5dc507e9aeb811fbc254129aeb6e54cc0b",
"index": 2483,
"step-1": "<mask token>\n\n\nclass MyAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n\n\nclass CalcResultAdmin(MyAdmin):\n list_display = 'result', 'message', 'time'\n search_fields = 'result', 'message', 'time'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyAdmin(admin.ModelAdmin):\n <mask token>\n\n def has_delete_permission(self, request, obj=None):\n return False\n\n\nclass CalcResultAdmin(MyAdmin):\n list_display = 'result', 'message', 'time'\n search_fields = 'result', 'message', 'time'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass MyAdmin(admin.ModelAdmin):\n\n def has_add_permission(self, request, obj=None):\n return False\n\n def has_delete_permission(self, request, obj=None):\n return False\n\n\nclass CalcResultAdmin(MyAdmin):\n list_display = 'result', 'message', 'time'\n search_fields = 'result', 'message', 'time'\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass MyAdmin(admin.ModelAdmin):\n\n def has_add_permission(self, request, obj=None):\n return False\n\n def has_delete_permission(self, request, obj=None):\n return False\n\n\nclass CalcResultAdmin(MyAdmin):\n list_display = 'result', 'message', 'time'\n search_fields = 'result', 'message', 'time'\n\n\nadmin.site.register(CalcResult, CalcResultAdmin)\n",
"step-5": "from django.contrib import admin\n\nfrom calc.models import CalcResult\n\n\nclass MyAdmin(admin.ModelAdmin):\n def has_add_permission(self, request, obj=None):\n return False\n\n def has_delete_permission(self, request, obj=None):\n return False\n\n\nclass CalcResultAdmin(MyAdmin):\n list_display = ('result', 'message', 'time',)\n search_fields = ('result', 'message', 'time',)\n\nadmin.site.register(CalcResult, CalcResultAdmin)\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('sandbox_report',
'0006_sandboxreportlink_sandboxreportval')]
operations = [migrations.DeleteModel(name='SandboxReportLink'),
migrations.DeleteModel(name='SandboxReportVal'), migrations.
DeleteModel(name='SandboxTask')]
<|reserved_special_token_1|>
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('sandbox_report',
'0006_sandboxreportlink_sandboxreportval')]
operations = [migrations.DeleteModel(name='SandboxReportLink'),
migrations.DeleteModel(name='SandboxReportVal'), migrations.
DeleteModel(name='SandboxTask')]
<|reserved_special_token_1|>
# Generated by Django 2.1.1 on 2019-11-20 12:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sandbox_report', '0006_sandboxreportlink_sandboxreportval'),
]
operations = [
migrations.DeleteModel(
name='SandboxReportLink',
),
migrations.DeleteModel(
name='SandboxReportVal',
),
migrations.DeleteModel(
name='SandboxTask',
),
]
|
flexible
|
{
"blob_id": "b92497396e711d705760db547b43cc65beba6cfd",
"index": 6172,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sandbox_report',\n '0006_sandboxreportlink_sandboxreportval')]\n operations = [migrations.DeleteModel(name='SandboxReportLink'),\n migrations.DeleteModel(name='SandboxReportVal'), migrations.\n DeleteModel(name='SandboxTask')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sandbox_report',\n '0006_sandboxreportlink_sandboxreportval')]\n operations = [migrations.DeleteModel(name='SandboxReportLink'),\n migrations.DeleteModel(name='SandboxReportVal'), migrations.\n DeleteModel(name='SandboxTask')]\n",
"step-5": "# Generated by Django 2.1.1 on 2019-11-20 12:34\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('sandbox_report', '0006_sandboxreportlink_sandboxreportval'),\n ]\n\n operations = [\n migrations.DeleteModel(\n name='SandboxReportLink',\n ),\n migrations.DeleteModel(\n name='SandboxReportVal',\n ),\n migrations.DeleteModel(\n name='SandboxTask',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 5 10:04:05 2019
@author: cristina
"""
import numpy as np
from itertools import chain
from numpy import linalg as LA
diag = LA.eigh
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 13})
import time
pi = np.pi
exp = np.exp
t1 = time.time()
N = 2000 #number of sites
M = 200 #number of empty sites
m = 1.0 #effective mass
delta =1.35/27211.6 #SC gap
mu = 1.0/27211.6 #chemical potential
mu = 0.0
a = 4.98/0.529 ##lattice constant
phi = pi/2.0#phase of second SC
phi = 0.0
H = np.zeros([2*(2*N + M), 2*(2*N + M)], dtype=complex)
h = np.zeros([2*N + M, 2*N + M, 2, 2], dtype=complex)
factor = 1/(m*a**2) - mu
factor_2 = -1/(2*m*a**2)
hopping = factor_2*10
hopping = 0.0
#diagonal terms
range1_diagonal = range(N)
range2_diagonal = range(N+M, 2*N+M - 1)
for i in range1_diagonal:
g_i = i
h[g_i, g_i, 0, 1] = delta
h[g_i, g_i, 1, 0] = delta
h[g_i, g_i, 0, 0] = factor
h[g_i, g_i, 1, 1] = - factor
for i in range2_diagonal:
g_i = i
h[g_i, g_i, 0, 1] = delta*exp(1j*phi)
h[g_i, g_i, 1, 0] = delta*exp(-1j*phi)
h[g_i, g_i, 0, 0] = factor
h[g_i, g_i, 1, 1] = - factor
#off - diagonal terms
range1_offdiagonal = range(N - 1)
range2_offdiagonal = range(N+M, 2*N+M - 1)
range_offdiagonal = chain(range1_offdiagonal, range2_offdiagonal)
for i in range_offdiagonal:
g_i = i
g_j = i + 1
h[g_i, g_j, 0, 0] = factor_2
h[g_i, g_j, 1, 1] = - factor_2
h[g_j, g_i, 0, 0] = factor_2
h[g_j, g_i, 1, 1] = - factor_2
#hopping between the 2 Chains
h[N - 1, N + M, 0, 0] = hopping
h[N - 1, N + M, 1, 1] = - hopping
h[N + M, N - 1, 0, 0] = hopping
h[N + M, N - 1, 1, 1] = - hopping
for i in range(2*N + M):
for j in range(2*N + M):
for t_i in range(2):
for t_j in range(2):
H[(i) * 2 + t_i, (j) * 2 + t_j] = h[i, j, t_i, t_j]
H = np.matrix(H)
T = np.allclose(H, H.getH())###check if Hermitian
print('Is H an Hermitian matrix?', T)
(E, psi) = diag(H)####diagonalize H
####LDOS functions
def LDOS_up(omega, E, u, Damping):
t = sum ( u**2 / (omega - E + 1j*Damping) )
tt = -1/pi*np.imag(t)
return(tt)
def LDOS_down(omega, E, v, Damping):
t = sum ( v**2 / (omega + E + 1j*Damping) )
tt = -1/pi*np.imag(t)
return(tt)
#### u and v components in the Nth atom
u_borde1 = np.zeros(len(E))
v_borde1 = np.zeros(len(E))
I = N - 1
u_borde2 = np.zeros(len(E))
v_borde2 = np.zeros(len(E))
I2 = N + M - 1
u_bulk1 = np.zeros(len(E))
v_bulk1 = np.zeros(len(E))
I3 = int(N/2) - 1
u_bulk2 = np.zeros(len(E))
v_bulk2 = np.zeros(len(E))
I4 = N + M + int(N/2.0) - 1
I = N
for i in range(len(E)):
u_borde1[i] = psi[2*I-2,i]
v_borde1[i] = psi[2*I-1,i]
u_borde2[i] = psi[2*I2-2,i]
v_borde2[i] = psi[2*I2-1,i]
u_bulk1[i] = psi[2*I3-2,i]
v_bulk1[i] = psi[2*I3-1,i]
u_bulk2[i] = psi[2*I4-2,i]
v_bulk2[i] = psi[2*I4-1,i]
###calculate LDOS
omega = np.linspace(-4*delta, 4*delta, 2000)#omega vector
LDOS_borde1_up = np.zeros(len(omega))
LDOS_borde1_down = np.zeros(len(omega))
LDOS_borde2_up = np.zeros(len(omega))
LDOS_borde2_down = np.zeros(len(omega))
LDOS_bulk1_up = np.zeros(len(omega))
LDOS_bulk1_down = np.zeros(len(omega))
LDOS_bulk2_up = np.zeros(len(omega))
LDOS_bulk2_down = np.zeros(len(omega))
D = 0.02/27211.6
for i in range(len(omega)):
LDOS_borde1_up[i] = LDOS_up(omega[i], E, u_borde1, D)
LDOS_borde1_down[i] = LDOS_up(omega[i], E, v_borde1, D)
LDOS_borde2_up[i] = LDOS_up(omega[i], E, u_borde2, D)
LDOS_borde2_down[i] = LDOS_up(omega[i], E, v_borde2, D)
LDOS_bulk1_up[i] = LDOS_up(omega[i], E, u_bulk1, D)
LDOS_bulk1_down[i] = LDOS_up(omega[i], E, v_bulk1, D)
LDOS_bulk2_up[i] = LDOS_up(omega[i], E, u_bulk2, D)
LDOS_bulk2_down[i] = LDOS_up(omega[i], E, v_bulk2, D)
###plot LDOS
plt.figure(1)
plt.plot(omega*27211.6, LDOS_borde1_up + LDOS_borde1_down)
plt.plot(omega*27211.6, LDOS_borde1_up, label = 'up')
plt.plot(omega*27211.6, LDOS_borde1_down, label = 'down')
plt.title('Borde SC 1')
#plt.title('Site %i' %I)
plt.legend()
plt.figure(2)
plt.plot(omega*27211.6, LDOS_borde2_up + LDOS_borde2_down)
plt.plot(omega*27211.6, LDOS_borde2_up, label = 'up')
plt.plot(omega*27211.6, LDOS_borde2_down, label = 'down')
plt.title('Borde SC 2')
#plt.title('Site %i' %I)
plt.legend()
plt.figure(3)
plt.plot(omega*27211.6, LDOS_bulk1_up + LDOS_bulk1_down)
plt.plot(omega*27211.6, LDOS_bulk1_up, label = 'up')
plt.plot(omega*27211.6, LDOS_bulk1_down, label = 'down')
plt.title('Bulk SC 1')
#plt.title('Site %i' %I)
plt.legend()
plt.figure(4)
plt.plot(omega*27211.6, LDOS_bulk2_up + LDOS_bulk2_down)
plt.plot(omega*27211.6, LDOS_bulk2_up, label = 'up')
plt.plot(omega*27211.6, LDOS_bulk2_down, label = 'down')
plt.title('Bulk SC 2')
#plt.title('Site %i' %I)
plt.legend()
t2 = time.time()
print('Program finished after', (t2 - t1)/60.0, 'mins')
|
normal
|
{
"blob_id": "f2ad95574b65b4d3e44b85c76f3a0150a3275cec",
"index": 2356,
"step-1": "<mask token>\n\n\ndef LDOS_up(omega, E, u, Damping):\n t = sum(u ** 2 / (omega - E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\ndef LDOS_down(omega, E, v, Damping):\n t = sum(v ** 2 / (omega + E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\n<mask token>\n",
"step-2": "<mask token>\nplt.rcParams.update({'font.size': 13})\n<mask token>\nfor i in range1_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta\n h[g_i, g_i, 1, 0] = delta\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\nfor i in range2_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta * exp(1.0j * phi)\n h[g_i, g_i, 1, 0] = delta * exp(-1.0j * phi)\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\n<mask token>\nfor i in range_offdiagonal:\n g_i = i\n g_j = i + 1\n h[g_i, g_j, 0, 0] = factor_2\n h[g_i, g_j, 1, 1] = -factor_2\n h[g_j, g_i, 0, 0] = factor_2\n h[g_j, g_i, 1, 1] = -factor_2\n<mask token>\nfor i in range(2 * N + M):\n for j in range(2 * N + M):\n for t_i in range(2):\n for t_j in range(2):\n H[i * 2 + t_i, j * 2 + t_j] = h[i, j, t_i, t_j]\n<mask token>\nprint('Is H an Hermitian matrix?', T)\n<mask token>\n\n\ndef LDOS_up(omega, E, u, Damping):\n t = sum(u ** 2 / (omega - E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\ndef LDOS_down(omega, E, v, Damping):\n t = sum(v ** 2 / (omega + E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\n<mask token>\nfor i in range(len(E)):\n u_borde1[i] = psi[2 * I - 2, i]\n v_borde1[i] = psi[2 * I - 1, i]\n u_borde2[i] = psi[2 * I2 - 2, i]\n v_borde2[i] = psi[2 * I2 - 1, i]\n u_bulk1[i] = psi[2 * I3 - 2, i]\n v_bulk1[i] = psi[2 * I3 - 1, i]\n u_bulk2[i] = psi[2 * I4 - 2, i]\n v_bulk2[i] = psi[2 * I4 - 1, i]\n<mask token>\nfor i in range(len(omega)):\n LDOS_borde1_up[i] = LDOS_up(omega[i], E, u_borde1, D)\n LDOS_borde1_down[i] = LDOS_up(omega[i], E, v_borde1, D)\n LDOS_borde2_up[i] = LDOS_up(omega[i], E, u_borde2, D)\n LDOS_borde2_down[i] = LDOS_up(omega[i], E, v_borde2, D)\n LDOS_bulk1_up[i] = LDOS_up(omega[i], E, u_bulk1, D)\n LDOS_bulk1_down[i] = LDOS_up(omega[i], E, v_bulk1, D)\n LDOS_bulk2_up[i] = LDOS_up(omega[i], E, u_bulk2, D)\n LDOS_bulk2_down[i] = LDOS_up(omega[i], E, v_bulk2, D)\nplt.figure(1)\nplt.plot(omega * 27211.6, LDOS_borde1_up + LDOS_borde1_down)\nplt.plot(omega * 27211.6, LDOS_borde1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde1_down, label='down')\nplt.title('Borde SC 1')\nplt.legend()\nplt.figure(2)\nplt.plot(omega * 27211.6, LDOS_borde2_up + LDOS_borde2_down)\nplt.plot(omega * 27211.6, LDOS_borde2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde2_down, label='down')\nplt.title('Borde SC 2')\nplt.legend()\nplt.figure(3)\nplt.plot(omega * 27211.6, LDOS_bulk1_up + LDOS_bulk1_down)\nplt.plot(omega * 27211.6, LDOS_bulk1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk1_down, label='down')\nplt.title('Bulk SC 1')\nplt.legend()\nplt.figure(4)\nplt.plot(omega * 27211.6, LDOS_bulk2_up + LDOS_bulk2_down)\nplt.plot(omega * 27211.6, LDOS_bulk2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk2_down, label='down')\nplt.title('Bulk SC 2')\nplt.legend()\n<mask token>\nprint('Program finished after', (t2 - t1) / 60.0, 'mins')\n",
"step-3": "<mask token>\ndiag = LA.eigh\n<mask token>\nplt.rcParams.update({'font.size': 13})\n<mask token>\npi = np.pi\nexp = np.exp\nt1 = time.time()\nN = 2000\nM = 200\nm = 1.0\ndelta = 1.35 / 27211.6\nmu = 1.0 / 27211.6\nmu = 0.0\na = 4.98 / 0.529\nphi = pi / 2.0\nphi = 0.0\nH = np.zeros([2 * (2 * N + M), 2 * (2 * N + M)], dtype=complex)\nh = np.zeros([2 * N + M, 2 * N + M, 2, 2], dtype=complex)\nfactor = 1 / (m * a ** 2) - mu\nfactor_2 = -1 / (2 * m * a ** 2)\nhopping = factor_2 * 10\nhopping = 0.0\nrange1_diagonal = range(N)\nrange2_diagonal = range(N + M, 2 * N + M - 1)\nfor i in range1_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta\n h[g_i, g_i, 1, 0] = delta\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\nfor i in range2_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta * exp(1.0j * phi)\n h[g_i, g_i, 1, 0] = delta * exp(-1.0j * phi)\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\nrange1_offdiagonal = range(N - 1)\nrange2_offdiagonal = range(N + M, 2 * N + M - 1)\nrange_offdiagonal = chain(range1_offdiagonal, range2_offdiagonal)\nfor i in range_offdiagonal:\n g_i = i\n g_j = i + 1\n h[g_i, g_j, 0, 0] = factor_2\n h[g_i, g_j, 1, 1] = -factor_2\n h[g_j, g_i, 0, 0] = factor_2\n h[g_j, g_i, 1, 1] = -factor_2\nh[N - 1, N + M, 0, 0] = hopping\nh[N - 1, N + M, 1, 1] = -hopping\nh[N + M, N - 1, 0, 0] = hopping\nh[N + M, N - 1, 1, 1] = -hopping\nfor i in range(2 * N + M):\n for j in range(2 * N + M):\n for t_i in range(2):\n for t_j in range(2):\n H[i * 2 + t_i, j * 2 + t_j] = h[i, j, t_i, t_j]\nH = np.matrix(H)\nT = np.allclose(H, H.getH())\nprint('Is H an Hermitian matrix?', T)\nE, psi = diag(H)\n\n\ndef LDOS_up(omega, E, u, Damping):\n t = sum(u ** 2 / (omega - E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\ndef LDOS_down(omega, E, v, Damping):\n t = sum(v ** 2 / (omega + E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\nu_borde1 = np.zeros(len(E))\nv_borde1 = np.zeros(len(E))\nI = N - 1\nu_borde2 = np.zeros(len(E))\nv_borde2 = np.zeros(len(E))\nI2 = N + M - 1\nu_bulk1 = np.zeros(len(E))\nv_bulk1 = np.zeros(len(E))\nI3 = int(N / 2) - 1\nu_bulk2 = np.zeros(len(E))\nv_bulk2 = np.zeros(len(E))\nI4 = N + M + int(N / 2.0) - 1\nI = N\nfor i in range(len(E)):\n u_borde1[i] = psi[2 * I - 2, i]\n v_borde1[i] = psi[2 * I - 1, i]\n u_borde2[i] = psi[2 * I2 - 2, i]\n v_borde2[i] = psi[2 * I2 - 1, i]\n u_bulk1[i] = psi[2 * I3 - 2, i]\n v_bulk1[i] = psi[2 * I3 - 1, i]\n u_bulk2[i] = psi[2 * I4 - 2, i]\n v_bulk2[i] = psi[2 * I4 - 1, i]\nomega = np.linspace(-4 * delta, 4 * delta, 2000)\nLDOS_borde1_up = np.zeros(len(omega))\nLDOS_borde1_down = np.zeros(len(omega))\nLDOS_borde2_up = np.zeros(len(omega))\nLDOS_borde2_down = np.zeros(len(omega))\nLDOS_bulk1_up = np.zeros(len(omega))\nLDOS_bulk1_down = np.zeros(len(omega))\nLDOS_bulk2_up = np.zeros(len(omega))\nLDOS_bulk2_down = np.zeros(len(omega))\nD = 0.02 / 27211.6\nfor i in range(len(omega)):\n LDOS_borde1_up[i] = LDOS_up(omega[i], E, u_borde1, D)\n LDOS_borde1_down[i] = LDOS_up(omega[i], E, v_borde1, D)\n LDOS_borde2_up[i] = LDOS_up(omega[i], E, u_borde2, D)\n LDOS_borde2_down[i] = LDOS_up(omega[i], E, v_borde2, D)\n LDOS_bulk1_up[i] = LDOS_up(omega[i], E, u_bulk1, D)\n LDOS_bulk1_down[i] = LDOS_up(omega[i], E, v_bulk1, D)\n LDOS_bulk2_up[i] = LDOS_up(omega[i], E, u_bulk2, D)\n LDOS_bulk2_down[i] = LDOS_up(omega[i], E, v_bulk2, D)\nplt.figure(1)\nplt.plot(omega * 27211.6, LDOS_borde1_up + LDOS_borde1_down)\nplt.plot(omega * 27211.6, LDOS_borde1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde1_down, label='down')\nplt.title('Borde SC 1')\nplt.legend()\nplt.figure(2)\nplt.plot(omega * 27211.6, LDOS_borde2_up + LDOS_borde2_down)\nplt.plot(omega * 27211.6, LDOS_borde2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde2_down, label='down')\nplt.title('Borde SC 2')\nplt.legend()\nplt.figure(3)\nplt.plot(omega * 27211.6, LDOS_bulk1_up + LDOS_bulk1_down)\nplt.plot(omega * 27211.6, LDOS_bulk1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk1_down, label='down')\nplt.title('Bulk SC 1')\nplt.legend()\nplt.figure(4)\nplt.plot(omega * 27211.6, LDOS_bulk2_up + LDOS_bulk2_down)\nplt.plot(omega * 27211.6, LDOS_bulk2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk2_down, label='down')\nplt.title('Bulk SC 2')\nplt.legend()\nt2 = time.time()\nprint('Program finished after', (t2 - t1) / 60.0, 'mins')\n",
"step-4": "<mask token>\nimport numpy as np\nfrom itertools import chain\nfrom numpy import linalg as LA\ndiag = LA.eigh\nimport matplotlib.pyplot as plt\nplt.rcParams.update({'font.size': 13})\nimport time\npi = np.pi\nexp = np.exp\nt1 = time.time()\nN = 2000\nM = 200\nm = 1.0\ndelta = 1.35 / 27211.6\nmu = 1.0 / 27211.6\nmu = 0.0\na = 4.98 / 0.529\nphi = pi / 2.0\nphi = 0.0\nH = np.zeros([2 * (2 * N + M), 2 * (2 * N + M)], dtype=complex)\nh = np.zeros([2 * N + M, 2 * N + M, 2, 2], dtype=complex)\nfactor = 1 / (m * a ** 2) - mu\nfactor_2 = -1 / (2 * m * a ** 2)\nhopping = factor_2 * 10\nhopping = 0.0\nrange1_diagonal = range(N)\nrange2_diagonal = range(N + M, 2 * N + M - 1)\nfor i in range1_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta\n h[g_i, g_i, 1, 0] = delta\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\nfor i in range2_diagonal:\n g_i = i\n h[g_i, g_i, 0, 1] = delta * exp(1.0j * phi)\n h[g_i, g_i, 1, 0] = delta * exp(-1.0j * phi)\n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = -factor\nrange1_offdiagonal = range(N - 1)\nrange2_offdiagonal = range(N + M, 2 * N + M - 1)\nrange_offdiagonal = chain(range1_offdiagonal, range2_offdiagonal)\nfor i in range_offdiagonal:\n g_i = i\n g_j = i + 1\n h[g_i, g_j, 0, 0] = factor_2\n h[g_i, g_j, 1, 1] = -factor_2\n h[g_j, g_i, 0, 0] = factor_2\n h[g_j, g_i, 1, 1] = -factor_2\nh[N - 1, N + M, 0, 0] = hopping\nh[N - 1, N + M, 1, 1] = -hopping\nh[N + M, N - 1, 0, 0] = hopping\nh[N + M, N - 1, 1, 1] = -hopping\nfor i in range(2 * N + M):\n for j in range(2 * N + M):\n for t_i in range(2):\n for t_j in range(2):\n H[i * 2 + t_i, j * 2 + t_j] = h[i, j, t_i, t_j]\nH = np.matrix(H)\nT = np.allclose(H, H.getH())\nprint('Is H an Hermitian matrix?', T)\nE, psi = diag(H)\n\n\ndef LDOS_up(omega, E, u, Damping):\n t = sum(u ** 2 / (omega - E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\ndef LDOS_down(omega, E, v, Damping):\n t = sum(v ** 2 / (omega + E + 1.0j * Damping))\n tt = -1 / pi * np.imag(t)\n return tt\n\n\nu_borde1 = np.zeros(len(E))\nv_borde1 = np.zeros(len(E))\nI = N - 1\nu_borde2 = np.zeros(len(E))\nv_borde2 = np.zeros(len(E))\nI2 = N + M - 1\nu_bulk1 = np.zeros(len(E))\nv_bulk1 = np.zeros(len(E))\nI3 = int(N / 2) - 1\nu_bulk2 = np.zeros(len(E))\nv_bulk2 = np.zeros(len(E))\nI4 = N + M + int(N / 2.0) - 1\nI = N\nfor i in range(len(E)):\n u_borde1[i] = psi[2 * I - 2, i]\n v_borde1[i] = psi[2 * I - 1, i]\n u_borde2[i] = psi[2 * I2 - 2, i]\n v_borde2[i] = psi[2 * I2 - 1, i]\n u_bulk1[i] = psi[2 * I3 - 2, i]\n v_bulk1[i] = psi[2 * I3 - 1, i]\n u_bulk2[i] = psi[2 * I4 - 2, i]\n v_bulk2[i] = psi[2 * I4 - 1, i]\nomega = np.linspace(-4 * delta, 4 * delta, 2000)\nLDOS_borde1_up = np.zeros(len(omega))\nLDOS_borde1_down = np.zeros(len(omega))\nLDOS_borde2_up = np.zeros(len(omega))\nLDOS_borde2_down = np.zeros(len(omega))\nLDOS_bulk1_up = np.zeros(len(omega))\nLDOS_bulk1_down = np.zeros(len(omega))\nLDOS_bulk2_up = np.zeros(len(omega))\nLDOS_bulk2_down = np.zeros(len(omega))\nD = 0.02 / 27211.6\nfor i in range(len(omega)):\n LDOS_borde1_up[i] = LDOS_up(omega[i], E, u_borde1, D)\n LDOS_borde1_down[i] = LDOS_up(omega[i], E, v_borde1, D)\n LDOS_borde2_up[i] = LDOS_up(omega[i], E, u_borde2, D)\n LDOS_borde2_down[i] = LDOS_up(omega[i], E, v_borde2, D)\n LDOS_bulk1_up[i] = LDOS_up(omega[i], E, u_bulk1, D)\n LDOS_bulk1_down[i] = LDOS_up(omega[i], E, v_bulk1, D)\n LDOS_bulk2_up[i] = LDOS_up(omega[i], E, u_bulk2, D)\n LDOS_bulk2_down[i] = LDOS_up(omega[i], E, v_bulk2, D)\nplt.figure(1)\nplt.plot(omega * 27211.6, LDOS_borde1_up + LDOS_borde1_down)\nplt.plot(omega * 27211.6, LDOS_borde1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde1_down, label='down')\nplt.title('Borde SC 1')\nplt.legend()\nplt.figure(2)\nplt.plot(omega * 27211.6, LDOS_borde2_up + LDOS_borde2_down)\nplt.plot(omega * 27211.6, LDOS_borde2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_borde2_down, label='down')\nplt.title('Borde SC 2')\nplt.legend()\nplt.figure(3)\nplt.plot(omega * 27211.6, LDOS_bulk1_up + LDOS_bulk1_down)\nplt.plot(omega * 27211.6, LDOS_bulk1_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk1_down, label='down')\nplt.title('Bulk SC 1')\nplt.legend()\nplt.figure(4)\nplt.plot(omega * 27211.6, LDOS_bulk2_up + LDOS_bulk2_down)\nplt.plot(omega * 27211.6, LDOS_bulk2_up, label='up')\nplt.plot(omega * 27211.6, LDOS_bulk2_down, label='down')\nplt.title('Bulk SC 2')\nplt.legend()\nt2 = time.time()\nprint('Program finished after', (t2 - t1) / 60.0, 'mins')\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 5 10:04:05 2019\n\n@author: cristina\n\"\"\"\n\nimport numpy as np\nfrom itertools import chain\nfrom numpy import linalg as LA\ndiag = LA.eigh\nimport matplotlib.pyplot as plt\nplt.rcParams.update({'font.size': 13})\nimport time\n\npi = np.pi\nexp = np.exp\nt1 = time.time()\n\nN = 2000 #number of sites\nM = 200 #number of empty sites\nm = 1.0 #effective mass\ndelta =1.35/27211.6 #SC gap\nmu = 1.0/27211.6 #chemical potential\nmu = 0.0\na = 4.98/0.529 ##lattice constant\nphi = pi/2.0#phase of second SC\nphi = 0.0\n\nH = np.zeros([2*(2*N + M), 2*(2*N + M)], dtype=complex)\nh = np.zeros([2*N + M, 2*N + M, 2, 2], dtype=complex)\n\nfactor = 1/(m*a**2) - mu\nfactor_2 = -1/(2*m*a**2)\nhopping = factor_2*10\nhopping = 0.0\n\n#diagonal terms\nrange1_diagonal = range(N)\nrange2_diagonal = range(N+M, 2*N+M - 1)\n\nfor i in range1_diagonal:\n g_i = i\n \n h[g_i, g_i, 0, 1] = delta\n h[g_i, g_i, 1, 0] = delta \n \n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = - factor\n \n \nfor i in range2_diagonal:\n g_i = i\n \n h[g_i, g_i, 0, 1] = delta*exp(1j*phi)\n h[g_i, g_i, 1, 0] = delta*exp(-1j*phi) \n \n h[g_i, g_i, 0, 0] = factor\n h[g_i, g_i, 1, 1] = - factor\n\n#off - diagonal terms\nrange1_offdiagonal = range(N - 1)\nrange2_offdiagonal = range(N+M, 2*N+M - 1)\nrange_offdiagonal = chain(range1_offdiagonal, range2_offdiagonal)\n\nfor i in range_offdiagonal:\n g_i = i\n g_j = i + 1\n \n h[g_i, g_j, 0, 0] = factor_2\n h[g_i, g_j, 1, 1] = - factor_2 \n \n h[g_j, g_i, 0, 0] = factor_2\n h[g_j, g_i, 1, 1] = - factor_2\n \n\n#hopping between the 2 Chains\nh[N - 1, N + M, 0, 0] = hopping\nh[N - 1, N + M, 1, 1] = - hopping\n\nh[N + M, N - 1, 0, 0] = hopping\nh[N + M, N - 1, 1, 1] = - hopping\n\nfor i in range(2*N + M):\n for j in range(2*N + M):\n for t_i in range(2):\n for t_j in range(2):\n H[(i) * 2 + t_i, (j) * 2 + t_j] = h[i, j, t_i, t_j]\n \nH = np.matrix(H) \nT = np.allclose(H, H.getH())###check if Hermitian\nprint('Is H an Hermitian matrix?', T)\n\n(E, psi) = diag(H)####diagonalize H\n\n\n####LDOS functions\ndef LDOS_up(omega, E, u, Damping):\n t = sum ( u**2 / (omega - E + 1j*Damping) )\n tt = -1/pi*np.imag(t)\n return(tt)\n \ndef LDOS_down(omega, E, v, Damping):\n t = sum ( v**2 / (omega + E + 1j*Damping) )\n tt = -1/pi*np.imag(t)\n return(tt)\n\n\n\n#### u and v components in the Nth atom\nu_borde1 = np.zeros(len(E))\nv_borde1 = np.zeros(len(E))\nI = N - 1\n\nu_borde2 = np.zeros(len(E))\nv_borde2 = np.zeros(len(E))\nI2 = N + M - 1\n\nu_bulk1 = np.zeros(len(E))\nv_bulk1 = np.zeros(len(E))\nI3 = int(N/2) - 1\n\nu_bulk2 = np.zeros(len(E))\nv_bulk2 = np.zeros(len(E))\nI4 = N + M + int(N/2.0) - 1\n\nI = N \nfor i in range(len(E)):\n u_borde1[i] = psi[2*I-2,i]\n v_borde1[i] = psi[2*I-1,i]\n \n u_borde2[i] = psi[2*I2-2,i]\n v_borde2[i] = psi[2*I2-1,i]\n \n u_bulk1[i] = psi[2*I3-2,i]\n v_bulk1[i] = psi[2*I3-1,i]\n \n u_bulk2[i] = psi[2*I4-2,i]\n v_bulk2[i] = psi[2*I4-1,i]\n\n###calculate LDOS\nomega = np.linspace(-4*delta, 4*delta, 2000)#omega vector \n\nLDOS_borde1_up = np.zeros(len(omega))\nLDOS_borde1_down = np.zeros(len(omega))\n\nLDOS_borde2_up = np.zeros(len(omega))\nLDOS_borde2_down = np.zeros(len(omega))\n\nLDOS_bulk1_up = np.zeros(len(omega))\nLDOS_bulk1_down = np.zeros(len(omega))\n\nLDOS_bulk2_up = np.zeros(len(omega))\nLDOS_bulk2_down = np.zeros(len(omega))\n\nD = 0.02/27211.6\nfor i in range(len(omega)):\n\n LDOS_borde1_up[i] = LDOS_up(omega[i], E, u_borde1, D) \n LDOS_borde1_down[i] = LDOS_up(omega[i], E, v_borde1, D)\n \n LDOS_borde2_up[i] = LDOS_up(omega[i], E, u_borde2, D) \n LDOS_borde2_down[i] = LDOS_up(omega[i], E, v_borde2, D)\n \n LDOS_bulk1_up[i] = LDOS_up(omega[i], E, u_bulk1, D) \n LDOS_bulk1_down[i] = LDOS_up(omega[i], E, v_bulk1, D)\n \n LDOS_bulk2_up[i] = LDOS_up(omega[i], E, u_bulk2, D) \n LDOS_bulk2_down[i] = LDOS_up(omega[i], E, v_bulk2, D)\n\n\n###plot LDOS \nplt.figure(1)\nplt.plot(omega*27211.6, LDOS_borde1_up + LDOS_borde1_down) \nplt.plot(omega*27211.6, LDOS_borde1_up, label = 'up') \nplt.plot(omega*27211.6, LDOS_borde1_down, label = 'down')\nplt.title('Borde SC 1')\n#plt.title('Site %i' %I) \nplt.legend() \n\nplt.figure(2)\nplt.plot(omega*27211.6, LDOS_borde2_up + LDOS_borde2_down) \nplt.plot(omega*27211.6, LDOS_borde2_up, label = 'up') \nplt.plot(omega*27211.6, LDOS_borde2_down, label = 'down')\nplt.title('Borde SC 2')\n#plt.title('Site %i' %I) \nplt.legend() \n\nplt.figure(3)\nplt.plot(omega*27211.6, LDOS_bulk1_up + LDOS_bulk1_down) \nplt.plot(omega*27211.6, LDOS_bulk1_up, label = 'up') \nplt.plot(omega*27211.6, LDOS_bulk1_down, label = 'down')\nplt.title('Bulk SC 1')\n#plt.title('Site %i' %I) \nplt.legend()\n\nplt.figure(4)\nplt.plot(omega*27211.6, LDOS_bulk2_up + LDOS_bulk2_down) \nplt.plot(omega*27211.6, LDOS_bulk2_up, label = 'up') \nplt.plot(omega*27211.6, LDOS_bulk2_down, label = 'down')\nplt.title('Bulk SC 2')\n#plt.title('Site %i' %I) \nplt.legend() \n\n\n\nt2 = time.time()\nprint('Program finished after', (t2 - t1)/60.0, 'mins')\n\n\n\n\n\n\n\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import sys
from bs4 import BeautifulSoup
def get_classes(html):
"""
returns a list of classes and titles, parsing through 'html'
"""
# elements = html.find_all("span", "code")
# titles = html.find_all("span", "title")
# classes = []
# for i in range(len(elements)):
# item = elements[i]
# tit = titles[i]
# classes += [(item.text.replace('\xa0', ' '), tit.text.replace('\xa0', ' '))]
# return classes
|
normal
|
{
"blob_id": "9bb8e0f732eac474dbc01c374f9c74178f65dc36",
"index": 3063,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n",
"step-3": "import sys\nfrom bs4 import BeautifulSoup\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n",
"step-4": "import sys\nfrom bs4 import BeautifulSoup\n\n\ndef get_classes(html):\n \"\"\"\n returns a list of classes and titles, parsing through 'html'\n \"\"\"\n # elements = html.find_all(\"span\", \"code\")\n # titles = html.find_all(\"span\", \"title\")\n # classes = []\n # for i in range(len(elements)):\n # item = elements[i]\n # tit = titles[i]\n # classes += [(item.text.replace('\\xa0', ' '), tit.text.replace('\\xa0', ' '))]\n # return classes\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class ConfigLoader:
<|reserved_special_token_0|>
def __init__(self, level):
self._log = Logger('configloader', level)
self._log.info('ready.')
def configure(self, filename='config.yaml'):
"""
Read and return configuration from the specified YAML file.
Pretty-prints the configuration object if the log level is set to DEBUG.
"""
self._log.info('reading from yaml configuration file {}...'.format(
filename))
_config = yaml.safe_load(open(filename, 'r'))
if self._log.level == Level.DEBUG:
self._log.debug('YAML configuration as read:')
print(Fore.BLUE)
pp = pprint.PrettyPrinter(width=80, indent=2)
pp.pprint(_config)
print(Style.RESET_ALL)
self._log.info('configuration read.')
return _config
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConfigLoader:
"""
Has just one method: configure() reads a YAML file.
"""
def __init__(self, level):
self._log = Logger('configloader', level)
self._log.info('ready.')
def configure(self, filename='config.yaml'):
"""
Read and return configuration from the specified YAML file.
Pretty-prints the configuration object if the log level is set to DEBUG.
"""
self._log.info('reading from yaml configuration file {}...'.format(
filename))
_config = yaml.safe_load(open(filename, 'r'))
if self._log.level == Level.DEBUG:
self._log.debug('YAML configuration as read:')
print(Fore.BLUE)
pp = pprint.PrettyPrinter(width=80, indent=2)
pp.pprint(_config)
print(Style.RESET_ALL)
self._log.info('configuration read.')
return _config
<|reserved_special_token_1|>
<|reserved_special_token_0|>
init()
try:
import yaml
except ImportError:
exit(
'This script requires the pyyaml module\nInstall with: pip3 install --user pyyaml'
)
<|reserved_special_token_0|>
class ConfigLoader:
"""
Has just one method: configure() reads a YAML file.
"""
def __init__(self, level):
self._log = Logger('configloader', level)
self._log.info('ready.')
def configure(self, filename='config.yaml'):
"""
Read and return configuration from the specified YAML file.
Pretty-prints the configuration object if the log level is set to DEBUG.
"""
self._log.info('reading from yaml configuration file {}...'.format(
filename))
_config = yaml.safe_load(open(filename, 'r'))
if self._log.level == Level.DEBUG:
self._log.debug('YAML configuration as read:')
print(Fore.BLUE)
pp = pprint.PrettyPrinter(width=80, indent=2)
pp.pprint(_config)
print(Style.RESET_ALL)
self._log.info('configuration read.')
return _config
<|reserved_special_token_1|>
import pprint
from colorama import init, Fore, Style
init()
try:
import yaml
except ImportError:
exit(
'This script requires the pyyaml module\nInstall with: pip3 install --user pyyaml'
)
from core.logger import Level, Logger
class ConfigLoader:
"""
Has just one method: configure() reads a YAML file.
"""
def __init__(self, level):
self._log = Logger('configloader', level)
self._log.info('ready.')
def configure(self, filename='config.yaml'):
"""
Read and return configuration from the specified YAML file.
Pretty-prints the configuration object if the log level is set to DEBUG.
"""
self._log.info('reading from yaml configuration file {}...'.format(
filename))
_config = yaml.safe_load(open(filename, 'r'))
if self._log.level == Level.DEBUG:
self._log.debug('YAML configuration as read:')
print(Fore.BLUE)
pp = pprint.PrettyPrinter(width=80, indent=2)
pp.pprint(_config)
print(Style.RESET_ALL)
self._log.info('configuration read.')
return _config
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2021 by Murray Altheim. All rights reserved. This file is part
# of the Robot Operating System project, released under the MIT License. Please
# see the LICENSE file included as part of this package.
#
# author: Murray Altheim
# created: 2020-04-15
# modified: 2020-04-15
import pprint
from colorama import init, Fore, Style
init()
try:
import yaml
except ImportError:
exit("This script requires the pyyaml module\nInstall with: pip3 install --user pyyaml")
from core.logger import Level, Logger
class ConfigLoader():
'''
Has just one method: configure() reads a YAML file.
'''
def __init__(self, level):
self._log = Logger('configloader', level)
self._log.info('ready.')
# ..........................................................................
def configure(self, filename='config.yaml'):
'''
Read and return configuration from the specified YAML file.
Pretty-prints the configuration object if the log level is set to DEBUG.
'''
self._log.info('reading from yaml configuration file {}...'.format(filename))
_config = yaml.safe_load(open(filename, 'r'))
if self._log.level == Level.DEBUG:
self._log.debug('YAML configuration as read:')
print(Fore.BLUE)
pp = pprint.PrettyPrinter(width=80, indent=2)
pp.pprint(_config)
print(Style.RESET_ALL)
self._log.info('configuration read.')
return _config
#EOF
|
flexible
|
{
"blob_id": "3a6038cb80548b98fc7e4a328092f1dc1ffd6dfd",
"index": 1154,
"step-1": "<mask token>\n\n\nclass ConfigLoader:\n <mask token>\n\n def __init__(self, level):\n self._log = Logger('configloader', level)\n self._log.info('ready.')\n\n def configure(self, filename='config.yaml'):\n \"\"\"\n Read and return configuration from the specified YAML file.\n\n Pretty-prints the configuration object if the log level is set to DEBUG.\n \"\"\"\n self._log.info('reading from yaml configuration file {}...'.format(\n filename))\n _config = yaml.safe_load(open(filename, 'r'))\n if self._log.level == Level.DEBUG:\n self._log.debug('YAML configuration as read:')\n print(Fore.BLUE)\n pp = pprint.PrettyPrinter(width=80, indent=2)\n pp.pprint(_config)\n print(Style.RESET_ALL)\n self._log.info('configuration read.')\n return _config\n",
"step-2": "<mask token>\n\n\nclass ConfigLoader:\n \"\"\"\n Has just one method: configure() reads a YAML file.\n \"\"\"\n\n def __init__(self, level):\n self._log = Logger('configloader', level)\n self._log.info('ready.')\n\n def configure(self, filename='config.yaml'):\n \"\"\"\n Read and return configuration from the specified YAML file.\n\n Pretty-prints the configuration object if the log level is set to DEBUG.\n \"\"\"\n self._log.info('reading from yaml configuration file {}...'.format(\n filename))\n _config = yaml.safe_load(open(filename, 'r'))\n if self._log.level == Level.DEBUG:\n self._log.debug('YAML configuration as read:')\n print(Fore.BLUE)\n pp = pprint.PrettyPrinter(width=80, indent=2)\n pp.pprint(_config)\n print(Style.RESET_ALL)\n self._log.info('configuration read.')\n return _config\n",
"step-3": "<mask token>\ninit()\ntry:\n import yaml\nexcept ImportError:\n exit(\n 'This script requires the pyyaml module\\nInstall with: pip3 install --user pyyaml'\n )\n<mask token>\n\n\nclass ConfigLoader:\n \"\"\"\n Has just one method: configure() reads a YAML file.\n \"\"\"\n\n def __init__(self, level):\n self._log = Logger('configloader', level)\n self._log.info('ready.')\n\n def configure(self, filename='config.yaml'):\n \"\"\"\n Read and return configuration from the specified YAML file.\n\n Pretty-prints the configuration object if the log level is set to DEBUG.\n \"\"\"\n self._log.info('reading from yaml configuration file {}...'.format(\n filename))\n _config = yaml.safe_load(open(filename, 'r'))\n if self._log.level == Level.DEBUG:\n self._log.debug('YAML configuration as read:')\n print(Fore.BLUE)\n pp = pprint.PrettyPrinter(width=80, indent=2)\n pp.pprint(_config)\n print(Style.RESET_ALL)\n self._log.info('configuration read.')\n return _config\n",
"step-4": "import pprint\nfrom colorama import init, Fore, Style\ninit()\ntry:\n import yaml\nexcept ImportError:\n exit(\n 'This script requires the pyyaml module\\nInstall with: pip3 install --user pyyaml'\n )\nfrom core.logger import Level, Logger\n\n\nclass ConfigLoader:\n \"\"\"\n Has just one method: configure() reads a YAML file.\n \"\"\"\n\n def __init__(self, level):\n self._log = Logger('configloader', level)\n self._log.info('ready.')\n\n def configure(self, filename='config.yaml'):\n \"\"\"\n Read and return configuration from the specified YAML file.\n\n Pretty-prints the configuration object if the log level is set to DEBUG.\n \"\"\"\n self._log.info('reading from yaml configuration file {}...'.format(\n filename))\n _config = yaml.safe_load(open(filename, 'r'))\n if self._log.level == Level.DEBUG:\n self._log.debug('YAML configuration as read:')\n print(Fore.BLUE)\n pp = pprint.PrettyPrinter(width=80, indent=2)\n pp.pprint(_config)\n print(Style.RESET_ALL)\n self._log.info('configuration read.')\n return _config\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# Copyright 2020-2021 by Murray Altheim. All rights reserved. This file is part\n# of the Robot Operating System project, released under the MIT License. Please\n# see the LICENSE file included as part of this package.\n#\n# author: Murray Altheim\n# created: 2020-04-15\n# modified: 2020-04-15\n\nimport pprint\nfrom colorama import init, Fore, Style\ninit()\ntry:\n import yaml\nexcept ImportError:\n exit(\"This script requires the pyyaml module\\nInstall with: pip3 install --user pyyaml\")\n\nfrom core.logger import Level, Logger\n\nclass ConfigLoader():\n '''\n Has just one method: configure() reads a YAML file.\n '''\n def __init__(self, level):\n self._log = Logger('configloader', level)\n self._log.info('ready.')\n\n # ..........................................................................\n def configure(self, filename='config.yaml'):\n '''\n Read and return configuration from the specified YAML file.\n\n Pretty-prints the configuration object if the log level is set to DEBUG.\n '''\n self._log.info('reading from yaml configuration file {}...'.format(filename))\n _config = yaml.safe_load(open(filename, 'r'))\n if self._log.level == Level.DEBUG:\n self._log.debug('YAML configuration as read:')\n print(Fore.BLUE)\n pp = pprint.PrettyPrinter(width=80, indent=2)\n pp.pprint(_config)\n print(Style.RESET_ALL)\n self._log.info('configuration read.')\n return _config\n\n#EOF\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
_all__ = ['minning_algo']
<|reserved_special_token_1|>
_all__ = ["minning_algo"]
|
flexible
|
{
"blob_id": "5a7b68648898818e0db47f225f3d4b0972cd5b99",
"index": 7521,
"step-1": "<mask token>\n",
"step-2": "_all__ = ['minning_algo']\n",
"step-3": "_all__ = [\"minning_algo\"]\n\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class CNP(torch.nn.Module):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CNP(torch.nn.Module):
<|reserved_special_token_0|>
def forward(self, context, query, key=None):
query = query.view(query.shape[0], -1)
h = self.encoder(context)
h = h.mean(dim=0)
h = torch.stack([h] * query.shape[0], dim=0)
r = torch.cat([h, query], dim=1)
out = self.decoder(r)
return out
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CNP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer):
super(CNP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim + query_dim,
hidden_dim), torch.nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
def forward(self, context, query, key=None):
query = query.view(query.shape[0], -1)
h = self.encoder(context)
h = h.mean(dim=0)
h = torch.stack([h] * query.shape[0], dim=0)
r = torch.cat([h, query], dim=1)
out = self.decoder(r)
return out
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y
=1e-08):
""" Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). """
K = kernel(X_train, X_train, l, sigma_f) + sigma_y ** 2 * np.eye(len(
X_train))
K_s = kernel(X_s, X_train, l, sigma_f)
K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y ** 2 * np.eye(len(X_s))
mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))
cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))
return mu_s, cov_s
class CNP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer):
super(CNP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim + query_dim,
hidden_dim), torch.nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
def forward(self, context, query, key=None):
query = query.view(query.shape[0], -1)
h = self.encoder(context)
h = h.mean(dim=0)
h = torch.stack([h] * query.shape[0], dim=0)
r = torch.cat([h, query], dim=1)
out = self.decoder(r)
return out
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,
dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.
ReLU()]
for i in range(en_layer - 2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.
nn.ReLU()]
for i in range(dec_layer - 2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,
hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,
hidden_dim))
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,
num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
h = self.encoder(context)
h.unsqueeze_(1)
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
pred = self.decoder(h)
return pred
<|reserved_special_token_1|>
import torch
import numpy as np
# source: https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb
def kernel(X1, X2, l=1.0, sigma_f=1.0):
''' Isotropic squared exponential kernel. Computes a covariance matrix from points in X1 and X2. Args: X1: Array of m points (m x d). X2: Array of n points (n x d). Returns: Covariance matrix (m x n). '''
sqdist = np.sum(X1**2, 1).reshape(-1, 1) + np.sum(X2**2, 1) - 2 * np.dot(X1, X2.T)
return sigma_f**2 * np.exp(-0.5 / l**2 * sqdist)
# source: # https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb
def posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y=1e-8):
''' Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). '''
K = kernel(X_train, X_train, l, sigma_f) + sigma_y**2 * np.eye(len(X_train))
K_s = kernel(X_s, X_train, l, sigma_f)
K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y**2 * np.eye(len(X_s))
mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))
cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))
return mu_s, cov_s
class CNP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer):
super(CNP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim+query_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim+query_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
def forward(self, context, query, key=None):
query = query.view(query.shape[0], -1)
# encode
h = self.encoder(context)
# aggregate
h = h.mean(dim=0)
h = torch.stack([h]*(query.shape[0]), dim=0)
r = torch.cat([h, query], dim=1)
# predict
out = self.decoder(r)
return out
class ANP(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):
super(ANP, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.projector = torch.nn.Linear(query_dim, hidden_dim)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
# encode
h = self.encoder(context)
h.unsqueeze_(1)
# aggregate
q_t = self.projector(query)
k_t = self.projector(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
# predict
pred = self.decoder(h)
return pred
class ANPv2(torch.nn.Module):
def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):
super(ANPv2, self).__init__()
if en_layer == 1:
self.encoder = torch.nn.Linear(in_dim, hidden_dim)
else:
self.encoder = [
torch.nn.Linear(in_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(en_layer-2):
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder.append(torch.nn.ReLU())
self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.encoder = torch.nn.Sequential(*self.encoder)
if dec_layer == 1:
self.decoder = torch.nn.Linear(hidden_dim, out_dim)
else:
self.decoder = [
torch.nn.Linear(hidden_dim, hidden_dim),
torch.nn.ReLU()
]
for i in range(dec_layer-2):
self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))
self.decoder.append(torch.nn.ReLU())
self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))
self.decoder = torch.nn.Sequential(*self.decoder)
self.key_mlp = torch.nn.Sequential(
torch.nn.Linear(query_dim, hidden_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_dim, hidden_dim)
)
self.query_mlp = torch.nn.Sequential(
torch.nn.Linear(query_dim, hidden_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_dim, hidden_dim)
)
self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)
def forward(self, context, key, query):
query = query.view(query.shape[0], -1)
key = key.view(key.shape[0], -1)
# encode
h = self.encoder(context)
h.unsqueeze_(1)
# aggregate
q_t = self.query_mlp(query)
k_t = self.key_mlp(key)
q_t.unsqueeze_(1)
k_t.unsqueeze_(1)
h, _ = self.attention(query=q_t, key=k_t, value=h)
h.squeeze_(1)
# predict
pred = self.decoder(h)
return pred
|
flexible
|
{
"blob_id": "82c3bde5746d04c126a93851844f775e7ce65f4b",
"index": 9442,
"step-1": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n <mask token>\n <mask token>\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-2": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n <mask token>\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-3": "<mask token>\n\n\nclass CNP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim + query_dim,\n hidden_dim), torch.nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-4": "<mask token>\n\n\ndef posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y\n =1e-08):\n \"\"\" Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). \"\"\"\n K = kernel(X_train, X_train, l, sigma_f) + sigma_y ** 2 * np.eye(len(\n X_train))\n K_s = kernel(X_s, X_train, l, sigma_f)\n K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y ** 2 * np.eye(len(X_s))\n mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))\n cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))\n return mu_s, cov_s\n\n\nclass CNP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim + query_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim + query_dim,\n hidden_dim), torch.nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n\n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n h = self.encoder(context)\n h = h.mean(dim=0)\n h = torch.stack([h] * query.shape[0], dim=0)\n r = torch.cat([h, query], dim=1)\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n\n\nclass ANPv2(torch.nn.Module):\n\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer,\n dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [torch.nn.Linear(in_dim, hidden_dim), torch.nn.\n ReLU()]\n for i in range(en_layer - 2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [torch.nn.Linear(hidden_dim, hidden_dim), torch.\n nn.ReLU()]\n for i in range(dec_layer - 2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.key_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.query_mlp = torch.nn.Sequential(torch.nn.Linear(query_dim,\n hidden_dim), torch.nn.ReLU(), torch.nn.Linear(hidden_dim,\n hidden_dim))\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim,\n num_heads=nhead)\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n h = self.encoder(context)\n h.unsqueeze_(1)\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n pred = self.decoder(h)\n return pred\n",
"step-5": "import torch\nimport numpy as np\n\n\n# source: https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb\ndef kernel(X1, X2, l=1.0, sigma_f=1.0):\n ''' Isotropic squared exponential kernel. Computes a covariance matrix from points in X1 and X2. Args: X1: Array of m points (m x d). X2: Array of n points (n x d). Returns: Covariance matrix (m x n). '''\n sqdist = np.sum(X1**2, 1).reshape(-1, 1) + np.sum(X2**2, 1) - 2 * np.dot(X1, X2.T)\n return sigma_f**2 * np.exp(-0.5 / l**2 * sqdist)\n \n# source: # https://github.com/krasserm/bayesian-machine-learning/blob/master/gaussian_processes.ipynb\ndef posterior_predictive(X_s, X_train, Y_train, l=1.0, sigma_f=1.0, sigma_y=1e-8):\n ''' Computes the sufficient statistics of the GP posterior predictive distribution from m training data X_train and Y_train and n new inputs X_s. Args: X_s: New input locations (n x d). X_train: Training locations (m x d). Y_train: Training targets (m x 1). l: Kernel length parameter. sigma_f: Kernel vertical variation parameter. sigma_y: Noise parameter. Returns: Posterior mean vector (n x d) and covariance matrix (n x n). '''\n K = kernel(X_train, X_train, l, sigma_f) + sigma_y**2 * np.eye(len(X_train))\n K_s = kernel(X_s, X_train, l, sigma_f)\n K_ss = kernel(X_s, X_s, l, sigma_f) + sigma_y**2 * np.eye(len(X_s))\n \n mu_s = np.matmul(K_s, np.linalg.solve(K, Y_train))\n cov_s = K_ss - np.matmul(K_s, np.linalg.solve(K, K_s.T))\n \n return mu_s, cov_s\n\nclass CNP(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer):\n super(CNP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim+query_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim+query_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n \n def forward(self, context, query, key=None):\n query = query.view(query.shape[0], -1)\n # encode\n h = self.encoder(context)\n # aggregate\n h = h.mean(dim=0)\n h = torch.stack([h]*(query.shape[0]), dim=0)\n r = torch.cat([h, query], dim=1)\n # predict\n out = self.decoder(r)\n return out\n\n\nclass ANP(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):\n super(ANP, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n self.projector = torch.nn.Linear(query_dim, hidden_dim)\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)\n\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n # encode\n h = self.encoder(context)\n h.unsqueeze_(1)\n # aggregate\n q_t = self.projector(query)\n k_t = self.projector(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n # predict\n pred = self.decoder(h)\n return pred\n\nclass ANPv2(torch.nn.Module):\n def __init__(self, in_dim, hidden_dim, query_dim, out_dim, en_layer, dec_layer, nhead):\n super(ANPv2, self).__init__()\n if en_layer == 1:\n self.encoder = torch.nn.Linear(in_dim, hidden_dim)\n else:\n self.encoder = [\n torch.nn.Linear(in_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(en_layer-2):\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder.append(torch.nn.ReLU())\n self.encoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.encoder = torch.nn.Sequential(*self.encoder)\n \n if dec_layer == 1:\n self.decoder = torch.nn.Linear(hidden_dim, out_dim)\n else:\n self.decoder = [\n torch.nn.Linear(hidden_dim, hidden_dim),\n torch.nn.ReLU()\n ]\n for i in range(dec_layer-2):\n self.decoder.append(torch.nn.Linear(hidden_dim, hidden_dim))\n self.decoder.append(torch.nn.ReLU())\n self.decoder.append(torch.nn.Linear(hidden_dim, out_dim))\n self.decoder = torch.nn.Sequential(*self.decoder)\n \n self.key_mlp = torch.nn.Sequential(\n torch.nn.Linear(query_dim, hidden_dim),\n torch.nn.ReLU(),\n torch.nn.Linear(hidden_dim, hidden_dim)\n )\n\n self.query_mlp = torch.nn.Sequential(\n torch.nn.Linear(query_dim, hidden_dim),\n torch.nn.ReLU(),\n torch.nn.Linear(hidden_dim, hidden_dim)\n )\n\n self.attention = torch.nn.MultiheadAttention(embed_dim=hidden_dim, num_heads=nhead)\n\n\n def forward(self, context, key, query):\n query = query.view(query.shape[0], -1)\n key = key.view(key.shape[0], -1)\n # encode\n h = self.encoder(context)\n h.unsqueeze_(1)\n # aggregate\n q_t = self.query_mlp(query)\n k_t = self.key_mlp(key)\n q_t.unsqueeze_(1)\n k_t.unsqueeze_(1)\n h, _ = self.attention(query=q_t, key=k_t, value=h)\n h.squeeze_(1)\n # predict\n pred = self.decoder(h)\n return pred\n",
"step-ids": [
7,
8,
9,
10,
13
]
}
|
[
7,
8,
9,
10,
13
] |
import PySimpleGUI as sg
class TelaLisatrClientes():
def __init__(self):
self.__window = None
def init_components(self, lista_clientes):
layout = [
[sg.Text('Dados do cliente')],
[sg.Listbox(values=lista_clientes, size=(60, 10))],
[sg.Submit()]
]
self.__window = sg.Window('Lista de clientes').Layout(layout)
def lista_clientes(self, lista_clientes):
self.init_components(lista_clientes)
button, values = self.__window.Read()
self.__window.Close()
return button, values
|
normal
|
{
"blob_id": "624b34d160ea6db4f5249544f1614a20f506ca9e",
"index": 895,
"step-1": "<mask token>\n\n\nclass TelaLisatrClientes:\n <mask token>\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"step-4": "import PySimpleGUI as sg\n\n\nclass TelaLisatrClientes:\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n layout = [[sg.Text('Dados do cliente')], [sg.Listbox(values=\n lista_clientes, size=(60, 10))], [sg.Submit()]]\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n self.init_components(lista_clientes)\n button, values = self.__window.Read()\n self.__window.Close()\n return button, values\n",
"step-5": "import PySimpleGUI as sg\n\nclass TelaLisatrClientes():\n\n def __init__(self):\n self.__window = None\n\n def init_components(self, lista_clientes):\n\n layout = [\n [sg.Text('Dados do cliente')],\n [sg.Listbox(values=lista_clientes, size=(60, 10))],\n [sg.Submit()]\n ]\n\n self.__window = sg.Window('Lista de clientes').Layout(layout)\n\n def lista_clientes(self, lista_clientes):\n\n self.init_components(lista_clientes)\n\n button, values = self.__window.Read()\n\n self.__window.Close()\n\n return button, values\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('guac_auth', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='guacamoleconnectiongroup',
name='type',
),
migrations.AlterUniqueTogether(
name='guacamoleconnectiongrouppermission',
unique_together=set([]),
),
migrations.AlterUniqueTogether(
name='guacamoleconnectionpermission',
unique_together=set([]),
),
migrations.AlterUniqueTogether(
name='guacamolesystempermission',
unique_together=set([]),
),
migrations.AlterUniqueTogether(
name='guacamoleuserpermission',
unique_together=set([]),
),
migrations.RemoveField(
model_name='guacamoleconnectiongrouppermission',
name='permission',
),
migrations.RemoveField(
model_name='guacamoleconnectionpermission',
name='permission',
),
migrations.RemoveField(
model_name='guacamolesystempermission',
name='permission',
),
migrations.RemoveField(
model_name='guacamoleuserpermission',
name='permission',
),
]
|
normal
|
{
"blob_id": "7f63097265b1058785e90441f85b7f0088946717",
"index": 7785,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('guac_auth', '0001_initial')]\n operations = [migrations.RemoveField(model_name=\n 'guacamoleconnectiongroup', name='type'), migrations.\n AlterUniqueTogether(name='guacamoleconnectiongrouppermission',\n unique_together=set([])), migrations.AlterUniqueTogether(name=\n 'guacamoleconnectionpermission', unique_together=set([])),\n migrations.AlterUniqueTogether(name='guacamolesystempermission',\n unique_together=set([])), migrations.AlterUniqueTogether(name=\n 'guacamoleuserpermission', unique_together=set([])), migrations.\n RemoveField(model_name='guacamoleconnectiongrouppermission', name=\n 'permission'), migrations.RemoveField(model_name=\n 'guacamoleconnectionpermission', name='permission'), migrations.\n RemoveField(model_name='guacamolesystempermission', name=\n 'permission'), migrations.RemoveField(model_name=\n 'guacamoleuserpermission', name='permission')]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('guac_auth', '0001_initial')]\n operations = [migrations.RemoveField(model_name=\n 'guacamoleconnectiongroup', name='type'), migrations.\n AlterUniqueTogether(name='guacamoleconnectiongrouppermission',\n unique_together=set([])), migrations.AlterUniqueTogether(name=\n 'guacamoleconnectionpermission', unique_together=set([])),\n migrations.AlterUniqueTogether(name='guacamolesystempermission',\n unique_together=set([])), migrations.AlterUniqueTogether(name=\n 'guacamoleuserpermission', unique_together=set([])), migrations.\n RemoveField(model_name='guacamoleconnectiongrouppermission', name=\n 'permission'), migrations.RemoveField(model_name=\n 'guacamoleconnectionpermission', name='permission'), migrations.\n RemoveField(model_name='guacamolesystempermission', name=\n 'permission'), migrations.RemoveField(model_name=\n 'guacamoleuserpermission', name='permission')]\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('guac_auth', '0001_initial'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='guacamoleconnectiongroup',\n name='type',\n ),\n migrations.AlterUniqueTogether(\n name='guacamoleconnectiongrouppermission',\n unique_together=set([]),\n ),\n migrations.AlterUniqueTogether(\n name='guacamoleconnectionpermission',\n unique_together=set([]),\n ),\n migrations.AlterUniqueTogether(\n name='guacamolesystempermission',\n unique_together=set([]),\n ),\n migrations.AlterUniqueTogether(\n name='guacamoleuserpermission',\n unique_together=set([]),\n ),\n migrations.RemoveField(\n model_name='guacamoleconnectiongrouppermission',\n name='permission',\n ),\n migrations.RemoveField(\n model_name='guacamoleconnectionpermission',\n name='permission',\n ),\n migrations.RemoveField(\n model_name='guacamolesystempermission',\n name='permission',\n ),\n migrations.RemoveField(\n model_name='guacamoleuserpermission',\n name='permission',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
''' 简述:这里有四个数字,分别是:1、2、3、4
提问:能组成多少个互不相同且无重复数字的三位数?各是多少? '''
for x in range(1,5):
for y in range(1,5):
for z in range(1,5):
if (x != y) & (x != z) & (y != z):
print(x,y,z)
|
normal
|
{
"blob_id": "caac877bf6c42217ea41f51717f6a704a3a9774b",
"index": 6838,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor x in range(1, 5):\n for y in range(1, 5):\n for z in range(1, 5):\n if (x != y) & (x != z) & (y != z):\n print(x, y, z)\n",
"step-3": "''' 简述:这里有四个数字,分别是:1、2、3、4\n提问:能组成多少个互不相同且无重复数字的三位数?各是多少? '''\n\nfor x in range(1,5):\n for y in range(1,5):\n for z in range(1,5):\n if (x != y) & (x != z) & (y != z):\n print(x,y,z)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def auto_int(x):
return int(x, 0)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def auto_int(x):
return int(x, 0)
def load_options():
global parsed_args
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-i', '--input-file', help=
'Input (log) file. If omitted, stdin will be read.')
base_parser.add_argument('-o', '--output-file', help=
'Output file. If omitted, the output will be written to stdout.')
base_parser.add_argument('-n', '--no-timestamps', action='store_true',
help='Specifies whether or not the input file contains timestamps. ')
base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=
'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'
)
base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,
help=
'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '
)
base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,
help=
'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'
)
base_parser.add_argument('-s', '--short-htc-header', action=
'store_true', help=
'Use 6 byte HTC header ("old" format) instead of 8 bytes.')
base_parser.add_argument('-t', '--keep-timestamps', action='store_true',
help=
'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'
)
parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=
description, parents=[base_parser])
subparsers = parser.add_subparsers(dest='subparser_name')
parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,
description=wmi_ctrl_description, parents=[base_parser])
parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '
)
parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=
'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'
)
parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=
'+', type=auto_int, help=
"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. "
)
parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',
nargs='+', type=auto_int, help=
"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. "
)
parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,
description=htc_ctrl_description, parents=[base_parser])
parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '
)
parser_htt = subparsers.add_parser('htt', help=htt_help, description=
htt_description, parents=[base_parser])
parser_htt.add_argument('-p', '--print-data', action='store_true', help
=
'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '
)
parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=
int, default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all = subparsers.add_parser('all', help=all_help, description=
all_description, parents=[base_parser])
parser_all.add_argument('-p', '--print-data', action='store_true', help
=
'Print message payload (and not just message ID) for all encountered messages. '
)
parser_all.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,
default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type
=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parsed_args = parser.parse_args()
def main():
global parsed_args
load_options()
try:
if parsed_args.input_file:
infp = open(parsed_args.input_file, 'r')
else:
infp = sys.stdin
if parsed_args.output_file:
outfp = open(parsed_args.output_file, 'w')
else:
outfp = sys.stdout
if parsed_args.data_direction:
if parsed_args.data_direction[0] == 't2h':
t2h = True
elif parsed_args.data_direction[0] == 'h2t':
t2h = False
else:
sys.stderr.write('Unsupported data direction: {}\n'.format(
parsed_args.data_direction[0]))
exit(1)
else:
t2h = False
hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.
keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.
desc_str, dump_desc_invert=parsed_args.desc_str_invert,
log_has_timestamps=not parsed_args.no_timestamps,
include_dump_desc_in_output=False, remove_ascii_part=True)
if parsed_args.subparser_name == 'wmi-ctrl':
analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],
wmi_unified=not parsed_args.wmi_old, short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,
msg_id_filter=parsed_args.id, msg_id_exclude_filter=
parsed_args.skip_id)
if parsed_args.tlv:
parsed_args.print_data = True
elif parsed_args.subparser_name == 'htc-ctrl':
analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'htt':
analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h)
elif parsed_args.subparser_name == 'all':
analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[
0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not
parsed_args.wmi_old, short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
else:
sys.stderr.write('Unsupported subcommand: {}\n'.format(
parsed_args.subparser_name))
for line in infp:
if hf.parse_line(line):
hexdata = hf.get_hex()
if analyzer.parse_hexdata(hexdata):
str = analyzer.get_id_str()
outfp.write(str)
if parsed_args.print_data:
analyzer.print_data(outfp)
except IOError as err:
sys.stderr.write('{}\n'.format(err))
except:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
description = (
'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '
)
wmi_ctrl_help = (
'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '
)
wmi_ctrl_description = (
"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used."
)
htc_ctrl_help = (
'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '
)
htc_ctrl_description = (
"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0."
)
htt_help = (
'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '
)
htt_description = (
"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used."
)
all_help = (
'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '
)
all_description = (
"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used."
)
def auto_int(x):
return int(x, 0)
def load_options():
global parsed_args
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-i', '--input-file', help=
'Input (log) file. If omitted, stdin will be read.')
base_parser.add_argument('-o', '--output-file', help=
'Output file. If omitted, the output will be written to stdout.')
base_parser.add_argument('-n', '--no-timestamps', action='store_true',
help='Specifies whether or not the input file contains timestamps. ')
base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=
'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'
)
base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,
help=
'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '
)
base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,
help=
'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'
)
base_parser.add_argument('-s', '--short-htc-header', action=
'store_true', help=
'Use 6 byte HTC header ("old" format) instead of 8 bytes.')
base_parser.add_argument('-t', '--keep-timestamps', action='store_true',
help=
'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'
)
parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=
description, parents=[base_parser])
subparsers = parser.add_subparsers(dest='subparser_name')
parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,
description=wmi_ctrl_description, parents=[base_parser])
parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '
)
parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=
'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'
)
parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=
'+', type=auto_int, help=
"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. "
)
parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',
nargs='+', type=auto_int, help=
"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. "
)
parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,
description=htc_ctrl_description, parents=[base_parser])
parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '
)
parser_htt = subparsers.add_parser('htt', help=htt_help, description=
htt_description, parents=[base_parser])
parser_htt.add_argument('-p', '--print-data', action='store_true', help
=
'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '
)
parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=
int, default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all = subparsers.add_parser('all', help=all_help, description=
all_description, parents=[base_parser])
parser_all.add_argument('-p', '--print-data', action='store_true', help
=
'Print message payload (and not just message ID) for all encountered messages. '
)
parser_all.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,
default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type
=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parsed_args = parser.parse_args()
def main():
global parsed_args
load_options()
try:
if parsed_args.input_file:
infp = open(parsed_args.input_file, 'r')
else:
infp = sys.stdin
if parsed_args.output_file:
outfp = open(parsed_args.output_file, 'w')
else:
outfp = sys.stdout
if parsed_args.data_direction:
if parsed_args.data_direction[0] == 't2h':
t2h = True
elif parsed_args.data_direction[0] == 'h2t':
t2h = False
else:
sys.stderr.write('Unsupported data direction: {}\n'.format(
parsed_args.data_direction[0]))
exit(1)
else:
t2h = False
hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.
keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.
desc_str, dump_desc_invert=parsed_args.desc_str_invert,
log_has_timestamps=not parsed_args.no_timestamps,
include_dump_desc_in_output=False, remove_ascii_part=True)
if parsed_args.subparser_name == 'wmi-ctrl':
analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],
wmi_unified=not parsed_args.wmi_old, short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,
msg_id_filter=parsed_args.id, msg_id_exclude_filter=
parsed_args.skip_id)
if parsed_args.tlv:
parsed_args.print_data = True
elif parsed_args.subparser_name == 'htc-ctrl':
analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'htt':
analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h)
elif parsed_args.subparser_name == 'all':
analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[
0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not
parsed_args.wmi_old, short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
else:
sys.stderr.write('Unsupported subcommand: {}\n'.format(
parsed_args.subparser_name))
for line in infp:
if hf.parse_line(line):
hexdata = hf.get_hex()
if analyzer.parse_hexdata(hexdata):
str = analyzer.get_id_str()
outfp.write(str)
if parsed_args.print_data:
analyzer.print_data(outfp)
except IOError as err:
sys.stderr.write('{}\n'.format(err))
except:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from collections import namedtuple
import argparse
import pdb
import traceback
import sys
import os
from qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer
import hexfilter
description = (
'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '
)
wmi_ctrl_help = (
'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '
)
wmi_ctrl_description = (
"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used."
)
htc_ctrl_help = (
'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '
)
htc_ctrl_description = (
"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0."
)
htt_help = (
'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '
)
htt_description = (
"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used."
)
all_help = (
'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '
)
all_description = (
"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used."
)
def auto_int(x):
return int(x, 0)
def load_options():
global parsed_args
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-i', '--input-file', help=
'Input (log) file. If omitted, stdin will be read.')
base_parser.add_argument('-o', '--output-file', help=
'Output file. If omitted, the output will be written to stdout.')
base_parser.add_argument('-n', '--no-timestamps', action='store_true',
help='Specifies whether or not the input file contains timestamps. ')
base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=
'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'
)
base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,
help=
'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '
)
base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,
help=
'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'
)
base_parser.add_argument('-s', '--short-htc-header', action=
'store_true', help=
'Use 6 byte HTC header ("old" format) instead of 8 bytes.')
base_parser.add_argument('-t', '--keep-timestamps', action='store_true',
help=
'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'
)
parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=
description, parents=[base_parser])
subparsers = parser.add_subparsers(dest='subparser_name')
parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,
description=wmi_ctrl_description, parents=[base_parser])
parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '
)
parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=
'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'
)
parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=
'+', type=auto_int, help=
"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. "
)
parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',
nargs='+', type=auto_int, help=
"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. "
)
parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,
description=htc_ctrl_description, parents=[base_parser])
parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',
help=
'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '
)
parser_htt = subparsers.add_parser('htt', help=htt_help, description=
htt_description, parents=[base_parser])
parser_htt.add_argument('-p', '--print-data', action='store_true', help
=
'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '
)
parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=
int, default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all = subparsers.add_parser('all', help=all_help, description=
all_description, parents=[base_parser])
parser_all.add_argument('-p', '--print-data', action='store_true', help
=
'Print message payload (and not just message ID) for all encountered messages. '
)
parser_all.add_argument('--wmi-old', action='store_true', help=
'Specifies whether or not the WMI messages are according to the "old" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'
)
parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,
default=[1], help=
'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'
)
parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type
=int, default=[2], help=
'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'
)
parsed_args = parser.parse_args()
def main():
global parsed_args
load_options()
try:
if parsed_args.input_file:
infp = open(parsed_args.input_file, 'r')
else:
infp = sys.stdin
if parsed_args.output_file:
outfp = open(parsed_args.output_file, 'w')
else:
outfp = sys.stdout
if parsed_args.data_direction:
if parsed_args.data_direction[0] == 't2h':
t2h = True
elif parsed_args.data_direction[0] == 'h2t':
t2h = False
else:
sys.stderr.write('Unsupported data direction: {}\n'.format(
parsed_args.data_direction[0]))
exit(1)
else:
t2h = False
hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.
keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.
desc_str, dump_desc_invert=parsed_args.desc_str_invert,
log_has_timestamps=not parsed_args.no_timestamps,
include_dump_desc_in_output=False, remove_ascii_part=True)
if parsed_args.subparser_name == 'wmi-ctrl':
analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],
wmi_unified=not parsed_args.wmi_old, short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,
msg_id_filter=parsed_args.id, msg_id_exclude_filter=
parsed_args.skip_id)
if parsed_args.tlv:
parsed_args.print_data = True
elif parsed_args.subparser_name == 'htc-ctrl':
analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'htt':
analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=
parsed_args.short_htc_header, timestamps=parsed_args.
keep_timestamps, t2h=t2h)
elif parsed_args.subparser_name == 'all':
analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[
0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not
parsed_args.wmi_old, short_htc_hdr=parsed_args.
short_htc_header, timestamps=parsed_args.keep_timestamps,
t2h=t2h)
else:
sys.stderr.write('Unsupported subcommand: {}\n'.format(
parsed_args.subparser_name))
for line in infp:
if hf.parse_line(line):
hexdata = hf.get_hex()
if analyzer.parse_hexdata(hexdata):
str = analyzer.get_id_str()
outfp.write(str)
if parsed_args.print_data:
analyzer.print_data(outfp)
except IOError as err:
sys.stderr.write('{}\n'.format(err))
except:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
from collections import namedtuple
import argparse
import pdb
import traceback
import sys
import os
from qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer
import hexfilter
description = \
"Tool used to analyze hexdumps produced by a qca wireless kernel " \
"driver (such as ath6kl, ath10k or qcacld2.0). " \
"The hexdumps are assumed to contain dumps of the traffic " \
"between the driver and the target. " \
"No special preprocessing of the log files is required. " \
"Filter strings (description strings) can be used to limit the output " \
"(only RX or TX etc.). " \
"The driver must of course be configured to log all necessary debug " \
"data (for ath6kl and ath10k this means a proper debug mask). "
wmi_ctrl_help = \
"Subcommand for WMI control message parsing. " \
"This subcommand is used to extract WMI control messages from the input. "
wmi_ctrl_description = \
"Extracts WMI control message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"--ep-id is used to determine from which HTC endpoint the data will " \
"be extracted (see description of that option below). " \
"All valid WMI control message ID's will be printed together with the " \
"message enum string (from ath6kl source code). " \
"The --wmi-old option must be used if the driver does not use the WMI " \
"unified protocol (ath6kl). " \
"The WMI control message payload will also be printed together with " \
"message ID's if the --print-data option is used."
htc_ctrl_help = \
"Subcommand for HTC control message parsing. " \
"This subcommand is used to extract HTC control messages from the input. "
htc_ctrl_description = \
"Extracts HTC control message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"All valid HTC control message ID's will be printed together with the " \
"message enum string (from ath6kl source code). " \
"The message payload will also be printed together with the " \
"message ID's if the --print-data option is used. " \
"HTC control messages will always be extracted from endpoint 0."
htt_help = \
"Subcommand for HTT message parsing. " \
"This subcommand is used to extract HTT messages from the input. "
htt_description = \
"Extracts HTT message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output -file). " \
"--ep-id is used to determine from which HTC endpoint the data will " \
"be extracted (see description of that option below). " \
"All valid HTT message ID's will be printed together with the " \
"message enum string (from ath10k source code). " \
"The message payload will also be printed together with " \
"message ID's if the --print-data option is used."
all_help = \
"Subcommand for parsing of all supported message types. " \
"This subcommand is used to extract both WMI control, " \
"HTC control and HTT messages from the input. "
all_description = \
"Extracts message hexdata from an input (--input-file). " \
"The extracted messages will be printed to the output (--output-file). " \
"The messages can be any of the supported message types " \
"(currently only WMI controli, HTC control and HTT). " \
"--wmi-ctrl-ep-id and --htt-ep-id is used to determine from which " \
"endpoints WMI and HTT data will be extracted " \
"(see description of those options below). " \
"HTC control messages will always be extracted from ep 0. " \
"All valid message ID's will be printed together " \
"with a corresponding message enum string. " \
"The message payload will also be printed together with " \
"message ID's if the --print-data option is used."
def auto_int(x):
return int(x, 0)
def load_options():
global parsed_args
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-i', '--input-file',
help="Input (log) file. If omitted, "
"stdin will be read.")
base_parser.add_argument('-o', '--output-file',
help="Output file. If omitted, "
"the output will be written to stdout.")
base_parser.add_argument('-n', '--no-timestamps', action="store_true",
help="Specifies whether or not the input file "
"contains timestamps. ")
base_parser.add_argument('-d', '--desc-str', nargs='+', type=str,
help="Description string(s) of the dumps. "
"Only dumps with a prefix "
"matching any of the provided desc strings "
"will be analyzed. "
"If no --desc-str option is given, no "
"description filtering will be performed. "
"The prefix of a hexdump is the short "
"description string before the address "
"in each line of the dump, i.e the hexdump "
"prefix. "
"--desc-str is normally used to select "
"between RX and TX logs and should be "
"combined with a proper --data-direction "
"option.")
base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,
help="This option is used to specify how the "
"hexdata should be interpreted. "
"Valid values are: "
"t2h (target to host) or h2t (host to target). "
"With t2h, RX trailers will be printed if "
"--print-data is used. h2t is default. "
"This option should be combined with an "
"applicable --desc-str option. ")
base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,
help="Description string(s) of the dumps to be. "
"excluded. Similar to --desc-str, but all "
"matching prefixes will be excluded from "
"the analysis.")
base_parser.add_argument('-s', '--short-htc-header', action="store_true",
help="Use 6 byte HTC header (\"old\" format) "
"instead of 8 bytes.")
base_parser.add_argument('-t', '--keep-timestamps', action="store_true",
help="Keep the timestamps associated with each "
"hexdump in the output. "
"This option will only have effect if the "
"log file contains timestamps.")
parser = argparse.ArgumentParser(prog="qca_hex_analyzer",
description=description,
parents=[base_parser])
subparsers = parser.add_subparsers(dest="subparser_name")
parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl',
help=wmi_ctrl_help,
description=wmi_ctrl_description,
parents=[base_parser])
parser_wmi_ctrl.add_argument('--wmi-old', action="store_true",
help="Specifies whether or not the WMI messages "
"are according to the \"old\" WMI protocol. "
"If not set, the messages will be interpreted "
"according to the unified WMI format")
parser_wmi_ctrl.add_argument('-p', '--print-data', action="store_true",
help="Print WMI data message payload (and not just "
"WMI message ID) for all encountered messages. ")
parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[2],
help="WMI control service endpoint ID. "
"This is the endpoint where the WMI control data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"control service endpoint (service id 0x100) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 2 "
"will be used.")
parser_wmi_ctrl.add_argument('--tlv', action="store_true",
help="TLV analysis."
"Each WMI message will be interpreted as a TLV "
"message and the content of the message will be. "
"written out in text (instead of hexdump). "
"If the encountered message is not supported by "
"the parser, the hex data will be printed instead.")
parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID',
nargs='+', type=auto_int,
help="WMI message id filter. "
"Only WMI messages with an id matching any of the "
"provided id's will be included in the output. "
"If no --id | --msg-id option is given, no "
"filtering will be performed. ")
parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',
nargs='+', type=auto_int,
help="WMI message id exclude filter. "
"Similar to --id | --msg-id, but all matching "
"id's will be excluded from the output. ")
parser_htc_ctrl = subparsers.add_parser('htc-ctrl',
help=htc_ctrl_help,
description=htc_ctrl_description,
parents=[base_parser])
parser_htc_ctrl.add_argument('-p', '--print-data', action="store_true",
help="Print HTC ctrl data message payload (and not just "
"message ID) for all encountered messages. ")
parser_htt = subparsers.add_parser('htt',
help=htt_help,
description=htt_description,
parents=[base_parser])
parser_htt.add_argument('-p', '--print-data', action="store_true",
help="Print HTT data message payload (and not just "
"HTT message ID) for all encountered messages. ")
parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1,
type=int, default=[1],
help="HTT service endpoint ID. "
"This is the endpoint where the HTT data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"HTT endpoint (service id 0x300) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 1 "
"will be used.")
parser_all = subparsers.add_parser('all',
help=all_help,
description=all_description,
parents=[base_parser])
parser_all.add_argument('-p', '--print-data', action="store_true",
help="Print message payload (and not just "
"message ID) for all encountered messages. ")
parser_all.add_argument('--wmi-old', action="store_true",
help="Specifies whether or not the WMI messages "
"are according to the \"old\" WMI protocol. "
"If not set, the messages will be interpreted "
"according to the unified WMI format")
parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1,
type=int, default=[1],
help="HTT service endpoint ID. "
"This is the endpoint where the HTT data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"HTT endpoint (service id 0x300) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 1 "
"will be used.")
parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1,
type=int, default=[2],
help="WMI control service endpoint ID. "
"This is the endpoint where the WMI control data is "
"expected to be present. Make sure the endpoint "
"matches the endpoint id associated with the "
"control service endpoint (service id 0x100) "
"of the driver (the endpoint received from the "
"target in the HTC service connect response). "
"If this option is omitted a default value of 2 "
"will be used.")
parsed_args = parser.parse_args()
def main():
global parsed_args
load_options()
try:
if parsed_args.input_file:
infp = open(parsed_args.input_file, "r")
else:
infp = sys.stdin
if parsed_args.output_file:
outfp = open(parsed_args.output_file, "w")
else:
outfp = sys.stdout
if parsed_args.data_direction:
if parsed_args.data_direction[0] == 't2h':
t2h = True
elif parsed_args.data_direction[0] == 'h2t':
t2h = False
else:
sys.stderr.write('Unsupported data direction: {}\n'.format(parsed_args.data_direction[0]))
exit(1)
else:
# Interpret the data as host -> target is the default behaviour
t2h = False
hf = hexfilter.HexFilterLinux(skip_timestamps=(not parsed_args.keep_timestamps),
abs_timestamps=True,
dump_desc=parsed_args.desc_str,
dump_desc_invert=parsed_args.desc_str_invert,
log_has_timestamps=(not parsed_args.no_timestamps),
include_dump_desc_in_output=False,
remove_ascii_part=True)
if parsed_args.subparser_name == 'wmi-ctrl':
analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],
wmi_unified=(not parsed_args.wmi_old),
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h,
tlv_analysis=parsed_args.tlv,
msg_id_filter=parsed_args.id,
msg_id_exclude_filter=parsed_args.skip_id)
if parsed_args.tlv:
parsed_args.print_data = True
elif parsed_args.subparser_name == 'htc-ctrl':
analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'htt':
analyzer = HttAnalyzer(eid=parsed_args.ep_id[0],
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
elif parsed_args.subparser_name == 'all':
analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[0],
htt_eid=parsed_args.htt_ep_id[0],
wmi_unified=(not parsed_args.wmi_old),
short_htc_hdr=parsed_args.short_htc_header,
timestamps=parsed_args.keep_timestamps,
t2h=t2h)
else:
sys.stderr.write('Unsupported subcommand: {}\n'.format(parsed_args.subparser_name))
for line in infp:
if hf.parse_line(line):
hexdata = hf.get_hex()
if analyzer.parse_hexdata(hexdata):
str = analyzer.get_id_str()
outfp.write(str)
if parsed_args.print_data:
analyzer.print_data(outfp)
except IOError as err:
sys.stderr.write('{}\n'.format(err))
except:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "3b381668dbb9b4e5a2e323dc4d6b5e3951736882",
"index": 1804,
"step-1": "<mask token>\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\ndescription = (\n 'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '\n )\nwmi_ctrl_help = (\n 'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '\n )\nwmi_ctrl_description = (\n \"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nhtc_ctrl_help = (\n 'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '\n )\nhtc_ctrl_description = (\n \"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0.\"\n )\nhtt_help = (\n 'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '\n )\nhtt_description = (\n \"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nall_help = (\n 'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '\n )\nall_description = (\n \"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "from collections import namedtuple\nimport argparse\nimport pdb\nimport traceback\nimport sys\nimport os\nfrom qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer\nimport hexfilter\ndescription = (\n 'Tool used to analyze hexdumps produced by a qca wireless kernel driver (such as ath6kl, ath10k or qcacld2.0). The hexdumps are assumed to contain dumps of the traffic between the driver and the target. No special preprocessing of the log files is required. Filter strings (description strings) can be used to limit the output (only RX or TX etc.). The driver must of course be configured to log all necessary debug data (for ath6kl and ath10k this means a proper debug mask). '\n )\nwmi_ctrl_help = (\n 'Subcommand for WMI control message parsing. This subcommand is used to extract WMI control messages from the input. '\n )\nwmi_ctrl_description = (\n \"Extracts WMI control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid WMI control message ID's will be printed together with the message enum string (from ath6kl source code). The --wmi-old option must be used if the driver does not use the WMI unified protocol (ath6kl). The WMI control message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nhtc_ctrl_help = (\n 'Subcommand for HTC control message parsing. This subcommand is used to extract HTC control messages from the input. '\n )\nhtc_ctrl_description = (\n \"Extracts HTC control message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). All valid HTC control message ID's will be printed together with the message enum string (from ath6kl source code). The message payload will also be printed together with the message ID's if the --print-data option is used. HTC control messages will always be extracted from endpoint 0.\"\n )\nhtt_help = (\n 'Subcommand for HTT message parsing. This subcommand is used to extract HTT messages from the input. '\n )\nhtt_description = (\n \"Extracts HTT message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output -file). --ep-id is used to determine from which HTC endpoint the data will be extracted (see description of that option below). All valid HTT message ID's will be printed together with the message enum string (from ath10k source code). The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\nall_help = (\n 'Subcommand for parsing of all supported message types. This subcommand is used to extract both WMI control, HTC control and HTT messages from the input. '\n )\nall_description = (\n \"Extracts message hexdata from an input (--input-file). The extracted messages will be printed to the output (--output-file). The messages can be any of the supported message types (currently only WMI controli, HTC control and HTT). --wmi-ctrl-ep-id and --htt-ep-id is used to determine from which endpoints WMI and HTT data will be extracted (see description of those options below). HTC control messages will always be extracted from ep 0. All valid message ID's will be printed together with a corresponding message enum string. The message payload will also be printed together with message ID's if the --print-data option is used.\"\n )\n\n\ndef auto_int(x):\n return int(x, 0)\n\n\ndef load_options():\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n base_parser.add_argument('-i', '--input-file', help=\n 'Input (log) file. If omitted, stdin will be read.')\n base_parser.add_argument('-o', '--output-file', help=\n 'Output file. If omitted, the output will be written to stdout.')\n base_parser.add_argument('-n', '--no-timestamps', action='store_true',\n help='Specifies whether or not the input file contains timestamps. ')\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str, help=\n 'Description string(s) of the dumps. Only dumps with a prefix matching any of the provided desc strings will be analyzed. If no --desc-str option is given, no description filtering will be performed. The prefix of a hexdump is the short description string before the address in each line of the dump, i.e the hexdump prefix. --desc-str is normally used to select between RX and TX logs and should be combined with a proper --data-direction option.'\n )\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\n 'This option is used to specify how the hexdata should be interpreted. Valid values are: t2h (target to host) or h2t (host to target). With t2h, RX trailers will be printed if --print-data is used. h2t is default. This option should be combined with an applicable --desc-str option. '\n )\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\n 'Description string(s) of the dumps to be. excluded. Similar to --desc-str, but all matching prefixes will be excluded from the analysis.'\n )\n base_parser.add_argument('-s', '--short-htc-header', action=\n 'store_true', help=\n 'Use 6 byte HTC header (\"old\" format) instead of 8 bytes.')\n base_parser.add_argument('-t', '--keep-timestamps', action='store_true',\n help=\n 'Keep the timestamps associated with each hexdump in the output. This option will only have effect if the log file contains timestamps.'\n )\n parser = argparse.ArgumentParser(prog='qca_hex_analyzer', description=\n description, parents=[base_parser])\n subparsers = parser.add_subparsers(dest='subparser_name')\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl', help=wmi_ctrl_help,\n description=wmi_ctrl_description, parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_wmi_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print WMI data message payload (and not just WMI message ID) for all encountered messages. '\n )\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parser_wmi_ctrl.add_argument('--tlv', action='store_true', help=\n 'TLV analysis.Each WMI message will be interpreted as a TLV message and the content of the message will be. written out in text (instead of hexdump). If the encountered message is not supported by the parser, the hex data will be printed instead.'\n )\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID', nargs=\n '+', type=auto_int, help=\n \"WMI message id filter. Only WMI messages with an id matching any of the provided id's will be included in the output. If no --id | --msg-id option is given, no filtering will be performed. \"\n )\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int, help=\n \"WMI message id exclude filter. Similar to --id | --msg-id, but all matching id's will be excluded from the output. \"\n )\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl', help=htc_ctrl_help,\n description=htc_ctrl_description, parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action='store_true',\n help=\n 'Print HTC ctrl data message payload (and not just message ID) for all encountered messages. '\n )\n parser_htt = subparsers.add_parser('htt', help=htt_help, description=\n htt_description, parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print HTT data message payload (and not just HTT message ID) for all encountered messages. '\n )\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1, type=\n int, default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all = subparsers.add_parser('all', help=all_help, description=\n all_description, parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action='store_true', help\n =\n 'Print message payload (and not just message ID) for all encountered messages. '\n )\n parser_all.add_argument('--wmi-old', action='store_true', help=\n 'Specifies whether or not the WMI messages are according to the \"old\" WMI protocol. If not set, the messages will be interpreted according to the unified WMI format'\n )\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1, type=int,\n default=[1], help=\n 'HTT service endpoint ID. This is the endpoint where the HTT data is expected to be present. Make sure the endpoint matches the endpoint id associated with the HTT endpoint (service id 0x300) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 1 will be used.'\n )\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1, type\n =int, default=[2], help=\n 'WMI control service endpoint ID. This is the endpoint where the WMI control data is expected to be present. Make sure the endpoint matches the endpoint id associated with the control service endpoint (service id 0x100) of the driver (the endpoint received from the target in the HTC service connect response). If this option is omitted a default value of 2 will be used.'\n )\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, 'r')\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, 'w')\n else:\n outfp = sys.stdout\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(\n parsed_args.data_direction[0]))\n exit(1)\n else:\n t2h = False\n hf = hexfilter.HexFilterLinux(skip_timestamps=not parsed_args.\n keep_timestamps, abs_timestamps=True, dump_desc=parsed_args.\n desc_str, dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=not parsed_args.no_timestamps,\n include_dump_desc_in_output=False, remove_ascii_part=True)\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=not parsed_args.wmi_old, short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h, tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id, msg_id_exclude_filter=\n parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0], short_htc_hdr=\n parsed_args.short_htc_header, timestamps=parsed_args.\n keep_timestamps, t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[\n 0], htt_eid=parsed_args.htt_ep_id[0], wmi_unified=not\n parsed_args.wmi_old, short_htc_hdr=parsed_args.\n short_htc_header, timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(\n parsed_args.subparser_name))\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from collections import namedtuple\n\nimport argparse\nimport pdb\nimport traceback\nimport sys\nimport os\nfrom qca_hex_analyzer import WmiCtrlAnalyzer, HtcCtrlAnalyzer, HttAnalyzer, AllAnalyzer\nimport hexfilter\n\ndescription = \\\n \"Tool used to analyze hexdumps produced by a qca wireless kernel \" \\\n \"driver (such as ath6kl, ath10k or qcacld2.0). \" \\\n \"The hexdumps are assumed to contain dumps of the traffic \" \\\n \"between the driver and the target. \" \\\n \"No special preprocessing of the log files is required. \" \\\n \"Filter strings (description strings) can be used to limit the output \" \\\n \"(only RX or TX etc.). \" \\\n \"The driver must of course be configured to log all necessary debug \" \\\n \"data (for ath6kl and ath10k this means a proper debug mask). \"\n\nwmi_ctrl_help = \\\n \"Subcommand for WMI control message parsing. \" \\\n \"This subcommand is used to extract WMI control messages from the input. \"\n\nwmi_ctrl_description = \\\n \"Extracts WMI control message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"--ep-id is used to determine from which HTC endpoint the data will \" \\\n \"be extracted (see description of that option below). \" \\\n \"All valid WMI control message ID's will be printed together with the \" \\\n \"message enum string (from ath6kl source code). \" \\\n \"The --wmi-old option must be used if the driver does not use the WMI \" \\\n \"unified protocol (ath6kl). \" \\\n \"The WMI control message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\nhtc_ctrl_help = \\\n \"Subcommand for HTC control message parsing. \" \\\n \"This subcommand is used to extract HTC control messages from the input. \"\n\nhtc_ctrl_description = \\\n \"Extracts HTC control message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"All valid HTC control message ID's will be printed together with the \" \\\n \"message enum string (from ath6kl source code). \" \\\n \"The message payload will also be printed together with the \" \\\n \"message ID's if the --print-data option is used. \" \\\n \"HTC control messages will always be extracted from endpoint 0.\"\n\nhtt_help = \\\n \"Subcommand for HTT message parsing. \" \\\n \"This subcommand is used to extract HTT messages from the input. \"\n\nhtt_description = \\\n \"Extracts HTT message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output -file). \" \\\n \"--ep-id is used to determine from which HTC endpoint the data will \" \\\n \"be extracted (see description of that option below). \" \\\n \"All valid HTT message ID's will be printed together with the \" \\\n \"message enum string (from ath10k source code). \" \\\n \"The message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\nall_help = \\\n \"Subcommand for parsing of all supported message types. \" \\\n \"This subcommand is used to extract both WMI control, \" \\\n \"HTC control and HTT messages from the input. \"\n\nall_description = \\\n \"Extracts message hexdata from an input (--input-file). \" \\\n \"The extracted messages will be printed to the output (--output-file). \" \\\n \"The messages can be any of the supported message types \" \\\n \"(currently only WMI controli, HTC control and HTT). \" \\\n \"--wmi-ctrl-ep-id and --htt-ep-id is used to determine from which \" \\\n \"endpoints WMI and HTT data will be extracted \" \\\n \"(see description of those options below). \" \\\n \"HTC control messages will always be extracted from ep 0. \" \\\n \"All valid message ID's will be printed together \" \\\n \"with a corresponding message enum string. \" \\\n \"The message payload will also be printed together with \" \\\n \"message ID's if the --print-data option is used.\"\n\n\ndef auto_int(x):\n\n return int(x, 0)\n\n\ndef load_options():\n\n global parsed_args\n base_parser = argparse.ArgumentParser(add_help=False)\n\n base_parser.add_argument('-i', '--input-file',\n help=\"Input (log) file. If omitted, \"\n \"stdin will be read.\")\n base_parser.add_argument('-o', '--output-file',\n help=\"Output file. If omitted, \"\n \"the output will be written to stdout.\")\n base_parser.add_argument('-n', '--no-timestamps', action=\"store_true\",\n help=\"Specifies whether or not the input file \"\n \"contains timestamps. \")\n base_parser.add_argument('-d', '--desc-str', nargs='+', type=str,\n help=\"Description string(s) of the dumps. \"\n \"Only dumps with a prefix \"\n \"matching any of the provided desc strings \"\n \"will be analyzed. \"\n \"If no --desc-str option is given, no \"\n \"description filtering will be performed. \"\n \"The prefix of a hexdump is the short \"\n \"description string before the address \"\n \"in each line of the dump, i.e the hexdump \"\n \"prefix. \"\n \"--desc-str is normally used to select \"\n \"between RX and TX logs and should be \"\n \"combined with a proper --data-direction \"\n \"option.\")\n base_parser.add_argument('-a', '--data-direction', nargs=1, type=str,\n help=\"This option is used to specify how the \"\n \"hexdata should be interpreted. \"\n \"Valid values are: \"\n \"t2h (target to host) or h2t (host to target). \"\n \"With t2h, RX trailers will be printed if \"\n \"--print-data is used. h2t is default. \"\n \"This option should be combined with an \"\n \"applicable --desc-str option. \")\n base_parser.add_argument('-v', '--desc-str-invert', nargs='+', type=str,\n help=\"Description string(s) of the dumps to be. \"\n \"excluded. Similar to --desc-str, but all \"\n \"matching prefixes will be excluded from \"\n \"the analysis.\")\n base_parser.add_argument('-s', '--short-htc-header', action=\"store_true\",\n help=\"Use 6 byte HTC header (\\\"old\\\" format) \"\n \"instead of 8 bytes.\")\n base_parser.add_argument('-t', '--keep-timestamps', action=\"store_true\",\n help=\"Keep the timestamps associated with each \"\n \"hexdump in the output. \"\n \"This option will only have effect if the \"\n \"log file contains timestamps.\")\n\n parser = argparse.ArgumentParser(prog=\"qca_hex_analyzer\",\n description=description,\n parents=[base_parser])\n\n subparsers = parser.add_subparsers(dest=\"subparser_name\")\n parser_wmi_ctrl = subparsers.add_parser('wmi-ctrl',\n help=wmi_ctrl_help,\n description=wmi_ctrl_description,\n parents=[base_parser])\n parser_wmi_ctrl.add_argument('--wmi-old', action=\"store_true\",\n help=\"Specifies whether or not the WMI messages \"\n \"are according to the \\\"old\\\" WMI protocol. \"\n \"If not set, the messages will be interpreted \"\n \"according to the unified WMI format\")\n parser_wmi_ctrl.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print WMI data message payload (and not just \"\n \"WMI message ID) for all encountered messages. \")\n parser_wmi_ctrl.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[2],\n help=\"WMI control service endpoint ID. \"\n \"This is the endpoint where the WMI control data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"control service endpoint (service id 0x100) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 2 \"\n \"will be used.\")\n parser_wmi_ctrl.add_argument('--tlv', action=\"store_true\",\n help=\"TLV analysis.\"\n \"Each WMI message will be interpreted as a TLV \"\n \"message and the content of the message will be. \"\n \"written out in text (instead of hexdump). \"\n \"If the encountered message is not supported by \"\n \"the parser, the hex data will be printed instead.\")\n parser_wmi_ctrl.add_argument('--id', '--msg-id', metavar='ID',\n nargs='+', type=auto_int,\n help=\"WMI message id filter. \"\n \"Only WMI messages with an id matching any of the \"\n \"provided id's will be included in the output. \"\n \"If no --id | --msg-id option is given, no \"\n \"filtering will be performed. \")\n parser_wmi_ctrl.add_argument('--skip-id', '--skip-msg-id', metavar='ID',\n nargs='+', type=auto_int,\n help=\"WMI message id exclude filter. \"\n \"Similar to --id | --msg-id, but all matching \"\n \"id's will be excluded from the output. \")\n parser_htc_ctrl = subparsers.add_parser('htc-ctrl',\n help=htc_ctrl_help,\n description=htc_ctrl_description,\n parents=[base_parser])\n parser_htc_ctrl.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print HTC ctrl data message payload (and not just \"\n \"message ID) for all encountered messages. \")\n parser_htt = subparsers.add_parser('htt',\n help=htt_help,\n description=htt_description,\n parents=[base_parser])\n parser_htt.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print HTT data message payload (and not just \"\n \"HTT message ID) for all encountered messages. \")\n parser_htt.add_argument('-e', '--ep-id', metavar='ID', nargs=1,\n type=int, default=[1],\n help=\"HTT service endpoint ID. \"\n \"This is the endpoint where the HTT data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"HTT endpoint (service id 0x300) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 1 \"\n \"will be used.\")\n parser_all = subparsers.add_parser('all',\n help=all_help,\n description=all_description,\n parents=[base_parser])\n parser_all.add_argument('-p', '--print-data', action=\"store_true\",\n help=\"Print message payload (and not just \"\n \"message ID) for all encountered messages. \")\n parser_all.add_argument('--wmi-old', action=\"store_true\",\n help=\"Specifies whether or not the WMI messages \"\n \"are according to the \\\"old\\\" WMI protocol. \"\n \"If not set, the messages will be interpreted \"\n \"according to the unified WMI format\")\n parser_all.add_argument('--htt-ep-id', metavar='ID', nargs=1,\n type=int, default=[1],\n help=\"HTT service endpoint ID. \"\n \"This is the endpoint where the HTT data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"HTT endpoint (service id 0x300) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 1 \"\n \"will be used.\")\n parser_all.add_argument('--wmi-ctrl-ep-id', metavar='ID', nargs=1,\n type=int, default=[2],\n help=\"WMI control service endpoint ID. \"\n \"This is the endpoint where the WMI control data is \"\n \"expected to be present. Make sure the endpoint \"\n \"matches the endpoint id associated with the \"\n \"control service endpoint (service id 0x100) \"\n \"of the driver (the endpoint received from the \"\n \"target in the HTC service connect response). \"\n \"If this option is omitted a default value of 2 \"\n \"will be used.\")\n parsed_args = parser.parse_args()\n\n\ndef main():\n global parsed_args\n load_options()\n\n try:\n if parsed_args.input_file:\n infp = open(parsed_args.input_file, \"r\")\n else:\n infp = sys.stdin\n if parsed_args.output_file:\n outfp = open(parsed_args.output_file, \"w\")\n else:\n outfp = sys.stdout\n\n if parsed_args.data_direction:\n if parsed_args.data_direction[0] == 't2h':\n t2h = True\n elif parsed_args.data_direction[0] == 'h2t':\n t2h = False\n else:\n sys.stderr.write('Unsupported data direction: {}\\n'.format(parsed_args.data_direction[0]))\n exit(1)\n else:\n # Interpret the data as host -> target is the default behaviour\n t2h = False\n\n hf = hexfilter.HexFilterLinux(skip_timestamps=(not parsed_args.keep_timestamps),\n abs_timestamps=True,\n dump_desc=parsed_args.desc_str,\n dump_desc_invert=parsed_args.desc_str_invert,\n log_has_timestamps=(not parsed_args.no_timestamps),\n include_dump_desc_in_output=False,\n remove_ascii_part=True)\n\n if parsed_args.subparser_name == 'wmi-ctrl':\n analyzer = WmiCtrlAnalyzer(eid=parsed_args.ep_id[0],\n wmi_unified=(not parsed_args.wmi_old),\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h,\n tlv_analysis=parsed_args.tlv,\n msg_id_filter=parsed_args.id,\n msg_id_exclude_filter=parsed_args.skip_id)\n if parsed_args.tlv:\n parsed_args.print_data = True\n elif parsed_args.subparser_name == 'htc-ctrl':\n analyzer = HtcCtrlAnalyzer(short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'htt':\n analyzer = HttAnalyzer(eid=parsed_args.ep_id[0],\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n elif parsed_args.subparser_name == 'all':\n analyzer = AllAnalyzer(wmi_ctrl_eid=parsed_args.wmi_ctrl_ep_id[0],\n htt_eid=parsed_args.htt_ep_id[0],\n wmi_unified=(not parsed_args.wmi_old),\n short_htc_hdr=parsed_args.short_htc_header,\n timestamps=parsed_args.keep_timestamps,\n t2h=t2h)\n else:\n sys.stderr.write('Unsupported subcommand: {}\\n'.format(parsed_args.subparser_name))\n\n for line in infp:\n if hf.parse_line(line):\n hexdata = hf.get_hex()\n if analyzer.parse_hexdata(hexdata):\n str = analyzer.get_id_str()\n outfp.write(str)\n if parsed_args.print_data:\n analyzer.print_data(outfp)\n\n except IOError as err:\n sys.stderr.write('{}\\n'.format(err))\n except:\n type, value, tb = sys.exc_info()\n traceback.print_exc()\n pdb.post_mortem(tb)\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
from armulator.armv6.bits_ops import add_with_carry, bit_not
from armulator.armv6.enums import InstrSet
from armulator.armv6.opcodes.opcode import Opcode
class SubsPcLrThumb(Opcode):
def __init__(self, instruction, imm32, n):
super().__init__(instruction)
self.imm32 = imm32
self.n = n
def execute(self, processor):
if processor.condition_passed():
if (processor.registers.current_mode_is_user_or_system() or
processor.registers.current_instr_set() == InstrSet.THUMB_EE):
print('unpredictable')
else:
operand2 = self.imm32
result = add_with_carry(processor.registers.get(self.n), bit_not(operand2, 32), 1)[0]
if (processor.registers.cpsr.m == 0b11010 and
processor.registers.cpsr.j and
processor.registers.cpsr.t):
print('unpredictable')
else:
processor.branch_write_pc(result)
|
normal
|
{
"blob_id": "89376b2464dfb724197a1c1e164af8277e03ad59",
"index": 2507,
"step-1": "<mask token>\n\n\nclass SubsPcLrThumb(Opcode):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass SubsPcLrThumb(Opcode):\n\n def __init__(self, instruction, imm32, n):\n super().__init__(instruction)\n self.imm32 = imm32\n self.n = n\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass SubsPcLrThumb(Opcode):\n\n def __init__(self, instruction, imm32, n):\n super().__init__(instruction)\n self.imm32 = imm32\n self.n = n\n\n def execute(self, processor):\n if processor.condition_passed():\n if processor.registers.current_mode_is_user_or_system(\n ) or processor.registers.current_instr_set(\n ) == InstrSet.THUMB_EE:\n print('unpredictable')\n else:\n operand2 = self.imm32\n result = add_with_carry(processor.registers.get(self.n),\n bit_not(operand2, 32), 1)[0]\n if (processor.registers.cpsr.m == 26 and processor.\n registers.cpsr.j and processor.registers.cpsr.t):\n print('unpredictable')\n else:\n processor.branch_write_pc(result)\n",
"step-4": "from armulator.armv6.bits_ops import add_with_carry, bit_not\nfrom armulator.armv6.enums import InstrSet\nfrom armulator.armv6.opcodes.opcode import Opcode\n\n\nclass SubsPcLrThumb(Opcode):\n\n def __init__(self, instruction, imm32, n):\n super().__init__(instruction)\n self.imm32 = imm32\n self.n = n\n\n def execute(self, processor):\n if processor.condition_passed():\n if processor.registers.current_mode_is_user_or_system(\n ) or processor.registers.current_instr_set(\n ) == InstrSet.THUMB_EE:\n print('unpredictable')\n else:\n operand2 = self.imm32\n result = add_with_carry(processor.registers.get(self.n),\n bit_not(operand2, 32), 1)[0]\n if (processor.registers.cpsr.m == 26 and processor.\n registers.cpsr.j and processor.registers.cpsr.t):\n print('unpredictable')\n else:\n processor.branch_write_pc(result)\n",
"step-5": "from armulator.armv6.bits_ops import add_with_carry, bit_not\nfrom armulator.armv6.enums import InstrSet\nfrom armulator.armv6.opcodes.opcode import Opcode\n\n\nclass SubsPcLrThumb(Opcode):\n def __init__(self, instruction, imm32, n):\n super().__init__(instruction)\n self.imm32 = imm32\n self.n = n\n\n def execute(self, processor):\n if processor.condition_passed():\n if (processor.registers.current_mode_is_user_or_system() or\n processor.registers.current_instr_set() == InstrSet.THUMB_EE):\n print('unpredictable')\n else:\n operand2 = self.imm32\n result = add_with_carry(processor.registers.get(self.n), bit_not(operand2, 32), 1)[0]\n if (processor.registers.cpsr.m == 0b11010 and\n processor.registers.cpsr.j and\n processor.registers.cpsr.t):\n print('unpredictable')\n else:\n processor.branch_write_pc(result)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
year = int(input('西暦>'))
if year % 4 == 0 and year % 100 != 0:
print('閏年')
pass
elif year % 400 == 0:
print('閏年')
pass
else:
print('平年')
pass
|
normal
|
{
"blob_id": "b381d1110e6a7570cd872d689a43aba2d2580a23",
"index": 8449,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif year % 4 == 0 and year % 100 != 0:\n print('閏年')\n pass\nelif year % 400 == 0:\n print('閏年')\n pass\nelse:\n print('平年')\n pass\n",
"step-3": "year = int(input('西暦>'))\nif year % 4 == 0 and year % 100 != 0:\n print('閏年')\n pass\nelif year % 400 == 0:\n print('閏年')\n pass\nelse:\n print('平年')\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
<|reserved_special_token_0|>
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<|reserved_special_token_0|>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
<|reserved_special_token_0|>
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
<|reserved_special_token_0|>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
<|reserved_special_token_0|>
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<|reserved_special_token_0|>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
<|reserved_special_token_0|>
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
<|reserved_special_token_0|>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
<|reserved_special_token_0|>
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<|reserved_special_token_0|>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info('Existing video: {0}'.format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error('Failed to gen cover for {0}'.format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,
FLIP_NUM):
tlog.error('Failed to gen flips for {0}'.format(file_path))
else:
tlog.error('Failed to gen thumb for {0}'.format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,
video.path))
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
<|reserved_special_token_0|>
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',
dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info('Received signal {0}. Will stop all task threads'.format(
signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))[
'video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for path, rating in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id,
path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for root, dirs, files in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(
current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path,
file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value,
exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
<|reserved_special_token_0|>
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info('Existing video: {0}'.format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error('Failed to gen cover for {0}'.format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,
FLIP_NUM):
tlog.error('Failed to gen flips for {0}'.format(file_path))
else:
tlog.error('Failed to gen thumb for {0}'.format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,
video.path))
def is_valid_video_file(file_path, file_name):
if file_name.startswith('.') or not file_name.endswith('.mp4'):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info('Keywords blacklist: {0}'.format(blacklist))
except Exception as e:
log.error('Error while processing {0}:{1}'.format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '')
file_path = os.path.splitext(file_path)[0]
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub('\\s+', ' ', file_path)
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
def get_cover_path(fn):
return './static/cover/' + str(fn) + '.png'
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',
'-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error('Failed to find duration for {0}'.format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.
format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM + 3):
flip_file = '{0}-{1}.png'.format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,
flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',
'-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',
dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def search_duration_from_text(text):
regExp = re.compile('Duration: (\\d{2}):(\\d{2}):(\\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
hour, min, sec = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
<|reserved_special_token_1|>
# coding=utf8
# encoding: utf-8
import os
import platform
import re
import signal
import sys
import traceback
from subprocess import Popen, PIPE
from threading import Thread, current_thread
from Queue import Queue
from util.log import get_logger, log
from video.models import Video, KeywordVideoId
from django.db.models import Max
from collect_video import G_GEN_IMAGE
MAX_THREAD_NUM = 4
THREAD_STOP_FLAGS = []
THUMB_DIR = './static/thumb'
THUMB_SIZE = '180x135'
COVER_DIR = './static/cover'
FLIP_DIR = './static/flip'
FLIP_NUM = 10
task_queue = Queue(maxsize=2000)
def register_int_signal_handler():
def stop_thread_handler(signum, frame):
log.info("Received signal {0}. Will stop all task threads".format(signum))
for _ in range(len(THREAD_STOP_FLAGS)):
THREAD_STOP_FLAGS[_] = True
if platform.platform().startswith('Windows'):
signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)
else:
signal.signal(signal.SIGINT, stop_thread_handler)
def next_video_id(current, path):
existing = Video.objects.filter(path=path)
if existing:
return existing[0].video_id, current
current += 1
return current, current
def create_task_list(path_list):
"""
Walks path recursively, and create a task list
:param path_list: a list of (path, rating)
:return: a list of ImportTask objects
"""
current_video_id = Video.objects.all().aggregate(Max('video_id'))['video_id__max']
if not current_video_id:
current_video_id = 0
task_list = []
for (path, rating) in path_list:
base_path = os.path.split(path)[0]
if os.path.isfile(path):
file_name = os.path.basename(path)
if is_valid_video_file(path, file_name):
video_id, current_video_id = next_video_id(current_video_id, path)
task_list.append(ImportTask(video_id, base_path, path, rating))
continue
for (root, dirs, files) in os.walk(path):
for file_name in files:
try:
file_path = os.path.join(root, file_name)
if os.path.isdir(file_path):
continue
if is_valid_video_file(file_path, file_name):
video_id, current_video_id = next_video_id(current_video_id, file_path)
task_list.append(ImportTask(video_id, base_path, file_path, rating))
except:
log.error('#Error while proceeding: {0}'.format(file_name))
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
return task_list
def start_tasks(task_list):
global task_queue
for task in task_list:
task_queue.put(task)
if not THREAD_STOP_FLAGS:
for _ in range(MAX_THREAD_NUM):
THREAD_STOP_FLAGS.append(True)
if not os.path.isdir(COVER_DIR):
os.mkdir(COVER_DIR)
if not os.path.isdir(THUMB_DIR):
os.mkdir(THUMB_DIR)
if not os.path.isdir(FLIP_DIR):
os.mkdir(FLIP_DIR)
for _ in range(MAX_THREAD_NUM):
if THREAD_STOP_FLAGS[_]:
t = Thread(target=import_worker, kwargs={'thread_index': _})
t.name = str(_)
t.daemon = False
t.start()
task_queue.join()
def add_keywords_to_db(task_list):
blacklist = load_keyword_blacklist_from_file()
for task in task_list:
base_path = task.base_path
file_path = task.file_path
video_id = task.video_id
keywords = get_keywords(base_path, file_path, blacklist)
log.info('#Keywords:'.format(keywords))
for key in keywords:
try:
if KeywordVideoId.objects.filter(keyword=key, video_id=video_id):
log.info("Existing keyword {0} for {1}".format(key, video_id))
continue
keyword_record = KeywordVideoId()
keyword_record.keyword = key
keyword_record.video = Video.objects.get(video_id=video_id)
keyword_record.save()
log.info('#Added keyword:{0} for video_id: {1}'.format(key, video_id))
except Exception as e:
log.error("Error while adding keyword {0} to video {1}: {2}".format(key, video_id, e))
class ImportTask(object):
def __init__(self, video_id, base_path, path, rating=Video.P):
"""
Create an import task object.
:param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.
:param base_path: path prefix that will be ignored when creating keywords from path.
:param path: path of the file
:param rating: rating of the video, highest by default.
"""
self.video_id = video_id
self.base_path = base_path
self.file_path = path
self.rating = rating
def import_worker(thread_index):
"""
Thread worker that deals with tasks.
:return:
"""
THREAD_STOP_FLAGS[thread_index] = False
while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):
task = task_queue.get()
do_import_video_task(task)
task_queue.task_done()
THREAD_STOP_FLAGS[thread_index] = True
def do_import_video_task(task):
video_id = task.video_id
file_path = task.file_path
rating = task.rating
file_name = os.path.basename(file_path)[:-4]
tlog = get_logger(current_thread().name)
videos = Video.objects.filter(path=file_path)
if videos:
tlog.info("Existing video: {0}".format(task.file_path))
return
video = Video()
video.video_id = video_id
video.rating = rating
thumb_path = get_thumb_path(video.video_id)
cover_path = get_cover_path(video.video_id)
if not gen_cover(task.file_path, cover_path):
tlog.error("Failed to gen cover for {0}".format(file_path))
return
success, duration = gen_thumb(file_path, thumb_path)
if success:
if not gen_flips(file_path, video.video_id, duration, FLIP_DIR, FLIP_NUM):
tlog.error("Failed to gen flips for {0}".format(file_path))
else:
tlog.error("Failed to gen thumb for {0}".format(file_path))
video.title = file_name
video.path = file_path
video.duration = duration
video.save()
tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration, video.path))
def is_valid_video_file(file_path, file_name):
# skip hidden files (possibly not valid video files)
if file_name.startswith('.') or (not file_name.endswith('.mp4')):
return False
if os.path.getsize(file_path) == 0:
log.info('Remove invalid video file: {0}'.format(file_path))
os.remove(file_path)
return False
return True
def load_keyword_blacklist_from_file():
blacklist = set()
keyword_file = 'keywords.blacklist'
try:
with open(keyword_file, 'r') as kfp:
for line in kfp:
line = line.strip('\n')
if line:
blacklist.add(line)
log.info("Keywords blacklist: {0}".format(blacklist))
except Exception as e:
log.error("Error while processing {0}:{1}".format(keyword_file, e))
return blacklist
def get_keywords(prefix, file_path, blacklist):
"""
Get keywords from file path
:param prefix: Prefix of the dir path, so we can ignore them
:param file_path: full path of the video file
:param blacklist: A set of words/symbols that should be ignored
:return: a list of keywords
"""
file_path = str(file_path).replace(prefix, '') # remove base_dir from file_path
file_path = os.path.splitext(file_path)[0] # Only keep the part without extension
file_path = str(file_path).lower()
for bad_keyword in blacklist:
file_path = file_path.replace(bad_keyword, ' ')
file_path = re.sub(r'\s+', ' ', file_path) # Replace multiple spaces to single one
keywords = file_path.split(' ')
keywords = [k for k in keywords if k]
return keywords
class KeywordDictDataObj(object):
def __init__(self):
self.count = 0
self.files = set()
def get_thumb_path(fn):
return './static/thumb/' + str(fn) + '.png'
def get_cover_path(fn):
return './static/cover/' + str(fn) + '.png'
def gen_thumb(video_path, thumb_path):
"""
Generate thumb image for the given video, and grabs duration from output
:return: (success, duration)
"""
if os.path.isfile(thumb_path):
os.remove(thumb_path)
global THUMB_SIZE
cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1', '-f', 'apng', '-s', THUMB_SIZE, thumb_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
output = p.communicate()[1]
duration = search_duration_from_text(output)
if not duration:
tlog = get_logger(current_thread().name)
tlog.error("Failed to find duration for {0}".format(video_path))
duration = 0
return p.returncode == 0, duration
def gen_flips(video_path, video_id, duration, flip_path, flip_num):
"""
Generate flips for the given video
:param video_path: path of the video
:param video_id: id of the file
:param duration: duration of video in seconds
:param flip_path: path dir to put the flips
:param flip_num: number of flips to generate
:return: True on success, False otherwise
"""
if not G_GEN_IMAGE:
return True
duration = float(duration)
flip_num = float(flip_num)
interval = duration / flip_num
if interval <= 0.0:
tlog = get_logger(current_thread().name)
tlog.error("Cannot generate flips. Duration: {0} FlipNum:{1}".format(duration, flip_num))
return False
fps = 'fps=1/' + str(interval)
global THUMB_SIZE
flip_path = os.path.join(flip_path, str(video_id))
for _ in range(FLIP_NUM+3):
flip_file = "{0}-{1}.png".format(flip_path, _)
if os.path.isfile(flip_file):
os.remove(flip_file)
flip_path_template = flip_path + '-%d.png'
cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE, flip_path_template]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
def gen_cover(video_path, cover_path):
if not G_GEN_IMAGE:
return True
if os.path.isfile(cover_path):
os.remove(cover_path)
cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1', '-f', 'apng', cover_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
# Convert video to mp4
def convert_video_to_mp4(video_path, dest_path):
tlog = get_logger(current_thread().name)
if os.path.isfile(dest_path):
tlog.info('#Already converted, skip: {0}'.format(dest_path))
return True
tlog.info('#Converting: {0} => {1}\n', video_path, dest_path)
cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac', dest_path]
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
p.communicate()
return p.returncode == 0
# Search the duration from given text
def search_duration_from_text(text):
# Match pattern like Duration: 00:24:14.91, s
regExp = re.compile(r'Duration: (\d{2}):(\d{2}):(\d{2})')
result = regExp.search(text, re.M | re.U)
if result is not None:
(hour, min, sec) = result.groups()
duration = int(hour) * 3600 + int(min) * 60 + int(sec)
return duration
return None
|
flexible
|
{
"blob_id": "fbd5400823a8148adf358a2acc58fde146a25313",
"index": 2275,
"step-1": "<mask token>\n\n\ndef register_int_signal_handler():\n\n def stop_thread_handler(signum, frame):\n log.info('Received signal {0}. Will stop all task threads'.format(\n signum))\n for _ in range(len(THREAD_STOP_FLAGS)):\n THREAD_STOP_FLAGS[_] = True\n if platform.platform().startswith('Windows'):\n signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)\n else:\n signal.signal(signal.SIGINT, stop_thread_handler)\n\n\n<mask token>\n\n\ndef create_task_list(path_list):\n \"\"\"\n Walks path recursively, and create a task list\n :param path_list: a list of (path, rating)\n :return: a list of ImportTask objects\n \"\"\"\n current_video_id = Video.objects.all().aggregate(Max('video_id'))[\n 'video_id__max']\n if not current_video_id:\n current_video_id = 0\n task_list = []\n for path, rating in path_list:\n base_path = os.path.split(path)[0]\n if os.path.isfile(path):\n file_name = os.path.basename(path)\n if is_valid_video_file(path, file_name):\n video_id, current_video_id = next_video_id(current_video_id,\n path)\n task_list.append(ImportTask(video_id, base_path, path, rating))\n continue\n for root, dirs, files in os.walk(path):\n for file_name in files:\n try:\n file_path = os.path.join(root, file_name)\n if os.path.isdir(file_path):\n continue\n if is_valid_video_file(file_path, file_name):\n video_id, current_video_id = next_video_id(\n current_video_id, file_path)\n task_list.append(ImportTask(video_id, base_path,\n file_path, rating))\n except:\n log.error('#Error while proceeding: {0}'.format(file_name))\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value,\n exc_traceback, limit=2, file=sys.stdout)\n return task_list\n\n\ndef start_tasks(task_list):\n global task_queue\n for task in task_list:\n task_queue.put(task)\n if not THREAD_STOP_FLAGS:\n for _ in range(MAX_THREAD_NUM):\n THREAD_STOP_FLAGS.append(True)\n if not os.path.isdir(COVER_DIR):\n os.mkdir(COVER_DIR)\n if not os.path.isdir(THUMB_DIR):\n os.mkdir(THUMB_DIR)\n if not os.path.isdir(FLIP_DIR):\n os.mkdir(FLIP_DIR)\n for _ in range(MAX_THREAD_NUM):\n if THREAD_STOP_FLAGS[_]:\n t = Thread(target=import_worker, kwargs={'thread_index': _})\n t.name = str(_)\n t.daemon = False\n t.start()\n task_queue.join()\n\n\n<mask token>\n\n\nclass ImportTask(object):\n\n def __init__(self, video_id, base_path, path, rating=Video.P):\n \"\"\"\n Create an import task object.\n :param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.\n :param base_path: path prefix that will be ignored when creating keywords from path.\n :param path: path of the file\n :param rating: rating of the video, highest by default.\n \"\"\"\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating\n\n\n<mask token>\n\n\ndef is_valid_video_file(file_path, file_name):\n if file_name.startswith('.') or not file_name.endswith('.mp4'):\n return False\n if os.path.getsize(file_path) == 0:\n log.info('Remove invalid video file: {0}'.format(file_path))\n os.remove(file_path)\n return False\n return True\n\n\ndef load_keyword_blacklist_from_file():\n blacklist = set()\n keyword_file = 'keywords.blacklist'\n try:\n with open(keyword_file, 'r') as kfp:\n for line in kfp:\n line = line.strip('\\n')\n if line:\n blacklist.add(line)\n log.info('Keywords blacklist: {0}'.format(blacklist))\n except Exception as e:\n log.error('Error while processing {0}:{1}'.format(keyword_file, e))\n return blacklist\n\n\ndef get_keywords(prefix, file_path, blacklist):\n \"\"\"\n Get keywords from file path\n :param prefix: Prefix of the dir path, so we can ignore them\n :param file_path: full path of the video file\n :param blacklist: A set of words/symbols that should be ignored\n :return: a list of keywords\n \"\"\"\n file_path = str(file_path).replace(prefix, '')\n file_path = os.path.splitext(file_path)[0]\n file_path = str(file_path).lower()\n for bad_keyword in blacklist:\n file_path = file_path.replace(bad_keyword, ' ')\n file_path = re.sub('\\\\s+', ' ', file_path)\n keywords = file_path.split(' ')\n keywords = [k for k in keywords if k]\n return keywords\n\n\nclass KeywordDictDataObj(object):\n\n def __init__(self):\n self.count = 0\n self.files = set()\n\n\n<mask token>\n\n\ndef gen_thumb(video_path, thumb_path):\n \"\"\"\n Generate thumb image for the given video, and grabs duration from output\n :return: (success, duration)\n \"\"\"\n if os.path.isfile(thumb_path):\n os.remove(thumb_path)\n global THUMB_SIZE\n cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',\n '-f', 'apng', '-s', THUMB_SIZE, thumb_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n output = p.communicate()[1]\n duration = search_duration_from_text(output)\n if not duration:\n tlog = get_logger(current_thread().name)\n tlog.error('Failed to find duration for {0}'.format(video_path))\n duration = 0\n return p.returncode == 0, duration\n\n\ndef gen_flips(video_path, video_id, duration, flip_path, flip_num):\n \"\"\"\n Generate flips for the given video\n :param video_path: path of the video\n :param video_id: id of the file\n :param duration: duration of video in seconds\n :param flip_path: path dir to put the flips\n :param flip_num: number of flips to generate\n :return: True on success, False otherwise\n \"\"\"\n if not G_GEN_IMAGE:\n return True\n duration = float(duration)\n flip_num = float(flip_num)\n interval = duration / flip_num\n if interval <= 0.0:\n tlog = get_logger(current_thread().name)\n tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.\n format(duration, flip_num))\n return False\n fps = 'fps=1/' + str(interval)\n global THUMB_SIZE\n flip_path = os.path.join(flip_path, str(video_id))\n for _ in range(FLIP_NUM + 3):\n flip_file = '{0}-{1}.png'.format(flip_path, _)\n if os.path.isfile(flip_file):\n os.remove(flip_file)\n flip_path_template = flip_path + '-%d.png'\n cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,\n flip_path_template]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\n<mask token>\n\n\ndef search_duration_from_text(text):\n regExp = re.compile('Duration: (\\\\d{2}):(\\\\d{2}):(\\\\d{2})')\n result = regExp.search(text, re.M | re.U)\n if result is not None:\n hour, min, sec = result.groups()\n duration = int(hour) * 3600 + int(min) * 60 + int(sec)\n return duration\n return None\n",
"step-2": "<mask token>\n\n\ndef register_int_signal_handler():\n\n def stop_thread_handler(signum, frame):\n log.info('Received signal {0}. Will stop all task threads'.format(\n signum))\n for _ in range(len(THREAD_STOP_FLAGS)):\n THREAD_STOP_FLAGS[_] = True\n if platform.platform().startswith('Windows'):\n signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)\n else:\n signal.signal(signal.SIGINT, stop_thread_handler)\n\n\ndef next_video_id(current, path):\n existing = Video.objects.filter(path=path)\n if existing:\n return existing[0].video_id, current\n current += 1\n return current, current\n\n\ndef create_task_list(path_list):\n \"\"\"\n Walks path recursively, and create a task list\n :param path_list: a list of (path, rating)\n :return: a list of ImportTask objects\n \"\"\"\n current_video_id = Video.objects.all().aggregate(Max('video_id'))[\n 'video_id__max']\n if not current_video_id:\n current_video_id = 0\n task_list = []\n for path, rating in path_list:\n base_path = os.path.split(path)[0]\n if os.path.isfile(path):\n file_name = os.path.basename(path)\n if is_valid_video_file(path, file_name):\n video_id, current_video_id = next_video_id(current_video_id,\n path)\n task_list.append(ImportTask(video_id, base_path, path, rating))\n continue\n for root, dirs, files in os.walk(path):\n for file_name in files:\n try:\n file_path = os.path.join(root, file_name)\n if os.path.isdir(file_path):\n continue\n if is_valid_video_file(file_path, file_name):\n video_id, current_video_id = next_video_id(\n current_video_id, file_path)\n task_list.append(ImportTask(video_id, base_path,\n file_path, rating))\n except:\n log.error('#Error while proceeding: {0}'.format(file_name))\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value,\n exc_traceback, limit=2, file=sys.stdout)\n return task_list\n\n\ndef start_tasks(task_list):\n global task_queue\n for task in task_list:\n task_queue.put(task)\n if not THREAD_STOP_FLAGS:\n for _ in range(MAX_THREAD_NUM):\n THREAD_STOP_FLAGS.append(True)\n if not os.path.isdir(COVER_DIR):\n os.mkdir(COVER_DIR)\n if not os.path.isdir(THUMB_DIR):\n os.mkdir(THUMB_DIR)\n if not os.path.isdir(FLIP_DIR):\n os.mkdir(FLIP_DIR)\n for _ in range(MAX_THREAD_NUM):\n if THREAD_STOP_FLAGS[_]:\n t = Thread(target=import_worker, kwargs={'thread_index': _})\n t.name = str(_)\n t.daemon = False\n t.start()\n task_queue.join()\n\n\n<mask token>\n\n\nclass ImportTask(object):\n\n def __init__(self, video_id, base_path, path, rating=Video.P):\n \"\"\"\n Create an import task object.\n :param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.\n :param base_path: path prefix that will be ignored when creating keywords from path.\n :param path: path of the file\n :param rating: rating of the video, highest by default.\n \"\"\"\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating\n\n\n<mask token>\n\n\ndef is_valid_video_file(file_path, file_name):\n if file_name.startswith('.') or not file_name.endswith('.mp4'):\n return False\n if os.path.getsize(file_path) == 0:\n log.info('Remove invalid video file: {0}'.format(file_path))\n os.remove(file_path)\n return False\n return True\n\n\ndef load_keyword_blacklist_from_file():\n blacklist = set()\n keyword_file = 'keywords.blacklist'\n try:\n with open(keyword_file, 'r') as kfp:\n for line in kfp:\n line = line.strip('\\n')\n if line:\n blacklist.add(line)\n log.info('Keywords blacklist: {0}'.format(blacklist))\n except Exception as e:\n log.error('Error while processing {0}:{1}'.format(keyword_file, e))\n return blacklist\n\n\ndef get_keywords(prefix, file_path, blacklist):\n \"\"\"\n Get keywords from file path\n :param prefix: Prefix of the dir path, so we can ignore them\n :param file_path: full path of the video file\n :param blacklist: A set of words/symbols that should be ignored\n :return: a list of keywords\n \"\"\"\n file_path = str(file_path).replace(prefix, '')\n file_path = os.path.splitext(file_path)[0]\n file_path = str(file_path).lower()\n for bad_keyword in blacklist:\n file_path = file_path.replace(bad_keyword, ' ')\n file_path = re.sub('\\\\s+', ' ', file_path)\n keywords = file_path.split(' ')\n keywords = [k for k in keywords if k]\n return keywords\n\n\nclass KeywordDictDataObj(object):\n\n def __init__(self):\n self.count = 0\n self.files = set()\n\n\ndef get_thumb_path(fn):\n return './static/thumb/' + str(fn) + '.png'\n\n\n<mask token>\n\n\ndef gen_thumb(video_path, thumb_path):\n \"\"\"\n Generate thumb image for the given video, and grabs duration from output\n :return: (success, duration)\n \"\"\"\n if os.path.isfile(thumb_path):\n os.remove(thumb_path)\n global THUMB_SIZE\n cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',\n '-f', 'apng', '-s', THUMB_SIZE, thumb_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n output = p.communicate()[1]\n duration = search_duration_from_text(output)\n if not duration:\n tlog = get_logger(current_thread().name)\n tlog.error('Failed to find duration for {0}'.format(video_path))\n duration = 0\n return p.returncode == 0, duration\n\n\ndef gen_flips(video_path, video_id, duration, flip_path, flip_num):\n \"\"\"\n Generate flips for the given video\n :param video_path: path of the video\n :param video_id: id of the file\n :param duration: duration of video in seconds\n :param flip_path: path dir to put the flips\n :param flip_num: number of flips to generate\n :return: True on success, False otherwise\n \"\"\"\n if not G_GEN_IMAGE:\n return True\n duration = float(duration)\n flip_num = float(flip_num)\n interval = duration / flip_num\n if interval <= 0.0:\n tlog = get_logger(current_thread().name)\n tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.\n format(duration, flip_num))\n return False\n fps = 'fps=1/' + str(interval)\n global THUMB_SIZE\n flip_path = os.path.join(flip_path, str(video_id))\n for _ in range(FLIP_NUM + 3):\n flip_file = '{0}-{1}.png'.format(flip_path, _)\n if os.path.isfile(flip_file):\n os.remove(flip_file)\n flip_path_template = flip_path + '-%d.png'\n cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,\n flip_path_template]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef gen_cover(video_path, cover_path):\n if not G_GEN_IMAGE:\n return True\n if os.path.isfile(cover_path):\n os.remove(cover_path)\n cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',\n '-f', 'apng', cover_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\n<mask token>\n\n\ndef search_duration_from_text(text):\n regExp = re.compile('Duration: (\\\\d{2}):(\\\\d{2}):(\\\\d{2})')\n result = regExp.search(text, re.M | re.U)\n if result is not None:\n hour, min, sec = result.groups()\n duration = int(hour) * 3600 + int(min) * 60 + int(sec)\n return duration\n return None\n",
"step-3": "<mask token>\n\n\ndef register_int_signal_handler():\n\n def stop_thread_handler(signum, frame):\n log.info('Received signal {0}. Will stop all task threads'.format(\n signum))\n for _ in range(len(THREAD_STOP_FLAGS)):\n THREAD_STOP_FLAGS[_] = True\n if platform.platform().startswith('Windows'):\n signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)\n else:\n signal.signal(signal.SIGINT, stop_thread_handler)\n\n\ndef next_video_id(current, path):\n existing = Video.objects.filter(path=path)\n if existing:\n return existing[0].video_id, current\n current += 1\n return current, current\n\n\ndef create_task_list(path_list):\n \"\"\"\n Walks path recursively, and create a task list\n :param path_list: a list of (path, rating)\n :return: a list of ImportTask objects\n \"\"\"\n current_video_id = Video.objects.all().aggregate(Max('video_id'))[\n 'video_id__max']\n if not current_video_id:\n current_video_id = 0\n task_list = []\n for path, rating in path_list:\n base_path = os.path.split(path)[0]\n if os.path.isfile(path):\n file_name = os.path.basename(path)\n if is_valid_video_file(path, file_name):\n video_id, current_video_id = next_video_id(current_video_id,\n path)\n task_list.append(ImportTask(video_id, base_path, path, rating))\n continue\n for root, dirs, files in os.walk(path):\n for file_name in files:\n try:\n file_path = os.path.join(root, file_name)\n if os.path.isdir(file_path):\n continue\n if is_valid_video_file(file_path, file_name):\n video_id, current_video_id = next_video_id(\n current_video_id, file_path)\n task_list.append(ImportTask(video_id, base_path,\n file_path, rating))\n except:\n log.error('#Error while proceeding: {0}'.format(file_name))\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value,\n exc_traceback, limit=2, file=sys.stdout)\n return task_list\n\n\ndef start_tasks(task_list):\n global task_queue\n for task in task_list:\n task_queue.put(task)\n if not THREAD_STOP_FLAGS:\n for _ in range(MAX_THREAD_NUM):\n THREAD_STOP_FLAGS.append(True)\n if not os.path.isdir(COVER_DIR):\n os.mkdir(COVER_DIR)\n if not os.path.isdir(THUMB_DIR):\n os.mkdir(THUMB_DIR)\n if not os.path.isdir(FLIP_DIR):\n os.mkdir(FLIP_DIR)\n for _ in range(MAX_THREAD_NUM):\n if THREAD_STOP_FLAGS[_]:\n t = Thread(target=import_worker, kwargs={'thread_index': _})\n t.name = str(_)\n t.daemon = False\n t.start()\n task_queue.join()\n\n\n<mask token>\n\n\nclass ImportTask(object):\n\n def __init__(self, video_id, base_path, path, rating=Video.P):\n \"\"\"\n Create an import task object.\n :param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.\n :param base_path: path prefix that will be ignored when creating keywords from path.\n :param path: path of the file\n :param rating: rating of the video, highest by default.\n \"\"\"\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating\n\n\ndef import_worker(thread_index):\n \"\"\"\n Thread worker that deals with tasks.\n :return:\n \"\"\"\n THREAD_STOP_FLAGS[thread_index] = False\n while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):\n task = task_queue.get()\n do_import_video_task(task)\n task_queue.task_done()\n THREAD_STOP_FLAGS[thread_index] = True\n\n\ndef do_import_video_task(task):\n video_id = task.video_id\n file_path = task.file_path\n rating = task.rating\n file_name = os.path.basename(file_path)[:-4]\n tlog = get_logger(current_thread().name)\n videos = Video.objects.filter(path=file_path)\n if videos:\n tlog.info('Existing video: {0}'.format(task.file_path))\n return\n video = Video()\n video.video_id = video_id\n video.rating = rating\n thumb_path = get_thumb_path(video.video_id)\n cover_path = get_cover_path(video.video_id)\n if not gen_cover(task.file_path, cover_path):\n tlog.error('Failed to gen cover for {0}'.format(file_path))\n return\n success, duration = gen_thumb(file_path, thumb_path)\n if success:\n if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,\n FLIP_NUM):\n tlog.error('Failed to gen flips for {0}'.format(file_path))\n else:\n tlog.error('Failed to gen thumb for {0}'.format(file_path))\n video.title = file_name\n video.path = file_path\n video.duration = duration\n video.save()\n tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,\n video.path))\n\n\ndef is_valid_video_file(file_path, file_name):\n if file_name.startswith('.') or not file_name.endswith('.mp4'):\n return False\n if os.path.getsize(file_path) == 0:\n log.info('Remove invalid video file: {0}'.format(file_path))\n os.remove(file_path)\n return False\n return True\n\n\ndef load_keyword_blacklist_from_file():\n blacklist = set()\n keyword_file = 'keywords.blacklist'\n try:\n with open(keyword_file, 'r') as kfp:\n for line in kfp:\n line = line.strip('\\n')\n if line:\n blacklist.add(line)\n log.info('Keywords blacklist: {0}'.format(blacklist))\n except Exception as e:\n log.error('Error while processing {0}:{1}'.format(keyword_file, e))\n return blacklist\n\n\ndef get_keywords(prefix, file_path, blacklist):\n \"\"\"\n Get keywords from file path\n :param prefix: Prefix of the dir path, so we can ignore them\n :param file_path: full path of the video file\n :param blacklist: A set of words/symbols that should be ignored\n :return: a list of keywords\n \"\"\"\n file_path = str(file_path).replace(prefix, '')\n file_path = os.path.splitext(file_path)[0]\n file_path = str(file_path).lower()\n for bad_keyword in blacklist:\n file_path = file_path.replace(bad_keyword, ' ')\n file_path = re.sub('\\\\s+', ' ', file_path)\n keywords = file_path.split(' ')\n keywords = [k for k in keywords if k]\n return keywords\n\n\nclass KeywordDictDataObj(object):\n\n def __init__(self):\n self.count = 0\n self.files = set()\n\n\ndef get_thumb_path(fn):\n return './static/thumb/' + str(fn) + '.png'\n\n\n<mask token>\n\n\ndef gen_thumb(video_path, thumb_path):\n \"\"\"\n Generate thumb image for the given video, and grabs duration from output\n :return: (success, duration)\n \"\"\"\n if os.path.isfile(thumb_path):\n os.remove(thumb_path)\n global THUMB_SIZE\n cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',\n '-f', 'apng', '-s', THUMB_SIZE, thumb_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n output = p.communicate()[1]\n duration = search_duration_from_text(output)\n if not duration:\n tlog = get_logger(current_thread().name)\n tlog.error('Failed to find duration for {0}'.format(video_path))\n duration = 0\n return p.returncode == 0, duration\n\n\ndef gen_flips(video_path, video_id, duration, flip_path, flip_num):\n \"\"\"\n Generate flips for the given video\n :param video_path: path of the video\n :param video_id: id of the file\n :param duration: duration of video in seconds\n :param flip_path: path dir to put the flips\n :param flip_num: number of flips to generate\n :return: True on success, False otherwise\n \"\"\"\n if not G_GEN_IMAGE:\n return True\n duration = float(duration)\n flip_num = float(flip_num)\n interval = duration / flip_num\n if interval <= 0.0:\n tlog = get_logger(current_thread().name)\n tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.\n format(duration, flip_num))\n return False\n fps = 'fps=1/' + str(interval)\n global THUMB_SIZE\n flip_path = os.path.join(flip_path, str(video_id))\n for _ in range(FLIP_NUM + 3):\n flip_file = '{0}-{1}.png'.format(flip_path, _)\n if os.path.isfile(flip_file):\n os.remove(flip_file)\n flip_path_template = flip_path + '-%d.png'\n cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,\n flip_path_template]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef gen_cover(video_path, cover_path):\n if not G_GEN_IMAGE:\n return True\n if os.path.isfile(cover_path):\n os.remove(cover_path)\n cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',\n '-f', 'apng', cover_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef convert_video_to_mp4(video_path, dest_path):\n tlog = get_logger(current_thread().name)\n if os.path.isfile(dest_path):\n tlog.info('#Already converted, skip: {0}'.format(dest_path))\n return True\n tlog.info('#Converting: {0} => {1}\\n', video_path, dest_path)\n cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',\n dest_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef search_duration_from_text(text):\n regExp = re.compile('Duration: (\\\\d{2}):(\\\\d{2}):(\\\\d{2})')\n result = regExp.search(text, re.M | re.U)\n if result is not None:\n hour, min, sec = result.groups()\n duration = int(hour) * 3600 + int(min) * 60 + int(sec)\n return duration\n return None\n",
"step-4": "<mask token>\n\n\ndef register_int_signal_handler():\n\n def stop_thread_handler(signum, frame):\n log.info('Received signal {0}. Will stop all task threads'.format(\n signum))\n for _ in range(len(THREAD_STOP_FLAGS)):\n THREAD_STOP_FLAGS[_] = True\n if platform.platform().startswith('Windows'):\n signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)\n else:\n signal.signal(signal.SIGINT, stop_thread_handler)\n\n\ndef next_video_id(current, path):\n existing = Video.objects.filter(path=path)\n if existing:\n return existing[0].video_id, current\n current += 1\n return current, current\n\n\ndef create_task_list(path_list):\n \"\"\"\n Walks path recursively, and create a task list\n :param path_list: a list of (path, rating)\n :return: a list of ImportTask objects\n \"\"\"\n current_video_id = Video.objects.all().aggregate(Max('video_id'))[\n 'video_id__max']\n if not current_video_id:\n current_video_id = 0\n task_list = []\n for path, rating in path_list:\n base_path = os.path.split(path)[0]\n if os.path.isfile(path):\n file_name = os.path.basename(path)\n if is_valid_video_file(path, file_name):\n video_id, current_video_id = next_video_id(current_video_id,\n path)\n task_list.append(ImportTask(video_id, base_path, path, rating))\n continue\n for root, dirs, files in os.walk(path):\n for file_name in files:\n try:\n file_path = os.path.join(root, file_name)\n if os.path.isdir(file_path):\n continue\n if is_valid_video_file(file_path, file_name):\n video_id, current_video_id = next_video_id(\n current_video_id, file_path)\n task_list.append(ImportTask(video_id, base_path,\n file_path, rating))\n except:\n log.error('#Error while proceeding: {0}'.format(file_name))\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value,\n exc_traceback, limit=2, file=sys.stdout)\n return task_list\n\n\ndef start_tasks(task_list):\n global task_queue\n for task in task_list:\n task_queue.put(task)\n if not THREAD_STOP_FLAGS:\n for _ in range(MAX_THREAD_NUM):\n THREAD_STOP_FLAGS.append(True)\n if not os.path.isdir(COVER_DIR):\n os.mkdir(COVER_DIR)\n if not os.path.isdir(THUMB_DIR):\n os.mkdir(THUMB_DIR)\n if not os.path.isdir(FLIP_DIR):\n os.mkdir(FLIP_DIR)\n for _ in range(MAX_THREAD_NUM):\n if THREAD_STOP_FLAGS[_]:\n t = Thread(target=import_worker, kwargs={'thread_index': _})\n t.name = str(_)\n t.daemon = False\n t.start()\n task_queue.join()\n\n\n<mask token>\n\n\nclass ImportTask(object):\n\n def __init__(self, video_id, base_path, path, rating=Video.P):\n \"\"\"\n Create an import task object.\n :param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.\n :param base_path: path prefix that will be ignored when creating keywords from path.\n :param path: path of the file\n :param rating: rating of the video, highest by default.\n \"\"\"\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating\n\n\ndef import_worker(thread_index):\n \"\"\"\n Thread worker that deals with tasks.\n :return:\n \"\"\"\n THREAD_STOP_FLAGS[thread_index] = False\n while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):\n task = task_queue.get()\n do_import_video_task(task)\n task_queue.task_done()\n THREAD_STOP_FLAGS[thread_index] = True\n\n\ndef do_import_video_task(task):\n video_id = task.video_id\n file_path = task.file_path\n rating = task.rating\n file_name = os.path.basename(file_path)[:-4]\n tlog = get_logger(current_thread().name)\n videos = Video.objects.filter(path=file_path)\n if videos:\n tlog.info('Existing video: {0}'.format(task.file_path))\n return\n video = Video()\n video.video_id = video_id\n video.rating = rating\n thumb_path = get_thumb_path(video.video_id)\n cover_path = get_cover_path(video.video_id)\n if not gen_cover(task.file_path, cover_path):\n tlog.error('Failed to gen cover for {0}'.format(file_path))\n return\n success, duration = gen_thumb(file_path, thumb_path)\n if success:\n if not gen_flips(file_path, video.video_id, duration, FLIP_DIR,\n FLIP_NUM):\n tlog.error('Failed to gen flips for {0}'.format(file_path))\n else:\n tlog.error('Failed to gen thumb for {0}'.format(file_path))\n video.title = file_name\n video.path = file_path\n video.duration = duration\n video.save()\n tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration,\n video.path))\n\n\ndef is_valid_video_file(file_path, file_name):\n if file_name.startswith('.') or not file_name.endswith('.mp4'):\n return False\n if os.path.getsize(file_path) == 0:\n log.info('Remove invalid video file: {0}'.format(file_path))\n os.remove(file_path)\n return False\n return True\n\n\ndef load_keyword_blacklist_from_file():\n blacklist = set()\n keyword_file = 'keywords.blacklist'\n try:\n with open(keyword_file, 'r') as kfp:\n for line in kfp:\n line = line.strip('\\n')\n if line:\n blacklist.add(line)\n log.info('Keywords blacklist: {0}'.format(blacklist))\n except Exception as e:\n log.error('Error while processing {0}:{1}'.format(keyword_file, e))\n return blacklist\n\n\ndef get_keywords(prefix, file_path, blacklist):\n \"\"\"\n Get keywords from file path\n :param prefix: Prefix of the dir path, so we can ignore them\n :param file_path: full path of the video file\n :param blacklist: A set of words/symbols that should be ignored\n :return: a list of keywords\n \"\"\"\n file_path = str(file_path).replace(prefix, '')\n file_path = os.path.splitext(file_path)[0]\n file_path = str(file_path).lower()\n for bad_keyword in blacklist:\n file_path = file_path.replace(bad_keyword, ' ')\n file_path = re.sub('\\\\s+', ' ', file_path)\n keywords = file_path.split(' ')\n keywords = [k for k in keywords if k]\n return keywords\n\n\nclass KeywordDictDataObj(object):\n\n def __init__(self):\n self.count = 0\n self.files = set()\n\n\ndef get_thumb_path(fn):\n return './static/thumb/' + str(fn) + '.png'\n\n\ndef get_cover_path(fn):\n return './static/cover/' + str(fn) + '.png'\n\n\ndef gen_thumb(video_path, thumb_path):\n \"\"\"\n Generate thumb image for the given video, and grabs duration from output\n :return: (success, duration)\n \"\"\"\n if os.path.isfile(thumb_path):\n os.remove(thumb_path)\n global THUMB_SIZE\n cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1',\n '-f', 'apng', '-s', THUMB_SIZE, thumb_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n output = p.communicate()[1]\n duration = search_duration_from_text(output)\n if not duration:\n tlog = get_logger(current_thread().name)\n tlog.error('Failed to find duration for {0}'.format(video_path))\n duration = 0\n return p.returncode == 0, duration\n\n\ndef gen_flips(video_path, video_id, duration, flip_path, flip_num):\n \"\"\"\n Generate flips for the given video\n :param video_path: path of the video\n :param video_id: id of the file\n :param duration: duration of video in seconds\n :param flip_path: path dir to put the flips\n :param flip_num: number of flips to generate\n :return: True on success, False otherwise\n \"\"\"\n if not G_GEN_IMAGE:\n return True\n duration = float(duration)\n flip_num = float(flip_num)\n interval = duration / flip_num\n if interval <= 0.0:\n tlog = get_logger(current_thread().name)\n tlog.error('Cannot generate flips. Duration: {0} FlipNum:{1}'.\n format(duration, flip_num))\n return False\n fps = 'fps=1/' + str(interval)\n global THUMB_SIZE\n flip_path = os.path.join(flip_path, str(video_id))\n for _ in range(FLIP_NUM + 3):\n flip_file = '{0}-{1}.png'.format(flip_path, _)\n if os.path.isfile(flip_file):\n os.remove(flip_file)\n flip_path_template = flip_path + '-%d.png'\n cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE,\n flip_path_template]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef gen_cover(video_path, cover_path):\n if not G_GEN_IMAGE:\n return True\n if os.path.isfile(cover_path):\n os.remove(cover_path)\n cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1',\n '-f', 'apng', cover_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef convert_video_to_mp4(video_path, dest_path):\n tlog = get_logger(current_thread().name)\n if os.path.isfile(dest_path):\n tlog.info('#Already converted, skip: {0}'.format(dest_path))\n return True\n tlog.info('#Converting: {0} => {1}\\n', video_path, dest_path)\n cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac',\n dest_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n return p.returncode == 0\n\n\ndef search_duration_from_text(text):\n regExp = re.compile('Duration: (\\\\d{2}):(\\\\d{2}):(\\\\d{2})')\n result = regExp.search(text, re.M | re.U)\n if result is not None:\n hour, min, sec = result.groups()\n duration = int(hour) * 3600 + int(min) * 60 + int(sec)\n return duration\n return None\n",
"step-5": "# coding=utf8\n# encoding: utf-8\n\nimport os\nimport platform\nimport re\nimport signal\nimport sys\nimport traceback\nfrom subprocess import Popen, PIPE\nfrom threading import Thread, current_thread\n\nfrom Queue import Queue\n\nfrom util.log import get_logger, log\nfrom video.models import Video, KeywordVideoId\nfrom django.db.models import Max\nfrom collect_video import G_GEN_IMAGE\n\nMAX_THREAD_NUM = 4\nTHREAD_STOP_FLAGS = []\n\nTHUMB_DIR = './static/thumb'\nTHUMB_SIZE = '180x135'\nCOVER_DIR = './static/cover'\nFLIP_DIR = './static/flip'\n\nFLIP_NUM = 10\n\ntask_queue = Queue(maxsize=2000)\n\n\ndef register_int_signal_handler():\n def stop_thread_handler(signum, frame):\n log.info(\"Received signal {0}. Will stop all task threads\".format(signum))\n for _ in range(len(THREAD_STOP_FLAGS)):\n THREAD_STOP_FLAGS[_] = True\n\n if platform.platform().startswith('Windows'):\n signal.signal(signal.CTRL_C_EVENT, stop_thread_handler)\n else:\n signal.signal(signal.SIGINT, stop_thread_handler)\n\n\ndef next_video_id(current, path):\n existing = Video.objects.filter(path=path)\n if existing:\n return existing[0].video_id, current\n current += 1\n return current, current\n\ndef create_task_list(path_list):\n \"\"\"\n Walks path recursively, and create a task list\n :param path_list: a list of (path, rating)\n :return: a list of ImportTask objects\n \"\"\"\n current_video_id = Video.objects.all().aggregate(Max('video_id'))['video_id__max']\n if not current_video_id:\n current_video_id = 0\n\n task_list = []\n for (path, rating) in path_list:\n base_path = os.path.split(path)[0]\n if os.path.isfile(path):\n file_name = os.path.basename(path)\n if is_valid_video_file(path, file_name):\n video_id, current_video_id = next_video_id(current_video_id, path)\n task_list.append(ImportTask(video_id, base_path, path, rating))\n continue\n for (root, dirs, files) in os.walk(path):\n for file_name in files:\n try:\n file_path = os.path.join(root, file_name)\n if os.path.isdir(file_path):\n continue\n if is_valid_video_file(file_path, file_name):\n video_id, current_video_id = next_video_id(current_video_id, file_path)\n task_list.append(ImportTask(video_id, base_path, file_path, rating))\n except:\n log.error('#Error while proceeding: {0}'.format(file_name))\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)\n return task_list\n\n\ndef start_tasks(task_list):\n global task_queue\n for task in task_list:\n task_queue.put(task)\n\n if not THREAD_STOP_FLAGS:\n for _ in range(MAX_THREAD_NUM):\n THREAD_STOP_FLAGS.append(True)\n\n if not os.path.isdir(COVER_DIR):\n os.mkdir(COVER_DIR)\n if not os.path.isdir(THUMB_DIR):\n os.mkdir(THUMB_DIR)\n if not os.path.isdir(FLIP_DIR):\n os.mkdir(FLIP_DIR)\n for _ in range(MAX_THREAD_NUM):\n if THREAD_STOP_FLAGS[_]:\n t = Thread(target=import_worker, kwargs={'thread_index': _})\n t.name = str(_)\n t.daemon = False\n t.start()\n\n task_queue.join()\n\n\ndef add_keywords_to_db(task_list):\n blacklist = load_keyword_blacklist_from_file()\n for task in task_list:\n base_path = task.base_path\n file_path = task.file_path\n video_id = task.video_id\n\n keywords = get_keywords(base_path, file_path, blacklist)\n\n log.info('#Keywords:'.format(keywords))\n for key in keywords:\n try:\n if KeywordVideoId.objects.filter(keyword=key, video_id=video_id):\n log.info(\"Existing keyword {0} for {1}\".format(key, video_id))\n continue\n keyword_record = KeywordVideoId()\n keyword_record.keyword = key\n keyword_record.video = Video.objects.get(video_id=video_id)\n keyword_record.save()\n log.info('#Added keyword:{0} for video_id: {1}'.format(key, video_id))\n except Exception as e:\n log.error(\"Error while adding keyword {0} to video {1}: {2}\".format(key, video_id, e))\n\n\nclass ImportTask(object):\n def __init__(self, video_id, base_path, path, rating=Video.P):\n \"\"\"\n Create an import task object.\n :param video_id: a pre-allocated video_id in number, so we don't need to lock db in multiple thread.\n :param base_path: path prefix that will be ignored when creating keywords from path.\n :param path: path of the file\n :param rating: rating of the video, highest by default.\n \"\"\"\n self.video_id = video_id\n self.base_path = base_path\n self.file_path = path\n self.rating = rating\n\n\ndef import_worker(thread_index):\n \"\"\"\n Thread worker that deals with tasks.\n :return:\n \"\"\"\n THREAD_STOP_FLAGS[thread_index] = False\n while not (THREAD_STOP_FLAGS[thread_index] or task_queue.empty()):\n task = task_queue.get()\n do_import_video_task(task)\n task_queue.task_done()\n THREAD_STOP_FLAGS[thread_index] = True\n\n\ndef do_import_video_task(task):\n video_id = task.video_id\n file_path = task.file_path\n rating = task.rating\n file_name = os.path.basename(file_path)[:-4]\n\n tlog = get_logger(current_thread().name)\n videos = Video.objects.filter(path=file_path)\n if videos:\n tlog.info(\"Existing video: {0}\".format(task.file_path))\n return\n video = Video()\n video.video_id = video_id\n video.rating = rating\n\n thumb_path = get_thumb_path(video.video_id)\n cover_path = get_cover_path(video.video_id)\n if not gen_cover(task.file_path, cover_path):\n tlog.error(\"Failed to gen cover for {0}\".format(file_path))\n return\n\n success, duration = gen_thumb(file_path, thumb_path)\n if success:\n if not gen_flips(file_path, video.video_id, duration, FLIP_DIR, FLIP_NUM):\n tlog.error(\"Failed to gen flips for {0}\".format(file_path))\n else:\n tlog.error(\"Failed to gen thumb for {0}\".format(file_path))\n\n video.title = file_name\n video.path = file_path\n video.duration = duration\n video.save()\n tlog.info('#Video: {0} [{1}] {2}'.format(video.title, video.duration, video.path))\n\n\ndef is_valid_video_file(file_path, file_name):\n # skip hidden files (possibly not valid video files)\n if file_name.startswith('.') or (not file_name.endswith('.mp4')):\n return False\n if os.path.getsize(file_path) == 0:\n log.info('Remove invalid video file: {0}'.format(file_path))\n os.remove(file_path)\n return False\n return True\n\n\ndef load_keyword_blacklist_from_file():\n blacklist = set()\n keyword_file = 'keywords.blacklist'\n try:\n with open(keyword_file, 'r') as kfp:\n for line in kfp:\n line = line.strip('\\n')\n if line:\n blacklist.add(line)\n log.info(\"Keywords blacklist: {0}\".format(blacklist))\n except Exception as e:\n log.error(\"Error while processing {0}:{1}\".format(keyword_file, e))\n return blacklist\n\n\ndef get_keywords(prefix, file_path, blacklist):\n \"\"\"\n Get keywords from file path\n :param prefix: Prefix of the dir path, so we can ignore them\n :param file_path: full path of the video file\n :param blacklist: A set of words/symbols that should be ignored\n :return: a list of keywords\n \"\"\"\n file_path = str(file_path).replace(prefix, '') # remove base_dir from file_path\n file_path = os.path.splitext(file_path)[0] # Only keep the part without extension\n file_path = str(file_path).lower()\n for bad_keyword in blacklist:\n file_path = file_path.replace(bad_keyword, ' ')\n file_path = re.sub(r'\\s+', ' ', file_path) # Replace multiple spaces to single one\n keywords = file_path.split(' ')\n keywords = [k for k in keywords if k]\n\n return keywords\n\n\n\nclass KeywordDictDataObj(object):\n def __init__(self):\n self.count = 0\n self.files = set()\n\n\ndef get_thumb_path(fn):\n return './static/thumb/' + str(fn) + '.png'\n\n\ndef get_cover_path(fn):\n return './static/cover/' + str(fn) + '.png'\n\n\ndef gen_thumb(video_path, thumb_path):\n \"\"\"\n Generate thumb image for the given video, and grabs duration from output\n :return: (success, duration)\n \"\"\"\n if os.path.isfile(thumb_path):\n os.remove(thumb_path)\n\n global THUMB_SIZE\n cmd = ['ffmpeg', '-itsoffset', '-5', '-i', video_path, '-vframes', '1', '-f', 'apng', '-s', THUMB_SIZE, thumb_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n output = p.communicate()[1]\n\n duration = search_duration_from_text(output)\n if not duration:\n tlog = get_logger(current_thread().name)\n tlog.error(\"Failed to find duration for {0}\".format(video_path))\n duration = 0\n\n return p.returncode == 0, duration\n\n\ndef gen_flips(video_path, video_id, duration, flip_path, flip_num):\n \"\"\"\n Generate flips for the given video\n :param video_path: path of the video\n :param video_id: id of the file\n :param duration: duration of video in seconds\n :param flip_path: path dir to put the flips\n :param flip_num: number of flips to generate\n :return: True on success, False otherwise\n \"\"\"\n if not G_GEN_IMAGE:\n return True\n\n duration = float(duration)\n flip_num = float(flip_num)\n interval = duration / flip_num\n if interval <= 0.0:\n tlog = get_logger(current_thread().name)\n tlog.error(\"Cannot generate flips. Duration: {0} FlipNum:{1}\".format(duration, flip_num))\n return False\n fps = 'fps=1/' + str(interval)\n global THUMB_SIZE\n flip_path = os.path.join(flip_path, str(video_id))\n for _ in range(FLIP_NUM+3):\n flip_file = \"{0}-{1}.png\".format(flip_path, _)\n if os.path.isfile(flip_file):\n os.remove(flip_file)\n flip_path_template = flip_path + '-%d.png'\n cmd = ['ffmpeg', '-i', video_path, '-vf', fps, '-s', THUMB_SIZE, flip_path_template]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n\n return p.returncode == 0\n\n\ndef gen_cover(video_path, cover_path):\n if not G_GEN_IMAGE:\n return True\n if os.path.isfile(cover_path):\n os.remove(cover_path)\n\n cmd = ['ffmpeg', '-itsoffset', '-1', '-i', video_path, '-vframes', '1', '-f', 'apng', cover_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n\n return p.returncode == 0\n\n\n# Convert video to mp4\ndef convert_video_to_mp4(video_path, dest_path):\n tlog = get_logger(current_thread().name)\n if os.path.isfile(dest_path):\n tlog.info('#Already converted, skip: {0}'.format(dest_path))\n return True\n tlog.info('#Converting: {0} => {1}\\n', video_path, dest_path)\n\n cmd = ['ffmpeg', '-i', video_path, '-vcodec', 'h264', '-acodec', 'aac', dest_path]\n p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n p.communicate()\n\n return p.returncode == 0\n\n\n# Search the duration from given text\ndef search_duration_from_text(text):\n # Match pattern like Duration: 00:24:14.91, s\n regExp = re.compile(r'Duration: (\\d{2}):(\\d{2}):(\\d{2})')\n result = regExp.search(text, re.M | re.U)\n\n if result is not None:\n (hour, min, sec) = result.groups()\n duration = int(hour) * 3600 + int(min) * 60 + int(sec)\n return duration\n return None\n",
"step-ids": [
13,
16,
19,
20,
24
]
}
|
[
13,
16,
19,
20,
24
] |
from introduction import give_speech
from staring import stare_at_people
from dow_jones import visualize_dow_jones
from art_critic import give_art_critiques
from hipster import try_hipster_social_interaction
from empathy import share_feelings_with_everyone
from slapstick import perform_slapstick_humor
from ending import finish
def performance():
give_speech()
visualize_dow_jones()
give_art_critiques()
stare_at_people()
try_hipster_social_interaction()
share_feelings_with_everyone()
perform_slapstick_humor()
finish()
if __name__ == '__main__':
performance()
|
normal
|
{
"blob_id": "d218b72d1992a30ad07a1edca1caf04b7b1985f6",
"index": 7834,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef performance():\n give_speech()\n visualize_dow_jones()\n give_art_critiques()\n stare_at_people()\n try_hipster_social_interaction()\n share_feelings_with_everyone()\n perform_slapstick_humor()\n finish()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef performance():\n give_speech()\n visualize_dow_jones()\n give_art_critiques()\n stare_at_people()\n try_hipster_social_interaction()\n share_feelings_with_everyone()\n perform_slapstick_humor()\n finish()\n\n\nif __name__ == '__main__':\n performance()\n",
"step-4": "from introduction import give_speech\nfrom staring import stare_at_people\nfrom dow_jones import visualize_dow_jones\nfrom art_critic import give_art_critiques\nfrom hipster import try_hipster_social_interaction\nfrom empathy import share_feelings_with_everyone\nfrom slapstick import perform_slapstick_humor\nfrom ending import finish\n\n\ndef performance():\n give_speech()\n visualize_dow_jones()\n give_art_critiques()\n stare_at_people()\n try_hipster_social_interaction()\n share_feelings_with_everyone()\n perform_slapstick_humor()\n finish()\n\n\nif __name__ == '__main__':\n performance()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(connection, info, args, world):
"""Resets a users money"""
money = shelve.open('money-%s.db' % world.hostnicks[connection.host],
writeback=True)
money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':
[], 'coinchance': [(True) for x in range(50)] + [(False) for x in
range(50)]}
money.sync()
connection.ircsend(info['channel'],
'%s: Your money data has been reset.' % info['sender'])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
arguments = ['self', 'info', 'args', 'world']
minlevel = 2
helpstring = 'moneyreset'
def main(connection, info, args, world):
"""Resets a users money"""
money = shelve.open('money-%s.db' % world.hostnicks[connection.host],
writeback=True)
money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':
[], 'coinchance': [(True) for x in range(50)] + [(False) for x in
range(50)]}
money.sync()
connection.ircsend(info['channel'],
'%s: Your money data has been reset.' % info['sender'])
<|reserved_special_token_1|>
import shelve
arguments = ['self', 'info', 'args', 'world']
minlevel = 2
helpstring = 'moneyreset'
def main(connection, info, args, world):
"""Resets a users money"""
money = shelve.open('money-%s.db' % world.hostnicks[connection.host],
writeback=True)
money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':
[], 'coinchance': [(True) for x in range(50)] + [(False) for x in
range(50)]}
money.sync()
connection.ircsend(info['channel'],
'%s: Your money data has been reset.' % info['sender'])
<|reserved_special_token_1|>
import shelve
arguments = ["self", "info", "args", "world"]
minlevel = 2
helpstring = "moneyreset"
def main(connection, info, args, world) :
"""Resets a users money"""
money = shelve.open("money-%s.db" % (world.hostnicks[connection.host]), writeback=True)
money[info["sender"]] = {"money":100000, "maxmoney":100000, "items":[], "coinchance":[True for x in range(50)] + [False for x in range(50)]}
money.sync()
connection.ircsend(info["channel"], "%s: Your money data has been reset." % (info["sender"]))
|
flexible
|
{
"blob_id": "95021cc01c0b85b512fd466797d4d128472773c3",
"index": 2943,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-3": "<mask token>\narguments = ['self', 'info', 'args', 'world']\nminlevel = 2\nhelpstring = 'moneyreset'\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-4": "import shelve\narguments = ['self', 'info', 'args', 'world']\nminlevel = 2\nhelpstring = 'moneyreset'\n\n\ndef main(connection, info, args, world):\n \"\"\"Resets a users money\"\"\"\n money = shelve.open('money-%s.db' % world.hostnicks[connection.host],\n writeback=True)\n money[info['sender']] = {'money': 100000, 'maxmoney': 100000, 'items':\n [], 'coinchance': [(True) for x in range(50)] + [(False) for x in\n range(50)]}\n money.sync()\n connection.ircsend(info['channel'], \n '%s: Your money data has been reset.' % info['sender'])\n",
"step-5": "import shelve\narguments = [\"self\", \"info\", \"args\", \"world\"]\nminlevel = 2\nhelpstring = \"moneyreset\"\n\ndef main(connection, info, args, world) :\n \"\"\"Resets a users money\"\"\"\n money = shelve.open(\"money-%s.db\" % (world.hostnicks[connection.host]), writeback=True)\n money[info[\"sender\"]] = {\"money\":100000, \"maxmoney\":100000, \"items\":[], \"coinchance\":[True for x in range(50)] + [False for x in range(50)]}\n money.sync()\n connection.ircsend(info[\"channel\"], \"%s: Your money data has been reset.\" % (info[\"sender\"]))\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
def get_mask(mask):
r = mask[:, :, 0]
g = mask[:, :, 1]
return r // (r.max() or 1) * -1 + g // (g.max() or 1)
def calculate_brightness(image):
weights = np.array([0.299, 0.587, 0.114])
brightness_matrix = (image*weights).sum(axis=2)
return brightness_matrix
def calculate_energy(brightness):
x_gradient = np.hstack((
(brightness[:, 1] - brightness[:, 0])[:, np.newaxis],
brightness[:, 2:] - brightness[:, :-2],
(brightness[:, -1] - brightness[:, -2])[:, np.newaxis]
))
y_gradient = np.vstack((
brightness[1, :] - brightness[0, :],
brightness[2:, :] - brightness[:-2, :],
brightness[-1, :] - brightness[-2, :]
))
return np.sqrt(x_gradient ** 2 + y_gradient ** 2)
def calculate_minimal_seam_matrix(pre_energy, mask=None):
min_seam_searcher = pre_energy + mask if mask is not None else pre_energy.copy()
for i in range(1, min_seam_searcher.shape[0]):
row = min_seam_searcher[i-1]
minimum = np.vstack((np.insert(row[:-1], 0, row[0]), row, np.append(row[1:], row[-1]))).min(axis=0)
min_seam_searcher[i] += minimum
return min_seam_searcher
def get_minimal_seam(min_seam):
seam = np.zeros(min_seam.shape[0], dtype=np.int32)
seam[-1] = np.argmin(min_seam[-1])
for i in range(min_seam.shape[0] - 2, -1, -1):
last = seam[i+1]
if last == 0:
seam[i] = np.argmin(min_seam[i, : 2])
elif last == min_seam.shape[1] - 1:
seam[i] = last + np.argmin(min_seam[i, (last - 1):]) - 1
else:
seam[i] = last + np.argmin(min_seam[i, (last - 1): (last + 2)]) - 1
return seam
def cut(image, mask):
brightness = calculate_brightness(image)
energy = calculate_energy(brightness)
mult = image.shape[0] * image.shape[1] * 256
min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not None else None)
seam = get_minimal_seam(min_seam)
copy = np.empty((image.shape[0], image.shape[1] - 1, 3), np.uint8)
copy_mask = np.empty((image.shape[0], image.shape[1] - 1), np.int32) if mask is not None else None
seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)
for row, i in enumerate(seam):
copy[row] = np.delete(image[row], i, axis=0)
if mask is not None:
copy_mask[row] = np.delete(mask[row], i, axis=0)
seam_mask[row][i] = 1
return copy, copy_mask, seam_mask
def extend(image, mask):
brightness = calculate_brightness(image)
energy = calculate_energy(brightness)
mult = image.shape[0] * image.shape[1] * 256
min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not None else None)
seam = get_minimal_seam(min_seam)
copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)
copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32) if mask is not None else None
seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)
for row, i in enumerate(seam):
if i >= image.shape[1] - 1:
copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)
if mask is not None:
copy_mask[row] = np.append(mask[row], 0)
copy_mask[row][-2] = 1
copy_mask[row][-1] = 1
else:
copy[row] = np.insert(image[row], i+1, image[row][i] // 2 + image[row][i+1] // 2, axis=0)
if mask is not None:
copy_mask[row] = np.insert(mask[row], i+1, 0, axis=0)
copy_mask[row][i] = 1
copy_mask[row][i+1] = 1
seam_mask[row][i] = 1
return copy, copy_mask, seam_mask
def seam_carve(image, mode, mask):
if mode == 'horizontal shrink':
return cut(image, mask)
elif mode == 'vertical shrink':
transposed_image, transposed_mask, transposed_seam_mask = cut(
np.transpose(image, (1, 0, 2)), mask.T if mask is not None else None
)
return (np.transpose(transposed_image, (1, 0, 2)),
transposed_mask.T if mask is not None else None,
transposed_seam_mask.T)
elif mode == 'horizontal expand':
return extend(image, mask)
else:
transposed_image, transposed_mask, transposed_seam_mask = extend(
np.transpose(image, (1, 0, 2)), mask.T if mask is not None else None
)
return (np.transpose(transposed_image, (1, 0, 2)),
transposed_mask.T if mask is not None else None,
transposed_seam_mask.T)
|
normal
|
{
"blob_id": "7130a382784955780a3f258c81ce05c61915af56",
"index": 5000,
"step-1": "<mask token>\n\n\ndef get_mask(mask):\n r = mask[:, :, 0]\n g = mask[:, :, 1]\n return r // (r.max() or 1) * -1 + g // (g.max() or 1)\n\n\n<mask token>\n\n\ndef extend(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)\n copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n if i >= image.shape[1] - 1:\n copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)\n if mask is not None:\n copy_mask[row] = np.append(mask[row], 0)\n copy_mask[row][-2] = 1\n copy_mask[row][-1] = 1\n else:\n copy[row] = np.insert(image[row], i + 1, image[row][i] // 2 + \n image[row][i + 1] // 2, axis=0)\n if mask is not None:\n copy_mask[row] = np.insert(mask[row], i + 1, 0, axis=0)\n copy_mask[row][i] = 1\n copy_mask[row][i + 1] = 1\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef seam_carve(image, mode, mask):\n if mode == 'horizontal shrink':\n return cut(image, mask)\n elif mode == 'vertical shrink':\n transposed_image, transposed_mask, transposed_seam_mask = cut(np.\n transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n elif mode == 'horizontal expand':\n return extend(image, mask)\n else:\n transposed_image, transposed_mask, transposed_seam_mask = extend(np\n .transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n",
"step-2": "<mask token>\n\n\ndef get_mask(mask):\n r = mask[:, :, 0]\n g = mask[:, :, 1]\n return r // (r.max() or 1) * -1 + g // (g.max() or 1)\n\n\ndef calculate_brightness(image):\n weights = np.array([0.299, 0.587, 0.114])\n brightness_matrix = (image * weights).sum(axis=2)\n return brightness_matrix\n\n\ndef calculate_energy(brightness):\n x_gradient = np.hstack(((brightness[:, 1] - brightness[:, 0])[:, np.\n newaxis], brightness[:, 2:] - brightness[:, :-2], (brightness[:, -1\n ] - brightness[:, -2])[:, np.newaxis]))\n y_gradient = np.vstack((brightness[1, :] - brightness[0, :], brightness\n [2:, :] - brightness[:-2, :], brightness[-1, :] - brightness[-2, :]))\n return np.sqrt(x_gradient ** 2 + y_gradient ** 2)\n\n\n<mask token>\n\n\ndef cut(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] - 1, 3), np.uint8)\n copy_mask = np.empty((image.shape[0], image.shape[1] - 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n copy[row] = np.delete(image[row], i, axis=0)\n if mask is not None:\n copy_mask[row] = np.delete(mask[row], i, axis=0)\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef extend(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)\n copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n if i >= image.shape[1] - 1:\n copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)\n if mask is not None:\n copy_mask[row] = np.append(mask[row], 0)\n copy_mask[row][-2] = 1\n copy_mask[row][-1] = 1\n else:\n copy[row] = np.insert(image[row], i + 1, image[row][i] // 2 + \n image[row][i + 1] // 2, axis=0)\n if mask is not None:\n copy_mask[row] = np.insert(mask[row], i + 1, 0, axis=0)\n copy_mask[row][i] = 1\n copy_mask[row][i + 1] = 1\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef seam_carve(image, mode, mask):\n if mode == 'horizontal shrink':\n return cut(image, mask)\n elif mode == 'vertical shrink':\n transposed_image, transposed_mask, transposed_seam_mask = cut(np.\n transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n elif mode == 'horizontal expand':\n return extend(image, mask)\n else:\n transposed_image, transposed_mask, transposed_seam_mask = extend(np\n .transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n",
"step-3": "<mask token>\n\n\ndef get_mask(mask):\n r = mask[:, :, 0]\n g = mask[:, :, 1]\n return r // (r.max() or 1) * -1 + g // (g.max() or 1)\n\n\ndef calculate_brightness(image):\n weights = np.array([0.299, 0.587, 0.114])\n brightness_matrix = (image * weights).sum(axis=2)\n return brightness_matrix\n\n\ndef calculate_energy(brightness):\n x_gradient = np.hstack(((brightness[:, 1] - brightness[:, 0])[:, np.\n newaxis], brightness[:, 2:] - brightness[:, :-2], (brightness[:, -1\n ] - brightness[:, -2])[:, np.newaxis]))\n y_gradient = np.vstack((brightness[1, :] - brightness[0, :], brightness\n [2:, :] - brightness[:-2, :], brightness[-1, :] - brightness[-2, :]))\n return np.sqrt(x_gradient ** 2 + y_gradient ** 2)\n\n\ndef calculate_minimal_seam_matrix(pre_energy, mask=None):\n min_seam_searcher = (pre_energy + mask if mask is not None else\n pre_energy.copy())\n for i in range(1, min_seam_searcher.shape[0]):\n row = min_seam_searcher[i - 1]\n minimum = np.vstack((np.insert(row[:-1], 0, row[0]), row, np.append\n (row[1:], row[-1]))).min(axis=0)\n min_seam_searcher[i] += minimum\n return min_seam_searcher\n\n\n<mask token>\n\n\ndef cut(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] - 1, 3), np.uint8)\n copy_mask = np.empty((image.shape[0], image.shape[1] - 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n copy[row] = np.delete(image[row], i, axis=0)\n if mask is not None:\n copy_mask[row] = np.delete(mask[row], i, axis=0)\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef extend(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)\n copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n if i >= image.shape[1] - 1:\n copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)\n if mask is not None:\n copy_mask[row] = np.append(mask[row], 0)\n copy_mask[row][-2] = 1\n copy_mask[row][-1] = 1\n else:\n copy[row] = np.insert(image[row], i + 1, image[row][i] // 2 + \n image[row][i + 1] // 2, axis=0)\n if mask is not None:\n copy_mask[row] = np.insert(mask[row], i + 1, 0, axis=0)\n copy_mask[row][i] = 1\n copy_mask[row][i + 1] = 1\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef seam_carve(image, mode, mask):\n if mode == 'horizontal shrink':\n return cut(image, mask)\n elif mode == 'vertical shrink':\n transposed_image, transposed_mask, transposed_seam_mask = cut(np.\n transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n elif mode == 'horizontal expand':\n return extend(image, mask)\n else:\n transposed_image, transposed_mask, transposed_seam_mask = extend(np\n .transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n",
"step-4": "import numpy as np\n\n\ndef get_mask(mask):\n r = mask[:, :, 0]\n g = mask[:, :, 1]\n return r // (r.max() or 1) * -1 + g // (g.max() or 1)\n\n\ndef calculate_brightness(image):\n weights = np.array([0.299, 0.587, 0.114])\n brightness_matrix = (image * weights).sum(axis=2)\n return brightness_matrix\n\n\ndef calculate_energy(brightness):\n x_gradient = np.hstack(((brightness[:, 1] - brightness[:, 0])[:, np.\n newaxis], brightness[:, 2:] - brightness[:, :-2], (brightness[:, -1\n ] - brightness[:, -2])[:, np.newaxis]))\n y_gradient = np.vstack((brightness[1, :] - brightness[0, :], brightness\n [2:, :] - brightness[:-2, :], brightness[-1, :] - brightness[-2, :]))\n return np.sqrt(x_gradient ** 2 + y_gradient ** 2)\n\n\ndef calculate_minimal_seam_matrix(pre_energy, mask=None):\n min_seam_searcher = (pre_energy + mask if mask is not None else\n pre_energy.copy())\n for i in range(1, min_seam_searcher.shape[0]):\n row = min_seam_searcher[i - 1]\n minimum = np.vstack((np.insert(row[:-1], 0, row[0]), row, np.append\n (row[1:], row[-1]))).min(axis=0)\n min_seam_searcher[i] += minimum\n return min_seam_searcher\n\n\ndef get_minimal_seam(min_seam):\n seam = np.zeros(min_seam.shape[0], dtype=np.int32)\n seam[-1] = np.argmin(min_seam[-1])\n for i in range(min_seam.shape[0] - 2, -1, -1):\n last = seam[i + 1]\n if last == 0:\n seam[i] = np.argmin(min_seam[i, :2])\n elif last == min_seam.shape[1] - 1:\n seam[i] = last + np.argmin(min_seam[i, last - 1:]) - 1\n else:\n seam[i] = last + np.argmin(min_seam[i, last - 1:last + 2]) - 1\n return seam\n\n\ndef cut(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] - 1, 3), np.uint8)\n copy_mask = np.empty((image.shape[0], image.shape[1] - 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n copy[row] = np.delete(image[row], i, axis=0)\n if mask is not None:\n copy_mask[row] = np.delete(mask[row], i, axis=0)\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef extend(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not\n None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)\n copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32\n ) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n if i >= image.shape[1] - 1:\n copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)\n if mask is not None:\n copy_mask[row] = np.append(mask[row], 0)\n copy_mask[row][-2] = 1\n copy_mask[row][-1] = 1\n else:\n copy[row] = np.insert(image[row], i + 1, image[row][i] // 2 + \n image[row][i + 1] // 2, axis=0)\n if mask is not None:\n copy_mask[row] = np.insert(mask[row], i + 1, 0, axis=0)\n copy_mask[row][i] = 1\n copy_mask[row][i + 1] = 1\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef seam_carve(image, mode, mask):\n if mode == 'horizontal shrink':\n return cut(image, mask)\n elif mode == 'vertical shrink':\n transposed_image, transposed_mask, transposed_seam_mask = cut(np.\n transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n elif mode == 'horizontal expand':\n return extend(image, mask)\n else:\n transposed_image, transposed_mask, transposed_seam_mask = extend(np\n .transpose(image, (1, 0, 2)), mask.T if mask is not None else None)\n return (np.transpose(transposed_image, (1, 0, 2)), transposed_mask.\n T if mask is not None else None, transposed_seam_mask.T)\n",
"step-5": "import numpy as np\n\n\ndef get_mask(mask):\n r = mask[:, :, 0]\n g = mask[:, :, 1]\n return r // (r.max() or 1) * -1 + g // (g.max() or 1)\n\n\ndef calculate_brightness(image):\n weights = np.array([0.299, 0.587, 0.114])\n brightness_matrix = (image*weights).sum(axis=2)\n return brightness_matrix\n\n\ndef calculate_energy(brightness):\n x_gradient = np.hstack((\n (brightness[:, 1] - brightness[:, 0])[:, np.newaxis],\n brightness[:, 2:] - brightness[:, :-2],\n (brightness[:, -1] - brightness[:, -2])[:, np.newaxis]\n ))\n y_gradient = np.vstack((\n brightness[1, :] - brightness[0, :],\n brightness[2:, :] - brightness[:-2, :],\n brightness[-1, :] - brightness[-2, :]\n ))\n return np.sqrt(x_gradient ** 2 + y_gradient ** 2)\n\n\ndef calculate_minimal_seam_matrix(pre_energy, mask=None):\n min_seam_searcher = pre_energy + mask if mask is not None else pre_energy.copy()\n for i in range(1, min_seam_searcher.shape[0]):\n row = min_seam_searcher[i-1]\n minimum = np.vstack((np.insert(row[:-1], 0, row[0]), row, np.append(row[1:], row[-1]))).min(axis=0)\n min_seam_searcher[i] += minimum\n return min_seam_searcher\n\n\ndef get_minimal_seam(min_seam):\n seam = np.zeros(min_seam.shape[0], dtype=np.int32)\n seam[-1] = np.argmin(min_seam[-1])\n for i in range(min_seam.shape[0] - 2, -1, -1):\n last = seam[i+1]\n if last == 0:\n seam[i] = np.argmin(min_seam[i, : 2])\n elif last == min_seam.shape[1] - 1:\n seam[i] = last + np.argmin(min_seam[i, (last - 1):]) - 1\n else:\n seam[i] = last + np.argmin(min_seam[i, (last - 1): (last + 2)]) - 1\n return seam\n\n\ndef cut(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] - 1, 3), np.uint8)\n copy_mask = np.empty((image.shape[0], image.shape[1] - 1), np.int32) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n copy[row] = np.delete(image[row], i, axis=0)\n if mask is not None:\n copy_mask[row] = np.delete(mask[row], i, axis=0)\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef extend(image, mask):\n brightness = calculate_brightness(image)\n energy = calculate_energy(brightness)\n mult = image.shape[0] * image.shape[1] * 256\n min_seam = calculate_minimal_seam_matrix(energy, mask * mult if mask is not None else None)\n seam = get_minimal_seam(min_seam)\n copy = np.empty((image.shape[0], image.shape[1] + 1, 3), np.uint8)\n copy_mask = np.zeros((image.shape[0], image.shape[1] + 1), np.int32) if mask is not None else None\n seam_mask = np.zeros(image.shape[:2], dtype=np.uint8)\n for row, i in enumerate(seam):\n if i >= image.shape[1] - 1:\n copy[row] = np.concatenate((image[row], [image[row][-1]]), axis=0)\n if mask is not None:\n copy_mask[row] = np.append(mask[row], 0)\n copy_mask[row][-2] = 1\n copy_mask[row][-1] = 1\n else:\n copy[row] = np.insert(image[row], i+1, image[row][i] // 2 + image[row][i+1] // 2, axis=0)\n if mask is not None:\n copy_mask[row] = np.insert(mask[row], i+1, 0, axis=0)\n copy_mask[row][i] = 1\n copy_mask[row][i+1] = 1\n seam_mask[row][i] = 1\n return copy, copy_mask, seam_mask\n\n\ndef seam_carve(image, mode, mask):\n if mode == 'horizontal shrink':\n return cut(image, mask)\n elif mode == 'vertical shrink':\n transposed_image, transposed_mask, transposed_seam_mask = cut(\n np.transpose(image, (1, 0, 2)), mask.T if mask is not None else None\n )\n return (np.transpose(transposed_image, (1, 0, 2)),\n transposed_mask.T if mask is not None else None,\n transposed_seam_mask.T)\n elif mode == 'horizontal expand':\n return extend(image, mask)\n else:\n transposed_image, transposed_mask, transposed_seam_mask = extend(\n np.transpose(image, (1, 0, 2)), mask.T if mask is not None else None\n )\n return (np.transpose(transposed_image, (1, 0, 2)),\n transposed_mask.T if mask is not None else None,\n transposed_seam_mask.T)\n",
"step-ids": [
3,
6,
7,
9,
10
]
}
|
[
3,
6,
7,
9,
10
] |
a = input()
b = []
ind = []
for i in a:
if i.isalpha():
b.append(i)
else:
ind.append(a.index(i))
c = list(reversed(b))
for i in ind:
c.insert(i, a[i])
print(''.join(c))
|
normal
|
{
"blob_id": "8fedaeb13fde117cf6b7ace23b59c26e4aab2bc2",
"index": 4492,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in a:\n if i.isalpha():\n b.append(i)\n else:\n ind.append(a.index(i))\n<mask token>\nfor i in ind:\n c.insert(i, a[i])\nprint(''.join(c))\n",
"step-3": "a = input()\nb = []\nind = []\nfor i in a:\n if i.isalpha():\n b.append(i)\n else:\n ind.append(a.index(i))\nc = list(reversed(b))\nfor i in ind:\n c.insert(i, a[i])\nprint(''.join(c))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def read_ints():
return list(map(int, input().strip().split(' ')))
def solve():
K, S = read_ints()
total = 0
for X in range(K + 1):
if S - X < 0:
break
Y_min = max(S - X - K, 0)
Y_max = min(S - X, K)
if Y_min <= Y_max:
total += Y_max - Y_min + 1
return total
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def read_int():
return int(input().strip())
def read_ints():
return list(map(int, input().strip().split(' ')))
def solve():
K, S = read_ints()
total = 0
for X in range(K + 1):
if S - X < 0:
break
Y_min = max(S - X - K, 0)
Y_max = min(S - X, K)
if Y_min <= Y_max:
total += Y_max - Y_min + 1
return total
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def read_int():
return int(input().strip())
def read_ints():
return list(map(int, input().strip().split(' ')))
def solve():
K, S = read_ints()
total = 0
for X in range(K + 1):
if S - X < 0:
break
Y_min = max(S - X - K, 0)
Y_max = min(S - X, K)
if Y_min <= Y_max:
total += Y_max - Y_min + 1
return total
if __name__ == '__main__':
print(solve())
<|reserved_special_token_1|>
def read_int():
return int(input().strip())
def read_ints():
return list(map(int, input().strip().split(' ')))
def solve():
K, S = read_ints()
# X+Y+Z = S
# 0 <= X,Y,Z <= K
total = 0
for X in range(K+1):
if S-X < 0:
break
# Y+Z=S-X
Y_min = max(S-X-K, 0)
Y_max = min(S-X, K)
if Y_min <= Y_max:
total += Y_max-Y_min+1
return total
if __name__ == '__main__':
print(solve())
|
flexible
|
{
"blob_id": "46b1fc975fbeedcafaa66c85c378e2249a495647",
"index": 8827,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef read_ints():\n return list(map(int, input().strip().split(' ')))\n\n\ndef solve():\n K, S = read_ints()\n total = 0\n for X in range(K + 1):\n if S - X < 0:\n break\n Y_min = max(S - X - K, 0)\n Y_max = min(S - X, K)\n if Y_min <= Y_max:\n total += Y_max - Y_min + 1\n return total\n\n\n<mask token>\n",
"step-3": "def read_int():\n return int(input().strip())\n\n\ndef read_ints():\n return list(map(int, input().strip().split(' ')))\n\n\ndef solve():\n K, S = read_ints()\n total = 0\n for X in range(K + 1):\n if S - X < 0:\n break\n Y_min = max(S - X - K, 0)\n Y_max = min(S - X, K)\n if Y_min <= Y_max:\n total += Y_max - Y_min + 1\n return total\n\n\n<mask token>\n",
"step-4": "def read_int():\n return int(input().strip())\n\n\ndef read_ints():\n return list(map(int, input().strip().split(' ')))\n\n\ndef solve():\n K, S = read_ints()\n total = 0\n for X in range(K + 1):\n if S - X < 0:\n break\n Y_min = max(S - X - K, 0)\n Y_max = min(S - X, K)\n if Y_min <= Y_max:\n total += Y_max - Y_min + 1\n return total\n\n\nif __name__ == '__main__':\n print(solve())\n",
"step-5": "\n\ndef read_int():\n return int(input().strip())\n\n\ndef read_ints():\n return list(map(int, input().strip().split(' ')))\n\n\ndef solve():\n K, S = read_ints()\n # X+Y+Z = S\n # 0 <= X,Y,Z <= K\n total = 0\n for X in range(K+1):\n if S-X < 0:\n break\n # Y+Z=S-X\n Y_min = max(S-X-K, 0)\n Y_max = min(S-X, K)\n if Y_min <= Y_max:\n total += Y_max-Y_min+1\n return total\n\n\nif __name__ == '__main__':\n print(solve())\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
setup(name='TF_Speech', version='0.2.0', extras_require={'tensorflow': [
'tensorflow'], 'tensorflow with gpu': ['tensorflow-gpu']})
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from setuptools import setup
setup(name='TF_Speech', version='0.2.0', extras_require={'tensorflow': [
'tensorflow'], 'tensorflow with gpu': ['tensorflow-gpu']})
<|reserved_special_token_1|>
"""
Python package setup file.
"""
from setuptools import setup
setup(
name="TF_Speech",
version="0.2.0",
extras_require={'tensorflow': ['tensorflow'],
'tensorflow with gpu': ['tensorflow-gpu']},
)
|
flexible
|
{
"blob_id": "97ebdeada3d797a971b5c3851b75f9754595f67c",
"index": 358,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='TF_Speech', version='0.2.0', extras_require={'tensorflow': [\n 'tensorflow'], 'tensorflow with gpu': ['tensorflow-gpu']})\n",
"step-3": "<mask token>\nfrom setuptools import setup\nsetup(name='TF_Speech', version='0.2.0', extras_require={'tensorflow': [\n 'tensorflow'], 'tensorflow with gpu': ['tensorflow-gpu']})\n",
"step-4": "\"\"\"\nPython package setup file.\n\"\"\"\n\nfrom setuptools import setup\n\nsetup(\n name=\"TF_Speech\",\n version=\"0.2.0\",\n extras_require={'tensorflow': ['tensorflow'],\n 'tensorflow with gpu': ['tensorflow-gpu']},\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
###
# This Python module contains commented out classifiers that I will no longer
# be using
###
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import BaggingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
# Using Decision trees
# dt = DecisionTreeClassifier(max_depth=None)
# dt.fit(X_train_cv, y_train)
# print("DT Accuracy = " + str(dt.score(X_dev_cv, y_dev)))
# Using AdaBoost (takes too long)
# clf = DecisionTreeClassifier()
# ada = AdaBoostClassifier(clf)
# ada.fit(X_train_cv, y_train)
# print("ADA accuracy = " + str(ada.score(X_dev_cv, y_dev)))
# Using Bagging as a classifier with KNN
# clf = KNeighborsClassifier(n_neighbors=10)
# bag = BaggingClassifier(clf, max_features=0.5, max_samples=0.5)
# bag.fit(X_top10_train, y_top10_train)
# print("Bag accuracy = " + str(bag.score(X_top10_dev, y_top10_dev)))
# Using a random forest classifier
# rforest = RandomForestClassifier(max_depth=10000)
# rforest.fit(X_train_cv, y_train)
# print("Random Forest accuracy = " + str(rforest.score(X_dev_cv, y_dev)))
|
normal
|
{
"blob_id": "5029f3e2000c25d6044f93201c698773e310d452",
"index": 3391,
"step-1": "<mask token>\n",
"step-2": "from sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import BaggingClassifier\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\n",
"step-3": "###\n# This Python module contains commented out classifiers that I will no longer\n# be using\n###\n\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.ensemble import BaggingClassifier\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\n\n\n# Using Decision trees\n# dt = DecisionTreeClassifier(max_depth=None)\n# dt.fit(X_train_cv, y_train)\n# print(\"DT Accuracy = \" + str(dt.score(X_dev_cv, y_dev)))\n\n# Using AdaBoost (takes too long)\n# clf = DecisionTreeClassifier()\n# ada = AdaBoostClassifier(clf)\n# ada.fit(X_train_cv, y_train)\n# print(\"ADA accuracy = \" + str(ada.score(X_dev_cv, y_dev)))\n\n# Using Bagging as a classifier with KNN\n# clf = KNeighborsClassifier(n_neighbors=10)\n# bag = BaggingClassifier(clf, max_features=0.5, max_samples=0.5)\n# bag.fit(X_top10_train, y_top10_train)\n# print(\"Bag accuracy = \" + str(bag.score(X_top10_dev, y_top10_dev)))\n\n# Using a random forest classifier\n# rforest = RandomForestClassifier(max_depth=10000)\n# rforest.fit(X_train_cv, y_train)\n# print(\"Random Forest accuracy = \" + str(rforest.score(X_dev_cv, y_dev)))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('HMS', '0009_auto_20200329_0911')]
operations = [migrations.CreateModel(name='mess_timetable', fields=[(
'id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')), ('time', models.CharField(
choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner',
'Dinner')], max_length=10)), ('Monday', models.CharField(max_length
=100)), ('Tuesday', models.CharField(max_length=100)), ('Wednesday',
models.CharField(max_length=100)), ('Thursday', models.CharField(
max_length=100)), ('Friday', models.CharField(max_length=100)), (
'Saturday', models.CharField(max_length=100)), ('Sunday', models.
CharField(max_length=100))])]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('HMS', '0009_auto_20200329_0911')]
operations = [migrations.CreateModel(name='mess_timetable', fields=[(
'id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')), ('time', models.CharField(
choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner',
'Dinner')], max_length=10)), ('Monday', models.CharField(max_length
=100)), ('Tuesday', models.CharField(max_length=100)), ('Wednesday',
models.CharField(max_length=100)), ('Thursday', models.CharField(
max_length=100)), ('Friday', models.CharField(max_length=100)), (
'Saturday', models.CharField(max_length=100)), ('Sunday', models.
CharField(max_length=100))])]
<|reserved_special_token_1|>
# Generated by Django 3.0.3 on 2020-04-24 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('HMS', '0009_auto_20200329_0911'),
]
operations = [
migrations.CreateModel(
name='mess_timetable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.CharField(choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner', 'Dinner')], max_length=10)),
('Monday', models.CharField(max_length=100)),
('Tuesday', models.CharField(max_length=100)),
('Wednesday', models.CharField(max_length=100)),
('Thursday', models.CharField(max_length=100)),
('Friday', models.CharField(max_length=100)),
('Saturday', models.CharField(max_length=100)),
('Sunday', models.CharField(max_length=100)),
],
),
]
|
flexible
|
{
"blob_id": "e307bcc28526081141f1f2204c225d8e5f0100a8",
"index": 9015,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('HMS', '0009_auto_20200329_0911')]\n operations = [migrations.CreateModel(name='mess_timetable', fields=[(\n 'id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('time', models.CharField(\n choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner',\n 'Dinner')], max_length=10)), ('Monday', models.CharField(max_length\n =100)), ('Tuesday', models.CharField(max_length=100)), ('Wednesday',\n models.CharField(max_length=100)), ('Thursday', models.CharField(\n max_length=100)), ('Friday', models.CharField(max_length=100)), (\n 'Saturday', models.CharField(max_length=100)), ('Sunday', models.\n CharField(max_length=100))])]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('HMS', '0009_auto_20200329_0911')]\n operations = [migrations.CreateModel(name='mess_timetable', fields=[(\n 'id', models.AutoField(auto_created=True, primary_key=True,\n serialize=False, verbose_name='ID')), ('time', models.CharField(\n choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner',\n 'Dinner')], max_length=10)), ('Monday', models.CharField(max_length\n =100)), ('Tuesday', models.CharField(max_length=100)), ('Wednesday',\n models.CharField(max_length=100)), ('Thursday', models.CharField(\n max_length=100)), ('Friday', models.CharField(max_length=100)), (\n 'Saturday', models.CharField(max_length=100)), ('Sunday', models.\n CharField(max_length=100))])]\n",
"step-5": "# Generated by Django 3.0.3 on 2020-04-24 14:03\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('HMS', '0009_auto_20200329_0911'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='mess_timetable',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('time', models.CharField(choices=[('Breakfast', 'Breakfast'), ('Lunch', 'Lunch'), ('Dinner', 'Dinner')], max_length=10)),\n ('Monday', models.CharField(max_length=100)),\n ('Tuesday', models.CharField(max_length=100)),\n ('Wednesday', models.CharField(max_length=100)),\n ('Thursday', models.CharField(max_length=100)),\n ('Friday', models.CharField(max_length=100)),\n ('Saturday', models.CharField(max_length=100)),\n ('Sunday', models.CharField(max_length=100)),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='AuditLog', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('action_time', models.DateTimeField(
auto_now=True, verbose_name='操作时间')), ('user', models.CharField(
max_length=64, verbose_name='操作者')), ('obj', models.TextField(blank
=True, null=True, verbose_name='操作对象')), ('operate_type', models.
CharField(max_length=32, verbose_name='操作类型')), ('change_message',
models.TextField(blank=True, verbose_name='操作信息'))], options={
'verbose_name': '操作日志', 'ordering': ['-id']})]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='AuditLog', fields=[('id',
models.AutoField(auto_created=True, primary_key=True, serialize=
False, verbose_name='ID')), ('action_time', models.DateTimeField(
auto_now=True, verbose_name='操作时间')), ('user', models.CharField(
max_length=64, verbose_name='操作者')), ('obj', models.TextField(blank
=True, null=True, verbose_name='操作对象')), ('operate_type', models.
CharField(max_length=32, verbose_name='操作类型')), ('change_message',
models.TextField(blank=True, verbose_name='操作信息'))], options={
'verbose_name': '操作日志', 'ordering': ['-id']})]
<|reserved_special_token_1|>
# Generated by Django 3.1.7 on 2021-04-16 14:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AuditLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action_time', models.DateTimeField(auto_now=True, verbose_name='操作时间')),
('user', models.CharField(max_length=64, verbose_name='操作者')),
('obj', models.TextField(blank=True, null=True, verbose_name='操作对象')),
('operate_type', models.CharField(max_length=32, verbose_name='操作类型')),
('change_message', models.TextField(blank=True, verbose_name='操作信息')),
],
options={
'verbose_name': '操作日志',
'ordering': ['-id'],
},
),
]
|
flexible
|
{
"blob_id": "d65d85b4573728ed32ccf987459d5a228e2a8897",
"index": 5196,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='AuditLog', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('action_time', models.DateTimeField(\n auto_now=True, verbose_name='操作时间')), ('user', models.CharField(\n max_length=64, verbose_name='操作者')), ('obj', models.TextField(blank\n =True, null=True, verbose_name='操作对象')), ('operate_type', models.\n CharField(max_length=32, verbose_name='操作类型')), ('change_message',\n models.TextField(blank=True, verbose_name='操作信息'))], options={\n 'verbose_name': '操作日志', 'ordering': ['-id']})]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='AuditLog', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('action_time', models.DateTimeField(\n auto_now=True, verbose_name='操作时间')), ('user', models.CharField(\n max_length=64, verbose_name='操作者')), ('obj', models.TextField(blank\n =True, null=True, verbose_name='操作对象')), ('operate_type', models.\n CharField(max_length=32, verbose_name='操作类型')), ('change_message',\n models.TextField(blank=True, verbose_name='操作信息'))], options={\n 'verbose_name': '操作日志', 'ordering': ['-id']})]\n",
"step-5": "# Generated by Django 3.1.7 on 2021-04-16 14:03\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='AuditLog',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('action_time', models.DateTimeField(auto_now=True, verbose_name='操作时间')),\n ('user', models.CharField(max_length=64, verbose_name='操作者')),\n ('obj', models.TextField(blank=True, null=True, verbose_name='操作对象')),\n ('operate_type', models.CharField(max_length=32, verbose_name='操作类型')),\n ('change_message', models.TextField(blank=True, verbose_name='操作信息')),\n ],\n options={\n 'verbose_name': '操作日志',\n 'ordering': ['-id'],\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
<|reserved_special_token_0|>
def drawOneEllipse(aoi, img, draw):
if DEBUG:
print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],
aoi[1], aoi[2], aoi[3]))
imgDim = img.size
cx = aoi[0]
cy = aoi[1]
w = 2 * aoi[2]
h = 2 * aoi[3]
imgArea = imgDim[0] * imgDim[1]
LeftX = cx - aoi[2]
RightX = cx + aoi[2]
TopY = cy - aoi[3]
BottomY = cy + aoi[3]
draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=
'white')
def drawOneRect(aoi, img, draw):
if DEBUG:
print('Rectangle with Coordinates {0}'.format(aoi))
imgDim = img.size
TopY = aoi[3]
BottomY = aoi[1]
LeftX = aoi[0]
RightX = aoi[2]
if DEBUG:
print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,
BottomY, LeftX, RightX))
imgArea = imgDim[0] * imgDim[1]
draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',
outline='white')
def stat(img, mask=None):
if mask == None:
return ImageStat.Stat(img)
else:
return ImageStat.Stat(img, mask)
<|reserved_special_token_0|>
def results_for_mask(withColors, original, pictureName, key, mask):
mask_inverted = ImageOps.invert(mask)
stats_mask = stat(mask)
stats_in = stat(original, mask)
stats_out = stat(original, mask_inverted)
stats_in_image = Image.new('RGBA', original.size, 'black')
stats_in_image.paste(original, mask=mask)
stats_out_image = Image.new('RGBA', original.size, 'black')
stats_out_image.paste(original, mask=mask_inverted)
try:
if withColors:
return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +
'_in_lum'): luminance(stats_in.mean) / 256.0, (key +
'_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'):
stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /
256.0, (key + '_in_complexity'): complexity(pictureName,
key + 'in', stats_in_image), (key + '_out_lum'): luminance(
stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0
] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (
key + '_out_b'): stats_out.mean[2] / 256.0, (key +
'_out_complexity'): complexity(pictureName, key + 'out',
stats_out_image)}
else:
return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (
key + '_out_lum'): luminance(stats_out.mean) / 256.0}
except ZeroDivisionError:
return {}
def do_saliency(original, masks, path, prefix, pictureName, results):
saliency = Image.open(path + pictureName + '.png')
if saliency.mode != 'RGBA':
saliency = saliency.convert('RGBA')
saliency = saliency.resize(original.size)
stats_saliency = stat(saliency)
results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask
)
results.update(stuff)
saliency_bw = saliency.convert('L')
s_array = numpy.array(saliency_bw)
m_array = numpy.array(masks[0])
dot = numpy.dot(s_array, numpy.rot90(m_array))
results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
<|reserved_special_token_0|>
def drawOneEllipse(aoi, img, draw):
if DEBUG:
print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],
aoi[1], aoi[2], aoi[3]))
imgDim = img.size
cx = aoi[0]
cy = aoi[1]
w = 2 * aoi[2]
h = 2 * aoi[3]
imgArea = imgDim[0] * imgDim[1]
LeftX = cx - aoi[2]
RightX = cx + aoi[2]
TopY = cy - aoi[3]
BottomY = cy + aoi[3]
draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=
'white')
def drawOneRect(aoi, img, draw):
if DEBUG:
print('Rectangle with Coordinates {0}'.format(aoi))
imgDim = img.size
TopY = aoi[3]
BottomY = aoi[1]
LeftX = aoi[0]
RightX = aoi[2]
if DEBUG:
print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,
BottomY, LeftX, RightX))
imgArea = imgDim[0] * imgDim[1]
draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',
outline='white')
def stat(img, mask=None):
if mask == None:
return ImageStat.Stat(img)
else:
return ImageStat.Stat(img, mask)
def brightness(img, mask=None):
return stat(img, mask).rms[0]
<|reserved_special_token_0|>
def results_for_mask(withColors, original, pictureName, key, mask):
mask_inverted = ImageOps.invert(mask)
stats_mask = stat(mask)
stats_in = stat(original, mask)
stats_out = stat(original, mask_inverted)
stats_in_image = Image.new('RGBA', original.size, 'black')
stats_in_image.paste(original, mask=mask)
stats_out_image = Image.new('RGBA', original.size, 'black')
stats_out_image.paste(original, mask=mask_inverted)
try:
if withColors:
return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +
'_in_lum'): luminance(stats_in.mean) / 256.0, (key +
'_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'):
stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /
256.0, (key + '_in_complexity'): complexity(pictureName,
key + 'in', stats_in_image), (key + '_out_lum'): luminance(
stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0
] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (
key + '_out_b'): stats_out.mean[2] / 256.0, (key +
'_out_complexity'): complexity(pictureName, key + 'out',
stats_out_image)}
else:
return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (
key + '_out_lum'): luminance(stats_out.mean) / 256.0}
except ZeroDivisionError:
return {}
def do_saliency(original, masks, path, prefix, pictureName, results):
saliency = Image.open(path + pictureName + '.png')
if saliency.mode != 'RGBA':
saliency = saliency.convert('RGBA')
saliency = saliency.resize(original.size)
stats_saliency = stat(saliency)
results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask
)
results.update(stuff)
saliency_bw = saliency.convert('L')
s_array = numpy.array(saliency_bw)
m_array = numpy.array(masks[0])
dot = numpy.dot(s_array, numpy.rot90(m_array))
results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)
def write_stats(writer, filename, pictureName):
original = Image.open(IMG_DIR + filename)
if original.mode != 'RGBA':
original = original.convert('RGBA')
masks = createAOIMasks(pictureName, original.size)
if masks == None:
print('No masks found in: ' + filename)
return False
stats_orig = stat(original)
results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.
mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g':
stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,
'orig_complexity': complexity(pictureName, 'original', original)}
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)
results.update(stuff)
do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results
)
do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',
pictureName, results)
writer.writerow(results)
if DEBUG:
print('Generated stats for ' + filename)
return True
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def getCoordinates(picturename):
aoiName = picturename + '.OBT'
aoiList = []
obtfile = '{0}/{1}'.format(AOI_DIR, aoiName)
if not os.path.exists(obtfile):
if DEBUG:
print('WARNING: No OBT file found for ' + picturename)
return []
with open(obtfile) as file:
stringContent = file.readlines()
for string in stringContent:
dirtyContent = re.split(', | |=', string)
content = map(int, [x for x in dirtyContent if RepresentsInt(x)])
if content and content != [0]:
aoiList.append(content)
return aoiList
<|reserved_special_token_0|>
def createAOIMasks(pictureName, size):
if DEBUG:
print('Displaying AOIs for picture {0}'.format(pictureName))
aoiList = getCoordinates(pictureName)
if aoiList == []:
return None
masks = []
img = Image.new('L', size, 0)
draw = ImageDraw.Draw(img)
for aoi in aoiList:
drawAOI(aoi, img, draw)
masks.append(img)
emo = Image.new('L', size, 0)
emo_draw = ImageDraw.Draw(emo)
for aoi in aoiList[1:]:
drawAOI(aoi, emo, emo_draw)
masks.append(emo)
for aoi in aoiList:
individual = Image.new('L', size, 0)
individual_draw = ImageDraw.Draw(individual)
drawAOI(aoi, individual, individual_draw)
masks.append(individual)
return masks
def drawOneEllipse(aoi, img, draw):
if DEBUG:
print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],
aoi[1], aoi[2], aoi[3]))
imgDim = img.size
cx = aoi[0]
cy = aoi[1]
w = 2 * aoi[2]
h = 2 * aoi[3]
imgArea = imgDim[0] * imgDim[1]
LeftX = cx - aoi[2]
RightX = cx + aoi[2]
TopY = cy - aoi[3]
BottomY = cy + aoi[3]
draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=
'white')
def drawOneRect(aoi, img, draw):
if DEBUG:
print('Rectangle with Coordinates {0}'.format(aoi))
imgDim = img.size
TopY = aoi[3]
BottomY = aoi[1]
LeftX = aoi[0]
RightX = aoi[2]
if DEBUG:
print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,
BottomY, LeftX, RightX))
imgArea = imgDim[0] * imgDim[1]
draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',
outline='white')
def stat(img, mask=None):
if mask == None:
return ImageStat.Stat(img)
else:
return ImageStat.Stat(img, mask)
def brightness(img, mask=None):
return stat(img, mask).rms[0]
def luminance(c):
if len(c) < 3 or len(c) > 4:
raise Exception('Luminance got values: ', c)
r = c[0]
b = c[1]
g = c[2]
lum = r * 0.2126 + g * 0.7152 + b * 0.0722
if len(c) == 4:
result = lum * (c[3] / 255.0)
else:
result = lum
if math.isnan(result):
return 0.0
else:
return result
def complexity(pictureName, key, img):
name = 'masks/{0}-{1}.jpg'.format(pictureName, key)
img.save(name, quality=80, format='JPEG', optimize=True, progressive=True)
size = os.path.getsize(name)
return size
def results_for_mask(withColors, original, pictureName, key, mask):
mask_inverted = ImageOps.invert(mask)
stats_mask = stat(mask)
stats_in = stat(original, mask)
stats_out = stat(original, mask_inverted)
stats_in_image = Image.new('RGBA', original.size, 'black')
stats_in_image.paste(original, mask=mask)
stats_out_image = Image.new('RGBA', original.size, 'black')
stats_out_image.paste(original, mask=mask_inverted)
try:
if withColors:
return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +
'_in_lum'): luminance(stats_in.mean) / 256.0, (key +
'_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'):
stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /
256.0, (key + '_in_complexity'): complexity(pictureName,
key + 'in', stats_in_image), (key + '_out_lum'): luminance(
stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0
] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (
key + '_out_b'): stats_out.mean[2] / 256.0, (key +
'_out_complexity'): complexity(pictureName, key + 'out',
stats_out_image)}
else:
return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (
key + '_out_lum'): luminance(stats_out.mean) / 256.0}
except ZeroDivisionError:
return {}
def do_saliency(original, masks, path, prefix, pictureName, results):
saliency = Image.open(path + pictureName + '.png')
if saliency.mode != 'RGBA':
saliency = saliency.convert('RGBA')
saliency = saliency.resize(original.size)
stats_saliency = stat(saliency)
results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask
)
results.update(stuff)
saliency_bw = saliency.convert('L')
s_array = numpy.array(saliency_bw)
m_array = numpy.array(masks[0])
dot = numpy.dot(s_array, numpy.rot90(m_array))
results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)
def write_stats(writer, filename, pictureName):
original = Image.open(IMG_DIR + filename)
if original.mode != 'RGBA':
original = original.convert('RGBA')
masks = createAOIMasks(pictureName, original.size)
if masks == None:
print('No masks found in: ' + filename)
return False
stats_orig = stat(original)
results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.
mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g':
stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,
'orig_complexity': complexity(pictureName, 'original', original)}
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)
results.update(stuff)
do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results
)
do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',
pictureName, results)
writer.writerow(results)
if DEBUG:
print('Generated stats for ' + filename)
return True
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def getCoordinates(picturename):
aoiName = picturename + '.OBT'
aoiList = []
obtfile = '{0}/{1}'.format(AOI_DIR, aoiName)
if not os.path.exists(obtfile):
if DEBUG:
print('WARNING: No OBT file found for ' + picturename)
return []
with open(obtfile) as file:
stringContent = file.readlines()
for string in stringContent:
dirtyContent = re.split(', | |=', string)
content = map(int, [x for x in dirtyContent if RepresentsInt(x)])
if content and content != [0]:
aoiList.append(content)
return aoiList
def drawAOI(aoi, i, d):
if aoi[0] == 1:
drawOneRect(aoi[1:5], i, d)
else:
drawOneEllipse(aoi[1:5], i, d)
def createAOIMasks(pictureName, size):
if DEBUG:
print('Displaying AOIs for picture {0}'.format(pictureName))
aoiList = getCoordinates(pictureName)
if aoiList == []:
return None
masks = []
img = Image.new('L', size, 0)
draw = ImageDraw.Draw(img)
for aoi in aoiList:
drawAOI(aoi, img, draw)
masks.append(img)
emo = Image.new('L', size, 0)
emo_draw = ImageDraw.Draw(emo)
for aoi in aoiList[1:]:
drawAOI(aoi, emo, emo_draw)
masks.append(emo)
for aoi in aoiList:
individual = Image.new('L', size, 0)
individual_draw = ImageDraw.Draw(individual)
drawAOI(aoi, individual, individual_draw)
masks.append(individual)
return masks
def drawOneEllipse(aoi, img, draw):
if DEBUG:
print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],
aoi[1], aoi[2], aoi[3]))
imgDim = img.size
cx = aoi[0]
cy = aoi[1]
w = 2 * aoi[2]
h = 2 * aoi[3]
imgArea = imgDim[0] * imgDim[1]
LeftX = cx - aoi[2]
RightX = cx + aoi[2]
TopY = cy - aoi[3]
BottomY = cy + aoi[3]
draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=
'white')
def drawOneRect(aoi, img, draw):
if DEBUG:
print('Rectangle with Coordinates {0}'.format(aoi))
imgDim = img.size
TopY = aoi[3]
BottomY = aoi[1]
LeftX = aoi[0]
RightX = aoi[2]
if DEBUG:
print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,
BottomY, LeftX, RightX))
imgArea = imgDim[0] * imgDim[1]
draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',
outline='white')
def stat(img, mask=None):
if mask == None:
return ImageStat.Stat(img)
else:
return ImageStat.Stat(img, mask)
def brightness(img, mask=None):
return stat(img, mask).rms[0]
def luminance(c):
if len(c) < 3 or len(c) > 4:
raise Exception('Luminance got values: ', c)
r = c[0]
b = c[1]
g = c[2]
lum = r * 0.2126 + g * 0.7152 + b * 0.0722
if len(c) == 4:
result = lum * (c[3] / 255.0)
else:
result = lum
if math.isnan(result):
return 0.0
else:
return result
def complexity(pictureName, key, img):
name = 'masks/{0}-{1}.jpg'.format(pictureName, key)
img.save(name, quality=80, format='JPEG', optimize=True, progressive=True)
size = os.path.getsize(name)
return size
def results_for_mask(withColors, original, pictureName, key, mask):
mask_inverted = ImageOps.invert(mask)
stats_mask = stat(mask)
stats_in = stat(original, mask)
stats_out = stat(original, mask_inverted)
stats_in_image = Image.new('RGBA', original.size, 'black')
stats_in_image.paste(original, mask=mask)
stats_out_image = Image.new('RGBA', original.size, 'black')
stats_out_image.paste(original, mask=mask_inverted)
try:
if withColors:
return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +
'_in_lum'): luminance(stats_in.mean) / 256.0, (key +
'_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'):
stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /
256.0, (key + '_in_complexity'): complexity(pictureName,
key + 'in', stats_in_image), (key + '_out_lum'): luminance(
stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0
] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (
key + '_out_b'): stats_out.mean[2] / 256.0, (key +
'_out_complexity'): complexity(pictureName, key + 'out',
stats_out_image)}
else:
return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (
key + '_out_lum'): luminance(stats_out.mean) / 256.0}
except ZeroDivisionError:
return {}
def do_saliency(original, masks, path, prefix, pictureName, results):
saliency = Image.open(path + pictureName + '.png')
if saliency.mode != 'RGBA':
saliency = saliency.convert('RGBA')
saliency = saliency.resize(original.size)
stats_saliency = stat(saliency)
results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask
)
results.update(stuff)
saliency_bw = saliency.convert('L')
s_array = numpy.array(saliency_bw)
m_array = numpy.array(masks[0])
dot = numpy.dot(s_array, numpy.rot90(m_array))
results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)
def write_stats(writer, filename, pictureName):
original = Image.open(IMG_DIR + filename)
if original.mode != 'RGBA':
original = original.convert('RGBA')
masks = createAOIMasks(pictureName, original.size)
if masks == None:
print('No masks found in: ' + filename)
return False
stats_orig = stat(original)
results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.
mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g':
stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,
'orig_complexity': complexity(pictureName, 'original', original)}
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)
results.update(stuff)
do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results
)
do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',
pictureName, results)
writer.writerow(results)
if DEBUG:
print('Generated stats for ' + filename)
return True
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#!/usr/bin/env python
"""
maskAOI.py
Dan Fitch 20150618
"""
from __future__ import print_function
import sys, os, glob, shutil, fnmatch, math, re, numpy, csv
from PIL import Image, ImageFile, ImageDraw, ImageColor, ImageOps, ImageStat
ImageFile.MAXBLOCK = 1048576
DEBUG = False
AOI_DIR='/study/reference/public/IAPS/IAPS/IAPS_2008_1-20_800x600BMP/IAPS_2008_AOIs/'
IMG_DIR='/study/midus/IAPS2005png/'
SALIENCY_DIR='/home/fitch/aoi/saliency/'
SUN_SALIENCY_DIR='/home/fitch/aoi/sunsaliency/'
MASK_NAMES = ["0", "E", "1", "2", "3", "4"]
# A wrapper function to check if a string is a number (and account for negatives)
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
#Function to return only the main, averaged AOI files (the .OBT) and their coordinates.
def getCoordinates(picturename):
#Load one current image
aoiName = picturename + ".OBT"
aoiList = []
obtfile = "{0}/{1}".format(AOI_DIR, aoiName)
if not os.path.exists(obtfile):
if DEBUG: print("WARNING: No OBT file found for " + picturename)
return []
with open(obtfile) as file:
stringContent = file.readlines()
for string in stringContent:
dirtyContent = re.split(", | |=", string)
content = map(int, [ x for x in dirtyContent if RepresentsInt(x) ])
if content and content != [0]:
aoiList.append(content)
return aoiList
def drawAOI(aoi, i, d):
if aoi[0] == 1:
drawOneRect(aoi[1:5], i, d)
else:
drawOneEllipse(aoi[1:5], i, d)
# Function to display the AOI as masks
def createAOIMasks(pictureName, size):
if DEBUG: print("Displaying AOIs for picture {0}".format(pictureName))
aoiList = getCoordinates(pictureName)
if aoiList == []: return None
masks = []
# L is grayscale
img = Image.new("L", size, 0)
draw = ImageDraw.Draw(img)
for aoi in aoiList:
drawAOI(aoi, img, draw)
masks.append(img)
# Now the "emotional" masks, index 2 and up theoretically
emo = Image.new("L", size, 0)
emo_draw = ImageDraw.Draw(emo)
for aoi in aoiList[1:]:
drawAOI(aoi, emo, emo_draw)
masks.append(emo)
# Now we draw each mask individually
for aoi in aoiList:
individual = Image.new("L", size, 0)
individual_draw = ImageDraw.Draw(individual)
drawAOI(aoi, individual, individual_draw)
masks.append(individual)
return masks
def drawOneEllipse(aoi, img, draw):
#Draw one ellipse on the figure given
if DEBUG: print("Ellipse centered at [{0}, {1}] with {2} {3}".format(aoi[0], aoi[1], aoi[2], aoi[3]))
imgDim = img.size
cx=aoi[0]
cy=aoi[1]
w=2*aoi[2]
h=2*aoi[3]
imgArea=imgDim[0]*imgDim[1]
LeftX=cx-aoi[2]
RightX=cx+aoi[2]
TopY=cy-aoi[3]
BottomY=cy+aoi[3]
draw.ellipse(((LeftX,TopY),(RightX,BottomY)), fill="white", outline="white")
def drawOneRect(aoi, img, draw):
#Draw one rectangle on the figure given
if DEBUG: print("Rectangle with Coordinates {0}".format(aoi))
imgDim = img.size
TopY=aoi[3]
BottomY=aoi[1]
LeftX=aoi[0]
RightX=aoi[2]
if DEBUG: print(" Top:{0}, Bottom:{1}, Left:{2}, Right: {3}".format(TopY, BottomY, LeftX, RightX))
imgArea=imgDim[0]*imgDim[1]
draw.rectangle(((LeftX,TopY),(RightX,BottomY)), fill="white", outline="white")
def stat(img, mask=None):
if mask == None:
return ImageStat.Stat(img)
else:
return ImageStat.Stat(img, mask)
def brightness(img, mask=None):
return stat(img,mask).rms[0]
def luminance(c):
if len(c) < 3 or len(c) > 4:
raise Exception("Luminance got values: ", c)
r = c[0]
b = c[1]
g = c[2]
lum = r*0.2126 + g*0.7152 + b*0.0722
if len(c) == 4:
# Multiply by alpha... kind of hokey but should work for most cases
result = lum * (c[3] / 255.0)
else:
result = lum
if math.isnan(result):
return 0.0
else:
return result
def complexity(pictureName, key, img):
name = "masks/{0}-{1}.jpg".format(pictureName, key)
img.save(name, quality=80, format="JPEG", optimize=True, progressive=True)
size = os.path.getsize(name)
#os.remove(name)
return size
def results_for_mask(withColors, original, pictureName, key, mask):
# We also want the area outside of the mask
mask_inverted = ImageOps.invert(mask)
stats_mask = stat(mask)
stats_in = stat(original, mask)
stats_out = stat(original, mask_inverted)
# Complexity uses the resultant image saved as jpg, so we need to prepare some actual images
stats_in_image = Image.new('RGBA', original.size, "black")
stats_in_image.paste(original, mask=mask)
stats_out_image = Image.new('RGBA', original.size, "black")
stats_out_image.paste(original, mask=mask_inverted)
try:
if withColors:
return {
key + '_mask_lum': stats_mask.mean[0] / 256.0,
key + '_in_lum': luminance(stats_in.mean) / 256.0,
key + '_in_r': stats_in.mean[0] / 256.0,
key + '_in_g': stats_in.mean[1] / 256.0,
key + '_in_b': stats_in.mean[2] / 256.0,
key + '_in_complexity': complexity(pictureName, key + "in", stats_in_image),
key + '_out_lum': luminance(stats_out.mean) / 256.0,
key + '_out_r': stats_out.mean[0] / 256.0,
key + '_out_g': stats_out.mean[1] / 256.0,
key + '_out_b': stats_out.mean[2] / 256.0,
key + '_out_complexity': complexity(pictureName, key + "out", stats_out_image),
}
else:
return {
key + '_in_lum': luminance(stats_in.mean) / 256.0,
key + '_out_lum': luminance(stats_out.mean) / 256.0,
}
except ZeroDivisionError:
return {}
def do_saliency(original, masks, path, prefix, pictureName, results):
saliency = Image.open(path + pictureName + ".png")
if saliency.mode != "RGBA":
saliency = saliency.convert("RGBA")
saliency = saliency.resize(original.size)
stats_saliency = stat(saliency)
results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask)
results.update(stuff)
saliency_bw = saliency.convert("L")
s_array = numpy.array(saliency_bw)
m_array = numpy.array(masks[0])
dot = numpy.dot(s_array, numpy.rot90(m_array))
results[prefix + "_aoi_dotproduct_sum"] = numpy.sum(dot)
def write_stats(writer, filename, pictureName):
original = Image.open(IMG_DIR + filename)
if original.mode != "RGBA":
# P is palette. Did you know BMP *and* PNG files can have 8-bit palettes? WHAAAT
original = original.convert("RGBA")
# First, draw the AOI masks in white on black
# This returns a list, the first mask is ALL AOIs, the second is the "emotional" ones >=2, and the rest are each individual shape
masks = createAOIMasks(pictureName, original.size)
if masks == None:
print("No masks found in: " + filename)
return False
stats_orig = stat(original)
results = {
'image_name': pictureName,
'orig_lum': luminance(stats_orig.mean) / 256.0,
'orig_r': stats_orig.mean[0] / 256.0,
'orig_g': stats_orig.mean[1] / 256.0,
'orig_b': stats_orig.mean[2] / 256.0,
'orig_complexity': complexity(pictureName, "original", original),
}
for i, mask in zip(MASK_NAMES, masks):
stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)
results.update(stuff)
# And finally we get the saliency image and resize it and do a bunch of garbage with it and the AOI masks
do_saliency(original, masks, SALIENCY_DIR, "saliency", pictureName, results)
do_saliency(original, masks, SUN_SALIENCY_DIR, "sun_saliency", pictureName, results)
writer.writerow(results)
if DEBUG: print("Generated stats for " + filename)
return True
with open('stats.csv', 'wb') as csvfile:
per_mask_fields = [
'_mask_lum',
'_in_lum',
'_in_r',
'_in_g',
'_in_b',
'_in_complexity',
'_out_lum',
'_out_r',
'_out_g',
'_out_b',
'_out_complexity',
]
per_saliency_fields = [
'_in_lum',
'_out_lum',
]
fields = [
'image_name',
'orig_lum',
'orig_r',
'orig_g',
'orig_b',
'orig_complexity',
]
for i in MASK_NAMES:
for f in per_mask_fields:
fields.append("aoi{0}{1}".format(i,f))
fields.append("saliency_aoi_dotproduct_sum")
fields.append("saliency_lum")
for i in MASK_NAMES:
for f in per_saliency_fields:
fields.append("saliency{0}{1}".format(i,f))
fields.append("sun_saliency_aoi_dotproduct_sum")
fields.append("sun_saliency_lum")
for i in MASK_NAMES:
for f in per_saliency_fields:
fields.append("sun_saliency{0}{1}".format(i,f))
writer = csv.DictWriter(csvfile, fieldnames=fields)
writer.writerow(dict(zip(fields,fields)))
for filename in sorted(os.listdir(IMG_DIR)):
if not ".png" in filename:
continue
pictureName = filename.replace(".png", "")
try:
write_stats(writer, filename, pictureName)
except:
print("Error on file " + pictureName, file=sys.stderr)
raise
|
flexible
|
{
"blob_id": "833053a5a75636267feaad5ddaa21dce1de34038",
"index": 5319,
"step-1": "<mask token>\n\n\ndef RepresentsInt(s):\n try:\n int(s)\n return True\n except ValueError:\n return False\n\n\n<mask token>\n\n\ndef drawOneEllipse(aoi, img, draw):\n if DEBUG:\n print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],\n aoi[1], aoi[2], aoi[3]))\n imgDim = img.size\n cx = aoi[0]\n cy = aoi[1]\n w = 2 * aoi[2]\n h = 2 * aoi[3]\n imgArea = imgDim[0] * imgDim[1]\n LeftX = cx - aoi[2]\n RightX = cx + aoi[2]\n TopY = cy - aoi[3]\n BottomY = cy + aoi[3]\n draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=\n 'white')\n\n\ndef drawOneRect(aoi, img, draw):\n if DEBUG:\n print('Rectangle with Coordinates {0}'.format(aoi))\n imgDim = img.size\n TopY = aoi[3]\n BottomY = aoi[1]\n LeftX = aoi[0]\n RightX = aoi[2]\n if DEBUG:\n print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,\n BottomY, LeftX, RightX))\n imgArea = imgDim[0] * imgDim[1]\n draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',\n outline='white')\n\n\ndef stat(img, mask=None):\n if mask == None:\n return ImageStat.Stat(img)\n else:\n return ImageStat.Stat(img, mask)\n\n\n<mask token>\n\n\ndef results_for_mask(withColors, original, pictureName, key, mask):\n mask_inverted = ImageOps.invert(mask)\n stats_mask = stat(mask)\n stats_in = stat(original, mask)\n stats_out = stat(original, mask_inverted)\n stats_in_image = Image.new('RGBA', original.size, 'black')\n stats_in_image.paste(original, mask=mask)\n stats_out_image = Image.new('RGBA', original.size, 'black')\n stats_out_image.paste(original, mask=mask_inverted)\n try:\n if withColors:\n return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +\n '_in_lum'): luminance(stats_in.mean) / 256.0, (key +\n '_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'): \n stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /\n 256.0, (key + '_in_complexity'): complexity(pictureName, \n key + 'in', stats_in_image), (key + '_out_lum'): luminance(\n stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0\n ] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (\n key + '_out_b'): stats_out.mean[2] / 256.0, (key +\n '_out_complexity'): complexity(pictureName, key + 'out',\n stats_out_image)}\n else:\n return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (\n key + '_out_lum'): luminance(stats_out.mean) / 256.0}\n except ZeroDivisionError:\n return {}\n\n\ndef do_saliency(original, masks, path, prefix, pictureName, results):\n saliency = Image.open(path + pictureName + '.png')\n if saliency.mode != 'RGBA':\n saliency = saliency.convert('RGBA')\n saliency = saliency.resize(original.size)\n stats_saliency = stat(saliency)\n results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask\n )\n results.update(stuff)\n saliency_bw = saliency.convert('L')\n s_array = numpy.array(saliency_bw)\n m_array = numpy.array(masks[0])\n dot = numpy.dot(s_array, numpy.rot90(m_array))\n results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef RepresentsInt(s):\n try:\n int(s)\n return True\n except ValueError:\n return False\n\n\n<mask token>\n\n\ndef drawOneEllipse(aoi, img, draw):\n if DEBUG:\n print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],\n aoi[1], aoi[2], aoi[3]))\n imgDim = img.size\n cx = aoi[0]\n cy = aoi[1]\n w = 2 * aoi[2]\n h = 2 * aoi[3]\n imgArea = imgDim[0] * imgDim[1]\n LeftX = cx - aoi[2]\n RightX = cx + aoi[2]\n TopY = cy - aoi[3]\n BottomY = cy + aoi[3]\n draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=\n 'white')\n\n\ndef drawOneRect(aoi, img, draw):\n if DEBUG:\n print('Rectangle with Coordinates {0}'.format(aoi))\n imgDim = img.size\n TopY = aoi[3]\n BottomY = aoi[1]\n LeftX = aoi[0]\n RightX = aoi[2]\n if DEBUG:\n print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,\n BottomY, LeftX, RightX))\n imgArea = imgDim[0] * imgDim[1]\n draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',\n outline='white')\n\n\ndef stat(img, mask=None):\n if mask == None:\n return ImageStat.Stat(img)\n else:\n return ImageStat.Stat(img, mask)\n\n\ndef brightness(img, mask=None):\n return stat(img, mask).rms[0]\n\n\n<mask token>\n\n\ndef results_for_mask(withColors, original, pictureName, key, mask):\n mask_inverted = ImageOps.invert(mask)\n stats_mask = stat(mask)\n stats_in = stat(original, mask)\n stats_out = stat(original, mask_inverted)\n stats_in_image = Image.new('RGBA', original.size, 'black')\n stats_in_image.paste(original, mask=mask)\n stats_out_image = Image.new('RGBA', original.size, 'black')\n stats_out_image.paste(original, mask=mask_inverted)\n try:\n if withColors:\n return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +\n '_in_lum'): luminance(stats_in.mean) / 256.0, (key +\n '_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'): \n stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /\n 256.0, (key + '_in_complexity'): complexity(pictureName, \n key + 'in', stats_in_image), (key + '_out_lum'): luminance(\n stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0\n ] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (\n key + '_out_b'): stats_out.mean[2] / 256.0, (key +\n '_out_complexity'): complexity(pictureName, key + 'out',\n stats_out_image)}\n else:\n return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (\n key + '_out_lum'): luminance(stats_out.mean) / 256.0}\n except ZeroDivisionError:\n return {}\n\n\ndef do_saliency(original, masks, path, prefix, pictureName, results):\n saliency = Image.open(path + pictureName + '.png')\n if saliency.mode != 'RGBA':\n saliency = saliency.convert('RGBA')\n saliency = saliency.resize(original.size)\n stats_saliency = stat(saliency)\n results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask\n )\n results.update(stuff)\n saliency_bw = saliency.convert('L')\n s_array = numpy.array(saliency_bw)\n m_array = numpy.array(masks[0])\n dot = numpy.dot(s_array, numpy.rot90(m_array))\n results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)\n\n\ndef write_stats(writer, filename, pictureName):\n original = Image.open(IMG_DIR + filename)\n if original.mode != 'RGBA':\n original = original.convert('RGBA')\n masks = createAOIMasks(pictureName, original.size)\n if masks == None:\n print('No masks found in: ' + filename)\n return False\n stats_orig = stat(original)\n results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.\n mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g': \n stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,\n 'orig_complexity': complexity(pictureName, 'original', original)}\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)\n results.update(stuff)\n do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results\n )\n do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',\n pictureName, results)\n writer.writerow(results)\n if DEBUG:\n print('Generated stats for ' + filename)\n return True\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef RepresentsInt(s):\n try:\n int(s)\n return True\n except ValueError:\n return False\n\n\ndef getCoordinates(picturename):\n aoiName = picturename + '.OBT'\n aoiList = []\n obtfile = '{0}/{1}'.format(AOI_DIR, aoiName)\n if not os.path.exists(obtfile):\n if DEBUG:\n print('WARNING: No OBT file found for ' + picturename)\n return []\n with open(obtfile) as file:\n stringContent = file.readlines()\n for string in stringContent:\n dirtyContent = re.split(', | |=', string)\n content = map(int, [x for x in dirtyContent if RepresentsInt(x)])\n if content and content != [0]:\n aoiList.append(content)\n return aoiList\n\n\n<mask token>\n\n\ndef createAOIMasks(pictureName, size):\n if DEBUG:\n print('Displaying AOIs for picture {0}'.format(pictureName))\n aoiList = getCoordinates(pictureName)\n if aoiList == []:\n return None\n masks = []\n img = Image.new('L', size, 0)\n draw = ImageDraw.Draw(img)\n for aoi in aoiList:\n drawAOI(aoi, img, draw)\n masks.append(img)\n emo = Image.new('L', size, 0)\n emo_draw = ImageDraw.Draw(emo)\n for aoi in aoiList[1:]:\n drawAOI(aoi, emo, emo_draw)\n masks.append(emo)\n for aoi in aoiList:\n individual = Image.new('L', size, 0)\n individual_draw = ImageDraw.Draw(individual)\n drawAOI(aoi, individual, individual_draw)\n masks.append(individual)\n return masks\n\n\ndef drawOneEllipse(aoi, img, draw):\n if DEBUG:\n print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],\n aoi[1], aoi[2], aoi[3]))\n imgDim = img.size\n cx = aoi[0]\n cy = aoi[1]\n w = 2 * aoi[2]\n h = 2 * aoi[3]\n imgArea = imgDim[0] * imgDim[1]\n LeftX = cx - aoi[2]\n RightX = cx + aoi[2]\n TopY = cy - aoi[3]\n BottomY = cy + aoi[3]\n draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=\n 'white')\n\n\ndef drawOneRect(aoi, img, draw):\n if DEBUG:\n print('Rectangle with Coordinates {0}'.format(aoi))\n imgDim = img.size\n TopY = aoi[3]\n BottomY = aoi[1]\n LeftX = aoi[0]\n RightX = aoi[2]\n if DEBUG:\n print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,\n BottomY, LeftX, RightX))\n imgArea = imgDim[0] * imgDim[1]\n draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',\n outline='white')\n\n\ndef stat(img, mask=None):\n if mask == None:\n return ImageStat.Stat(img)\n else:\n return ImageStat.Stat(img, mask)\n\n\ndef brightness(img, mask=None):\n return stat(img, mask).rms[0]\n\n\ndef luminance(c):\n if len(c) < 3 or len(c) > 4:\n raise Exception('Luminance got values: ', c)\n r = c[0]\n b = c[1]\n g = c[2]\n lum = r * 0.2126 + g * 0.7152 + b * 0.0722\n if len(c) == 4:\n result = lum * (c[3] / 255.0)\n else:\n result = lum\n if math.isnan(result):\n return 0.0\n else:\n return result\n\n\ndef complexity(pictureName, key, img):\n name = 'masks/{0}-{1}.jpg'.format(pictureName, key)\n img.save(name, quality=80, format='JPEG', optimize=True, progressive=True)\n size = os.path.getsize(name)\n return size\n\n\ndef results_for_mask(withColors, original, pictureName, key, mask):\n mask_inverted = ImageOps.invert(mask)\n stats_mask = stat(mask)\n stats_in = stat(original, mask)\n stats_out = stat(original, mask_inverted)\n stats_in_image = Image.new('RGBA', original.size, 'black')\n stats_in_image.paste(original, mask=mask)\n stats_out_image = Image.new('RGBA', original.size, 'black')\n stats_out_image.paste(original, mask=mask_inverted)\n try:\n if withColors:\n return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +\n '_in_lum'): luminance(stats_in.mean) / 256.0, (key +\n '_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'): \n stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /\n 256.0, (key + '_in_complexity'): complexity(pictureName, \n key + 'in', stats_in_image), (key + '_out_lum'): luminance(\n stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0\n ] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (\n key + '_out_b'): stats_out.mean[2] / 256.0, (key +\n '_out_complexity'): complexity(pictureName, key + 'out',\n stats_out_image)}\n else:\n return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (\n key + '_out_lum'): luminance(stats_out.mean) / 256.0}\n except ZeroDivisionError:\n return {}\n\n\ndef do_saliency(original, masks, path, prefix, pictureName, results):\n saliency = Image.open(path + pictureName + '.png')\n if saliency.mode != 'RGBA':\n saliency = saliency.convert('RGBA')\n saliency = saliency.resize(original.size)\n stats_saliency = stat(saliency)\n results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask\n )\n results.update(stuff)\n saliency_bw = saliency.convert('L')\n s_array = numpy.array(saliency_bw)\n m_array = numpy.array(masks[0])\n dot = numpy.dot(s_array, numpy.rot90(m_array))\n results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)\n\n\ndef write_stats(writer, filename, pictureName):\n original = Image.open(IMG_DIR + filename)\n if original.mode != 'RGBA':\n original = original.convert('RGBA')\n masks = createAOIMasks(pictureName, original.size)\n if masks == None:\n print('No masks found in: ' + filename)\n return False\n stats_orig = stat(original)\n results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.\n mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g': \n stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,\n 'orig_complexity': complexity(pictureName, 'original', original)}\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)\n results.update(stuff)\n do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results\n )\n do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',\n pictureName, results)\n writer.writerow(results)\n if DEBUG:\n print('Generated stats for ' + filename)\n return True\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef RepresentsInt(s):\n try:\n int(s)\n return True\n except ValueError:\n return False\n\n\ndef getCoordinates(picturename):\n aoiName = picturename + '.OBT'\n aoiList = []\n obtfile = '{0}/{1}'.format(AOI_DIR, aoiName)\n if not os.path.exists(obtfile):\n if DEBUG:\n print('WARNING: No OBT file found for ' + picturename)\n return []\n with open(obtfile) as file:\n stringContent = file.readlines()\n for string in stringContent:\n dirtyContent = re.split(', | |=', string)\n content = map(int, [x for x in dirtyContent if RepresentsInt(x)])\n if content and content != [0]:\n aoiList.append(content)\n return aoiList\n\n\ndef drawAOI(aoi, i, d):\n if aoi[0] == 1:\n drawOneRect(aoi[1:5], i, d)\n else:\n drawOneEllipse(aoi[1:5], i, d)\n\n\ndef createAOIMasks(pictureName, size):\n if DEBUG:\n print('Displaying AOIs for picture {0}'.format(pictureName))\n aoiList = getCoordinates(pictureName)\n if aoiList == []:\n return None\n masks = []\n img = Image.new('L', size, 0)\n draw = ImageDraw.Draw(img)\n for aoi in aoiList:\n drawAOI(aoi, img, draw)\n masks.append(img)\n emo = Image.new('L', size, 0)\n emo_draw = ImageDraw.Draw(emo)\n for aoi in aoiList[1:]:\n drawAOI(aoi, emo, emo_draw)\n masks.append(emo)\n for aoi in aoiList:\n individual = Image.new('L', size, 0)\n individual_draw = ImageDraw.Draw(individual)\n drawAOI(aoi, individual, individual_draw)\n masks.append(individual)\n return masks\n\n\ndef drawOneEllipse(aoi, img, draw):\n if DEBUG:\n print('Ellipse centered at [{0}, {1}] with {2} {3}'.format(aoi[0],\n aoi[1], aoi[2], aoi[3]))\n imgDim = img.size\n cx = aoi[0]\n cy = aoi[1]\n w = 2 * aoi[2]\n h = 2 * aoi[3]\n imgArea = imgDim[0] * imgDim[1]\n LeftX = cx - aoi[2]\n RightX = cx + aoi[2]\n TopY = cy - aoi[3]\n BottomY = cy + aoi[3]\n draw.ellipse(((LeftX, TopY), (RightX, BottomY)), fill='white', outline=\n 'white')\n\n\ndef drawOneRect(aoi, img, draw):\n if DEBUG:\n print('Rectangle with Coordinates {0}'.format(aoi))\n imgDim = img.size\n TopY = aoi[3]\n BottomY = aoi[1]\n LeftX = aoi[0]\n RightX = aoi[2]\n if DEBUG:\n print(' Top:{0}, Bottom:{1}, Left:{2}, Right: {3}'.format(TopY,\n BottomY, LeftX, RightX))\n imgArea = imgDim[0] * imgDim[1]\n draw.rectangle(((LeftX, TopY), (RightX, BottomY)), fill='white',\n outline='white')\n\n\ndef stat(img, mask=None):\n if mask == None:\n return ImageStat.Stat(img)\n else:\n return ImageStat.Stat(img, mask)\n\n\ndef brightness(img, mask=None):\n return stat(img, mask).rms[0]\n\n\ndef luminance(c):\n if len(c) < 3 or len(c) > 4:\n raise Exception('Luminance got values: ', c)\n r = c[0]\n b = c[1]\n g = c[2]\n lum = r * 0.2126 + g * 0.7152 + b * 0.0722\n if len(c) == 4:\n result = lum * (c[3] / 255.0)\n else:\n result = lum\n if math.isnan(result):\n return 0.0\n else:\n return result\n\n\ndef complexity(pictureName, key, img):\n name = 'masks/{0}-{1}.jpg'.format(pictureName, key)\n img.save(name, quality=80, format='JPEG', optimize=True, progressive=True)\n size = os.path.getsize(name)\n return size\n\n\ndef results_for_mask(withColors, original, pictureName, key, mask):\n mask_inverted = ImageOps.invert(mask)\n stats_mask = stat(mask)\n stats_in = stat(original, mask)\n stats_out = stat(original, mask_inverted)\n stats_in_image = Image.new('RGBA', original.size, 'black')\n stats_in_image.paste(original, mask=mask)\n stats_out_image = Image.new('RGBA', original.size, 'black')\n stats_out_image.paste(original, mask=mask_inverted)\n try:\n if withColors:\n return {(key + '_mask_lum'): stats_mask.mean[0] / 256.0, (key +\n '_in_lum'): luminance(stats_in.mean) / 256.0, (key +\n '_in_r'): stats_in.mean[0] / 256.0, (key + '_in_g'): \n stats_in.mean[1] / 256.0, (key + '_in_b'): stats_in.mean[2] /\n 256.0, (key + '_in_complexity'): complexity(pictureName, \n key + 'in', stats_in_image), (key + '_out_lum'): luminance(\n stats_out.mean) / 256.0, (key + '_out_r'): stats_out.mean[0\n ] / 256.0, (key + '_out_g'): stats_out.mean[1] / 256.0, (\n key + '_out_b'): stats_out.mean[2] / 256.0, (key +\n '_out_complexity'): complexity(pictureName, key + 'out',\n stats_out_image)}\n else:\n return {(key + '_in_lum'): luminance(stats_in.mean) / 256.0, (\n key + '_out_lum'): luminance(stats_out.mean) / 256.0}\n except ZeroDivisionError:\n return {}\n\n\ndef do_saliency(original, masks, path, prefix, pictureName, results):\n saliency = Image.open(path + pictureName + '.png')\n if saliency.mode != 'RGBA':\n saliency = saliency.convert('RGBA')\n saliency = saliency.resize(original.size)\n stats_saliency = stat(saliency)\n results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask\n )\n results.update(stuff)\n saliency_bw = saliency.convert('L')\n s_array = numpy.array(saliency_bw)\n m_array = numpy.array(masks[0])\n dot = numpy.dot(s_array, numpy.rot90(m_array))\n results[prefix + '_aoi_dotproduct_sum'] = numpy.sum(dot)\n\n\ndef write_stats(writer, filename, pictureName):\n original = Image.open(IMG_DIR + filename)\n if original.mode != 'RGBA':\n original = original.convert('RGBA')\n masks = createAOIMasks(pictureName, original.size)\n if masks == None:\n print('No masks found in: ' + filename)\n return False\n stats_orig = stat(original)\n results = {'image_name': pictureName, 'orig_lum': luminance(stats_orig.\n mean) / 256.0, 'orig_r': stats_orig.mean[0] / 256.0, 'orig_g': \n stats_orig.mean[1] / 256.0, 'orig_b': stats_orig.mean[2] / 256.0,\n 'orig_complexity': complexity(pictureName, 'original', original)}\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)\n results.update(stuff)\n do_saliency(original, masks, SALIENCY_DIR, 'saliency', pictureName, results\n )\n do_saliency(original, masks, SUN_SALIENCY_DIR, 'sun_saliency',\n pictureName, results)\n writer.writerow(results)\n if DEBUG:\n print('Generated stats for ' + filename)\n return True\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python\n\n\"\"\"\nmaskAOI.py\n\nDan Fitch 20150618\n\"\"\"\n\nfrom __future__ import print_function\n\nimport sys, os, glob, shutil, fnmatch, math, re, numpy, csv\nfrom PIL import Image, ImageFile, ImageDraw, ImageColor, ImageOps, ImageStat\nImageFile.MAXBLOCK = 1048576\n\nDEBUG = False\n\nAOI_DIR='/study/reference/public/IAPS/IAPS/IAPS_2008_1-20_800x600BMP/IAPS_2008_AOIs/'\nIMG_DIR='/study/midus/IAPS2005png/'\nSALIENCY_DIR='/home/fitch/aoi/saliency/'\nSUN_SALIENCY_DIR='/home/fitch/aoi/sunsaliency/'\nMASK_NAMES = [\"0\", \"E\", \"1\", \"2\", \"3\", \"4\"]\n\n\n# A wrapper function to check if a string is a number (and account for negatives)\ndef RepresentsInt(s):\n\ttry: \n\t\tint(s)\n\t\treturn True\n\texcept ValueError:\n\t\treturn False\n\t\t\n\n#Function to return only the main, averaged AOI files (the .OBT) and their coordinates.\ndef getCoordinates(picturename):\n #Load one current image\n aoiName = picturename + \".OBT\"\n aoiList = []\n obtfile = \"{0}/{1}\".format(AOI_DIR, aoiName)\n if not os.path.exists(obtfile):\n if DEBUG: print(\"WARNING: No OBT file found for \" + picturename)\n return []\n with open(obtfile) as file:\n stringContent = file.readlines()\n for string in stringContent:\n dirtyContent = re.split(\", | |=\", string)\n content = map(int, [ x for x in dirtyContent if RepresentsInt(x) ])\n if content and content != [0]:\n aoiList.append(content)\n return aoiList\n\n\ndef drawAOI(aoi, i, d):\n if aoi[0] == 1:\n drawOneRect(aoi[1:5], i, d)\n else:\n drawOneEllipse(aoi[1:5], i, d)\n\n# Function to display the AOI as masks\ndef createAOIMasks(pictureName, size):\n if DEBUG: print(\"Displaying AOIs for picture {0}\".format(pictureName))\n aoiList = getCoordinates(pictureName)\n\n if aoiList == []: return None\n\n masks = []\n\n # L is grayscale\n img = Image.new(\"L\", size, 0)\n draw = ImageDraw.Draw(img)\n\n for aoi in aoiList:\n drawAOI(aoi, img, draw)\n\n masks.append(img)\n\n # Now the \"emotional\" masks, index 2 and up theoretically\n emo = Image.new(\"L\", size, 0)\n emo_draw = ImageDraw.Draw(emo)\n\n for aoi in aoiList[1:]:\n drawAOI(aoi, emo, emo_draw)\n\n masks.append(emo)\n\n # Now we draw each mask individually\n for aoi in aoiList:\n individual = Image.new(\"L\", size, 0)\n individual_draw = ImageDraw.Draw(individual)\n drawAOI(aoi, individual, individual_draw)\n masks.append(individual)\n\n return masks\n\n\t\t\ndef drawOneEllipse(aoi, img, draw):\n #Draw one ellipse on the figure given\n if DEBUG: print(\"Ellipse centered at [{0}, {1}] with {2} {3}\".format(aoi[0], aoi[1], aoi[2], aoi[3]))\n imgDim = img.size\n cx=aoi[0]\n cy=aoi[1]\n w=2*aoi[2]\n h=2*aoi[3]\n imgArea=imgDim[0]*imgDim[1]\n LeftX=cx-aoi[2]\n RightX=cx+aoi[2]\n TopY=cy-aoi[3]\n BottomY=cy+aoi[3]\n draw.ellipse(((LeftX,TopY),(RightX,BottomY)), fill=\"white\", outline=\"white\")\n\t\ndef drawOneRect(aoi, img, draw):\n #Draw one rectangle on the figure given\n if DEBUG: print(\"Rectangle with Coordinates {0}\".format(aoi))\n imgDim = img.size\n TopY=aoi[3]\n BottomY=aoi[1]\n LeftX=aoi[0]\n RightX=aoi[2]\n if DEBUG: print(\" Top:{0}, Bottom:{1}, Left:{2}, Right: {3}\".format(TopY, BottomY, LeftX, RightX))\n imgArea=imgDim[0]*imgDim[1]\n draw.rectangle(((LeftX,TopY),(RightX,BottomY)), fill=\"white\", outline=\"white\")\n\ndef stat(img, mask=None):\n if mask == None:\n return ImageStat.Stat(img)\n else:\n return ImageStat.Stat(img, mask)\n\ndef brightness(img, mask=None):\n return stat(img,mask).rms[0]\n\t\ndef luminance(c):\n if len(c) < 3 or len(c) > 4:\n raise Exception(\"Luminance got values: \", c)\n r = c[0]\n b = c[1]\n g = c[2]\n lum = r*0.2126 + g*0.7152 + b*0.0722\n if len(c) == 4:\n # Multiply by alpha... kind of hokey but should work for most cases\n result = lum * (c[3] / 255.0)\n else:\n result = lum\n\n if math.isnan(result):\n return 0.0\n else:\n return result\n\ndef complexity(pictureName, key, img):\n name = \"masks/{0}-{1}.jpg\".format(pictureName, key)\n img.save(name, quality=80, format=\"JPEG\", optimize=True, progressive=True)\n size = os.path.getsize(name)\n #os.remove(name)\n return size\n\n\n\ndef results_for_mask(withColors, original, pictureName, key, mask):\n # We also want the area outside of the mask\n mask_inverted = ImageOps.invert(mask)\n stats_mask = stat(mask)\n stats_in = stat(original, mask)\n stats_out = stat(original, mask_inverted)\n\n # Complexity uses the resultant image saved as jpg, so we need to prepare some actual images\n\n stats_in_image = Image.new('RGBA', original.size, \"black\")\n stats_in_image.paste(original, mask=mask)\n stats_out_image = Image.new('RGBA', original.size, \"black\")\n stats_out_image.paste(original, mask=mask_inverted)\n\n try:\n if withColors:\n return {\n key + '_mask_lum': stats_mask.mean[0] / 256.0,\n key + '_in_lum': luminance(stats_in.mean) / 256.0,\n key + '_in_r': stats_in.mean[0] / 256.0,\n key + '_in_g': stats_in.mean[1] / 256.0,\n key + '_in_b': stats_in.mean[2] / 256.0,\n key + '_in_complexity': complexity(pictureName, key + \"in\", stats_in_image),\n key + '_out_lum': luminance(stats_out.mean) / 256.0,\n key + '_out_r': stats_out.mean[0] / 256.0,\n key + '_out_g': stats_out.mean[1] / 256.0,\n key + '_out_b': stats_out.mean[2] / 256.0,\n key + '_out_complexity': complexity(pictureName, key + \"out\", stats_out_image),\n }\n else:\n return {\n key + '_in_lum': luminance(stats_in.mean) / 256.0,\n key + '_out_lum': luminance(stats_out.mean) / 256.0,\n }\n except ZeroDivisionError:\n return {}\n\ndef do_saliency(original, masks, path, prefix, pictureName, results):\n saliency = Image.open(path + pictureName + \".png\")\n if saliency.mode != \"RGBA\":\n saliency = saliency.convert(\"RGBA\")\n saliency = saliency.resize(original.size)\n stats_saliency = stat(saliency)\n results[prefix + '_lum'] = luminance(stats_saliency.mean) / 256.0\n\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(False, saliency, pictureName, prefix + i, mask)\n results.update(stuff)\n\n saliency_bw = saliency.convert(\"L\")\n s_array = numpy.array(saliency_bw)\n m_array = numpy.array(masks[0])\n dot = numpy.dot(s_array, numpy.rot90(m_array))\n\n results[prefix + \"_aoi_dotproduct_sum\"] = numpy.sum(dot)\n\n\ndef write_stats(writer, filename, pictureName):\n\n original = Image.open(IMG_DIR + filename)\n\n if original.mode != \"RGBA\":\n # P is palette. Did you know BMP *and* PNG files can have 8-bit palettes? WHAAAT\n original = original.convert(\"RGBA\")\n\n # First, draw the AOI masks in white on black\n # This returns a list, the first mask is ALL AOIs, the second is the \"emotional\" ones >=2, and the rest are each individual shape\n masks = createAOIMasks(pictureName, original.size)\n\n if masks == None:\n print(\"No masks found in: \" + filename)\n return False\n\n stats_orig = stat(original)\n\n results = {\n 'image_name': pictureName,\n 'orig_lum': luminance(stats_orig.mean) / 256.0,\n 'orig_r': stats_orig.mean[0] / 256.0,\n 'orig_g': stats_orig.mean[1] / 256.0,\n 'orig_b': stats_orig.mean[2] / 256.0,\n 'orig_complexity': complexity(pictureName, \"original\", original),\n }\n\n for i, mask in zip(MASK_NAMES, masks):\n stuff = results_for_mask(True, original, pictureName, 'aoi' + i, mask)\n results.update(stuff)\n\n # And finally we get the saliency image and resize it and do a bunch of garbage with it and the AOI masks\n\n do_saliency(original, masks, SALIENCY_DIR, \"saliency\", pictureName, results)\n do_saliency(original, masks, SUN_SALIENCY_DIR, \"sun_saliency\", pictureName, results)\n\n\n writer.writerow(results)\n if DEBUG: print(\"Generated stats for \" + filename)\n return True\n\n\n\nwith open('stats.csv', 'wb') as csvfile:\n per_mask_fields = [\n '_mask_lum',\n '_in_lum',\n '_in_r',\n '_in_g',\n '_in_b',\n '_in_complexity',\n '_out_lum',\n '_out_r',\n '_out_g',\n '_out_b',\n '_out_complexity',\n ]\n\n per_saliency_fields = [\n '_in_lum',\n '_out_lum',\n ]\n\n fields = [\n 'image_name',\n 'orig_lum',\n 'orig_r',\n 'orig_g',\n 'orig_b',\n 'orig_complexity',\n ]\n\n for i in MASK_NAMES:\n for f in per_mask_fields:\n fields.append(\"aoi{0}{1}\".format(i,f))\n\n fields.append(\"saliency_aoi_dotproduct_sum\")\n fields.append(\"saliency_lum\")\n\n for i in MASK_NAMES:\n for f in per_saliency_fields:\n fields.append(\"saliency{0}{1}\".format(i,f))\n\n fields.append(\"sun_saliency_aoi_dotproduct_sum\")\n fields.append(\"sun_saliency_lum\")\n\n for i in MASK_NAMES:\n for f in per_saliency_fields:\n fields.append(\"sun_saliency{0}{1}\".format(i,f))\n\n writer = csv.DictWriter(csvfile, fieldnames=fields)\n writer.writerow(dict(zip(fields,fields)))\n\n for filename in sorted(os.listdir(IMG_DIR)):\n if not \".png\" in filename:\n continue\n\n pictureName = filename.replace(\".png\", \"\")\n\n try:\n write_stats(writer, filename, pictureName)\n\n except:\n print(\"Error on file \" + pictureName, file=sys.stderr)\n raise\n\n\n",
"step-ids": [
6,
8,
12,
13,
17
]
}
|
[
6,
8,
12,
13,
17
] |
from multiprocessing import Pool
from pathlib import Path
import os
import re
import json
import string
import math
import GLOBALS
stopWords = {"a", "about", "above", "after", "again", "against", "all", "am", "an", "and", "any", "are", "aren't",
"as", "at", "be", "because", "been", "before", "being", "below", "between", "both", "but", "by",
"can't",
"cannot", "could", "couldn't", "did", "didn't", "do", "does", "doesn't", "doing", "don't", "down",
"during",
"each", "few", "for", "from", "further", "had", "hadn't", "has", "hasn't", "have", "haven't", "having",
"he", "he'd",
"he'll", "he's", "her", "here", "here's", "hers", "herself", "him", "himself", "his", "how", "how's",
"i", "i'd", "i'll",
"i'm", "i've", "if", "in", "into", "is", "isn't", "it", "it's", "its", "itself", "let's", "me", "more",
"most", "mustn't", "my",
"myself", "no", "nor", "not", "of", "off", "on", "once", "only", "or", "other", "ought", "our", "ours",
"ourselves", "out", "over",
"own", "same", "shan't", "she", "she'd", "she'll", "she's", "should", "shouldn't", "so", "some",
"such", "than", "that", "that's",
"the", "their", "theirs", "them", "themselves", "then", "there", "there's", "these", "they", "they'd",
"they'll", "they're", "they've",
"this", "those", "through", "to", "too", "under", "until", "up", "very", "was", "wasn't", "we", "we'd",
"we'll", "we're", "we've", "were", "weren't",
"what", "what's", "when", "when's", "where", "where's", "which", "while", "who", "who's", "whom",
"why", "why's", "with", "won't", "would", "wouldn't",
"you", "you'd", "you'll", "you're", "you've", "your", "yours", "yourself", "yourselves"}
# Main Functions (aka functions called in __main__)
# Takes in query as str. Returns list of docs that match the OR query (inclusive)
def search(query, finalIndexPath):
listOfDicts = list()
queryList = set() # We use set() to remove duplicate terms, and we won't have to open a file twice
tempList = query.strip().lower().replace("'", "").split(" ")
for word in tempList:
if word not in stopWords:
queryList.add(word)
print("Cleaned query tokens:")
print(queryList, "\n") # query tokens with stopwords removed and replacing apostrohe and lower()
#convert set to list to enumerate
queryList = list(queryList)
for word in queryList:
charPath = word[0] #Get 1st char of current word, use to find subdir
# Get the file path of the final_indexed token.json file
jsonFilePath = str(Path(finalIndexPath) / charPath / word) + ".json"
try:
with open(jsonFilePath, "r") as file:
data = file.read()
jsonObj = json.loads(data)
docsDict = jsonObj["docList"]
listOfDicts.append(docsDict)
except:
pass
return intersectDicts(listOfDicts)
def getDocURLs(intersectedDocs, indexPath, cacheURLs):
listUrls = list() # holds unique file paths of .json files
#
# hashTablePath = Path(indexPath) / "hashurls.txt"
# with open(hashTablePath, "r") as file:
# data = file.read()
# hashSet = json.loads(data)
for docID in intersectedDocs:
if(docID in cacheURLs):
fileUrl = cacheURLs[docID]
listUrls.append( (fileUrl, intersectedDocs[docID]) )
return listUrls
# Helper Functions (aka functions called by other functions)
# Returns unique dict of file urls from hashurl.txt (or hasthtable.txt)
def intersectDicts(listOfDicts):
if len(listOfDicts) == 1:
return listOfDicts[0]
intersection = {}
for dictItem in listOfDicts:
for doc in dictItem:
if doc not in intersection:
intersection[doc] = dictItem[doc] #
else:
intersection[doc] += dictItem[doc] #adding tfidf weights
print("intersection = ", intersection)
return intersection
def flaskBackendQuery(queryUser, cacheURLs):
indexPath = GLOBALS.FINAL_INDEX
if (queryUser.strip() == ""):
print("Query needs to be at least one character")
unsortedDocs = search(queryUser, indexPath) #list of dictionaries
# Change filepaths to website URLs for displaying
unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)
# Sort docs by the TF-IDF score
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True) #highest scores shown first
return sortedURLs[0:10] #return 10 results
if __name__ == '__main__':
#####
# Aljon
# finalIndexPath = "C:\\Users\\aljon\\Documents\\CS_121\\Assignment_3\\CS121_InvertedIndex\\final_index"
# indexPath = "C:\\Users\\aljon\\Documents\\CS_121\\Assignment_3\\CS121_InvertedIndex\\index"
# William
# folderPath = "C:\\1_Repos\\developer\\partial_indexes"
# folderPath = "C:\\Anaconda3\\envs\\Projects\\developer\\partial_indexes"
indexPath = "C:\\1_Repos\\developer"
finalIndexPath = "C:\\1_Repos\\developer"
# Jerome
#folderPath = "C:\\Users\\arkse\\Desktop\\CS121_InvertedIndex\\DEV"
# Art
# windows
#folderPath = "C:\\Users\\aghar\\Downloads\\DEV"
# linux
#folderPath = "/home/anon/Downloads/DEV"
#####
# Get query from user
query = input("Enter a search query: ")
if(query.strip() == ""):
print("Query needs to be at least one character")
# Fetch all results of query, intersect them to follow Bool-AND logic
unsortedDocs = search(query, finalIndexPath)
# Change filepaths to website URLs for displaying
unsortedURLs = getDocURLs(unsortedDocs, indexPath)
# Sort docs by the TF-IDF score
sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)
# Print top 5 ranked file-urls for given query
print(f"\n------------ Top 5 Docs for '{query}' ------------\n")
for i, doc in enumerate(sortedURLs):
if (i > 5):
break
print(doc[0], " = ", doc[1])
print("\n------------ DONE! ------------\n")
|
normal
|
{
"blob_id": "19f17044d48c8cc0f9d366cde7edc846ff343462",
"index": 2598,
"step-1": "<mask token>\n\n\ndef search(query, finalIndexPath):\n listOfDicts = list()\n queryList = set()\n tempList = query.strip().lower().replace(\"'\", '').split(' ')\n for word in tempList:\n if word not in stopWords:\n queryList.add(word)\n print('Cleaned query tokens:')\n print(queryList, '\\n')\n queryList = list(queryList)\n for word in queryList:\n charPath = word[0]\n jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'\n try:\n with open(jsonFilePath, 'r') as file:\n data = file.read()\n jsonObj = json.loads(data)\n docsDict = jsonObj['docList']\n listOfDicts.append(docsDict)\n except:\n pass\n return intersectDicts(listOfDicts)\n\n\ndef getDocURLs(intersectedDocs, indexPath, cacheURLs):\n listUrls = list()\n for docID in intersectedDocs:\n if docID in cacheURLs:\n fileUrl = cacheURLs[docID]\n listUrls.append((fileUrl, intersectedDocs[docID]))\n return listUrls\n\n\ndef intersectDicts(listOfDicts):\n if len(listOfDicts) == 1:\n return listOfDicts[0]\n intersection = {}\n for dictItem in listOfDicts:\n for doc in dictItem:\n if doc not in intersection:\n intersection[doc] = dictItem[doc]\n else:\n intersection[doc] += dictItem[doc]\n print('intersection = ', intersection)\n return intersection\n\n\ndef flaskBackendQuery(queryUser, cacheURLs):\n indexPath = GLOBALS.FINAL_INDEX\n if queryUser.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(queryUser, indexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n return sortedURLs[0:10]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef search(query, finalIndexPath):\n listOfDicts = list()\n queryList = set()\n tempList = query.strip().lower().replace(\"'\", '').split(' ')\n for word in tempList:\n if word not in stopWords:\n queryList.add(word)\n print('Cleaned query tokens:')\n print(queryList, '\\n')\n queryList = list(queryList)\n for word in queryList:\n charPath = word[0]\n jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'\n try:\n with open(jsonFilePath, 'r') as file:\n data = file.read()\n jsonObj = json.loads(data)\n docsDict = jsonObj['docList']\n listOfDicts.append(docsDict)\n except:\n pass\n return intersectDicts(listOfDicts)\n\n\ndef getDocURLs(intersectedDocs, indexPath, cacheURLs):\n listUrls = list()\n for docID in intersectedDocs:\n if docID in cacheURLs:\n fileUrl = cacheURLs[docID]\n listUrls.append((fileUrl, intersectedDocs[docID]))\n return listUrls\n\n\ndef intersectDicts(listOfDicts):\n if len(listOfDicts) == 1:\n return listOfDicts[0]\n intersection = {}\n for dictItem in listOfDicts:\n for doc in dictItem:\n if doc not in intersection:\n intersection[doc] = dictItem[doc]\n else:\n intersection[doc] += dictItem[doc]\n print('intersection = ', intersection)\n return intersection\n\n\ndef flaskBackendQuery(queryUser, cacheURLs):\n indexPath = GLOBALS.FINAL_INDEX\n if queryUser.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(queryUser, indexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n return sortedURLs[0:10]\n\n\nif __name__ == '__main__':\n indexPath = 'C:\\\\1_Repos\\\\developer'\n finalIndexPath = 'C:\\\\1_Repos\\\\developer'\n query = input('Enter a search query: ')\n if query.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(query, finalIndexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n print(f\"\\n------------ Top 5 Docs for '{query}' ------------\\n\")\n for i, doc in enumerate(sortedURLs):\n if i > 5:\n break\n print(doc[0], ' = ', doc[1])\n print('\\n------------ DONE! ------------\\n')\n",
"step-3": "<mask token>\nstopWords = {'a', 'about', 'above', 'after', 'again', 'against', 'all',\n 'am', 'an', 'and', 'any', 'are', \"aren't\", 'as', 'at', 'be', 'because',\n 'been', 'before', 'being', 'below', 'between', 'both', 'but', 'by',\n \"can't\", 'cannot', 'could', \"couldn't\", 'did', \"didn't\", 'do', 'does',\n \"doesn't\", 'doing', \"don't\", 'down', 'during', 'each', 'few', 'for',\n 'from', 'further', 'had', \"hadn't\", 'has', \"hasn't\", 'have', \"haven't\",\n 'having', 'he', \"he'd\", \"he'll\", \"he's\", 'her', 'here', \"here's\",\n 'hers', 'herself', 'him', 'himself', 'his', 'how', \"how's\", 'i', \"i'd\",\n \"i'll\", \"i'm\", \"i've\", 'if', 'in', 'into', 'is', \"isn't\", 'it', \"it's\",\n 'its', 'itself', \"let's\", 'me', 'more', 'most', \"mustn't\", 'my',\n 'myself', 'no', 'nor', 'not', 'of', 'off', 'on', 'once', 'only', 'or',\n 'other', 'ought', 'our', 'ours', 'ourselves', 'out', 'over', 'own',\n 'same', \"shan't\", 'she', \"she'd\", \"she'll\", \"she's\", 'should',\n \"shouldn't\", 'so', 'some', 'such', 'than', 'that', \"that's\", 'the',\n 'their', 'theirs', 'them', 'themselves', 'then', 'there', \"there's\",\n 'these', 'they', \"they'd\", \"they'll\", \"they're\", \"they've\", 'this',\n 'those', 'through', 'to', 'too', 'under', 'until', 'up', 'very', 'was',\n \"wasn't\", 'we', \"we'd\", \"we'll\", \"we're\", \"we've\", 'were', \"weren't\",\n 'what', \"what's\", 'when', \"when's\", 'where', \"where's\", 'which',\n 'while', 'who', \"who's\", 'whom', 'why', \"why's\", 'with', \"won't\",\n 'would', \"wouldn't\", 'you', \"you'd\", \"you'll\", \"you're\", \"you've\",\n 'your', 'yours', 'yourself', 'yourselves'}\n\n\ndef search(query, finalIndexPath):\n listOfDicts = list()\n queryList = set()\n tempList = query.strip().lower().replace(\"'\", '').split(' ')\n for word in tempList:\n if word not in stopWords:\n queryList.add(word)\n print('Cleaned query tokens:')\n print(queryList, '\\n')\n queryList = list(queryList)\n for word in queryList:\n charPath = word[0]\n jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'\n try:\n with open(jsonFilePath, 'r') as file:\n data = file.read()\n jsonObj = json.loads(data)\n docsDict = jsonObj['docList']\n listOfDicts.append(docsDict)\n except:\n pass\n return intersectDicts(listOfDicts)\n\n\ndef getDocURLs(intersectedDocs, indexPath, cacheURLs):\n listUrls = list()\n for docID in intersectedDocs:\n if docID in cacheURLs:\n fileUrl = cacheURLs[docID]\n listUrls.append((fileUrl, intersectedDocs[docID]))\n return listUrls\n\n\ndef intersectDicts(listOfDicts):\n if len(listOfDicts) == 1:\n return listOfDicts[0]\n intersection = {}\n for dictItem in listOfDicts:\n for doc in dictItem:\n if doc not in intersection:\n intersection[doc] = dictItem[doc]\n else:\n intersection[doc] += dictItem[doc]\n print('intersection = ', intersection)\n return intersection\n\n\ndef flaskBackendQuery(queryUser, cacheURLs):\n indexPath = GLOBALS.FINAL_INDEX\n if queryUser.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(queryUser, indexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n return sortedURLs[0:10]\n\n\nif __name__ == '__main__':\n indexPath = 'C:\\\\1_Repos\\\\developer'\n finalIndexPath = 'C:\\\\1_Repos\\\\developer'\n query = input('Enter a search query: ')\n if query.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(query, finalIndexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n print(f\"\\n------------ Top 5 Docs for '{query}' ------------\\n\")\n for i, doc in enumerate(sortedURLs):\n if i > 5:\n break\n print(doc[0], ' = ', doc[1])\n print('\\n------------ DONE! ------------\\n')\n",
"step-4": "from multiprocessing import Pool\nfrom pathlib import Path\nimport os\nimport re\nimport json\nimport string\nimport math\nimport GLOBALS\nstopWords = {'a', 'about', 'above', 'after', 'again', 'against', 'all',\n 'am', 'an', 'and', 'any', 'are', \"aren't\", 'as', 'at', 'be', 'because',\n 'been', 'before', 'being', 'below', 'between', 'both', 'but', 'by',\n \"can't\", 'cannot', 'could', \"couldn't\", 'did', \"didn't\", 'do', 'does',\n \"doesn't\", 'doing', \"don't\", 'down', 'during', 'each', 'few', 'for',\n 'from', 'further', 'had', \"hadn't\", 'has', \"hasn't\", 'have', \"haven't\",\n 'having', 'he', \"he'd\", \"he'll\", \"he's\", 'her', 'here', \"here's\",\n 'hers', 'herself', 'him', 'himself', 'his', 'how', \"how's\", 'i', \"i'd\",\n \"i'll\", \"i'm\", \"i've\", 'if', 'in', 'into', 'is', \"isn't\", 'it', \"it's\",\n 'its', 'itself', \"let's\", 'me', 'more', 'most', \"mustn't\", 'my',\n 'myself', 'no', 'nor', 'not', 'of', 'off', 'on', 'once', 'only', 'or',\n 'other', 'ought', 'our', 'ours', 'ourselves', 'out', 'over', 'own',\n 'same', \"shan't\", 'she', \"she'd\", \"she'll\", \"she's\", 'should',\n \"shouldn't\", 'so', 'some', 'such', 'than', 'that', \"that's\", 'the',\n 'their', 'theirs', 'them', 'themselves', 'then', 'there', \"there's\",\n 'these', 'they', \"they'd\", \"they'll\", \"they're\", \"they've\", 'this',\n 'those', 'through', 'to', 'too', 'under', 'until', 'up', 'very', 'was',\n \"wasn't\", 'we', \"we'd\", \"we'll\", \"we're\", \"we've\", 'were', \"weren't\",\n 'what', \"what's\", 'when', \"when's\", 'where', \"where's\", 'which',\n 'while', 'who', \"who's\", 'whom', 'why', \"why's\", 'with', \"won't\",\n 'would', \"wouldn't\", 'you', \"you'd\", \"you'll\", \"you're\", \"you've\",\n 'your', 'yours', 'yourself', 'yourselves'}\n\n\ndef search(query, finalIndexPath):\n listOfDicts = list()\n queryList = set()\n tempList = query.strip().lower().replace(\"'\", '').split(' ')\n for word in tempList:\n if word not in stopWords:\n queryList.add(word)\n print('Cleaned query tokens:')\n print(queryList, '\\n')\n queryList = list(queryList)\n for word in queryList:\n charPath = word[0]\n jsonFilePath = str(Path(finalIndexPath) / charPath / word) + '.json'\n try:\n with open(jsonFilePath, 'r') as file:\n data = file.read()\n jsonObj = json.loads(data)\n docsDict = jsonObj['docList']\n listOfDicts.append(docsDict)\n except:\n pass\n return intersectDicts(listOfDicts)\n\n\ndef getDocURLs(intersectedDocs, indexPath, cacheURLs):\n listUrls = list()\n for docID in intersectedDocs:\n if docID in cacheURLs:\n fileUrl = cacheURLs[docID]\n listUrls.append((fileUrl, intersectedDocs[docID]))\n return listUrls\n\n\ndef intersectDicts(listOfDicts):\n if len(listOfDicts) == 1:\n return listOfDicts[0]\n intersection = {}\n for dictItem in listOfDicts:\n for doc in dictItem:\n if doc not in intersection:\n intersection[doc] = dictItem[doc]\n else:\n intersection[doc] += dictItem[doc]\n print('intersection = ', intersection)\n return intersection\n\n\ndef flaskBackendQuery(queryUser, cacheURLs):\n indexPath = GLOBALS.FINAL_INDEX\n if queryUser.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(queryUser, indexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n return sortedURLs[0:10]\n\n\nif __name__ == '__main__':\n indexPath = 'C:\\\\1_Repos\\\\developer'\n finalIndexPath = 'C:\\\\1_Repos\\\\developer'\n query = input('Enter a search query: ')\n if query.strip() == '':\n print('Query needs to be at least one character')\n unsortedDocs = search(query, finalIndexPath)\n unsortedURLs = getDocURLs(unsortedDocs, indexPath)\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n print(f\"\\n------------ Top 5 Docs for '{query}' ------------\\n\")\n for i, doc in enumerate(sortedURLs):\n if i > 5:\n break\n print(doc[0], ' = ', doc[1])\n print('\\n------------ DONE! ------------\\n')\n",
"step-5": "from multiprocessing import Pool\nfrom pathlib import Path\nimport os\nimport re\nimport json\nimport string\nimport math\nimport GLOBALS\n\nstopWords = {\"a\", \"about\", \"above\", \"after\", \"again\", \"against\", \"all\", \"am\", \"an\", \"and\", \"any\", \"are\", \"aren't\",\n \"as\", \"at\", \"be\", \"because\", \"been\", \"before\", \"being\", \"below\", \"between\", \"both\", \"but\", \"by\",\n \"can't\",\n \"cannot\", \"could\", \"couldn't\", \"did\", \"didn't\", \"do\", \"does\", \"doesn't\", \"doing\", \"don't\", \"down\",\n \"during\",\n \"each\", \"few\", \"for\", \"from\", \"further\", \"had\", \"hadn't\", \"has\", \"hasn't\", \"have\", \"haven't\", \"having\",\n \"he\", \"he'd\",\n \"he'll\", \"he's\", \"her\", \"here\", \"here's\", \"hers\", \"herself\", \"him\", \"himself\", \"his\", \"how\", \"how's\",\n \"i\", \"i'd\", \"i'll\",\n \"i'm\", \"i've\", \"if\", \"in\", \"into\", \"is\", \"isn't\", \"it\", \"it's\", \"its\", \"itself\", \"let's\", \"me\", \"more\",\n \"most\", \"mustn't\", \"my\",\n \"myself\", \"no\", \"nor\", \"not\", \"of\", \"off\", \"on\", \"once\", \"only\", \"or\", \"other\", \"ought\", \"our\", \"ours\",\n \"ourselves\", \"out\", \"over\",\n \"own\", \"same\", \"shan't\", \"she\", \"she'd\", \"she'll\", \"she's\", \"should\", \"shouldn't\", \"so\", \"some\",\n \"such\", \"than\", \"that\", \"that's\",\n \"the\", \"their\", \"theirs\", \"them\", \"themselves\", \"then\", \"there\", \"there's\", \"these\", \"they\", \"they'd\",\n \"they'll\", \"they're\", \"they've\",\n \"this\", \"those\", \"through\", \"to\", \"too\", \"under\", \"until\", \"up\", \"very\", \"was\", \"wasn't\", \"we\", \"we'd\",\n \"we'll\", \"we're\", \"we've\", \"were\", \"weren't\",\n \"what\", \"what's\", \"when\", \"when's\", \"where\", \"where's\", \"which\", \"while\", \"who\", \"who's\", \"whom\",\n \"why\", \"why's\", \"with\", \"won't\", \"would\", \"wouldn't\",\n \"you\", \"you'd\", \"you'll\", \"you're\", \"you've\", \"your\", \"yours\", \"yourself\", \"yourselves\"}\n\n\n\n# Main Functions (aka functions called in __main__)\n\n# Takes in query as str. Returns list of docs that match the OR query (inclusive)\ndef search(query, finalIndexPath):\n listOfDicts = list()\n queryList = set() # We use set() to remove duplicate terms, and we won't have to open a file twice\n tempList = query.strip().lower().replace(\"'\", \"\").split(\" \")\n\n for word in tempList:\n if word not in stopWords:\n queryList.add(word)\n\n print(\"Cleaned query tokens:\")\n print(queryList, \"\\n\") # query tokens with stopwords removed and replacing apostrohe and lower()\n\n #convert set to list to enumerate\n queryList = list(queryList)\n\n for word in queryList:\n charPath = word[0] #Get 1st char of current word, use to find subdir\n\n # Get the file path of the final_indexed token.json file\n jsonFilePath = str(Path(finalIndexPath) / charPath / word) + \".json\"\n\n try:\n with open(jsonFilePath, \"r\") as file:\n data = file.read()\n jsonObj = json.loads(data)\n docsDict = jsonObj[\"docList\"]\n listOfDicts.append(docsDict)\n except:\n pass\n\n return intersectDicts(listOfDicts)\n\n\ndef getDocURLs(intersectedDocs, indexPath, cacheURLs):\n listUrls = list() # holds unique file paths of .json files\n #\n # hashTablePath = Path(indexPath) / \"hashurls.txt\"\n # with open(hashTablePath, \"r\") as file:\n # data = file.read()\n # hashSet = json.loads(data)\n\n for docID in intersectedDocs:\n if(docID in cacheURLs):\n fileUrl = cacheURLs[docID]\n listUrls.append( (fileUrl, intersectedDocs[docID]) )\n\n return listUrls\n\n\n\n# Helper Functions (aka functions called by other functions)\n\n# Returns unique dict of file urls from hashurl.txt (or hasthtable.txt)\ndef intersectDicts(listOfDicts):\n if len(listOfDicts) == 1:\n return listOfDicts[0]\n\n intersection = {}\n for dictItem in listOfDicts:\n for doc in dictItem:\n if doc not in intersection:\n intersection[doc] = dictItem[doc] #\n else:\n intersection[doc] += dictItem[doc] #adding tfidf weights\n print(\"intersection = \", intersection)\n return intersection\n\n\ndef flaskBackendQuery(queryUser, cacheURLs):\n indexPath = GLOBALS.FINAL_INDEX\n\n if (queryUser.strip() == \"\"):\n print(\"Query needs to be at least one character\")\n\n unsortedDocs = search(queryUser, indexPath) #list of dictionaries\n\n # Change filepaths to website URLs for displaying\n unsortedURLs = getDocURLs(unsortedDocs, indexPath, cacheURLs)\n\n # Sort docs by the TF-IDF score\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True) #highest scores shown first\n\n return sortedURLs[0:10] #return 10 results\n\n\nif __name__ == '__main__':\n #####\n # Aljon\n # finalIndexPath = \"C:\\\\Users\\\\aljon\\\\Documents\\\\CS_121\\\\Assignment_3\\\\CS121_InvertedIndex\\\\final_index\"\n # indexPath = \"C:\\\\Users\\\\aljon\\\\Documents\\\\CS_121\\\\Assignment_3\\\\CS121_InvertedIndex\\\\index\"\n\n # William\n # folderPath = \"C:\\\\1_Repos\\\\developer\\\\partial_indexes\"\n # folderPath = \"C:\\\\Anaconda3\\\\envs\\\\Projects\\\\developer\\\\partial_indexes\"\n indexPath = \"C:\\\\1_Repos\\\\developer\"\n finalIndexPath = \"C:\\\\1_Repos\\\\developer\"\n\n # Jerome\n #folderPath = \"C:\\\\Users\\\\arkse\\\\Desktop\\\\CS121_InvertedIndex\\\\DEV\"\n\n # Art\n # windows\n #folderPath = \"C:\\\\Users\\\\aghar\\\\Downloads\\\\DEV\"\n # linux\n #folderPath = \"/home/anon/Downloads/DEV\"\n #####\n\n\n # Get query from user\n query = input(\"Enter a search query: \")\n if(query.strip() == \"\"):\n print(\"Query needs to be at least one character\")\n # Fetch all results of query, intersect them to follow Bool-AND logic\n unsortedDocs = search(query, finalIndexPath)\n\n # Change filepaths to website URLs for displaying\n unsortedURLs = getDocURLs(unsortedDocs, indexPath)\n\n # Sort docs by the TF-IDF score\n sortedURLs = sorted(unsortedURLs, key=lambda x: x[1], reverse=True)\n \n # Print top 5 ranked file-urls for given query\n print(f\"\\n------------ Top 5 Docs for '{query}' ------------\\n\")\n for i, doc in enumerate(sortedURLs):\n if (i > 5):\n break\n print(doc[0], \" = \", doc[1])\n\n print(\"\\n------------ DONE! ------------\\n\")\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = 'Anonymous Coward'
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect('/')
class Messages(webapp.RequestHandler):
def get(self, mode=''):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != '/all':
if 'last' in session:
messages_query.filter('date >', session['last'])
session['last'] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'
] = 'application/json; charset=utf-8'
self.response.out.write(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Message(db.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = 'Anonymous Coward'
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect('/')
class Messages(webapp.RequestHandler):
def get(self, mode=''):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != '/all':
if 'last' in session:
messages_query.filter('date >', session['last'])
session['last'] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'
] = 'application/json; charset=utf-8'
self.response.out.write(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Message(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now=True, auto_now_add=True)
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = 'Anonymous Coward'
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect('/')
class Messages(webapp.RequestHandler):
def get(self, mode=''):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != '/all':
if 'last' in session:
messages_query.filter('date >', session['last'])
session['last'] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'
] = 'application/json; charset=utf-8'
self.response.out.write(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from os.path import *
from datetime import datetime
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
import json
from appengine_utilities import sessions
class Message(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now=True, auto_now_add=True)
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = 'Anonymous Coward'
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect('/')
class Messages(webapp.RequestHandler):
def get(self, mode=''):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != '/all':
if 'last' in session:
messages_query.filter('date >', session['last'])
session['last'] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'
] = 'application/json; charset=utf-8'
self.response.out.write(result)
if __name__ == '__main__':
application = webapp.WSGIApplication([('/', Page), ('/messages(.*)',
Messages)], debug=True)
util.run_wsgi_app(application)
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# FISL Live
# =========
# Copyright (c) 2010, Triveos Tecnologia Ltda.
# License: AGPLv3
from os.path import *
from datetime import datetime
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
# from GAE Samples (to serialize models to JSON)
import json
# from gaeutilities.appspot.com
from appengine_utilities import sessions
class Message(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now=True, auto_now_add=True)
class Page(webapp.RequestHandler):
def get(self):
if users.get_current_user():
url = users.create_logout_url(self.request.uri)
linktext = 'Logout'
user = users.get_current_user()
else:
url = users.create_login_url(self.request.uri)
linktext = 'Login'
user = "Anonymous Coward"
path = join(dirname(__file__), 'index.html')
self.response.out.write(template.render(path, locals()))
def post(self):
content = self.request.get('content')
if content:
message = Message()
if users.get_current_user():
message.author = users.get_current_user()
message.content = self.request.get('content')
message.put()
self.redirect("/")
class Messages(webapp.RequestHandler):
def get(self, mode=""):
messages_query = Message.all().order('date')
session = sessions.Session()
if mode != "/all":
if 'last' in session:
messages_query.filter("date >", session['last'])
session["last"] = datetime.utcnow()
result = json.encode(messages_query.fetch(20))
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.response.out.write(result)
if __name__ == "__main__":
application = webapp.WSGIApplication([
('/', Page),
('/messages(.*)', Messages),
], debug=True)
util.run_wsgi_app(application)
# vim:ts=4:sw=4:et:sm:si:ai
|
flexible
|
{
"blob_id": "64ed3c512894902f85d619020b78338e228dddb6",
"index": 4380,
"step-1": "<mask token>\n\n\nclass Page(webapp.RequestHandler):\n\n def get(self):\n if users.get_current_user():\n url = users.create_logout_url(self.request.uri)\n linktext = 'Logout'\n user = users.get_current_user()\n else:\n url = users.create_login_url(self.request.uri)\n linktext = 'Login'\n user = 'Anonymous Coward'\n path = join(dirname(__file__), 'index.html')\n self.response.out.write(template.render(path, locals()))\n\n def post(self):\n content = self.request.get('content')\n if content:\n message = Message()\n if users.get_current_user():\n message.author = users.get_current_user()\n message.content = self.request.get('content')\n message.put()\n self.redirect('/')\n\n\nclass Messages(webapp.RequestHandler):\n\n def get(self, mode=''):\n messages_query = Message.all().order('date')\n session = sessions.Session()\n if mode != '/all':\n if 'last' in session:\n messages_query.filter('date >', session['last'])\n session['last'] = datetime.utcnow()\n result = json.encode(messages_query.fetch(20))\n self.response.headers['Content-Type'\n ] = 'application/json; charset=utf-8'\n self.response.out.write(result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Message(db.Model):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Page(webapp.RequestHandler):\n\n def get(self):\n if users.get_current_user():\n url = users.create_logout_url(self.request.uri)\n linktext = 'Logout'\n user = users.get_current_user()\n else:\n url = users.create_login_url(self.request.uri)\n linktext = 'Login'\n user = 'Anonymous Coward'\n path = join(dirname(__file__), 'index.html')\n self.response.out.write(template.render(path, locals()))\n\n def post(self):\n content = self.request.get('content')\n if content:\n message = Message()\n if users.get_current_user():\n message.author = users.get_current_user()\n message.content = self.request.get('content')\n message.put()\n self.redirect('/')\n\n\nclass Messages(webapp.RequestHandler):\n\n def get(self, mode=''):\n messages_query = Message.all().order('date')\n session = sessions.Session()\n if mode != '/all':\n if 'last' in session:\n messages_query.filter('date >', session['last'])\n session['last'] = datetime.utcnow()\n result = json.encode(messages_query.fetch(20))\n self.response.headers['Content-Type'\n ] = 'application/json; charset=utf-8'\n self.response.out.write(result)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Message(db.Model):\n author = db.UserProperty()\n content = db.StringProperty(multiline=True)\n date = db.DateTimeProperty(auto_now=True, auto_now_add=True)\n\n\nclass Page(webapp.RequestHandler):\n\n def get(self):\n if users.get_current_user():\n url = users.create_logout_url(self.request.uri)\n linktext = 'Logout'\n user = users.get_current_user()\n else:\n url = users.create_login_url(self.request.uri)\n linktext = 'Login'\n user = 'Anonymous Coward'\n path = join(dirname(__file__), 'index.html')\n self.response.out.write(template.render(path, locals()))\n\n def post(self):\n content = self.request.get('content')\n if content:\n message = Message()\n if users.get_current_user():\n message.author = users.get_current_user()\n message.content = self.request.get('content')\n message.put()\n self.redirect('/')\n\n\nclass Messages(webapp.RequestHandler):\n\n def get(self, mode=''):\n messages_query = Message.all().order('date')\n session = sessions.Session()\n if mode != '/all':\n if 'last' in session:\n messages_query.filter('date >', session['last'])\n session['last'] = datetime.utcnow()\n result = json.encode(messages_query.fetch(20))\n self.response.headers['Content-Type'\n ] = 'application/json; charset=utf-8'\n self.response.out.write(result)\n\n\n<mask token>\n",
"step-4": "from os.path import *\nfrom datetime import datetime\nfrom google.appengine.api import users\nfrom google.appengine.ext import db\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp import util\nfrom google.appengine.ext.webapp import template\nimport json\nfrom appengine_utilities import sessions\n\n\nclass Message(db.Model):\n author = db.UserProperty()\n content = db.StringProperty(multiline=True)\n date = db.DateTimeProperty(auto_now=True, auto_now_add=True)\n\n\nclass Page(webapp.RequestHandler):\n\n def get(self):\n if users.get_current_user():\n url = users.create_logout_url(self.request.uri)\n linktext = 'Logout'\n user = users.get_current_user()\n else:\n url = users.create_login_url(self.request.uri)\n linktext = 'Login'\n user = 'Anonymous Coward'\n path = join(dirname(__file__), 'index.html')\n self.response.out.write(template.render(path, locals()))\n\n def post(self):\n content = self.request.get('content')\n if content:\n message = Message()\n if users.get_current_user():\n message.author = users.get_current_user()\n message.content = self.request.get('content')\n message.put()\n self.redirect('/')\n\n\nclass Messages(webapp.RequestHandler):\n\n def get(self, mode=''):\n messages_query = Message.all().order('date')\n session = sessions.Session()\n if mode != '/all':\n if 'last' in session:\n messages_query.filter('date >', session['last'])\n session['last'] = datetime.utcnow()\n result = json.encode(messages_query.fetch(20))\n self.response.headers['Content-Type'\n ] = 'application/json; charset=utf-8'\n self.response.out.write(result)\n\n\nif __name__ == '__main__':\n application = webapp.WSGIApplication([('/', Page), ('/messages(.*)',\n Messages)], debug=True)\n util.run_wsgi_app(application)\n",
"step-5": "#!/usr/bin/env python\n# -*- encoding: utf-8 -*-\n#\n# FISL Live\n# =========\n# Copyright (c) 2010, Triveos Tecnologia Ltda.\n# License: AGPLv3\n\nfrom os.path import *\nfrom datetime import datetime\n\nfrom google.appengine.api import users\nfrom google.appengine.ext import db\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp import util\nfrom google.appengine.ext.webapp import template\n\n # from GAE Samples (to serialize models to JSON)\nimport json\n\n# from gaeutilities.appspot.com\nfrom appengine_utilities import sessions\n\n\nclass Message(db.Model):\n author = db.UserProperty()\n content = db.StringProperty(multiline=True)\n date = db.DateTimeProperty(auto_now=True, auto_now_add=True)\n\n\nclass Page(webapp.RequestHandler):\n def get(self):\n if users.get_current_user():\n url = users.create_logout_url(self.request.uri)\n linktext = 'Logout'\n user = users.get_current_user()\n else:\n url = users.create_login_url(self.request.uri)\n linktext = 'Login'\n user = \"Anonymous Coward\"\n\n path = join(dirname(__file__), 'index.html')\n self.response.out.write(template.render(path, locals()))\n\n def post(self):\n content = self.request.get('content')\n if content:\n message = Message()\n if users.get_current_user():\n message.author = users.get_current_user()\n message.content = self.request.get('content')\n message.put()\n self.redirect(\"/\")\n\n\nclass Messages(webapp.RequestHandler):\n def get(self, mode=\"\"):\n messages_query = Message.all().order('date')\n\n session = sessions.Session()\n if mode != \"/all\":\n if 'last' in session:\n messages_query.filter(\"date >\", session['last'])\n\n session[\"last\"] = datetime.utcnow()\n\n result = json.encode(messages_query.fetch(20))\n self.response.headers['Content-Type'] = 'application/json; charset=utf-8'\n self.response.out.write(result)\n\nif __name__ == \"__main__\":\n application = webapp.WSGIApplication([\n ('/', Page),\n ('/messages(.*)', Messages),\n ], debug=True)\n util.run_wsgi_app(application)\n\n# vim:ts=4:sw=4:et:sm:si:ai",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_json(file_name='data.json'):
with open(file_name, 'r') as json_fp:
json_data = json_fp.read()
data_arr = json.loads(json_data)
return data_arr
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_json(file_name='data.json'):
with open(file_name, 'r') as json_fp:
json_data = json_fp.read()
data_arr = json.loads(json_data)
return data_arr
if __name__ == '__main__':
json_file = 'data.json'
load_json(json_file)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import json
from city import City
def load_json(file_name='data.json'):
with open(file_name, 'r') as json_fp:
json_data = json_fp.read()
data_arr = json.loads(json_data)
return data_arr
if __name__ == '__main__':
json_file = 'data.json'
load_json(json_file)
<|reserved_special_token_1|>
"""""""""""""""
Write Data
"""""""""""""""
import json
from city import City
def load_json(file_name='data.json'):
with open(file_name, 'r') as json_fp:
json_data = json_fp.read()
data_arr = json.loads(json_data)
return data_arr
if __name__ == '__main__':
json_file = 'data.json'
load_json(json_file)
|
flexible
|
{
"blob_id": "63068a15d750abb29398d687495d6001ba17ab8a",
"index": 9435,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)\n",
"step-4": "<mask token>\nimport json\nfrom city import City\n\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)\n",
"step-5": "\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\nWrite Data\n\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n\nimport json\nfrom city import City\n\ndef load_json(file_name='data.json'):\n with open(file_name, 'r') as json_fp:\n json_data = json_fp.read()\n data_arr = json.loads(json_data)\n return data_arr\n\nif __name__ == '__main__':\n json_file = 'data.json'\n load_json(json_file)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.